From 5dafe38db1df88aff5184788a085d40ae65bb563 Mon Sep 17 00:00:00 2001 From: tkuchiki Date: Thu, 19 May 2016 17:28:12 +0900 Subject: [PATCH] use glide && vendoring --- .gitignore | 7 +- glide.lock | 24 + glide.yaml | 7 + vendor/github.com/alecthomas/template/LICENSE | 27 + .../github.com/alecthomas/template/README.md | 25 + vendor/github.com/alecthomas/template/doc.go | 406 +++ .../alecthomas/template/example_test.go | 72 + .../alecthomas/template/examplefiles_test.go | 183 ++ .../alecthomas/template/examplefunc_test.go | 55 + vendor/github.com/alecthomas/template/exec.go | 845 +++++ .../alecthomas/template/exec_test.go | 1044 +++++++ .../github.com/alecthomas/template/funcs.go | 598 ++++ .../github.com/alecthomas/template/helper.go | 108 + .../alecthomas/template/multi_test.go | 293 ++ .../alecthomas/template/parse/lex.go | 556 ++++ .../alecthomas/template/parse/lex_test.go | 468 +++ .../alecthomas/template/parse/node.go | 834 +++++ .../alecthomas/template/parse/parse.go | 700 +++++ .../alecthomas/template/parse/parse_test.go | 426 +++ .../alecthomas/template/template.go | 218 ++ .../alecthomas/template/testdata/file1.tmpl | 2 + .../alecthomas/template/testdata/file2.tmpl | 2 + .../alecthomas/template/testdata/tmpl1.tmpl | 3 + .../alecthomas/template/testdata/tmpl2.tmpl | 3 + vendor/github.com/alecthomas/units/COPYING | 19 + vendor/github.com/alecthomas/units/README.md | 11 + vendor/github.com/alecthomas/units/bytes.go | 83 + .../github.com/alecthomas/units/bytes_test.go | 49 + vendor/github.com/alecthomas/units/doc.go | 13 + vendor/github.com/alecthomas/units/si.go | 26 + vendor/github.com/alecthomas/units/util.go | 138 + .../github.com/mattn/go-runewidth/.travis.yml | 8 + .../github.com/mattn/go-runewidth/README.mkd | 26 + .../mattn/go-runewidth/runewidth.go | 464 +++ .../mattn/go-runewidth/runewidth_js.go | 8 + .../mattn/go-runewidth/runewidth_posix.go | 69 + .../mattn/go-runewidth/runewidth_test.go | 229 ++ .../mattn/go-runewidth/runewidth_windows.go | 24 + vendor/github.com/najeira/ltsv/LICENSE | 27 + vendor/github.com/najeira/ltsv/README.rst | 98 + vendor/github.com/najeira/ltsv/ltsv_test.go | 318 ++ vendor/github.com/najeira/ltsv/reader.go | 240 ++ vendor/github.com/najeira/ltsv/writer.go | 351 +++ .../olekukonko/tablewriter/.travis.yml | 8 + .../olekukonko/tablewriter/LICENCE.md | 19 + .../olekukonko/tablewriter/README.md | 141 + .../github.com/olekukonko/tablewriter/csv.go | 52 + .../tablewriter/csv2table/README.md | 43 + .../tablewriter/csv2table/csv2table.go | 84 + .../olekukonko/tablewriter/table.go | 510 ++++ .../olekukonko/tablewriter/table_test.go | 292 ++ .../olekukonko/tablewriter/test.csv | 4 + .../olekukonko/tablewriter/test_info.csv | 4 + .../github.com/olekukonko/tablewriter/util.go | 72 + .../github.com/olekukonko/tablewriter/wrap.go | 103 + .../olekukonko/tablewriter/wrap_test.go | 55 + .../github.com/tkuchiki/go-timezone/LICENSE | 21 + .../github.com/tkuchiki/go-timezone/README.md | 69 + .../tkuchiki/go-timezone/timezone.go | 1155 +++++++ .../github.com/tkuchiki/parsetime/.gitignore | 2 + .../github.com/tkuchiki/parsetime/.travis.yml | 17 + vendor/github.com/tkuchiki/parsetime/LICENSE | 22 + vendor/github.com/tkuchiki/parsetime/Makefile | 22 + .../github.com/tkuchiki/parsetime/README.md | 636 ++++ vendor/github.com/tkuchiki/parsetime/const.go | 83 + .../tkuchiki/parsetime/example/parse.go | 276 ++ .../tkuchiki/parsetime/parsetime.go | 539 ++++ .../tkuchiki/parsetime/parsetime_test.go | 1021 +++++++ .../alecthomas/kingpin.v2/.travis.yml | 4 + vendor/gopkg.in/alecthomas/kingpin.v2/COPYING | 19 + .../gopkg.in/alecthomas/kingpin.v2/README.md | 671 ++++ .../kingpin.v2/_examples/chat1/main.go | 20 + .../kingpin.v2/_examples/chat2/main.go | 38 + .../kingpin.v2/_examples/completion/main.go | 96 + .../kingpin.v2/_examples/curl/main.go | 105 + .../kingpin.v2/_examples/modular/main.go | 30 + .../kingpin.v2/_examples/ping/main.go | 20 + .../gopkg.in/alecthomas/kingpin.v2/actions.go | 42 + vendor/gopkg.in/alecthomas/kingpin.v2/app.go | 686 +++++ .../alecthomas/kingpin.v2/app_test.go | 349 +++ vendor/gopkg.in/alecthomas/kingpin.v2/args.go | 118 + .../alecthomas/kingpin.v2/args_test.go | 57 + vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go | 244 ++ .../kingpin.v2/cmd/genvalues/main.go | 134 + .../alecthomas/kingpin.v2/cmd_test.go | 276 ++ .../alecthomas/kingpin.v2/completions.go | 33 + .../alecthomas/kingpin.v2/completions_test.go | 78 + vendor/gopkg.in/alecthomas/kingpin.v2/doc.go | 68 + .../alecthomas/kingpin.v2/examples_test.go | 46 + .../gopkg.in/alecthomas/kingpin.v2/flags.go | 329 ++ .../alecthomas/kingpin.v2/flags_test.go | 319 ++ .../gopkg.in/alecthomas/kingpin.v2/global.go | 94 + .../alecthomas/kingpin.v2/guesswidth.go | 9 + .../alecthomas/kingpin.v2/guesswidth_unix.go | 38 + .../gopkg.in/alecthomas/kingpin.v2/model.go | 225 ++ .../gopkg.in/alecthomas/kingpin.v2/parser.go | 376 +++ .../alecthomas/kingpin.v2/parser_test.go | 42 + .../gopkg.in/alecthomas/kingpin.v2/parsers.go | 212 ++ .../alecthomas/kingpin.v2/parsers_test.go | 98 + .../alecthomas/kingpin.v2/templates.go | 262 ++ .../gopkg.in/alecthomas/kingpin.v2/usage.go | 211 ++ .../alecthomas/kingpin.v2/usage_test.go | 65 + .../gopkg.in/alecthomas/kingpin.v2/values.go | 466 +++ .../alecthomas/kingpin.v2/values.json | 25 + .../alecthomas/kingpin.v2/values_generated.go | 821 +++++ .../alecthomas/kingpin.v2/values_test.go | 89 + vendor/gopkg.in/yaml.v2/.travis.yml | 9 + vendor/gopkg.in/yaml.v2/LICENSE | 188 ++ vendor/gopkg.in/yaml.v2/LICENSE.libyaml | 31 + vendor/gopkg.in/yaml.v2/README.md | 131 + vendor/gopkg.in/yaml.v2/apic.go | 742 +++++ vendor/gopkg.in/yaml.v2/decode.go | 683 +++++ vendor/gopkg.in/yaml.v2/decode_test.go | 988 ++++++ vendor/gopkg.in/yaml.v2/emitterc.go | 1685 ++++++++++ vendor/gopkg.in/yaml.v2/encode.go | 306 ++ vendor/gopkg.in/yaml.v2/encode_test.go | 501 +++ vendor/gopkg.in/yaml.v2/parserc.go | 1096 +++++++ vendor/gopkg.in/yaml.v2/readerc.go | 394 +++ vendor/gopkg.in/yaml.v2/resolve.go | 203 ++ vendor/gopkg.in/yaml.v2/scannerc.go | 2710 +++++++++++++++++ vendor/gopkg.in/yaml.v2/sorter.go | 104 + vendor/gopkg.in/yaml.v2/suite_test.go | 12 + vendor/gopkg.in/yaml.v2/writerc.go | 89 + vendor/gopkg.in/yaml.v2/yaml.go | 346 +++ vendor/gopkg.in/yaml.v2/yamlh.go | 716 +++++ vendor/gopkg.in/yaml.v2/yamlprivateh.go | 173 ++ 126 files changed, 32276 insertions(+), 3 deletions(-) create mode 100644 glide.lock create mode 100644 glide.yaml create mode 100644 vendor/github.com/alecthomas/template/LICENSE create mode 100644 vendor/github.com/alecthomas/template/README.md create mode 100644 vendor/github.com/alecthomas/template/doc.go create mode 100644 vendor/github.com/alecthomas/template/example_test.go create mode 100644 vendor/github.com/alecthomas/template/examplefiles_test.go create mode 100644 vendor/github.com/alecthomas/template/examplefunc_test.go create mode 100644 vendor/github.com/alecthomas/template/exec.go create mode 100644 vendor/github.com/alecthomas/template/exec_test.go create mode 100644 vendor/github.com/alecthomas/template/funcs.go create mode 100644 vendor/github.com/alecthomas/template/helper.go create mode 100644 vendor/github.com/alecthomas/template/multi_test.go create mode 100644 vendor/github.com/alecthomas/template/parse/lex.go create mode 100644 vendor/github.com/alecthomas/template/parse/lex_test.go create mode 100644 vendor/github.com/alecthomas/template/parse/node.go create mode 100644 vendor/github.com/alecthomas/template/parse/parse.go create mode 100644 vendor/github.com/alecthomas/template/parse/parse_test.go create mode 100644 vendor/github.com/alecthomas/template/template.go create mode 100644 vendor/github.com/alecthomas/template/testdata/file1.tmpl create mode 100644 vendor/github.com/alecthomas/template/testdata/file2.tmpl create mode 100644 vendor/github.com/alecthomas/template/testdata/tmpl1.tmpl create mode 100644 vendor/github.com/alecthomas/template/testdata/tmpl2.tmpl create mode 100644 vendor/github.com/alecthomas/units/COPYING create mode 100644 vendor/github.com/alecthomas/units/README.md create mode 100644 vendor/github.com/alecthomas/units/bytes.go create mode 100644 vendor/github.com/alecthomas/units/bytes_test.go create mode 100644 vendor/github.com/alecthomas/units/doc.go create mode 100644 vendor/github.com/alecthomas/units/si.go create mode 100644 vendor/github.com/alecthomas/units/util.go create mode 100644 vendor/github.com/mattn/go-runewidth/.travis.yml create mode 100644 vendor/github.com/mattn/go-runewidth/README.mkd create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_js.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_posix.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_test.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_windows.go create mode 100644 vendor/github.com/najeira/ltsv/LICENSE create mode 100644 vendor/github.com/najeira/ltsv/README.rst create mode 100644 vendor/github.com/najeira/ltsv/ltsv_test.go create mode 100644 vendor/github.com/najeira/ltsv/reader.go create mode 100644 vendor/github.com/najeira/ltsv/writer.go create mode 100644 vendor/github.com/olekukonko/tablewriter/.travis.yml create mode 100644 vendor/github.com/olekukonko/tablewriter/LICENCE.md create mode 100644 vendor/github.com/olekukonko/tablewriter/README.md create mode 100644 vendor/github.com/olekukonko/tablewriter/csv.go create mode 100644 vendor/github.com/olekukonko/tablewriter/csv2table/README.md create mode 100644 vendor/github.com/olekukonko/tablewriter/csv2table/csv2table.go create mode 100644 vendor/github.com/olekukonko/tablewriter/table.go create mode 100644 vendor/github.com/olekukonko/tablewriter/table_test.go create mode 100644 vendor/github.com/olekukonko/tablewriter/test.csv create mode 100644 vendor/github.com/olekukonko/tablewriter/test_info.csv create mode 100644 vendor/github.com/olekukonko/tablewriter/util.go create mode 100644 vendor/github.com/olekukonko/tablewriter/wrap.go create mode 100644 vendor/github.com/olekukonko/tablewriter/wrap_test.go create mode 100644 vendor/github.com/tkuchiki/go-timezone/LICENSE create mode 100644 vendor/github.com/tkuchiki/go-timezone/README.md create mode 100644 vendor/github.com/tkuchiki/go-timezone/timezone.go create mode 100644 vendor/github.com/tkuchiki/parsetime/.gitignore create mode 100644 vendor/github.com/tkuchiki/parsetime/.travis.yml create mode 100644 vendor/github.com/tkuchiki/parsetime/LICENSE create mode 100644 vendor/github.com/tkuchiki/parsetime/Makefile create mode 100644 vendor/github.com/tkuchiki/parsetime/README.md create mode 100644 vendor/github.com/tkuchiki/parsetime/const.go create mode 100644 vendor/github.com/tkuchiki/parsetime/example/parse.go create mode 100644 vendor/github.com/tkuchiki/parsetime/parsetime.go create mode 100644 vendor/github.com/tkuchiki/parsetime/parsetime_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/.travis.yml create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/COPYING create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/README.md create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/_examples/chat1/main.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/_examples/chat2/main.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/_examples/completion/main.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/_examples/curl/main.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/_examples/modular/main.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/_examples/ping/main.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/actions.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/app.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/app_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/args.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/args_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/cmd/genvalues/main.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/cmd_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/completions.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/completions_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/doc.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/examples_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/flags.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/flags_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/global.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/model.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/parser.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/parser_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/parsers_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/templates.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/usage.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/usage_test.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/values.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/values.json create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go create mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/values_test.go create mode 100644 vendor/gopkg.in/yaml.v2/.travis.yml create mode 100644 vendor/gopkg.in/yaml.v2/LICENSE create mode 100644 vendor/gopkg.in/yaml.v2/LICENSE.libyaml create mode 100644 vendor/gopkg.in/yaml.v2/README.md create mode 100644 vendor/gopkg.in/yaml.v2/apic.go create mode 100644 vendor/gopkg.in/yaml.v2/decode.go create mode 100644 vendor/gopkg.in/yaml.v2/decode_test.go create mode 100644 vendor/gopkg.in/yaml.v2/emitterc.go create mode 100644 vendor/gopkg.in/yaml.v2/encode.go create mode 100644 vendor/gopkg.in/yaml.v2/encode_test.go create mode 100644 vendor/gopkg.in/yaml.v2/parserc.go create mode 100644 vendor/gopkg.in/yaml.v2/readerc.go create mode 100644 vendor/gopkg.in/yaml.v2/resolve.go create mode 100644 vendor/gopkg.in/yaml.v2/scannerc.go create mode 100644 vendor/gopkg.in/yaml.v2/sorter.go create mode 100644 vendor/gopkg.in/yaml.v2/suite_test.go create mode 100644 vendor/gopkg.in/yaml.v2/writerc.go create mode 100644 vendor/gopkg.in/yaml.v2/yaml.go create mode 100644 vendor/gopkg.in/yaml.v2/yamlh.go create mode 100644 vendor/gopkg.in/yaml.v2/yamlprivateh.go diff --git a/.gitignore b/.gitignore index 20cfb23..350f557 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ -pkg -src -alp \ No newline at end of file +pkg/ +src/ +tmp/ +alp diff --git a/glide.lock b/glide.lock new file mode 100644 index 0000000..b81c900 --- /dev/null +++ b/glide.lock @@ -0,0 +1,24 @@ +hash: 8a8182c7a7f210fc8e39bcd3bbbcb848e38cfa99a05b2869dc40d40cb348efa5 +updated: 2016-05-19T17:24:29.834110992+09:00 +imports: +- name: github.com/alecthomas/template + version: a0175ee3bccc567396460bf5acd36800cb10c49c + subpackages: + - parse +- name: github.com/alecthomas/units + version: 2efee857e7cfd4f3d0138cc3cbb1b4966962b93a +- name: github.com/mattn/go-runewidth + version: d6bea18f789704b5f83375793155289da36a3c7f +- name: github.com/najeira/ltsv + version: 40eb84a5b5d21afa8f26f357445d910c62011166 +- name: github.com/olekukonko/tablewriter + version: 279fecc91ba77130f753d073259d7f9e246acdda +- name: github.com/tkuchiki/go-timezone + version: ba4dfef1d8b25b2ce7fd59cddc62091391b171c5 +- name: github.com/tkuchiki/parsetime + version: bece749572a82d4846c5a18f56a9d1d28dade3e8 +- name: gopkg.in/alecthomas/kingpin.v2 + version: 8cccfa8eb2e3183254457fb1749b2667fbc364c7 +- name: gopkg.in/yaml.v2 + version: a83829b6f1293c91addabc89d0571c246397bbf4 +devImports: [] diff --git a/glide.yaml b/glide.yaml new file mode 100644 index 0000000..b5b946f --- /dev/null +++ b/glide.yaml @@ -0,0 +1,7 @@ +package: alp +import: +- package: github.com/najeira/ltsv +- package: github.com/olekukonko/tablewriter +- package: github.com/tkuchiki/parsetime +- package: gopkg.in/alecthomas/kingpin.v2 +- package: gopkg.in/yaml.v2 diff --git a/vendor/github.com/alecthomas/template/LICENSE b/vendor/github.com/alecthomas/template/LICENSE new file mode 100644 index 0000000..7448756 --- /dev/null +++ b/vendor/github.com/alecthomas/template/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/alecthomas/template/README.md b/vendor/github.com/alecthomas/template/README.md new file mode 100644 index 0000000..ef6a8ee --- /dev/null +++ b/vendor/github.com/alecthomas/template/README.md @@ -0,0 +1,25 @@ +# Go's `text/template` package with newline elision + +This is a fork of Go 1.4's [text/template](http://golang.org/pkg/text/template/) package with one addition: a backslash immediately after a closing delimiter will delete all subsequent newlines until a non-newline. + +eg. + +``` +{{if true}}\ +hello +{{end}}\ +``` + +Will result in: + +``` +hello\n +``` + +Rather than: + +``` +\n +hello\n +\n +``` diff --git a/vendor/github.com/alecthomas/template/doc.go b/vendor/github.com/alecthomas/template/doc.go new file mode 100644 index 0000000..223c595 --- /dev/null +++ b/vendor/github.com/alecthomas/template/doc.go @@ -0,0 +1,406 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package template implements data-driven templates for generating textual output. + +To generate HTML output, see package html/template, which has the same interface +as this package but automatically secures HTML output against certain attacks. + +Templates are executed by applying them to a data structure. Annotations in the +template refer to elements of the data structure (typically a field of a struct +or a key in a map) to control execution and derive values to be displayed. +Execution of the template walks the structure and sets the cursor, represented +by a period '.' and called "dot", to the value at the current location in the +structure as execution proceeds. + +The input text for a template is UTF-8-encoded text in any format. +"Actions"--data evaluations or control structures--are delimited by +"{{" and "}}"; all text outside actions is copied to the output unchanged. +Actions may not span newlines, although comments can. + +Once parsed, a template may be executed safely in parallel. + +Here is a trivial example that prints "17 items are made of wool". + + type Inventory struct { + Material string + Count uint + } + sweaters := Inventory{"wool", 17} + tmpl, err := template.New("test").Parse("{{.Count}} items are made of {{.Material}}") + if err != nil { panic(err) } + err = tmpl.Execute(os.Stdout, sweaters) + if err != nil { panic(err) } + +More intricate examples appear below. + +Actions + +Here is the list of actions. "Arguments" and "pipelines" are evaluations of +data, defined in detail below. + +*/ +// {{/* a comment */}} +// A comment; discarded. May contain newlines. +// Comments do not nest and must start and end at the +// delimiters, as shown here. +/* + + {{pipeline}} + The default textual representation of the value of the pipeline + is copied to the output. + + {{if pipeline}} T1 {{end}} + If the value of the pipeline is empty, no output is generated; + otherwise, T1 is executed. The empty values are false, 0, any + nil pointer or interface value, and any array, slice, map, or + string of length zero. + Dot is unaffected. + + {{if pipeline}} T1 {{else}} T0 {{end}} + If the value of the pipeline is empty, T0 is executed; + otherwise, T1 is executed. Dot is unaffected. + + {{if pipeline}} T1 {{else if pipeline}} T0 {{end}} + To simplify the appearance of if-else chains, the else action + of an if may include another if directly; the effect is exactly + the same as writing + {{if pipeline}} T1 {{else}}{{if pipeline}} T0 {{end}}{{end}} + + {{range pipeline}} T1 {{end}} + The value of the pipeline must be an array, slice, map, or channel. + If the value of the pipeline has length zero, nothing is output; + otherwise, dot is set to the successive elements of the array, + slice, or map and T1 is executed. If the value is a map and the + keys are of basic type with a defined order ("comparable"), the + elements will be visited in sorted key order. + + {{range pipeline}} T1 {{else}} T0 {{end}} + The value of the pipeline must be an array, slice, map, or channel. + If the value of the pipeline has length zero, dot is unaffected and + T0 is executed; otherwise, dot is set to the successive elements + of the array, slice, or map and T1 is executed. + + {{template "name"}} + The template with the specified name is executed with nil data. + + {{template "name" pipeline}} + The template with the specified name is executed with dot set + to the value of the pipeline. + + {{with pipeline}} T1 {{end}} + If the value of the pipeline is empty, no output is generated; + otherwise, dot is set to the value of the pipeline and T1 is + executed. + + {{with pipeline}} T1 {{else}} T0 {{end}} + If the value of the pipeline is empty, dot is unaffected and T0 + is executed; otherwise, dot is set to the value of the pipeline + and T1 is executed. + +Arguments + +An argument is a simple value, denoted by one of the following. + + - A boolean, string, character, integer, floating-point, imaginary + or complex constant in Go syntax. These behave like Go's untyped + constants, although raw strings may not span newlines. + - The keyword nil, representing an untyped Go nil. + - The character '.' (period): + . + The result is the value of dot. + - A variable name, which is a (possibly empty) alphanumeric string + preceded by a dollar sign, such as + $piOver2 + or + $ + The result is the value of the variable. + Variables are described below. + - The name of a field of the data, which must be a struct, preceded + by a period, such as + .Field + The result is the value of the field. Field invocations may be + chained: + .Field1.Field2 + Fields can also be evaluated on variables, including chaining: + $x.Field1.Field2 + - The name of a key of the data, which must be a map, preceded + by a period, such as + .Key + The result is the map element value indexed by the key. + Key invocations may be chained and combined with fields to any + depth: + .Field1.Key1.Field2.Key2 + Although the key must be an alphanumeric identifier, unlike with + field names they do not need to start with an upper case letter. + Keys can also be evaluated on variables, including chaining: + $x.key1.key2 + - The name of a niladic method of the data, preceded by a period, + such as + .Method + The result is the value of invoking the method with dot as the + receiver, dot.Method(). Such a method must have one return value (of + any type) or two return values, the second of which is an error. + If it has two and the returned error is non-nil, execution terminates + and an error is returned to the caller as the value of Execute. + Method invocations may be chained and combined with fields and keys + to any depth: + .Field1.Key1.Method1.Field2.Key2.Method2 + Methods can also be evaluated on variables, including chaining: + $x.Method1.Field + - The name of a niladic function, such as + fun + The result is the value of invoking the function, fun(). The return + types and values behave as in methods. Functions and function + names are described below. + - A parenthesized instance of one the above, for grouping. The result + may be accessed by a field or map key invocation. + print (.F1 arg1) (.F2 arg2) + (.StructValuedMethod "arg").Field + +Arguments may evaluate to any type; if they are pointers the implementation +automatically indirects to the base type when required. +If an evaluation yields a function value, such as a function-valued +field of a struct, the function is not invoked automatically, but it +can be used as a truth value for an if action and the like. To invoke +it, use the call function, defined below. + +A pipeline is a possibly chained sequence of "commands". A command is a simple +value (argument) or a function or method call, possibly with multiple arguments: + + Argument + The result is the value of evaluating the argument. + .Method [Argument...] + The method can be alone or the last element of a chain but, + unlike methods in the middle of a chain, it can take arguments. + The result is the value of calling the method with the + arguments: + dot.Method(Argument1, etc.) + functionName [Argument...] + The result is the value of calling the function associated + with the name: + function(Argument1, etc.) + Functions and function names are described below. + +Pipelines + +A pipeline may be "chained" by separating a sequence of commands with pipeline +characters '|'. In a chained pipeline, the result of the each command is +passed as the last argument of the following command. The output of the final +command in the pipeline is the value of the pipeline. + +The output of a command will be either one value or two values, the second of +which has type error. If that second value is present and evaluates to +non-nil, execution terminates and the error is returned to the caller of +Execute. + +Variables + +A pipeline inside an action may initialize a variable to capture the result. +The initialization has syntax + + $variable := pipeline + +where $variable is the name of the variable. An action that declares a +variable produces no output. + +If a "range" action initializes a variable, the variable is set to the +successive elements of the iteration. Also, a "range" may declare two +variables, separated by a comma: + + range $index, $element := pipeline + +in which case $index and $element are set to the successive values of the +array/slice index or map key and element, respectively. Note that if there is +only one variable, it is assigned the element; this is opposite to the +convention in Go range clauses. + +A variable's scope extends to the "end" action of the control structure ("if", +"with", or "range") in which it is declared, or to the end of the template if +there is no such control structure. A template invocation does not inherit +variables from the point of its invocation. + +When execution begins, $ is set to the data argument passed to Execute, that is, +to the starting value of dot. + +Examples + +Here are some example one-line templates demonstrating pipelines and variables. +All produce the quoted word "output": + + {{"\"output\""}} + A string constant. + {{`"output"`}} + A raw string constant. + {{printf "%q" "output"}} + A function call. + {{"output" | printf "%q"}} + A function call whose final argument comes from the previous + command. + {{printf "%q" (print "out" "put")}} + A parenthesized argument. + {{"put" | printf "%s%s" "out" | printf "%q"}} + A more elaborate call. + {{"output" | printf "%s" | printf "%q"}} + A longer chain. + {{with "output"}}{{printf "%q" .}}{{end}} + A with action using dot. + {{with $x := "output" | printf "%q"}}{{$x}}{{end}} + A with action that creates and uses a variable. + {{with $x := "output"}}{{printf "%q" $x}}{{end}} + A with action that uses the variable in another action. + {{with $x := "output"}}{{$x | printf "%q"}}{{end}} + The same, but pipelined. + +Functions + +During execution functions are found in two function maps: first in the +template, then in the global function map. By default, no functions are defined +in the template but the Funcs method can be used to add them. + +Predefined global functions are named as follows. + + and + Returns the boolean AND of its arguments by returning the + first empty argument or the last argument, that is, + "and x y" behaves as "if x then y else x". All the + arguments are evaluated. + call + Returns the result of calling the first argument, which + must be a function, with the remaining arguments as parameters. + Thus "call .X.Y 1 2" is, in Go notation, dot.X.Y(1, 2) where + Y is a func-valued field, map entry, or the like. + The first argument must be the result of an evaluation + that yields a value of function type (as distinct from + a predefined function such as print). The function must + return either one or two result values, the second of which + is of type error. If the arguments don't match the function + or the returned error value is non-nil, execution stops. + html + Returns the escaped HTML equivalent of the textual + representation of its arguments. + index + Returns the result of indexing its first argument by the + following arguments. Thus "index x 1 2 3" is, in Go syntax, + x[1][2][3]. Each indexed item must be a map, slice, or array. + js + Returns the escaped JavaScript equivalent of the textual + representation of its arguments. + len + Returns the integer length of its argument. + not + Returns the boolean negation of its single argument. + or + Returns the boolean OR of its arguments by returning the + first non-empty argument or the last argument, that is, + "or x y" behaves as "if x then x else y". All the + arguments are evaluated. + print + An alias for fmt.Sprint + printf + An alias for fmt.Sprintf + println + An alias for fmt.Sprintln + urlquery + Returns the escaped value of the textual representation of + its arguments in a form suitable for embedding in a URL query. + +The boolean functions take any zero value to be false and a non-zero +value to be true. + +There is also a set of binary comparison operators defined as +functions: + + eq + Returns the boolean truth of arg1 == arg2 + ne + Returns the boolean truth of arg1 != arg2 + lt + Returns the boolean truth of arg1 < arg2 + le + Returns the boolean truth of arg1 <= arg2 + gt + Returns the boolean truth of arg1 > arg2 + ge + Returns the boolean truth of arg1 >= arg2 + +For simpler multi-way equality tests, eq (only) accepts two or more +arguments and compares the second and subsequent to the first, +returning in effect + + arg1==arg2 || arg1==arg3 || arg1==arg4 ... + +(Unlike with || in Go, however, eq is a function call and all the +arguments will be evaluated.) + +The comparison functions work on basic types only (or named basic +types, such as "type Celsius float32"). They implement the Go rules +for comparison of values, except that size and exact type are +ignored, so any integer value, signed or unsigned, may be compared +with any other integer value. (The arithmetic value is compared, +not the bit pattern, so all negative integers are less than all +unsigned integers.) However, as usual, one may not compare an int +with a float32 and so on. + +Associated templates + +Each template is named by a string specified when it is created. Also, each +template is associated with zero or more other templates that it may invoke by +name; such associations are transitive and form a name space of templates. + +A template may use a template invocation to instantiate another associated +template; see the explanation of the "template" action above. The name must be +that of a template associated with the template that contains the invocation. + +Nested template definitions + +When parsing a template, another template may be defined and associated with the +template being parsed. Template definitions must appear at the top level of the +template, much like global variables in a Go program. + +The syntax of such definitions is to surround each template declaration with a +"define" and "end" action. + +The define action names the template being created by providing a string +constant. Here is a simple example: + + `{{define "T1"}}ONE{{end}} + {{define "T2"}}TWO{{end}} + {{define "T3"}}{{template "T1"}} {{template "T2"}}{{end}} + {{template "T3"}}` + +This defines two templates, T1 and T2, and a third T3 that invokes the other two +when it is executed. Finally it invokes T3. If executed this template will +produce the text + + ONE TWO + +By construction, a template may reside in only one association. If it's +necessary to have a template addressable from multiple associations, the +template definition must be parsed multiple times to create distinct *Template +values, or must be copied with the Clone or AddParseTree method. + +Parse may be called multiple times to assemble the various associated templates; +see the ParseFiles and ParseGlob functions and methods for simple ways to parse +related templates stored in files. + +A template may be executed directly or through ExecuteTemplate, which executes +an associated template identified by name. To invoke our example above, we +might write, + + err := tmpl.Execute(os.Stdout, "no data needed") + if err != nil { + log.Fatalf("execution failed: %s", err) + } + +or to invoke a particular template explicitly by name, + + err := tmpl.ExecuteTemplate(os.Stdout, "T2", "no data needed") + if err != nil { + log.Fatalf("execution failed: %s", err) + } + +*/ +package template diff --git a/vendor/github.com/alecthomas/template/example_test.go b/vendor/github.com/alecthomas/template/example_test.go new file mode 100644 index 0000000..461ec05 --- /dev/null +++ b/vendor/github.com/alecthomas/template/example_test.go @@ -0,0 +1,72 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template_test + +import ( + "log" + "os" + + "github.com/alecthomas/template" +) + +func ExampleTemplate() { + // Define a template. + const letter = ` +Dear {{.Name}}, +{{if .Attended}} +It was a pleasure to see you at the wedding.{{else}} +It is a shame you couldn't make it to the wedding.{{end}} +{{with .Gift}}Thank you for the lovely {{.}}. +{{end}} +Best wishes, +Josie +` + + // Prepare some data to insert into the template. + type Recipient struct { + Name, Gift string + Attended bool + } + var recipients = []Recipient{ + {"Aunt Mildred", "bone china tea set", true}, + {"Uncle John", "moleskin pants", false}, + {"Cousin Rodney", "", false}, + } + + // Create a new template and parse the letter into it. + t := template.Must(template.New("letter").Parse(letter)) + + // Execute the template for each recipient. + for _, r := range recipients { + err := t.Execute(os.Stdout, r) + if err != nil { + log.Println("executing template:", err) + } + } + + // Output: + // Dear Aunt Mildred, + // + // It was a pleasure to see you at the wedding. + // Thank you for the lovely bone china tea set. + // + // Best wishes, + // Josie + // + // Dear Uncle John, + // + // It is a shame you couldn't make it to the wedding. + // Thank you for the lovely moleskin pants. + // + // Best wishes, + // Josie + // + // Dear Cousin Rodney, + // + // It is a shame you couldn't make it to the wedding. + // + // Best wishes, + // Josie +} diff --git a/vendor/github.com/alecthomas/template/examplefiles_test.go b/vendor/github.com/alecthomas/template/examplefiles_test.go new file mode 100644 index 0000000..0c7181d --- /dev/null +++ b/vendor/github.com/alecthomas/template/examplefiles_test.go @@ -0,0 +1,183 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template_test + +import ( + "io" + "io/ioutil" + "log" + "os" + "path/filepath" + + "github.com/alecthomas/template" +) + +// templateFile defines the contents of a template to be stored in a file, for testing. +type templateFile struct { + name string + contents string +} + +func createTestDir(files []templateFile) string { + dir, err := ioutil.TempDir("", "template") + if err != nil { + log.Fatal(err) + } + for _, file := range files { + f, err := os.Create(filepath.Join(dir, file.name)) + if err != nil { + log.Fatal(err) + } + defer f.Close() + _, err = io.WriteString(f, file.contents) + if err != nil { + log.Fatal(err) + } + } + return dir +} + +// Here we demonstrate loading a set of templates from a directory. +func ExampleTemplate_glob() { + // Here we create a temporary directory and populate it with our sample + // template definition files; usually the template files would already + // exist in some location known to the program. + dir := createTestDir([]templateFile{ + // T0.tmpl is a plain template file that just invokes T1. + {"T0.tmpl", `T0 invokes T1: ({{template "T1"}})`}, + // T1.tmpl defines a template, T1 that invokes T2. + {"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`}, + // T2.tmpl defines a template T2. + {"T2.tmpl", `{{define "T2"}}This is T2{{end}}`}, + }) + // Clean up after the test; another quirk of running as an example. + defer os.RemoveAll(dir) + + // pattern is the glob pattern used to find all the template files. + pattern := filepath.Join(dir, "*.tmpl") + + // Here starts the example proper. + // T0.tmpl is the first name matched, so it becomes the starting template, + // the value returned by ParseGlob. + tmpl := template.Must(template.ParseGlob(pattern)) + + err := tmpl.Execute(os.Stdout, nil) + if err != nil { + log.Fatalf("template execution: %s", err) + } + // Output: + // T0 invokes T1: (T1 invokes T2: (This is T2)) +} + +// This example demonstrates one way to share some templates +// and use them in different contexts. In this variant we add multiple driver +// templates by hand to an existing bundle of templates. +func ExampleTemplate_helpers() { + // Here we create a temporary directory and populate it with our sample + // template definition files; usually the template files would already + // exist in some location known to the program. + dir := createTestDir([]templateFile{ + // T1.tmpl defines a template, T1 that invokes T2. + {"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`}, + // T2.tmpl defines a template T2. + {"T2.tmpl", `{{define "T2"}}This is T2{{end}}`}, + }) + // Clean up after the test; another quirk of running as an example. + defer os.RemoveAll(dir) + + // pattern is the glob pattern used to find all the template files. + pattern := filepath.Join(dir, "*.tmpl") + + // Here starts the example proper. + // Load the helpers. + templates := template.Must(template.ParseGlob(pattern)) + // Add one driver template to the bunch; we do this with an explicit template definition. + _, err := templates.Parse("{{define `driver1`}}Driver 1 calls T1: ({{template `T1`}})\n{{end}}") + if err != nil { + log.Fatal("parsing driver1: ", err) + } + // Add another driver template. + _, err = templates.Parse("{{define `driver2`}}Driver 2 calls T2: ({{template `T2`}})\n{{end}}") + if err != nil { + log.Fatal("parsing driver2: ", err) + } + // We load all the templates before execution. This package does not require + // that behavior but html/template's escaping does, so it's a good habit. + err = templates.ExecuteTemplate(os.Stdout, "driver1", nil) + if err != nil { + log.Fatalf("driver1 execution: %s", err) + } + err = templates.ExecuteTemplate(os.Stdout, "driver2", nil) + if err != nil { + log.Fatalf("driver2 execution: %s", err) + } + // Output: + // Driver 1 calls T1: (T1 invokes T2: (This is T2)) + // Driver 2 calls T2: (This is T2) +} + +// This example demonstrates how to use one group of driver +// templates with distinct sets of helper templates. +func ExampleTemplate_share() { + // Here we create a temporary directory and populate it with our sample + // template definition files; usually the template files would already + // exist in some location known to the program. + dir := createTestDir([]templateFile{ + // T0.tmpl is a plain template file that just invokes T1. + {"T0.tmpl", "T0 ({{.}} version) invokes T1: ({{template `T1`}})\n"}, + // T1.tmpl defines a template, T1 that invokes T2. Note T2 is not defined + {"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`}, + }) + // Clean up after the test; another quirk of running as an example. + defer os.RemoveAll(dir) + + // pattern is the glob pattern used to find all the template files. + pattern := filepath.Join(dir, "*.tmpl") + + // Here starts the example proper. + // Load the drivers. + drivers := template.Must(template.ParseGlob(pattern)) + + // We must define an implementation of the T2 template. First we clone + // the drivers, then add a definition of T2 to the template name space. + + // 1. Clone the helper set to create a new name space from which to run them. + first, err := drivers.Clone() + if err != nil { + log.Fatal("cloning helpers: ", err) + } + // 2. Define T2, version A, and parse it. + _, err = first.Parse("{{define `T2`}}T2, version A{{end}}") + if err != nil { + log.Fatal("parsing T2: ", err) + } + + // Now repeat the whole thing, using a different version of T2. + // 1. Clone the drivers. + second, err := drivers.Clone() + if err != nil { + log.Fatal("cloning drivers: ", err) + } + // 2. Define T2, version B, and parse it. + _, err = second.Parse("{{define `T2`}}T2, version B{{end}}") + if err != nil { + log.Fatal("parsing T2: ", err) + } + + // Execute the templates in the reverse order to verify the + // first is unaffected by the second. + err = second.ExecuteTemplate(os.Stdout, "T0.tmpl", "second") + if err != nil { + log.Fatalf("second execution: %s", err) + } + err = first.ExecuteTemplate(os.Stdout, "T0.tmpl", "first") + if err != nil { + log.Fatalf("first: execution: %s", err) + } + + // Output: + // T0 (second version) invokes T1: (T1 invokes T2: (T2, version B)) + // T0 (first version) invokes T1: (T1 invokes T2: (T2, version A)) +} diff --git a/vendor/github.com/alecthomas/template/examplefunc_test.go b/vendor/github.com/alecthomas/template/examplefunc_test.go new file mode 100644 index 0000000..8767cfd --- /dev/null +++ b/vendor/github.com/alecthomas/template/examplefunc_test.go @@ -0,0 +1,55 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template_test + +import ( + "log" + "os" + "strings" + + "github.com/alecthomas/template" +) + +// This example demonstrates a custom function to process template text. +// It installs the strings.Title function and uses it to +// Make Title Text Look Good In Our Template's Output. +func ExampleTemplate_func() { + // First we create a FuncMap with which to register the function. + funcMap := template.FuncMap{ + // The name "title" is what the function will be called in the template text. + "title": strings.Title, + } + + // A simple template definition to test our function. + // We print the input text several ways: + // - the original + // - title-cased + // - title-cased and then printed with %q + // - printed with %q and then title-cased. + const templateText = ` +Input: {{printf "%q" .}} +Output 0: {{title .}} +Output 1: {{title . | printf "%q"}} +Output 2: {{printf "%q" . | title}} +` + + // Create a template, add the function map, and parse the text. + tmpl, err := template.New("titleTest").Funcs(funcMap).Parse(templateText) + if err != nil { + log.Fatalf("parsing: %s", err) + } + + // Run the template to verify the output. + err = tmpl.Execute(os.Stdout, "the go programming language") + if err != nil { + log.Fatalf("execution: %s", err) + } + + // Output: + // Input: "the go programming language" + // Output 0: The Go Programming Language + // Output 1: "The Go Programming Language" + // Output 2: "The Go Programming Language" +} diff --git a/vendor/github.com/alecthomas/template/exec.go b/vendor/github.com/alecthomas/template/exec.go new file mode 100644 index 0000000..c3078e5 --- /dev/null +++ b/vendor/github.com/alecthomas/template/exec.go @@ -0,0 +1,845 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "bytes" + "fmt" + "io" + "reflect" + "runtime" + "sort" + "strings" + + "github.com/alecthomas/template/parse" +) + +// state represents the state of an execution. It's not part of the +// template so that multiple executions of the same template +// can execute in parallel. +type state struct { + tmpl *Template + wr io.Writer + node parse.Node // current node, for errors + vars []variable // push-down stack of variable values. +} + +// variable holds the dynamic value of a variable such as $, $x etc. +type variable struct { + name string + value reflect.Value +} + +// push pushes a new variable on the stack. +func (s *state) push(name string, value reflect.Value) { + s.vars = append(s.vars, variable{name, value}) +} + +// mark returns the length of the variable stack. +func (s *state) mark() int { + return len(s.vars) +} + +// pop pops the variable stack up to the mark. +func (s *state) pop(mark int) { + s.vars = s.vars[0:mark] +} + +// setVar overwrites the top-nth variable on the stack. Used by range iterations. +func (s *state) setVar(n int, value reflect.Value) { + s.vars[len(s.vars)-n].value = value +} + +// varValue returns the value of the named variable. +func (s *state) varValue(name string) reflect.Value { + for i := s.mark() - 1; i >= 0; i-- { + if s.vars[i].name == name { + return s.vars[i].value + } + } + s.errorf("undefined variable: %s", name) + return zero +} + +var zero reflect.Value + +// at marks the state to be on node n, for error reporting. +func (s *state) at(node parse.Node) { + s.node = node +} + +// doublePercent returns the string with %'s replaced by %%, if necessary, +// so it can be used safely inside a Printf format string. +func doublePercent(str string) string { + if strings.Contains(str, "%") { + str = strings.Replace(str, "%", "%%", -1) + } + return str +} + +// errorf formats the error and terminates processing. +func (s *state) errorf(format string, args ...interface{}) { + name := doublePercent(s.tmpl.Name()) + if s.node == nil { + format = fmt.Sprintf("template: %s: %s", name, format) + } else { + location, context := s.tmpl.ErrorContext(s.node) + format = fmt.Sprintf("template: %s: executing %q at <%s>: %s", location, name, doublePercent(context), format) + } + panic(fmt.Errorf(format, args...)) +} + +// errRecover is the handler that turns panics into returns from the top +// level of Parse. +func errRecover(errp *error) { + e := recover() + if e != nil { + switch err := e.(type) { + case runtime.Error: + panic(e) + case error: + *errp = err + default: + panic(e) + } + } +} + +// ExecuteTemplate applies the template associated with t that has the given name +// to the specified data object and writes the output to wr. +// If an error occurs executing the template or writing its output, +// execution stops, but partial results may already have been written to +// the output writer. +// A template may be executed safely in parallel. +func (t *Template) ExecuteTemplate(wr io.Writer, name string, data interface{}) error { + tmpl := t.tmpl[name] + if tmpl == nil { + return fmt.Errorf("template: no template %q associated with template %q", name, t.name) + } + return tmpl.Execute(wr, data) +} + +// Execute applies a parsed template to the specified data object, +// and writes the output to wr. +// If an error occurs executing the template or writing its output, +// execution stops, but partial results may already have been written to +// the output writer. +// A template may be executed safely in parallel. +func (t *Template) Execute(wr io.Writer, data interface{}) (err error) { + defer errRecover(&err) + value := reflect.ValueOf(data) + state := &state{ + tmpl: t, + wr: wr, + vars: []variable{{"$", value}}, + } + t.init() + if t.Tree == nil || t.Root == nil { + var b bytes.Buffer + for name, tmpl := range t.tmpl { + if tmpl.Tree == nil || tmpl.Root == nil { + continue + } + if b.Len() > 0 { + b.WriteString(", ") + } + fmt.Fprintf(&b, "%q", name) + } + var s string + if b.Len() > 0 { + s = "; defined templates are: " + b.String() + } + state.errorf("%q is an incomplete or empty template%s", t.Name(), s) + } + state.walk(value, t.Root) + return +} + +// Walk functions step through the major pieces of the template structure, +// generating output as they go. +func (s *state) walk(dot reflect.Value, node parse.Node) { + s.at(node) + switch node := node.(type) { + case *parse.ActionNode: + // Do not pop variables so they persist until next end. + // Also, if the action declares variables, don't print the result. + val := s.evalPipeline(dot, node.Pipe) + if len(node.Pipe.Decl) == 0 { + s.printValue(node, val) + } + case *parse.IfNode: + s.walkIfOrWith(parse.NodeIf, dot, node.Pipe, node.List, node.ElseList) + case *parse.ListNode: + for _, node := range node.Nodes { + s.walk(dot, node) + } + case *parse.RangeNode: + s.walkRange(dot, node) + case *parse.TemplateNode: + s.walkTemplate(dot, node) + case *parse.TextNode: + if _, err := s.wr.Write(node.Text); err != nil { + s.errorf("%s", err) + } + case *parse.WithNode: + s.walkIfOrWith(parse.NodeWith, dot, node.Pipe, node.List, node.ElseList) + default: + s.errorf("unknown node: %s", node) + } +} + +// walkIfOrWith walks an 'if' or 'with' node. The two control structures +// are identical in behavior except that 'with' sets dot. +func (s *state) walkIfOrWith(typ parse.NodeType, dot reflect.Value, pipe *parse.PipeNode, list, elseList *parse.ListNode) { + defer s.pop(s.mark()) + val := s.evalPipeline(dot, pipe) + truth, ok := isTrue(val) + if !ok { + s.errorf("if/with can't use %v", val) + } + if truth { + if typ == parse.NodeWith { + s.walk(val, list) + } else { + s.walk(dot, list) + } + } else if elseList != nil { + s.walk(dot, elseList) + } +} + +// isTrue reports whether the value is 'true', in the sense of not the zero of its type, +// and whether the value has a meaningful truth value. +func isTrue(val reflect.Value) (truth, ok bool) { + if !val.IsValid() { + // Something like var x interface{}, never set. It's a form of nil. + return false, true + } + switch val.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + truth = val.Len() > 0 + case reflect.Bool: + truth = val.Bool() + case reflect.Complex64, reflect.Complex128: + truth = val.Complex() != 0 + case reflect.Chan, reflect.Func, reflect.Ptr, reflect.Interface: + truth = !val.IsNil() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + truth = val.Int() != 0 + case reflect.Float32, reflect.Float64: + truth = val.Float() != 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + truth = val.Uint() != 0 + case reflect.Struct: + truth = true // Struct values are always true. + default: + return + } + return truth, true +} + +func (s *state) walkRange(dot reflect.Value, r *parse.RangeNode) { + s.at(r) + defer s.pop(s.mark()) + val, _ := indirect(s.evalPipeline(dot, r.Pipe)) + // mark top of stack before any variables in the body are pushed. + mark := s.mark() + oneIteration := func(index, elem reflect.Value) { + // Set top var (lexically the second if there are two) to the element. + if len(r.Pipe.Decl) > 0 { + s.setVar(1, elem) + } + // Set next var (lexically the first if there are two) to the index. + if len(r.Pipe.Decl) > 1 { + s.setVar(2, index) + } + s.walk(elem, r.List) + s.pop(mark) + } + switch val.Kind() { + case reflect.Array, reflect.Slice: + if val.Len() == 0 { + break + } + for i := 0; i < val.Len(); i++ { + oneIteration(reflect.ValueOf(i), val.Index(i)) + } + return + case reflect.Map: + if val.Len() == 0 { + break + } + for _, key := range sortKeys(val.MapKeys()) { + oneIteration(key, val.MapIndex(key)) + } + return + case reflect.Chan: + if val.IsNil() { + break + } + i := 0 + for ; ; i++ { + elem, ok := val.Recv() + if !ok { + break + } + oneIteration(reflect.ValueOf(i), elem) + } + if i == 0 { + break + } + return + case reflect.Invalid: + break // An invalid value is likely a nil map, etc. and acts like an empty map. + default: + s.errorf("range can't iterate over %v", val) + } + if r.ElseList != nil { + s.walk(dot, r.ElseList) + } +} + +func (s *state) walkTemplate(dot reflect.Value, t *parse.TemplateNode) { + s.at(t) + tmpl := s.tmpl.tmpl[t.Name] + if tmpl == nil { + s.errorf("template %q not defined", t.Name) + } + // Variables declared by the pipeline persist. + dot = s.evalPipeline(dot, t.Pipe) + newState := *s + newState.tmpl = tmpl + // No dynamic scoping: template invocations inherit no variables. + newState.vars = []variable{{"$", dot}} + newState.walk(dot, tmpl.Root) +} + +// Eval functions evaluate pipelines, commands, and their elements and extract +// values from the data structure by examining fields, calling methods, and so on. +// The printing of those values happens only through walk functions. + +// evalPipeline returns the value acquired by evaluating a pipeline. If the +// pipeline has a variable declaration, the variable will be pushed on the +// stack. Callers should therefore pop the stack after they are finished +// executing commands depending on the pipeline value. +func (s *state) evalPipeline(dot reflect.Value, pipe *parse.PipeNode) (value reflect.Value) { + if pipe == nil { + return + } + s.at(pipe) + for _, cmd := range pipe.Cmds { + value = s.evalCommand(dot, cmd, value) // previous value is this one's final arg. + // If the object has type interface{}, dig down one level to the thing inside. + if value.Kind() == reflect.Interface && value.Type().NumMethod() == 0 { + value = reflect.ValueOf(value.Interface()) // lovely! + } + } + for _, variable := range pipe.Decl { + s.push(variable.Ident[0], value) + } + return value +} + +func (s *state) notAFunction(args []parse.Node, final reflect.Value) { + if len(args) > 1 || final.IsValid() { + s.errorf("can't give argument to non-function %s", args[0]) + } +} + +func (s *state) evalCommand(dot reflect.Value, cmd *parse.CommandNode, final reflect.Value) reflect.Value { + firstWord := cmd.Args[0] + switch n := firstWord.(type) { + case *parse.FieldNode: + return s.evalFieldNode(dot, n, cmd.Args, final) + case *parse.ChainNode: + return s.evalChainNode(dot, n, cmd.Args, final) + case *parse.IdentifierNode: + // Must be a function. + return s.evalFunction(dot, n, cmd, cmd.Args, final) + case *parse.PipeNode: + // Parenthesized pipeline. The arguments are all inside the pipeline; final is ignored. + return s.evalPipeline(dot, n) + case *parse.VariableNode: + return s.evalVariableNode(dot, n, cmd.Args, final) + } + s.at(firstWord) + s.notAFunction(cmd.Args, final) + switch word := firstWord.(type) { + case *parse.BoolNode: + return reflect.ValueOf(word.True) + case *parse.DotNode: + return dot + case *parse.NilNode: + s.errorf("nil is not a command") + case *parse.NumberNode: + return s.idealConstant(word) + case *parse.StringNode: + return reflect.ValueOf(word.Text) + } + s.errorf("can't evaluate command %q", firstWord) + panic("not reached") +} + +// idealConstant is called to return the value of a number in a context where +// we don't know the type. In that case, the syntax of the number tells us +// its type, and we use Go rules to resolve. Note there is no such thing as +// a uint ideal constant in this situation - the value must be of int type. +func (s *state) idealConstant(constant *parse.NumberNode) reflect.Value { + // These are ideal constants but we don't know the type + // and we have no context. (If it was a method argument, + // we'd know what we need.) The syntax guides us to some extent. + s.at(constant) + switch { + case constant.IsComplex: + return reflect.ValueOf(constant.Complex128) // incontrovertible. + case constant.IsFloat && !isHexConstant(constant.Text) && strings.IndexAny(constant.Text, ".eE") >= 0: + return reflect.ValueOf(constant.Float64) + case constant.IsInt: + n := int(constant.Int64) + if int64(n) != constant.Int64 { + s.errorf("%s overflows int", constant.Text) + } + return reflect.ValueOf(n) + case constant.IsUint: + s.errorf("%s overflows int", constant.Text) + } + return zero +} + +func isHexConstant(s string) bool { + return len(s) > 2 && s[0] == '0' && (s[1] == 'x' || s[1] == 'X') +} + +func (s *state) evalFieldNode(dot reflect.Value, field *parse.FieldNode, args []parse.Node, final reflect.Value) reflect.Value { + s.at(field) + return s.evalFieldChain(dot, dot, field, field.Ident, args, final) +} + +func (s *state) evalChainNode(dot reflect.Value, chain *parse.ChainNode, args []parse.Node, final reflect.Value) reflect.Value { + s.at(chain) + // (pipe).Field1.Field2 has pipe as .Node, fields as .Field. Eval the pipeline, then the fields. + pipe := s.evalArg(dot, nil, chain.Node) + if len(chain.Field) == 0 { + s.errorf("internal error: no fields in evalChainNode") + } + return s.evalFieldChain(dot, pipe, chain, chain.Field, args, final) +} + +func (s *state) evalVariableNode(dot reflect.Value, variable *parse.VariableNode, args []parse.Node, final reflect.Value) reflect.Value { + // $x.Field has $x as the first ident, Field as the second. Eval the var, then the fields. + s.at(variable) + value := s.varValue(variable.Ident[0]) + if len(variable.Ident) == 1 { + s.notAFunction(args, final) + return value + } + return s.evalFieldChain(dot, value, variable, variable.Ident[1:], args, final) +} + +// evalFieldChain evaluates .X.Y.Z possibly followed by arguments. +// dot is the environment in which to evaluate arguments, while +// receiver is the value being walked along the chain. +func (s *state) evalFieldChain(dot, receiver reflect.Value, node parse.Node, ident []string, args []parse.Node, final reflect.Value) reflect.Value { + n := len(ident) + for i := 0; i < n-1; i++ { + receiver = s.evalField(dot, ident[i], node, nil, zero, receiver) + } + // Now if it's a method, it gets the arguments. + return s.evalField(dot, ident[n-1], node, args, final, receiver) +} + +func (s *state) evalFunction(dot reflect.Value, node *parse.IdentifierNode, cmd parse.Node, args []parse.Node, final reflect.Value) reflect.Value { + s.at(node) + name := node.Ident + function, ok := findFunction(name, s.tmpl) + if !ok { + s.errorf("%q is not a defined function", name) + } + return s.evalCall(dot, function, cmd, name, args, final) +} + +// evalField evaluates an expression like (.Field) or (.Field arg1 arg2). +// The 'final' argument represents the return value from the preceding +// value of the pipeline, if any. +func (s *state) evalField(dot reflect.Value, fieldName string, node parse.Node, args []parse.Node, final, receiver reflect.Value) reflect.Value { + if !receiver.IsValid() { + return zero + } + typ := receiver.Type() + receiver, _ = indirect(receiver) + // Unless it's an interface, need to get to a value of type *T to guarantee + // we see all methods of T and *T. + ptr := receiver + if ptr.Kind() != reflect.Interface && ptr.CanAddr() { + ptr = ptr.Addr() + } + if method := ptr.MethodByName(fieldName); method.IsValid() { + return s.evalCall(dot, method, node, fieldName, args, final) + } + hasArgs := len(args) > 1 || final.IsValid() + // It's not a method; must be a field of a struct or an element of a map. The receiver must not be nil. + receiver, isNil := indirect(receiver) + if isNil { + s.errorf("nil pointer evaluating %s.%s", typ, fieldName) + } + switch receiver.Kind() { + case reflect.Struct: + tField, ok := receiver.Type().FieldByName(fieldName) + if ok { + field := receiver.FieldByIndex(tField.Index) + if tField.PkgPath != "" { // field is unexported + s.errorf("%s is an unexported field of struct type %s", fieldName, typ) + } + // If it's a function, we must call it. + if hasArgs { + s.errorf("%s has arguments but cannot be invoked as function", fieldName) + } + return field + } + s.errorf("%s is not a field of struct type %s", fieldName, typ) + case reflect.Map: + // If it's a map, attempt to use the field name as a key. + nameVal := reflect.ValueOf(fieldName) + if nameVal.Type().AssignableTo(receiver.Type().Key()) { + if hasArgs { + s.errorf("%s is not a method but has arguments", fieldName) + } + return receiver.MapIndex(nameVal) + } + } + s.errorf("can't evaluate field %s in type %s", fieldName, typ) + panic("not reached") +} + +var ( + errorType = reflect.TypeOf((*error)(nil)).Elem() + fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem() +) + +// evalCall executes a function or method call. If it's a method, fun already has the receiver bound, so +// it looks just like a function call. The arg list, if non-nil, includes (in the manner of the shell), arg[0] +// as the function itself. +func (s *state) evalCall(dot, fun reflect.Value, node parse.Node, name string, args []parse.Node, final reflect.Value) reflect.Value { + if args != nil { + args = args[1:] // Zeroth arg is function name/node; not passed to function. + } + typ := fun.Type() + numIn := len(args) + if final.IsValid() { + numIn++ + } + numFixed := len(args) + if typ.IsVariadic() { + numFixed = typ.NumIn() - 1 // last arg is the variadic one. + if numIn < numFixed { + s.errorf("wrong number of args for %s: want at least %d got %d", name, typ.NumIn()-1, len(args)) + } + } else if numIn < typ.NumIn()-1 || !typ.IsVariadic() && numIn != typ.NumIn() { + s.errorf("wrong number of args for %s: want %d got %d", name, typ.NumIn(), len(args)) + } + if !goodFunc(typ) { + // TODO: This could still be a confusing error; maybe goodFunc should provide info. + s.errorf("can't call method/function %q with %d results", name, typ.NumOut()) + } + // Build the arg list. + argv := make([]reflect.Value, numIn) + // Args must be evaluated. Fixed args first. + i := 0 + for ; i < numFixed && i < len(args); i++ { + argv[i] = s.evalArg(dot, typ.In(i), args[i]) + } + // Now the ... args. + if typ.IsVariadic() { + argType := typ.In(typ.NumIn() - 1).Elem() // Argument is a slice. + for ; i < len(args); i++ { + argv[i] = s.evalArg(dot, argType, args[i]) + } + } + // Add final value if necessary. + if final.IsValid() { + t := typ.In(typ.NumIn() - 1) + if typ.IsVariadic() { + t = t.Elem() + } + argv[i] = s.validateType(final, t) + } + result := fun.Call(argv) + // If we have an error that is not nil, stop execution and return that error to the caller. + if len(result) == 2 && !result[1].IsNil() { + s.at(node) + s.errorf("error calling %s: %s", name, result[1].Interface().(error)) + } + return result[0] +} + +// canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero. +func canBeNil(typ reflect.Type) bool { + switch typ.Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return true + } + return false +} + +// validateType guarantees that the value is valid and assignable to the type. +func (s *state) validateType(value reflect.Value, typ reflect.Type) reflect.Value { + if !value.IsValid() { + if typ == nil || canBeNil(typ) { + // An untyped nil interface{}. Accept as a proper nil value. + return reflect.Zero(typ) + } + s.errorf("invalid value; expected %s", typ) + } + if typ != nil && !value.Type().AssignableTo(typ) { + if value.Kind() == reflect.Interface && !value.IsNil() { + value = value.Elem() + if value.Type().AssignableTo(typ) { + return value + } + // fallthrough + } + // Does one dereference or indirection work? We could do more, as we + // do with method receivers, but that gets messy and method receivers + // are much more constrained, so it makes more sense there than here. + // Besides, one is almost always all you need. + switch { + case value.Kind() == reflect.Ptr && value.Type().Elem().AssignableTo(typ): + value = value.Elem() + if !value.IsValid() { + s.errorf("dereference of nil pointer of type %s", typ) + } + case reflect.PtrTo(value.Type()).AssignableTo(typ) && value.CanAddr(): + value = value.Addr() + default: + s.errorf("wrong type for value; expected %s; got %s", typ, value.Type()) + } + } + return value +} + +func (s *state) evalArg(dot reflect.Value, typ reflect.Type, n parse.Node) reflect.Value { + s.at(n) + switch arg := n.(type) { + case *parse.DotNode: + return s.validateType(dot, typ) + case *parse.NilNode: + if canBeNil(typ) { + return reflect.Zero(typ) + } + s.errorf("cannot assign nil to %s", typ) + case *parse.FieldNode: + return s.validateType(s.evalFieldNode(dot, arg, []parse.Node{n}, zero), typ) + case *parse.VariableNode: + return s.validateType(s.evalVariableNode(dot, arg, nil, zero), typ) + case *parse.PipeNode: + return s.validateType(s.evalPipeline(dot, arg), typ) + case *parse.IdentifierNode: + return s.evalFunction(dot, arg, arg, nil, zero) + case *parse.ChainNode: + return s.validateType(s.evalChainNode(dot, arg, nil, zero), typ) + } + switch typ.Kind() { + case reflect.Bool: + return s.evalBool(typ, n) + case reflect.Complex64, reflect.Complex128: + return s.evalComplex(typ, n) + case reflect.Float32, reflect.Float64: + return s.evalFloat(typ, n) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return s.evalInteger(typ, n) + case reflect.Interface: + if typ.NumMethod() == 0 { + return s.evalEmptyInterface(dot, n) + } + case reflect.String: + return s.evalString(typ, n) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return s.evalUnsignedInteger(typ, n) + } + s.errorf("can't handle %s for arg of type %s", n, typ) + panic("not reached") +} + +func (s *state) evalBool(typ reflect.Type, n parse.Node) reflect.Value { + s.at(n) + if n, ok := n.(*parse.BoolNode); ok { + value := reflect.New(typ).Elem() + value.SetBool(n.True) + return value + } + s.errorf("expected bool; found %s", n) + panic("not reached") +} + +func (s *state) evalString(typ reflect.Type, n parse.Node) reflect.Value { + s.at(n) + if n, ok := n.(*parse.StringNode); ok { + value := reflect.New(typ).Elem() + value.SetString(n.Text) + return value + } + s.errorf("expected string; found %s", n) + panic("not reached") +} + +func (s *state) evalInteger(typ reflect.Type, n parse.Node) reflect.Value { + s.at(n) + if n, ok := n.(*parse.NumberNode); ok && n.IsInt { + value := reflect.New(typ).Elem() + value.SetInt(n.Int64) + return value + } + s.errorf("expected integer; found %s", n) + panic("not reached") +} + +func (s *state) evalUnsignedInteger(typ reflect.Type, n parse.Node) reflect.Value { + s.at(n) + if n, ok := n.(*parse.NumberNode); ok && n.IsUint { + value := reflect.New(typ).Elem() + value.SetUint(n.Uint64) + return value + } + s.errorf("expected unsigned integer; found %s", n) + panic("not reached") +} + +func (s *state) evalFloat(typ reflect.Type, n parse.Node) reflect.Value { + s.at(n) + if n, ok := n.(*parse.NumberNode); ok && n.IsFloat { + value := reflect.New(typ).Elem() + value.SetFloat(n.Float64) + return value + } + s.errorf("expected float; found %s", n) + panic("not reached") +} + +func (s *state) evalComplex(typ reflect.Type, n parse.Node) reflect.Value { + if n, ok := n.(*parse.NumberNode); ok && n.IsComplex { + value := reflect.New(typ).Elem() + value.SetComplex(n.Complex128) + return value + } + s.errorf("expected complex; found %s", n) + panic("not reached") +} + +func (s *state) evalEmptyInterface(dot reflect.Value, n parse.Node) reflect.Value { + s.at(n) + switch n := n.(type) { + case *parse.BoolNode: + return reflect.ValueOf(n.True) + case *parse.DotNode: + return dot + case *parse.FieldNode: + return s.evalFieldNode(dot, n, nil, zero) + case *parse.IdentifierNode: + return s.evalFunction(dot, n, n, nil, zero) + case *parse.NilNode: + // NilNode is handled in evalArg, the only place that calls here. + s.errorf("evalEmptyInterface: nil (can't happen)") + case *parse.NumberNode: + return s.idealConstant(n) + case *parse.StringNode: + return reflect.ValueOf(n.Text) + case *parse.VariableNode: + return s.evalVariableNode(dot, n, nil, zero) + case *parse.PipeNode: + return s.evalPipeline(dot, n) + } + s.errorf("can't handle assignment of %s to empty interface argument", n) + panic("not reached") +} + +// indirect returns the item at the end of indirection, and a bool to indicate if it's nil. +// We indirect through pointers and empty interfaces (only) because +// non-empty interfaces have methods we might need. +func indirect(v reflect.Value) (rv reflect.Value, isNil bool) { + for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() { + if v.IsNil() { + return v, true + } + if v.Kind() == reflect.Interface && v.NumMethod() > 0 { + break + } + } + return v, false +} + +// printValue writes the textual representation of the value to the output of +// the template. +func (s *state) printValue(n parse.Node, v reflect.Value) { + s.at(n) + iface, ok := printableValue(v) + if !ok { + s.errorf("can't print %s of type %s", n, v.Type()) + } + fmt.Fprint(s.wr, iface) +} + +// printableValue returns the, possibly indirected, interface value inside v that +// is best for a call to formatted printer. +func printableValue(v reflect.Value) (interface{}, bool) { + if v.Kind() == reflect.Ptr { + v, _ = indirect(v) // fmt.Fprint handles nil. + } + if !v.IsValid() { + return "", true + } + + if !v.Type().Implements(errorType) && !v.Type().Implements(fmtStringerType) { + if v.CanAddr() && (reflect.PtrTo(v.Type()).Implements(errorType) || reflect.PtrTo(v.Type()).Implements(fmtStringerType)) { + v = v.Addr() + } else { + switch v.Kind() { + case reflect.Chan, reflect.Func: + return nil, false + } + } + } + return v.Interface(), true +} + +// Types to help sort the keys in a map for reproducible output. + +type rvs []reflect.Value + +func (x rvs) Len() int { return len(x) } +func (x rvs) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +type rvInts struct{ rvs } + +func (x rvInts) Less(i, j int) bool { return x.rvs[i].Int() < x.rvs[j].Int() } + +type rvUints struct{ rvs } + +func (x rvUints) Less(i, j int) bool { return x.rvs[i].Uint() < x.rvs[j].Uint() } + +type rvFloats struct{ rvs } + +func (x rvFloats) Less(i, j int) bool { return x.rvs[i].Float() < x.rvs[j].Float() } + +type rvStrings struct{ rvs } + +func (x rvStrings) Less(i, j int) bool { return x.rvs[i].String() < x.rvs[j].String() } + +// sortKeys sorts (if it can) the slice of reflect.Values, which is a slice of map keys. +func sortKeys(v []reflect.Value) []reflect.Value { + if len(v) <= 1 { + return v + } + switch v[0].Kind() { + case reflect.Float32, reflect.Float64: + sort.Sort(rvFloats{v}) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + sort.Sort(rvInts{v}) + case reflect.String: + sort.Sort(rvStrings{v}) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + sort.Sort(rvUints{v}) + } + return v +} diff --git a/vendor/github.com/alecthomas/template/exec_test.go b/vendor/github.com/alecthomas/template/exec_test.go new file mode 100644 index 0000000..69c213e --- /dev/null +++ b/vendor/github.com/alecthomas/template/exec_test.go @@ -0,0 +1,1044 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "bytes" + "errors" + "flag" + "fmt" + "reflect" + "strings" + "testing" +) + +var debug = flag.Bool("debug", false, "show the errors produced by the tests") + +// T has lots of interesting pieces to use to test execution. +type T struct { + // Basics + True bool + I int + U16 uint16 + X string + FloatZero float64 + ComplexZero complex128 + // Nested structs. + U *U + // Struct with String method. + V0 V + V1, V2 *V + // Struct with Error method. + W0 W + W1, W2 *W + // Slices + SI []int + SIEmpty []int + SB []bool + // Maps + MSI map[string]int + MSIone map[string]int // one element, for deterministic output + MSIEmpty map[string]int + MXI map[interface{}]int + MII map[int]int + SMSI []map[string]int + // Empty interfaces; used to see if we can dig inside one. + Empty0 interface{} // nil + Empty1 interface{} + Empty2 interface{} + Empty3 interface{} + Empty4 interface{} + // Non-empty interface. + NonEmptyInterface I + // Stringer. + Str fmt.Stringer + Err error + // Pointers + PI *int + PS *string + PSI *[]int + NIL *int + // Function (not method) + BinaryFunc func(string, string) string + VariadicFunc func(...string) string + VariadicFuncInt func(int, ...string) string + NilOKFunc func(*int) bool + ErrFunc func() (string, error) + // Template to test evaluation of templates. + Tmpl *Template + // Unexported field; cannot be accessed by template. + unexported int +} + +type U struct { + V string +} + +type V struct { + j int +} + +func (v *V) String() string { + if v == nil { + return "nilV" + } + return fmt.Sprintf("<%d>", v.j) +} + +type W struct { + k int +} + +func (w *W) Error() string { + if w == nil { + return "nilW" + } + return fmt.Sprintf("[%d]", w.k) +} + +var tVal = &T{ + True: true, + I: 17, + U16: 16, + X: "x", + U: &U{"v"}, + V0: V{6666}, + V1: &V{7777}, // leave V2 as nil + W0: W{888}, + W1: &W{999}, // leave W2 as nil + SI: []int{3, 4, 5}, + SB: []bool{true, false}, + MSI: map[string]int{"one": 1, "two": 2, "three": 3}, + MSIone: map[string]int{"one": 1}, + MXI: map[interface{}]int{"one": 1}, + MII: map[int]int{1: 1}, + SMSI: []map[string]int{ + {"one": 1, "two": 2}, + {"eleven": 11, "twelve": 12}, + }, + Empty1: 3, + Empty2: "empty2", + Empty3: []int{7, 8}, + Empty4: &U{"UinEmpty"}, + NonEmptyInterface: new(T), + Str: bytes.NewBuffer([]byte("foozle")), + Err: errors.New("erroozle"), + PI: newInt(23), + PS: newString("a string"), + PSI: newIntSlice(21, 22, 23), + BinaryFunc: func(a, b string) string { return fmt.Sprintf("[%s=%s]", a, b) }, + VariadicFunc: func(s ...string) string { return fmt.Sprint("<", strings.Join(s, "+"), ">") }, + VariadicFuncInt: func(a int, s ...string) string { return fmt.Sprint(a, "=<", strings.Join(s, "+"), ">") }, + NilOKFunc: func(s *int) bool { return s == nil }, + ErrFunc: func() (string, error) { return "bla", nil }, + Tmpl: Must(New("x").Parse("test template")), // "x" is the value of .X +} + +// A non-empty interface. +type I interface { + Method0() string +} + +var iVal I = tVal + +// Helpers for creation. +func newInt(n int) *int { + return &n +} + +func newString(s string) *string { + return &s +} + +func newIntSlice(n ...int) *[]int { + p := new([]int) + *p = make([]int, len(n)) + copy(*p, n) + return p +} + +// Simple methods with and without arguments. +func (t *T) Method0() string { + return "M0" +} + +func (t *T) Method1(a int) int { + return a +} + +func (t *T) Method2(a uint16, b string) string { + return fmt.Sprintf("Method2: %d %s", a, b) +} + +func (t *T) Method3(v interface{}) string { + return fmt.Sprintf("Method3: %v", v) +} + +func (t *T) Copy() *T { + n := new(T) + *n = *t + return n +} + +func (t *T) MAdd(a int, b []int) []int { + v := make([]int, len(b)) + for i, x := range b { + v[i] = x + a + } + return v +} + +var myError = errors.New("my error") + +// MyError returns a value and an error according to its argument. +func (t *T) MyError(error bool) (bool, error) { + if error { + return true, myError + } + return false, nil +} + +// A few methods to test chaining. +func (t *T) GetU() *U { + return t.U +} + +func (u *U) TrueFalse(b bool) string { + if b { + return "true" + } + return "" +} + +func typeOf(arg interface{}) string { + return fmt.Sprintf("%T", arg) +} + +type execTest struct { + name string + input string + output string + data interface{} + ok bool +} + +// bigInt and bigUint are hex string representing numbers either side +// of the max int boundary. +// We do it this way so the test doesn't depend on ints being 32 bits. +var ( + bigInt = fmt.Sprintf("0x%x", int(1<", tVal, true}, + {"map .one interface", "{{.MXI.one}}", "1", tVal, true}, + {"map .WRONG args", "{{.MSI.one 1}}", "", tVal, false}, + {"map .WRONG type", "{{.MII.one}}", "", tVal, false}, + + // Dots of all kinds to test basic evaluation. + {"dot int", "<{{.}}>", "<13>", 13, true}, + {"dot uint", "<{{.}}>", "<14>", uint(14), true}, + {"dot float", "<{{.}}>", "<15.1>", 15.1, true}, + {"dot bool", "<{{.}}>", "", true, true}, + {"dot complex", "<{{.}}>", "<(16.2-17i)>", 16.2 - 17i, true}, + {"dot string", "<{{.}}>", "", "hello", true}, + {"dot slice", "<{{.}}>", "<[-1 -2 -3]>", []int{-1, -2, -3}, true}, + {"dot map", "<{{.}}>", "", map[string]int{"two": 22}, true}, + {"dot struct", "<{{.}}>", "<{7 seven}>", struct { + a int + b string + }{7, "seven"}, true}, + + // Variables. + {"$ int", "{{$}}", "123", 123, true}, + {"$.I", "{{$.I}}", "17", tVal, true}, + {"$.U.V", "{{$.U.V}}", "v", tVal, true}, + {"declare in action", "{{$x := $.U.V}}{{$x}}", "v", tVal, true}, + + // Type with String method. + {"V{6666}.String()", "-{{.V0}}-", "-<6666>-", tVal, true}, + {"&V{7777}.String()", "-{{.V1}}-", "-<7777>-", tVal, true}, + {"(*V)(nil).String()", "-{{.V2}}-", "-nilV-", tVal, true}, + + // Type with Error method. + {"W{888}.Error()", "-{{.W0}}-", "-[888]-", tVal, true}, + {"&W{999}.Error()", "-{{.W1}}-", "-[999]-", tVal, true}, + {"(*W)(nil).Error()", "-{{.W2}}-", "-nilW-", tVal, true}, + + // Pointers. + {"*int", "{{.PI}}", "23", tVal, true}, + {"*string", "{{.PS}}", "a string", tVal, true}, + {"*[]int", "{{.PSI}}", "[21 22 23]", tVal, true}, + {"*[]int[1]", "{{index .PSI 1}}", "22", tVal, true}, + {"NIL", "{{.NIL}}", "", tVal, true}, + + // Empty interfaces holding values. + {"empty nil", "{{.Empty0}}", "", tVal, true}, + {"empty with int", "{{.Empty1}}", "3", tVal, true}, + {"empty with string", "{{.Empty2}}", "empty2", tVal, true}, + {"empty with slice", "{{.Empty3}}", "[7 8]", tVal, true}, + {"empty with struct", "{{.Empty4}}", "{UinEmpty}", tVal, true}, + {"empty with struct, field", "{{.Empty4.V}}", "UinEmpty", tVal, true}, + + // Method calls. + {".Method0", "-{{.Method0}}-", "-M0-", tVal, true}, + {".Method1(1234)", "-{{.Method1 1234}}-", "-1234-", tVal, true}, + {".Method1(.I)", "-{{.Method1 .I}}-", "-17-", tVal, true}, + {".Method2(3, .X)", "-{{.Method2 3 .X}}-", "-Method2: 3 x-", tVal, true}, + {".Method2(.U16, `str`)", "-{{.Method2 .U16 `str`}}-", "-Method2: 16 str-", tVal, true}, + {".Method2(.U16, $x)", "{{if $x := .X}}-{{.Method2 .U16 $x}}{{end}}-", "-Method2: 16 x-", tVal, true}, + {".Method3(nil constant)", "-{{.Method3 nil}}-", "-Method3: -", tVal, true}, + {".Method3(nil value)", "-{{.Method3 .MXI.unset}}-", "-Method3: -", tVal, true}, + {"method on var", "{{if $x := .}}-{{$x.Method2 .U16 $x.X}}{{end}}-", "-Method2: 16 x-", tVal, true}, + {"method on chained var", + "{{range .MSIone}}{{if $.U.TrueFalse $.True}}{{$.U.TrueFalse $.True}}{{else}}WRONG{{end}}{{end}}", + "true", tVal, true}, + {"chained method", + "{{range .MSIone}}{{if $.GetU.TrueFalse $.True}}{{$.U.TrueFalse $.True}}{{else}}WRONG{{end}}{{end}}", + "true", tVal, true}, + {"chained method on variable", + "{{with $x := .}}{{with .SI}}{{$.GetU.TrueFalse $.True}}{{end}}{{end}}", + "true", tVal, true}, + {".NilOKFunc not nil", "{{call .NilOKFunc .PI}}", "false", tVal, true}, + {".NilOKFunc nil", "{{call .NilOKFunc nil}}", "true", tVal, true}, + + // Function call builtin. + {".BinaryFunc", "{{call .BinaryFunc `1` `2`}}", "[1=2]", tVal, true}, + {".VariadicFunc0", "{{call .VariadicFunc}}", "<>", tVal, true}, + {".VariadicFunc2", "{{call .VariadicFunc `he` `llo`}}", "", tVal, true}, + {".VariadicFuncInt", "{{call .VariadicFuncInt 33 `he` `llo`}}", "33=", tVal, true}, + {"if .BinaryFunc call", "{{ if .BinaryFunc}}{{call .BinaryFunc `1` `2`}}{{end}}", "[1=2]", tVal, true}, + {"if not .BinaryFunc call", "{{ if not .BinaryFunc}}{{call .BinaryFunc `1` `2`}}{{else}}No{{end}}", "No", tVal, true}, + {"Interface Call", `{{stringer .S}}`, "foozle", map[string]interface{}{"S": bytes.NewBufferString("foozle")}, true}, + {".ErrFunc", "{{call .ErrFunc}}", "bla", tVal, true}, + + // Erroneous function calls (check args). + {".BinaryFuncTooFew", "{{call .BinaryFunc `1`}}", "", tVal, false}, + {".BinaryFuncTooMany", "{{call .BinaryFunc `1` `2` `3`}}", "", tVal, false}, + {".BinaryFuncBad0", "{{call .BinaryFunc 1 3}}", "", tVal, false}, + {".BinaryFuncBad1", "{{call .BinaryFunc `1` 3}}", "", tVal, false}, + {".VariadicFuncBad0", "{{call .VariadicFunc 3}}", "", tVal, false}, + {".VariadicFuncIntBad0", "{{call .VariadicFuncInt}}", "", tVal, false}, + {".VariadicFuncIntBad`", "{{call .VariadicFuncInt `x`}}", "", tVal, false}, + {".VariadicFuncNilBad", "{{call .VariadicFunc nil}}", "", tVal, false}, + + // Pipelines. + {"pipeline", "-{{.Method0 | .Method2 .U16}}-", "-Method2: 16 M0-", tVal, true}, + {"pipeline func", "-{{call .VariadicFunc `llo` | call .VariadicFunc `he` }}-", "->-", tVal, true}, + + // Parenthesized expressions + {"parens in pipeline", "{{printf `%d %d %d` (1) (2 | add 3) (add 4 (add 5 6))}}", "1 5 15", tVal, true}, + + // Parenthesized expressions with field accesses + {"parens: $ in paren", "{{($).X}}", "x", tVal, true}, + {"parens: $.GetU in paren", "{{($.GetU).V}}", "v", tVal, true}, + {"parens: $ in paren in pipe", "{{($ | echo).X}}", "x", tVal, true}, + {"parens: spaces and args", `{{(makemap "up" "down" "left" "right").left}}`, "right", tVal, true}, + + // If. + {"if true", "{{if true}}TRUE{{end}}", "TRUE", tVal, true}, + {"if false", "{{if false}}TRUE{{else}}FALSE{{end}}", "FALSE", tVal, true}, + {"if nil", "{{if nil}}TRUE{{end}}", "", tVal, false}, + {"if 1", "{{if 1}}NON-ZERO{{else}}ZERO{{end}}", "NON-ZERO", tVal, true}, + {"if 0", "{{if 0}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true}, + {"if 1.5", "{{if 1.5}}NON-ZERO{{else}}ZERO{{end}}", "NON-ZERO", tVal, true}, + {"if 0.0", "{{if .FloatZero}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true}, + {"if 1.5i", "{{if 1.5i}}NON-ZERO{{else}}ZERO{{end}}", "NON-ZERO", tVal, true}, + {"if 0.0i", "{{if .ComplexZero}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true}, + {"if emptystring", "{{if ``}}NON-EMPTY{{else}}EMPTY{{end}}", "EMPTY", tVal, true}, + {"if string", "{{if `notempty`}}NON-EMPTY{{else}}EMPTY{{end}}", "NON-EMPTY", tVal, true}, + {"if emptyslice", "{{if .SIEmpty}}NON-EMPTY{{else}}EMPTY{{end}}", "EMPTY", tVal, true}, + {"if slice", "{{if .SI}}NON-EMPTY{{else}}EMPTY{{end}}", "NON-EMPTY", tVal, true}, + {"if emptymap", "{{if .MSIEmpty}}NON-EMPTY{{else}}EMPTY{{end}}", "EMPTY", tVal, true}, + {"if map", "{{if .MSI}}NON-EMPTY{{else}}EMPTY{{end}}", "NON-EMPTY", tVal, true}, + {"if map unset", "{{if .MXI.none}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true}, + {"if map not unset", "{{if not .MXI.none}}ZERO{{else}}NON-ZERO{{end}}", "ZERO", tVal, true}, + {"if $x with $y int", "{{if $x := true}}{{with $y := .I}}{{$x}},{{$y}}{{end}}{{end}}", "true,17", tVal, true}, + {"if $x with $x int", "{{if $x := true}}{{with $x := .I}}{{$x}},{{end}}{{$x}}{{end}}", "17,true", tVal, true}, + {"if else if", "{{if false}}FALSE{{else if true}}TRUE{{end}}", "TRUE", tVal, true}, + {"if else chain", "{{if eq 1 3}}1{{else if eq 2 3}}2{{else if eq 3 3}}3{{end}}", "3", tVal, true}, + + // Print etc. + {"print", `{{print "hello, print"}}`, "hello, print", tVal, true}, + {"print 123", `{{print 1 2 3}}`, "1 2 3", tVal, true}, + {"print nil", `{{print nil}}`, "", tVal, true}, + {"println", `{{println 1 2 3}}`, "1 2 3\n", tVal, true}, + {"printf int", `{{printf "%04x" 127}}`, "007f", tVal, true}, + {"printf float", `{{printf "%g" 3.5}}`, "3.5", tVal, true}, + {"printf complex", `{{printf "%g" 1+7i}}`, "(1+7i)", tVal, true}, + {"printf string", `{{printf "%s" "hello"}}`, "hello", tVal, true}, + {"printf function", `{{printf "%#q" zeroArgs}}`, "`zeroArgs`", tVal, true}, + {"printf field", `{{printf "%s" .U.V}}`, "v", tVal, true}, + {"printf method", `{{printf "%s" .Method0}}`, "M0", tVal, true}, + {"printf dot", `{{with .I}}{{printf "%d" .}}{{end}}`, "17", tVal, true}, + {"printf var", `{{with $x := .I}}{{printf "%d" $x}}{{end}}`, "17", tVal, true}, + {"printf lots", `{{printf "%d %s %g %s" 127 "hello" 7-3i .Method0}}`, "127 hello (7-3i) M0", tVal, true}, + + // HTML. + {"html", `{{html ""}}`, + "<script>alert("XSS");</script>", nil, true}, + {"html pipeline", `{{printf "" | html}}`, + "<script>alert("XSS");</script>", nil, true}, + {"html", `{{html .PS}}`, "a string", tVal, true}, + + // JavaScript. + {"js", `{{js .}}`, `It\'d be nice.`, `It'd be nice.`, true}, + + // URL query. + {"urlquery", `{{"http://www.example.org/"|urlquery}}`, "http%3A%2F%2Fwww.example.org%2F", nil, true}, + + // Booleans + {"not", "{{not true}} {{not false}}", "false true", nil, true}, + {"and", "{{and false 0}} {{and 1 0}} {{and 0 true}} {{and 1 1}}", "false 0 0 1", nil, true}, + {"or", "{{or 0 0}} {{or 1 0}} {{or 0 true}} {{or 1 1}}", "0 1 true 1", nil, true}, + {"boolean if", "{{if and true 1 `hi`}}TRUE{{else}}FALSE{{end}}", "TRUE", tVal, true}, + {"boolean if not", "{{if and true 1 `hi` | not}}TRUE{{else}}FALSE{{end}}", "FALSE", nil, true}, + + // Indexing. + {"slice[0]", "{{index .SI 0}}", "3", tVal, true}, + {"slice[1]", "{{index .SI 1}}", "4", tVal, true}, + {"slice[HUGE]", "{{index .SI 10}}", "", tVal, false}, + {"slice[WRONG]", "{{index .SI `hello`}}", "", tVal, false}, + {"map[one]", "{{index .MSI `one`}}", "1", tVal, true}, + {"map[two]", "{{index .MSI `two`}}", "2", tVal, true}, + {"map[NO]", "{{index .MSI `XXX`}}", "0", tVal, true}, + {"map[nil]", "{{index .MSI nil}}", "0", tVal, true}, + {"map[WRONG]", "{{index .MSI 10}}", "", tVal, false}, + {"double index", "{{index .SMSI 1 `eleven`}}", "11", tVal, true}, + + // Len. + {"slice", "{{len .SI}}", "3", tVal, true}, + {"map", "{{len .MSI }}", "3", tVal, true}, + {"len of int", "{{len 3}}", "", tVal, false}, + {"len of nothing", "{{len .Empty0}}", "", tVal, false}, + + // With. + {"with true", "{{with true}}{{.}}{{end}}", "true", tVal, true}, + {"with false", "{{with false}}{{.}}{{else}}FALSE{{end}}", "FALSE", tVal, true}, + {"with 1", "{{with 1}}{{.}}{{else}}ZERO{{end}}", "1", tVal, true}, + {"with 0", "{{with 0}}{{.}}{{else}}ZERO{{end}}", "ZERO", tVal, true}, + {"with 1.5", "{{with 1.5}}{{.}}{{else}}ZERO{{end}}", "1.5", tVal, true}, + {"with 0.0", "{{with .FloatZero}}{{.}}{{else}}ZERO{{end}}", "ZERO", tVal, true}, + {"with 1.5i", "{{with 1.5i}}{{.}}{{else}}ZERO{{end}}", "(0+1.5i)", tVal, true}, + {"with 0.0i", "{{with .ComplexZero}}{{.}}{{else}}ZERO{{end}}", "ZERO", tVal, true}, + {"with emptystring", "{{with ``}}{{.}}{{else}}EMPTY{{end}}", "EMPTY", tVal, true}, + {"with string", "{{with `notempty`}}{{.}}{{else}}EMPTY{{end}}", "notempty", tVal, true}, + {"with emptyslice", "{{with .SIEmpty}}{{.}}{{else}}EMPTY{{end}}", "EMPTY", tVal, true}, + {"with slice", "{{with .SI}}{{.}}{{else}}EMPTY{{end}}", "[3 4 5]", tVal, true}, + {"with emptymap", "{{with .MSIEmpty}}{{.}}{{else}}EMPTY{{end}}", "EMPTY", tVal, true}, + {"with map", "{{with .MSIone}}{{.}}{{else}}EMPTY{{end}}", "map[one:1]", tVal, true}, + {"with empty interface, struct field", "{{with .Empty4}}{{.V}}{{end}}", "UinEmpty", tVal, true}, + {"with $x int", "{{with $x := .I}}{{$x}}{{end}}", "17", tVal, true}, + {"with $x struct.U.V", "{{with $x := $}}{{$x.U.V}}{{end}}", "v", tVal, true}, + {"with variable and action", "{{with $x := $}}{{$y := $.U.V}}{{$y}}{{end}}", "v", tVal, true}, + + // Range. + {"range []int", "{{range .SI}}-{{.}}-{{end}}", "-3--4--5-", tVal, true}, + {"range empty no else", "{{range .SIEmpty}}-{{.}}-{{end}}", "", tVal, true}, + {"range []int else", "{{range .SI}}-{{.}}-{{else}}EMPTY{{end}}", "-3--4--5-", tVal, true}, + {"range empty else", "{{range .SIEmpty}}-{{.}}-{{else}}EMPTY{{end}}", "EMPTY", tVal, true}, + {"range []bool", "{{range .SB}}-{{.}}-{{end}}", "-true--false-", tVal, true}, + {"range []int method", "{{range .SI | .MAdd .I}}-{{.}}-{{end}}", "-20--21--22-", tVal, true}, + {"range map", "{{range .MSI}}-{{.}}-{{end}}", "-1--3--2-", tVal, true}, + {"range empty map no else", "{{range .MSIEmpty}}-{{.}}-{{end}}", "", tVal, true}, + {"range map else", "{{range .MSI}}-{{.}}-{{else}}EMPTY{{end}}", "-1--3--2-", tVal, true}, + {"range empty map else", "{{range .MSIEmpty}}-{{.}}-{{else}}EMPTY{{end}}", "EMPTY", tVal, true}, + {"range empty interface", "{{range .Empty3}}-{{.}}-{{else}}EMPTY{{end}}", "-7--8-", tVal, true}, + {"range empty nil", "{{range .Empty0}}-{{.}}-{{end}}", "", tVal, true}, + {"range $x SI", "{{range $x := .SI}}<{{$x}}>{{end}}", "<3><4><5>", tVal, true}, + {"range $x $y SI", "{{range $x, $y := .SI}}<{{$x}}={{$y}}>{{end}}", "<0=3><1=4><2=5>", tVal, true}, + {"range $x MSIone", "{{range $x := .MSIone}}<{{$x}}>{{end}}", "<1>", tVal, true}, + {"range $x $y MSIone", "{{range $x, $y := .MSIone}}<{{$x}}={{$y}}>{{end}}", "", tVal, true}, + {"range $x PSI", "{{range $x := .PSI}}<{{$x}}>{{end}}", "<21><22><23>", tVal, true}, + {"declare in range", "{{range $x := .PSI}}<{{$foo:=$x}}{{$x}}>{{end}}", "<21><22><23>", tVal, true}, + {"range count", `{{range $i, $x := count 5}}[{{$i}}]{{$x}}{{end}}`, "[0]a[1]b[2]c[3]d[4]e", tVal, true}, + {"range nil count", `{{range $i, $x := count 0}}{{else}}empty{{end}}`, "empty", tVal, true}, + + // Cute examples. + {"or as if true", `{{or .SI "slice is empty"}}`, "[3 4 5]", tVal, true}, + {"or as if false", `{{or .SIEmpty "slice is empty"}}`, "slice is empty", tVal, true}, + + // Error handling. + {"error method, error", "{{.MyError true}}", "", tVal, false}, + {"error method, no error", "{{.MyError false}}", "false", tVal, true}, + + // Fixed bugs. + // Must separate dot and receiver; otherwise args are evaluated with dot set to variable. + {"bug0", "{{range .MSIone}}{{if $.Method1 .}}X{{end}}{{end}}", "X", tVal, true}, + // Do not loop endlessly in indirect for non-empty interfaces. + // The bug appears with *interface only; looped forever. + {"bug1", "{{.Method0}}", "M0", &iVal, true}, + // Was taking address of interface field, so method set was empty. + {"bug2", "{{$.NonEmptyInterface.Method0}}", "M0", tVal, true}, + // Struct values were not legal in with - mere oversight. + {"bug3", "{{with $}}{{.Method0}}{{end}}", "M0", tVal, true}, + // Nil interface values in if. + {"bug4", "{{if .Empty0}}non-nil{{else}}nil{{end}}", "nil", tVal, true}, + // Stringer. + {"bug5", "{{.Str}}", "foozle", tVal, true}, + {"bug5a", "{{.Err}}", "erroozle", tVal, true}, + // Args need to be indirected and dereferenced sometimes. + {"bug6a", "{{vfunc .V0 .V1}}", "vfunc", tVal, true}, + {"bug6b", "{{vfunc .V0 .V0}}", "vfunc", tVal, true}, + {"bug6c", "{{vfunc .V1 .V0}}", "vfunc", tVal, true}, + {"bug6d", "{{vfunc .V1 .V1}}", "vfunc", tVal, true}, + // Legal parse but illegal execution: non-function should have no arguments. + {"bug7a", "{{3 2}}", "", tVal, false}, + {"bug7b", "{{$x := 1}}{{$x 2}}", "", tVal, false}, + {"bug7c", "{{$x := 1}}{{3 | $x}}", "", tVal, false}, + // Pipelined arg was not being type-checked. + {"bug8a", "{{3|oneArg}}", "", tVal, false}, + {"bug8b", "{{4|dddArg 3}}", "", tVal, false}, + // A bug was introduced that broke map lookups for lower-case names. + {"bug9", "{{.cause}}", "neglect", map[string]string{"cause": "neglect"}, true}, + // Field chain starting with function did not work. + {"bug10", "{{mapOfThree.three}}-{{(mapOfThree).three}}", "3-3", 0, true}, + // Dereferencing nil pointer while evaluating function arguments should not panic. Issue 7333. + {"bug11", "{{valueString .PS}}", "", T{}, false}, + // 0xef gave constant type float64. Issue 8622. + {"bug12xe", "{{printf `%T` 0xef}}", "int", T{}, true}, + {"bug12xE", "{{printf `%T` 0xEE}}", "int", T{}, true}, + {"bug12Xe", "{{printf `%T` 0Xef}}", "int", T{}, true}, + {"bug12XE", "{{printf `%T` 0XEE}}", "int", T{}, true}, + // Chained nodes did not work as arguments. Issue 8473. + {"bug13", "{{print (.Copy).I}}", "17", tVal, true}, +} + +func zeroArgs() string { + return "zeroArgs" +} + +func oneArg(a string) string { + return "oneArg=" + a +} + +func dddArg(a int, b ...string) string { + return fmt.Sprintln(a, b) +} + +// count returns a channel that will deliver n sequential 1-letter strings starting at "a" +func count(n int) chan string { + if n == 0 { + return nil + } + c := make(chan string) + go func() { + for i := 0; i < n; i++ { + c <- "abcdefghijklmnop"[i : i+1] + } + close(c) + }() + return c +} + +// vfunc takes a *V and a V +func vfunc(V, *V) string { + return "vfunc" +} + +// valueString takes a string, not a pointer. +func valueString(v string) string { + return "value is ignored" +} + +func add(args ...int) int { + sum := 0 + for _, x := range args { + sum += x + } + return sum +} + +func echo(arg interface{}) interface{} { + return arg +} + +func makemap(arg ...string) map[string]string { + if len(arg)%2 != 0 { + panic("bad makemap") + } + m := make(map[string]string) + for i := 0; i < len(arg); i += 2 { + m[arg[i]] = arg[i+1] + } + return m +} + +func stringer(s fmt.Stringer) string { + return s.String() +} + +func mapOfThree() interface{} { + return map[string]int{"three": 3} +} + +func testExecute(execTests []execTest, template *Template, t *testing.T) { + b := new(bytes.Buffer) + funcs := FuncMap{ + "add": add, + "count": count, + "dddArg": dddArg, + "echo": echo, + "makemap": makemap, + "mapOfThree": mapOfThree, + "oneArg": oneArg, + "stringer": stringer, + "typeOf": typeOf, + "valueString": valueString, + "vfunc": vfunc, + "zeroArgs": zeroArgs, + } + for _, test := range execTests { + var tmpl *Template + var err error + if template == nil { + tmpl, err = New(test.name).Funcs(funcs).Parse(test.input) + } else { + tmpl, err = template.New(test.name).Funcs(funcs).Parse(test.input) + } + if err != nil { + t.Errorf("%s: parse error: %s", test.name, err) + continue + } + b.Reset() + err = tmpl.Execute(b, test.data) + switch { + case !test.ok && err == nil: + t.Errorf("%s: expected error; got none", test.name) + continue + case test.ok && err != nil: + t.Errorf("%s: unexpected execute error: %s", test.name, err) + continue + case !test.ok && err != nil: + // expected error, got one + if *debug { + fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err) + } + } + result := b.String() + if result != test.output { + t.Errorf("%s: expected\n\t%q\ngot\n\t%q", test.name, test.output, result) + } + } +} + +func TestExecute(t *testing.T) { + testExecute(execTests, nil, t) +} + +var delimPairs = []string{ + "", "", // default + "{{", "}}", // same as default + "<<", ">>", // distinct + "|", "|", // same + "(日)", "(本)", // peculiar +} + +func TestDelims(t *testing.T) { + const hello = "Hello, world" + var value = struct{ Str string }{hello} + for i := 0; i < len(delimPairs); i += 2 { + text := ".Str" + left := delimPairs[i+0] + trueLeft := left + right := delimPairs[i+1] + trueRight := right + if left == "" { // default case + trueLeft = "{{" + } + if right == "" { // default case + trueRight = "}}" + } + text = trueLeft + text + trueRight + // Now add a comment + text += trueLeft + "/*comment*/" + trueRight + // Now add an action containing a string. + text += trueLeft + `"` + trueLeft + `"` + trueRight + // At this point text looks like `{{.Str}}{{/*comment*/}}{{"{{"}}`. + tmpl, err := New("delims").Delims(left, right).Parse(text) + if err != nil { + t.Fatalf("delim %q text %q parse err %s", left, text, err) + } + var b = new(bytes.Buffer) + err = tmpl.Execute(b, value) + if err != nil { + t.Fatalf("delim %q exec err %s", left, err) + } + if b.String() != hello+trueLeft { + t.Errorf("expected %q got %q", hello+trueLeft, b.String()) + } + } +} + +// Check that an error from a method flows back to the top. +func TestExecuteError(t *testing.T) { + b := new(bytes.Buffer) + tmpl := New("error") + _, err := tmpl.Parse("{{.MyError true}}") + if err != nil { + t.Fatalf("parse error: %s", err) + } + err = tmpl.Execute(b, tVal) + if err == nil { + t.Errorf("expected error; got none") + } else if !strings.Contains(err.Error(), myError.Error()) { + if *debug { + fmt.Printf("test execute error: %s\n", err) + } + t.Errorf("expected myError; got %s", err) + } +} + +const execErrorText = `line 1 +line 2 +line 3 +{{template "one" .}} +{{define "one"}}{{template "two" .}}{{end}} +{{define "two"}}{{template "three" .}}{{end}} +{{define "three"}}{{index "hi" $}}{{end}}` + +// Check that an error from a nested template contains all the relevant information. +func TestExecError(t *testing.T) { + tmpl, err := New("top").Parse(execErrorText) + if err != nil { + t.Fatal("parse error:", err) + } + var b bytes.Buffer + err = tmpl.Execute(&b, 5) // 5 is out of range indexing "hi" + if err == nil { + t.Fatal("expected error") + } + const want = `template: top:7:20: executing "three" at : error calling index: index out of range: 5` + got := err.Error() + if got != want { + t.Errorf("expected\n%q\ngot\n%q", want, got) + } +} + +func TestJSEscaping(t *testing.T) { + testCases := []struct { + in, exp string + }{ + {`a`, `a`}, + {`'foo`, `\'foo`}, + {`Go "jump" \`, `Go \"jump\" \\`}, + {`Yukihiro says "今日は世界"`, `Yukihiro says \"今日は世界\"`}, + {"unprintable \uFDFF", `unprintable \uFDFF`}, + {``, `\x3Chtml\x3E`}, + } + for _, tc := range testCases { + s := JSEscapeString(tc.in) + if s != tc.exp { + t.Errorf("JS escaping [%s] got [%s] want [%s]", tc.in, s, tc.exp) + } + } +} + +// A nice example: walk a binary tree. + +type Tree struct { + Val int + Left, Right *Tree +} + +// Use different delimiters to test Set.Delims. +const treeTemplate = ` + (define "tree") + [ + (.Val) + (with .Left) + (template "tree" .) + (end) + (with .Right) + (template "tree" .) + (end) + ] + (end) +` + +func TestTree(t *testing.T) { + var tree = &Tree{ + 1, + &Tree{ + 2, &Tree{ + 3, + &Tree{ + 4, nil, nil, + }, + nil, + }, + &Tree{ + 5, + &Tree{ + 6, nil, nil, + }, + nil, + }, + }, + &Tree{ + 7, + &Tree{ + 8, + &Tree{ + 9, nil, nil, + }, + nil, + }, + &Tree{ + 10, + &Tree{ + 11, nil, nil, + }, + nil, + }, + }, + } + tmpl, err := New("root").Delims("(", ")").Parse(treeTemplate) + if err != nil { + t.Fatal("parse error:", err) + } + var b bytes.Buffer + stripSpace := func(r rune) rune { + if r == '\t' || r == '\n' { + return -1 + } + return r + } + const expect = "[1[2[3[4]][5[6]]][7[8[9]][10[11]]]]" + // First by looking up the template. + err = tmpl.Lookup("tree").Execute(&b, tree) + if err != nil { + t.Fatal("exec error:", err) + } + result := strings.Map(stripSpace, b.String()) + if result != expect { + t.Errorf("expected %q got %q", expect, result) + } + // Then direct to execution. + b.Reset() + err = tmpl.ExecuteTemplate(&b, "tree", tree) + if err != nil { + t.Fatal("exec error:", err) + } + result = strings.Map(stripSpace, b.String()) + if result != expect { + t.Errorf("expected %q got %q", expect, result) + } +} + +func TestExecuteOnNewTemplate(t *testing.T) { + // This is issue 3872. + _ = New("Name").Templates() +} + +const testTemplates = `{{define "one"}}one{{end}}{{define "two"}}two{{end}}` + +func TestMessageForExecuteEmpty(t *testing.T) { + // Test a truly empty template. + tmpl := New("empty") + var b bytes.Buffer + err := tmpl.Execute(&b, 0) + if err == nil { + t.Fatal("expected initial error") + } + got := err.Error() + want := `template: empty: "empty" is an incomplete or empty template` + if got != want { + t.Errorf("expected error %s got %s", want, got) + } + // Add a non-empty template to check that the error is helpful. + tests, err := New("").Parse(testTemplates) + if err != nil { + t.Fatal(err) + } + tmpl.AddParseTree("secondary", tests.Tree) + err = tmpl.Execute(&b, 0) + if err == nil { + t.Fatal("expected second error") + } + got = err.Error() + want = `template: empty: "empty" is an incomplete or empty template; defined templates are: "secondary"` + if got != want { + t.Errorf("expected error %s got %s", want, got) + } + // Make sure we can execute the secondary. + err = tmpl.ExecuteTemplate(&b, "secondary", 0) + if err != nil { + t.Fatal(err) + } +} + +func TestFinalForPrintf(t *testing.T) { + tmpl, err := New("").Parse(`{{"x" | printf}}`) + if err != nil { + t.Fatal(err) + } + var b bytes.Buffer + err = tmpl.Execute(&b, 0) + if err != nil { + t.Fatal(err) + } +} + +type cmpTest struct { + expr string + truth string + ok bool +} + +var cmpTests = []cmpTest{ + {"eq true true", "true", true}, + {"eq true false", "false", true}, + {"eq 1+2i 1+2i", "true", true}, + {"eq 1+2i 1+3i", "false", true}, + {"eq 1.5 1.5", "true", true}, + {"eq 1.5 2.5", "false", true}, + {"eq 1 1", "true", true}, + {"eq 1 2", "false", true}, + {"eq `xy` `xy`", "true", true}, + {"eq `xy` `xyz`", "false", true}, + {"eq .Uthree .Uthree", "true", true}, + {"eq .Uthree .Ufour", "false", true}, + {"eq 3 4 5 6 3", "true", true}, + {"eq 3 4 5 6 7", "false", true}, + {"ne true true", "false", true}, + {"ne true false", "true", true}, + {"ne 1+2i 1+2i", "false", true}, + {"ne 1+2i 1+3i", "true", true}, + {"ne 1.5 1.5", "false", true}, + {"ne 1.5 2.5", "true", true}, + {"ne 1 1", "false", true}, + {"ne 1 2", "true", true}, + {"ne `xy` `xy`", "false", true}, + {"ne `xy` `xyz`", "true", true}, + {"ne .Uthree .Uthree", "false", true}, + {"ne .Uthree .Ufour", "true", true}, + {"lt 1.5 1.5", "false", true}, + {"lt 1.5 2.5", "true", true}, + {"lt 1 1", "false", true}, + {"lt 1 2", "true", true}, + {"lt `xy` `xy`", "false", true}, + {"lt `xy` `xyz`", "true", true}, + {"lt .Uthree .Uthree", "false", true}, + {"lt .Uthree .Ufour", "true", true}, + {"le 1.5 1.5", "true", true}, + {"le 1.5 2.5", "true", true}, + {"le 2.5 1.5", "false", true}, + {"le 1 1", "true", true}, + {"le 1 2", "true", true}, + {"le 2 1", "false", true}, + {"le `xy` `xy`", "true", true}, + {"le `xy` `xyz`", "true", true}, + {"le `xyz` `xy`", "false", true}, + {"le .Uthree .Uthree", "true", true}, + {"le .Uthree .Ufour", "true", true}, + {"le .Ufour .Uthree", "false", true}, + {"gt 1.5 1.5", "false", true}, + {"gt 1.5 2.5", "false", true}, + {"gt 1 1", "false", true}, + {"gt 2 1", "true", true}, + {"gt 1 2", "false", true}, + {"gt `xy` `xy`", "false", true}, + {"gt `xy` `xyz`", "false", true}, + {"gt .Uthree .Uthree", "false", true}, + {"gt .Uthree .Ufour", "false", true}, + {"gt .Ufour .Uthree", "true", true}, + {"ge 1.5 1.5", "true", true}, + {"ge 1.5 2.5", "false", true}, + {"ge 2.5 1.5", "true", true}, + {"ge 1 1", "true", true}, + {"ge 1 2", "false", true}, + {"ge 2 1", "true", true}, + {"ge `xy` `xy`", "true", true}, + {"ge `xy` `xyz`", "false", true}, + {"ge `xyz` `xy`", "true", true}, + {"ge .Uthree .Uthree", "true", true}, + {"ge .Uthree .Ufour", "false", true}, + {"ge .Ufour .Uthree", "true", true}, + // Mixing signed and unsigned integers. + {"eq .Uthree .Three", "true", true}, + {"eq .Three .Uthree", "true", true}, + {"le .Uthree .Three", "true", true}, + {"le .Three .Uthree", "true", true}, + {"ge .Uthree .Three", "true", true}, + {"ge .Three .Uthree", "true", true}, + {"lt .Uthree .Three", "false", true}, + {"lt .Three .Uthree", "false", true}, + {"gt .Uthree .Three", "false", true}, + {"gt .Three .Uthree", "false", true}, + {"eq .Ufour .Three", "false", true}, + {"lt .Ufour .Three", "false", true}, + {"gt .Ufour .Three", "true", true}, + {"eq .NegOne .Uthree", "false", true}, + {"eq .Uthree .NegOne", "false", true}, + {"ne .NegOne .Uthree", "true", true}, + {"ne .Uthree .NegOne", "true", true}, + {"lt .NegOne .Uthree", "true", true}, + {"lt .Uthree .NegOne", "false", true}, + {"le .NegOne .Uthree", "true", true}, + {"le .Uthree .NegOne", "false", true}, + {"gt .NegOne .Uthree", "false", true}, + {"gt .Uthree .NegOne", "true", true}, + {"ge .NegOne .Uthree", "false", true}, + {"ge .Uthree .NegOne", "true", true}, + {"eq (index `x` 0) 'x'", "true", true}, // The example that triggered this rule. + {"eq (index `x` 0) 'y'", "false", true}, + // Errors + {"eq `xy` 1", "", false}, // Different types. + {"eq 2 2.0", "", false}, // Different types. + {"lt true true", "", false}, // Unordered types. + {"lt 1+0i 1+0i", "", false}, // Unordered types. +} + +func TestComparison(t *testing.T) { + b := new(bytes.Buffer) + var cmpStruct = struct { + Uthree, Ufour uint + NegOne, Three int + }{3, 4, -1, 3} + for _, test := range cmpTests { + text := fmt.Sprintf("{{if %s}}true{{else}}false{{end}}", test.expr) + tmpl, err := New("empty").Parse(text) + if err != nil { + t.Fatalf("%q: %s", test.expr, err) + } + b.Reset() + err = tmpl.Execute(b, &cmpStruct) + if test.ok && err != nil { + t.Errorf("%s errored incorrectly: %s", test.expr, err) + continue + } + if !test.ok && err == nil { + t.Errorf("%s did not error", test.expr) + continue + } + if b.String() != test.truth { + t.Errorf("%s: want %s; got %s", test.expr, test.truth, b.String()) + } + } +} diff --git a/vendor/github.com/alecthomas/template/funcs.go b/vendor/github.com/alecthomas/template/funcs.go new file mode 100644 index 0000000..39ee5ed --- /dev/null +++ b/vendor/github.com/alecthomas/template/funcs.go @@ -0,0 +1,598 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "bytes" + "errors" + "fmt" + "io" + "net/url" + "reflect" + "strings" + "unicode" + "unicode/utf8" +) + +// FuncMap is the type of the map defining the mapping from names to functions. +// Each function must have either a single return value, or two return values of +// which the second has type error. In that case, if the second (error) +// return value evaluates to non-nil during execution, execution terminates and +// Execute returns that error. +type FuncMap map[string]interface{} + +var builtins = FuncMap{ + "and": and, + "call": call, + "html": HTMLEscaper, + "index": index, + "js": JSEscaper, + "len": length, + "not": not, + "or": or, + "print": fmt.Sprint, + "printf": fmt.Sprintf, + "println": fmt.Sprintln, + "urlquery": URLQueryEscaper, + + // Comparisons + "eq": eq, // == + "ge": ge, // >= + "gt": gt, // > + "le": le, // <= + "lt": lt, // < + "ne": ne, // != +} + +var builtinFuncs = createValueFuncs(builtins) + +// createValueFuncs turns a FuncMap into a map[string]reflect.Value +func createValueFuncs(funcMap FuncMap) map[string]reflect.Value { + m := make(map[string]reflect.Value) + addValueFuncs(m, funcMap) + return m +} + +// addValueFuncs adds to values the functions in funcs, converting them to reflect.Values. +func addValueFuncs(out map[string]reflect.Value, in FuncMap) { + for name, fn := range in { + v := reflect.ValueOf(fn) + if v.Kind() != reflect.Func { + panic("value for " + name + " not a function") + } + if !goodFunc(v.Type()) { + panic(fmt.Errorf("can't install method/function %q with %d results", name, v.Type().NumOut())) + } + out[name] = v + } +} + +// addFuncs adds to values the functions in funcs. It does no checking of the input - +// call addValueFuncs first. +func addFuncs(out, in FuncMap) { + for name, fn := range in { + out[name] = fn + } +} + +// goodFunc checks that the function or method has the right result signature. +func goodFunc(typ reflect.Type) bool { + // We allow functions with 1 result or 2 results where the second is an error. + switch { + case typ.NumOut() == 1: + return true + case typ.NumOut() == 2 && typ.Out(1) == errorType: + return true + } + return false +} + +// findFunction looks for a function in the template, and global map. +func findFunction(name string, tmpl *Template) (reflect.Value, bool) { + if tmpl != nil && tmpl.common != nil { + if fn := tmpl.execFuncs[name]; fn.IsValid() { + return fn, true + } + } + if fn := builtinFuncs[name]; fn.IsValid() { + return fn, true + } + return reflect.Value{}, false +} + +// Indexing. + +// index returns the result of indexing its first argument by the following +// arguments. Thus "index x 1 2 3" is, in Go syntax, x[1][2][3]. Each +// indexed item must be a map, slice, or array. +func index(item interface{}, indices ...interface{}) (interface{}, error) { + v := reflect.ValueOf(item) + for _, i := range indices { + index := reflect.ValueOf(i) + var isNil bool + if v, isNil = indirect(v); isNil { + return nil, fmt.Errorf("index of nil pointer") + } + switch v.Kind() { + case reflect.Array, reflect.Slice, reflect.String: + var x int64 + switch index.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + x = index.Int() + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + x = int64(index.Uint()) + default: + return nil, fmt.Errorf("cannot index slice/array with type %s", index.Type()) + } + if x < 0 || x >= int64(v.Len()) { + return nil, fmt.Errorf("index out of range: %d", x) + } + v = v.Index(int(x)) + case reflect.Map: + if !index.IsValid() { + index = reflect.Zero(v.Type().Key()) + } + if !index.Type().AssignableTo(v.Type().Key()) { + return nil, fmt.Errorf("%s is not index type for %s", index.Type(), v.Type()) + } + if x := v.MapIndex(index); x.IsValid() { + v = x + } else { + v = reflect.Zero(v.Type().Elem()) + } + default: + return nil, fmt.Errorf("can't index item of type %s", v.Type()) + } + } + return v.Interface(), nil +} + +// Length + +// length returns the length of the item, with an error if it has no defined length. +func length(item interface{}) (int, error) { + v, isNil := indirect(reflect.ValueOf(item)) + if isNil { + return 0, fmt.Errorf("len of nil pointer") + } + switch v.Kind() { + case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String: + return v.Len(), nil + } + return 0, fmt.Errorf("len of type %s", v.Type()) +} + +// Function invocation + +// call returns the result of evaluating the first argument as a function. +// The function must return 1 result, or 2 results, the second of which is an error. +func call(fn interface{}, args ...interface{}) (interface{}, error) { + v := reflect.ValueOf(fn) + typ := v.Type() + if typ.Kind() != reflect.Func { + return nil, fmt.Errorf("non-function of type %s", typ) + } + if !goodFunc(typ) { + return nil, fmt.Errorf("function called with %d args; should be 1 or 2", typ.NumOut()) + } + numIn := typ.NumIn() + var dddType reflect.Type + if typ.IsVariadic() { + if len(args) < numIn-1 { + return nil, fmt.Errorf("wrong number of args: got %d want at least %d", len(args), numIn-1) + } + dddType = typ.In(numIn - 1).Elem() + } else { + if len(args) != numIn { + return nil, fmt.Errorf("wrong number of args: got %d want %d", len(args), numIn) + } + } + argv := make([]reflect.Value, len(args)) + for i, arg := range args { + value := reflect.ValueOf(arg) + // Compute the expected type. Clumsy because of variadics. + var argType reflect.Type + if !typ.IsVariadic() || i < numIn-1 { + argType = typ.In(i) + } else { + argType = dddType + } + if !value.IsValid() && canBeNil(argType) { + value = reflect.Zero(argType) + } + if !value.Type().AssignableTo(argType) { + return nil, fmt.Errorf("arg %d has type %s; should be %s", i, value.Type(), argType) + } + argv[i] = value + } + result := v.Call(argv) + if len(result) == 2 && !result[1].IsNil() { + return result[0].Interface(), result[1].Interface().(error) + } + return result[0].Interface(), nil +} + +// Boolean logic. + +func truth(a interface{}) bool { + t, _ := isTrue(reflect.ValueOf(a)) + return t +} + +// and computes the Boolean AND of its arguments, returning +// the first false argument it encounters, or the last argument. +func and(arg0 interface{}, args ...interface{}) interface{} { + if !truth(arg0) { + return arg0 + } + for i := range args { + arg0 = args[i] + if !truth(arg0) { + break + } + } + return arg0 +} + +// or computes the Boolean OR of its arguments, returning +// the first true argument it encounters, or the last argument. +func or(arg0 interface{}, args ...interface{}) interface{} { + if truth(arg0) { + return arg0 + } + for i := range args { + arg0 = args[i] + if truth(arg0) { + break + } + } + return arg0 +} + +// not returns the Boolean negation of its argument. +func not(arg interface{}) (truth bool) { + truth, _ = isTrue(reflect.ValueOf(arg)) + return !truth +} + +// Comparison. + +// TODO: Perhaps allow comparison between signed and unsigned integers. + +var ( + errBadComparisonType = errors.New("invalid type for comparison") + errBadComparison = errors.New("incompatible types for comparison") + errNoComparison = errors.New("missing argument for comparison") +) + +type kind int + +const ( + invalidKind kind = iota + boolKind + complexKind + intKind + floatKind + integerKind + stringKind + uintKind +) + +func basicKind(v reflect.Value) (kind, error) { + switch v.Kind() { + case reflect.Bool: + return boolKind, nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return intKind, nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return uintKind, nil + case reflect.Float32, reflect.Float64: + return floatKind, nil + case reflect.Complex64, reflect.Complex128: + return complexKind, nil + case reflect.String: + return stringKind, nil + } + return invalidKind, errBadComparisonType +} + +// eq evaluates the comparison a == b || a == c || ... +func eq(arg1 interface{}, arg2 ...interface{}) (bool, error) { + v1 := reflect.ValueOf(arg1) + k1, err := basicKind(v1) + if err != nil { + return false, err + } + if len(arg2) == 0 { + return false, errNoComparison + } + for _, arg := range arg2 { + v2 := reflect.ValueOf(arg) + k2, err := basicKind(v2) + if err != nil { + return false, err + } + truth := false + if k1 != k2 { + // Special case: Can compare integer values regardless of type's sign. + switch { + case k1 == intKind && k2 == uintKind: + truth = v1.Int() >= 0 && uint64(v1.Int()) == v2.Uint() + case k1 == uintKind && k2 == intKind: + truth = v2.Int() >= 0 && v1.Uint() == uint64(v2.Int()) + default: + return false, errBadComparison + } + } else { + switch k1 { + case boolKind: + truth = v1.Bool() == v2.Bool() + case complexKind: + truth = v1.Complex() == v2.Complex() + case floatKind: + truth = v1.Float() == v2.Float() + case intKind: + truth = v1.Int() == v2.Int() + case stringKind: + truth = v1.String() == v2.String() + case uintKind: + truth = v1.Uint() == v2.Uint() + default: + panic("invalid kind") + } + } + if truth { + return true, nil + } + } + return false, nil +} + +// ne evaluates the comparison a != b. +func ne(arg1, arg2 interface{}) (bool, error) { + // != is the inverse of ==. + equal, err := eq(arg1, arg2) + return !equal, err +} + +// lt evaluates the comparison a < b. +func lt(arg1, arg2 interface{}) (bool, error) { + v1 := reflect.ValueOf(arg1) + k1, err := basicKind(v1) + if err != nil { + return false, err + } + v2 := reflect.ValueOf(arg2) + k2, err := basicKind(v2) + if err != nil { + return false, err + } + truth := false + if k1 != k2 { + // Special case: Can compare integer values regardless of type's sign. + switch { + case k1 == intKind && k2 == uintKind: + truth = v1.Int() < 0 || uint64(v1.Int()) < v2.Uint() + case k1 == uintKind && k2 == intKind: + truth = v2.Int() >= 0 && v1.Uint() < uint64(v2.Int()) + default: + return false, errBadComparison + } + } else { + switch k1 { + case boolKind, complexKind: + return false, errBadComparisonType + case floatKind: + truth = v1.Float() < v2.Float() + case intKind: + truth = v1.Int() < v2.Int() + case stringKind: + truth = v1.String() < v2.String() + case uintKind: + truth = v1.Uint() < v2.Uint() + default: + panic("invalid kind") + } + } + return truth, nil +} + +// le evaluates the comparison <= b. +func le(arg1, arg2 interface{}) (bool, error) { + // <= is < or ==. + lessThan, err := lt(arg1, arg2) + if lessThan || err != nil { + return lessThan, err + } + return eq(arg1, arg2) +} + +// gt evaluates the comparison a > b. +func gt(arg1, arg2 interface{}) (bool, error) { + // > is the inverse of <=. + lessOrEqual, err := le(arg1, arg2) + if err != nil { + return false, err + } + return !lessOrEqual, nil +} + +// ge evaluates the comparison a >= b. +func ge(arg1, arg2 interface{}) (bool, error) { + // >= is the inverse of <. + lessThan, err := lt(arg1, arg2) + if err != nil { + return false, err + } + return !lessThan, nil +} + +// HTML escaping. + +var ( + htmlQuot = []byte(""") // shorter than """ + htmlApos = []byte("'") // shorter than "'" and apos was not in HTML until HTML5 + htmlAmp = []byte("&") + htmlLt = []byte("<") + htmlGt = []byte(">") +) + +// HTMLEscape writes to w the escaped HTML equivalent of the plain text data b. +func HTMLEscape(w io.Writer, b []byte) { + last := 0 + for i, c := range b { + var html []byte + switch c { + case '"': + html = htmlQuot + case '\'': + html = htmlApos + case '&': + html = htmlAmp + case '<': + html = htmlLt + case '>': + html = htmlGt + default: + continue + } + w.Write(b[last:i]) + w.Write(html) + last = i + 1 + } + w.Write(b[last:]) +} + +// HTMLEscapeString returns the escaped HTML equivalent of the plain text data s. +func HTMLEscapeString(s string) string { + // Avoid allocation if we can. + if strings.IndexAny(s, `'"&<>`) < 0 { + return s + } + var b bytes.Buffer + HTMLEscape(&b, []byte(s)) + return b.String() +} + +// HTMLEscaper returns the escaped HTML equivalent of the textual +// representation of its arguments. +func HTMLEscaper(args ...interface{}) string { + return HTMLEscapeString(evalArgs(args)) +} + +// JavaScript escaping. + +var ( + jsLowUni = []byte(`\u00`) + hex = []byte("0123456789ABCDEF") + + jsBackslash = []byte(`\\`) + jsApos = []byte(`\'`) + jsQuot = []byte(`\"`) + jsLt = []byte(`\x3C`) + jsGt = []byte(`\x3E`) +) + +// JSEscape writes to w the escaped JavaScript equivalent of the plain text data b. +func JSEscape(w io.Writer, b []byte) { + last := 0 + for i := 0; i < len(b); i++ { + c := b[i] + + if !jsIsSpecial(rune(c)) { + // fast path: nothing to do + continue + } + w.Write(b[last:i]) + + if c < utf8.RuneSelf { + // Quotes, slashes and angle brackets get quoted. + // Control characters get written as \u00XX. + switch c { + case '\\': + w.Write(jsBackslash) + case '\'': + w.Write(jsApos) + case '"': + w.Write(jsQuot) + case '<': + w.Write(jsLt) + case '>': + w.Write(jsGt) + default: + w.Write(jsLowUni) + t, b := c>>4, c&0x0f + w.Write(hex[t : t+1]) + w.Write(hex[b : b+1]) + } + } else { + // Unicode rune. + r, size := utf8.DecodeRune(b[i:]) + if unicode.IsPrint(r) { + w.Write(b[i : i+size]) + } else { + fmt.Fprintf(w, "\\u%04X", r) + } + i += size - 1 + } + last = i + 1 + } + w.Write(b[last:]) +} + +// JSEscapeString returns the escaped JavaScript equivalent of the plain text data s. +func JSEscapeString(s string) string { + // Avoid allocation if we can. + if strings.IndexFunc(s, jsIsSpecial) < 0 { + return s + } + var b bytes.Buffer + JSEscape(&b, []byte(s)) + return b.String() +} + +func jsIsSpecial(r rune) bool { + switch r { + case '\\', '\'', '"', '<', '>': + return true + } + return r < ' ' || utf8.RuneSelf <= r +} + +// JSEscaper returns the escaped JavaScript equivalent of the textual +// representation of its arguments. +func JSEscaper(args ...interface{}) string { + return JSEscapeString(evalArgs(args)) +} + +// URLQueryEscaper returns the escaped value of the textual representation of +// its arguments in a form suitable for embedding in a URL query. +func URLQueryEscaper(args ...interface{}) string { + return url.QueryEscape(evalArgs(args)) +} + +// evalArgs formats the list of arguments into a string. It is therefore equivalent to +// fmt.Sprint(args...) +// except that each argument is indirected (if a pointer), as required, +// using the same rules as the default string evaluation during template +// execution. +func evalArgs(args []interface{}) string { + ok := false + var s string + // Fast path for simple common case. + if len(args) == 1 { + s, ok = args[0].(string) + } + if !ok { + for i, arg := range args { + a, ok := printableValue(reflect.ValueOf(arg)) + if ok { + args[i] = a + } // else left fmt do its thing + } + s = fmt.Sprint(args...) + } + return s +} diff --git a/vendor/github.com/alecthomas/template/helper.go b/vendor/github.com/alecthomas/template/helper.go new file mode 100644 index 0000000..3636fb5 --- /dev/null +++ b/vendor/github.com/alecthomas/template/helper.go @@ -0,0 +1,108 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Helper functions to make constructing templates easier. + +package template + +import ( + "fmt" + "io/ioutil" + "path/filepath" +) + +// Functions and methods to parse templates. + +// Must is a helper that wraps a call to a function returning (*Template, error) +// and panics if the error is non-nil. It is intended for use in variable +// initializations such as +// var t = template.Must(template.New("name").Parse("text")) +func Must(t *Template, err error) *Template { + if err != nil { + panic(err) + } + return t +} + +// ParseFiles creates a new Template and parses the template definitions from +// the named files. The returned template's name will have the (base) name and +// (parsed) contents of the first file. There must be at least one file. +// If an error occurs, parsing stops and the returned *Template is nil. +func ParseFiles(filenames ...string) (*Template, error) { + return parseFiles(nil, filenames...) +} + +// ParseFiles parses the named files and associates the resulting templates with +// t. If an error occurs, parsing stops and the returned template is nil; +// otherwise it is t. There must be at least one file. +func (t *Template) ParseFiles(filenames ...string) (*Template, error) { + return parseFiles(t, filenames...) +} + +// parseFiles is the helper for the method and function. If the argument +// template is nil, it is created from the first file. +func parseFiles(t *Template, filenames ...string) (*Template, error) { + if len(filenames) == 0 { + // Not really a problem, but be consistent. + return nil, fmt.Errorf("template: no files named in call to ParseFiles") + } + for _, filename := range filenames { + b, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + s := string(b) + name := filepath.Base(filename) + // First template becomes return value if not already defined, + // and we use that one for subsequent New calls to associate + // all the templates together. Also, if this file has the same name + // as t, this file becomes the contents of t, so + // t, err := New(name).Funcs(xxx).ParseFiles(name) + // works. Otherwise we create a new template associated with t. + var tmpl *Template + if t == nil { + t = New(name) + } + if name == t.Name() { + tmpl = t + } else { + tmpl = t.New(name) + } + _, err = tmpl.Parse(s) + if err != nil { + return nil, err + } + } + return t, nil +} + +// ParseGlob creates a new Template and parses the template definitions from the +// files identified by the pattern, which must match at least one file. The +// returned template will have the (base) name and (parsed) contents of the +// first file matched by the pattern. ParseGlob is equivalent to calling +// ParseFiles with the list of files matched by the pattern. +func ParseGlob(pattern string) (*Template, error) { + return parseGlob(nil, pattern) +} + +// ParseGlob parses the template definitions in the files identified by the +// pattern and associates the resulting templates with t. The pattern is +// processed by filepath.Glob and must match at least one file. ParseGlob is +// equivalent to calling t.ParseFiles with the list of files matched by the +// pattern. +func (t *Template) ParseGlob(pattern string) (*Template, error) { + return parseGlob(t, pattern) +} + +// parseGlob is the implementation of the function and method ParseGlob. +func parseGlob(t *Template, pattern string) (*Template, error) { + filenames, err := filepath.Glob(pattern) + if err != nil { + return nil, err + } + if len(filenames) == 0 { + return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern) + } + return parseFiles(t, filenames...) +} diff --git a/vendor/github.com/alecthomas/template/multi_test.go b/vendor/github.com/alecthomas/template/multi_test.go new file mode 100644 index 0000000..8d10362 --- /dev/null +++ b/vendor/github.com/alecthomas/template/multi_test.go @@ -0,0 +1,293 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +// Tests for mulitple-template parsing and execution. + +import ( + "bytes" + "fmt" + "strings" + "testing" + + "github.com/alecthomas/template/parse" +) + +const ( + noError = true + hasError = false +) + +type multiParseTest struct { + name string + input string + ok bool + names []string + results []string +} + +var multiParseTests = []multiParseTest{ + {"empty", "", noError, + nil, + nil}, + {"one", `{{define "foo"}} FOO {{end}}`, noError, + []string{"foo"}, + []string{" FOO "}}, + {"two", `{{define "foo"}} FOO {{end}}{{define "bar"}} BAR {{end}}`, noError, + []string{"foo", "bar"}, + []string{" FOO ", " BAR "}}, + // errors + {"missing end", `{{define "foo"}} FOO `, hasError, + nil, + nil}, + {"malformed name", `{{define "foo}} FOO `, hasError, + nil, + nil}, +} + +func TestMultiParse(t *testing.T) { + for _, test := range multiParseTests { + template, err := New("root").Parse(test.input) + switch { + case err == nil && !test.ok: + t.Errorf("%q: expected error; got none", test.name) + continue + case err != nil && test.ok: + t.Errorf("%q: unexpected error: %v", test.name, err) + continue + case err != nil && !test.ok: + // expected error, got one + if *debug { + fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err) + } + continue + } + if template == nil { + continue + } + if len(template.tmpl) != len(test.names)+1 { // +1 for root + t.Errorf("%s: wrong number of templates; wanted %d got %d", test.name, len(test.names), len(template.tmpl)) + continue + } + for i, name := range test.names { + tmpl, ok := template.tmpl[name] + if !ok { + t.Errorf("%s: can't find template %q", test.name, name) + continue + } + result := tmpl.Root.String() + if result != test.results[i] { + t.Errorf("%s=(%q): got\n\t%v\nexpected\n\t%v", test.name, test.input, result, test.results[i]) + } + } + } +} + +var multiExecTests = []execTest{ + {"empty", "", "", nil, true}, + {"text", "some text", "some text", nil, true}, + {"invoke x", `{{template "x" .SI}}`, "TEXT", tVal, true}, + {"invoke x no args", `{{template "x"}}`, "TEXT", tVal, true}, + {"invoke dot int", `{{template "dot" .I}}`, "17", tVal, true}, + {"invoke dot []int", `{{template "dot" .SI}}`, "[3 4 5]", tVal, true}, + {"invoke dotV", `{{template "dotV" .U}}`, "v", tVal, true}, + {"invoke nested int", `{{template "nested" .I}}`, "17", tVal, true}, + {"variable declared by template", `{{template "nested" $x:=.SI}},{{index $x 1}}`, "[3 4 5],4", tVal, true}, + + // User-defined function: test argument evaluator. + {"testFunc literal", `{{oneArg "joe"}}`, "oneArg=joe", tVal, true}, + {"testFunc .", `{{oneArg .}}`, "oneArg=joe", "joe", true}, +} + +// These strings are also in testdata/*. +const multiText1 = ` + {{define "x"}}TEXT{{end}} + {{define "dotV"}}{{.V}}{{end}} +` + +const multiText2 = ` + {{define "dot"}}{{.}}{{end}} + {{define "nested"}}{{template "dot" .}}{{end}} +` + +func TestMultiExecute(t *testing.T) { + // Declare a couple of templates first. + template, err := New("root").Parse(multiText1) + if err != nil { + t.Fatalf("parse error for 1: %s", err) + } + _, err = template.Parse(multiText2) + if err != nil { + t.Fatalf("parse error for 2: %s", err) + } + testExecute(multiExecTests, template, t) +} + +func TestParseFiles(t *testing.T) { + _, err := ParseFiles("DOES NOT EXIST") + if err == nil { + t.Error("expected error for non-existent file; got none") + } + template := New("root") + _, err = template.ParseFiles("testdata/file1.tmpl", "testdata/file2.tmpl") + if err != nil { + t.Fatalf("error parsing files: %v", err) + } + testExecute(multiExecTests, template, t) +} + +func TestParseGlob(t *testing.T) { + _, err := ParseGlob("DOES NOT EXIST") + if err == nil { + t.Error("expected error for non-existent file; got none") + } + _, err = New("error").ParseGlob("[x") + if err == nil { + t.Error("expected error for bad pattern; got none") + } + template := New("root") + _, err = template.ParseGlob("testdata/file*.tmpl") + if err != nil { + t.Fatalf("error parsing files: %v", err) + } + testExecute(multiExecTests, template, t) +} + +// In these tests, actual content (not just template definitions) comes from the parsed files. + +var templateFileExecTests = []execTest{ + {"test", `{{template "tmpl1.tmpl"}}{{template "tmpl2.tmpl"}}`, "template1\n\ny\ntemplate2\n\nx\n", 0, true}, +} + +func TestParseFilesWithData(t *testing.T) { + template, err := New("root").ParseFiles("testdata/tmpl1.tmpl", "testdata/tmpl2.tmpl") + if err != nil { + t.Fatalf("error parsing files: %v", err) + } + testExecute(templateFileExecTests, template, t) +} + +func TestParseGlobWithData(t *testing.T) { + template, err := New("root").ParseGlob("testdata/tmpl*.tmpl") + if err != nil { + t.Fatalf("error parsing files: %v", err) + } + testExecute(templateFileExecTests, template, t) +} + +const ( + cloneText1 = `{{define "a"}}{{template "b"}}{{template "c"}}{{end}}` + cloneText2 = `{{define "b"}}b{{end}}` + cloneText3 = `{{define "c"}}root{{end}}` + cloneText4 = `{{define "c"}}clone{{end}}` +) + +func TestClone(t *testing.T) { + // Create some templates and clone the root. + root, err := New("root").Parse(cloneText1) + if err != nil { + t.Fatal(err) + } + _, err = root.Parse(cloneText2) + if err != nil { + t.Fatal(err) + } + clone := Must(root.Clone()) + // Add variants to both. + _, err = root.Parse(cloneText3) + if err != nil { + t.Fatal(err) + } + _, err = clone.Parse(cloneText4) + if err != nil { + t.Fatal(err) + } + // Verify that the clone is self-consistent. + for k, v := range clone.tmpl { + if k == clone.name && v.tmpl[k] != clone { + t.Error("clone does not contain root") + } + if v != v.tmpl[v.name] { + t.Errorf("clone does not contain self for %q", k) + } + } + // Execute root. + var b bytes.Buffer + err = root.ExecuteTemplate(&b, "a", 0) + if err != nil { + t.Fatal(err) + } + if b.String() != "broot" { + t.Errorf("expected %q got %q", "broot", b.String()) + } + // Execute copy. + b.Reset() + err = clone.ExecuteTemplate(&b, "a", 0) + if err != nil { + t.Fatal(err) + } + if b.String() != "bclone" { + t.Errorf("expected %q got %q", "bclone", b.String()) + } +} + +func TestAddParseTree(t *testing.T) { + // Create some templates. + root, err := New("root").Parse(cloneText1) + if err != nil { + t.Fatal(err) + } + _, err = root.Parse(cloneText2) + if err != nil { + t.Fatal(err) + } + // Add a new parse tree. + tree, err := parse.Parse("cloneText3", cloneText3, "", "", nil, builtins) + if err != nil { + t.Fatal(err) + } + added, err := root.AddParseTree("c", tree["c"]) + // Execute. + var b bytes.Buffer + err = added.ExecuteTemplate(&b, "a", 0) + if err != nil { + t.Fatal(err) + } + if b.String() != "broot" { + t.Errorf("expected %q got %q", "broot", b.String()) + } +} + +// Issue 7032 +func TestAddParseTreeToUnparsedTemplate(t *testing.T) { + master := "{{define \"master\"}}{{end}}" + tmpl := New("master") + tree, err := parse.Parse("master", master, "", "", nil) + if err != nil { + t.Fatalf("unexpected parse err: %v", err) + } + masterTree := tree["master"] + tmpl.AddParseTree("master", masterTree) // used to panic +} + +func TestRedefinition(t *testing.T) { + var tmpl *Template + var err error + if tmpl, err = New("tmpl1").Parse(`{{define "test"}}foo{{end}}`); err != nil { + t.Fatalf("parse 1: %v", err) + } + if _, err = tmpl.Parse(`{{define "test"}}bar{{end}}`); err == nil { + t.Fatal("expected error") + } + if !strings.Contains(err.Error(), "redefinition") { + t.Fatalf("expected redefinition error; got %v", err) + } + if _, err = tmpl.New("tmpl2").Parse(`{{define "test"}}bar{{end}}`); err == nil { + t.Fatal("expected error") + } + if !strings.Contains(err.Error(), "redefinition") { + t.Fatalf("expected redefinition error; got %v", err) + } +} diff --git a/vendor/github.com/alecthomas/template/parse/lex.go b/vendor/github.com/alecthomas/template/parse/lex.go new file mode 100644 index 0000000..55f1c05 --- /dev/null +++ b/vendor/github.com/alecthomas/template/parse/lex.go @@ -0,0 +1,556 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package parse + +import ( + "fmt" + "strings" + "unicode" + "unicode/utf8" +) + +// item represents a token or text string returned from the scanner. +type item struct { + typ itemType // The type of this item. + pos Pos // The starting position, in bytes, of this item in the input string. + val string // The value of this item. +} + +func (i item) String() string { + switch { + case i.typ == itemEOF: + return "EOF" + case i.typ == itemError: + return i.val + case i.typ > itemKeyword: + return fmt.Sprintf("<%s>", i.val) + case len(i.val) > 10: + return fmt.Sprintf("%.10q...", i.val) + } + return fmt.Sprintf("%q", i.val) +} + +// itemType identifies the type of lex items. +type itemType int + +const ( + itemError itemType = iota // error occurred; value is text of error + itemBool // boolean constant + itemChar // printable ASCII character; grab bag for comma etc. + itemCharConstant // character constant + itemComplex // complex constant (1+2i); imaginary is just a number + itemColonEquals // colon-equals (':=') introducing a declaration + itemEOF + itemField // alphanumeric identifier starting with '.' + itemIdentifier // alphanumeric identifier not starting with '.' + itemLeftDelim // left action delimiter + itemLeftParen // '(' inside action + itemNumber // simple number, including imaginary + itemPipe // pipe symbol + itemRawString // raw quoted string (includes quotes) + itemRightDelim // right action delimiter + itemElideNewline // elide newline after right delim + itemRightParen // ')' inside action + itemSpace // run of spaces separating arguments + itemString // quoted string (includes quotes) + itemText // plain text + itemVariable // variable starting with '$', such as '$' or '$1' or '$hello' + // Keywords appear after all the rest. + itemKeyword // used only to delimit the keywords + itemDot // the cursor, spelled '.' + itemDefine // define keyword + itemElse // else keyword + itemEnd // end keyword + itemIf // if keyword + itemNil // the untyped nil constant, easiest to treat as a keyword + itemRange // range keyword + itemTemplate // template keyword + itemWith // with keyword +) + +var key = map[string]itemType{ + ".": itemDot, + "define": itemDefine, + "else": itemElse, + "end": itemEnd, + "if": itemIf, + "range": itemRange, + "nil": itemNil, + "template": itemTemplate, + "with": itemWith, +} + +const eof = -1 + +// stateFn represents the state of the scanner as a function that returns the next state. +type stateFn func(*lexer) stateFn + +// lexer holds the state of the scanner. +type lexer struct { + name string // the name of the input; used only for error reports + input string // the string being scanned + leftDelim string // start of action + rightDelim string // end of action + state stateFn // the next lexing function to enter + pos Pos // current position in the input + start Pos // start position of this item + width Pos // width of last rune read from input + lastPos Pos // position of most recent item returned by nextItem + items chan item // channel of scanned items + parenDepth int // nesting depth of ( ) exprs +} + +// next returns the next rune in the input. +func (l *lexer) next() rune { + if int(l.pos) >= len(l.input) { + l.width = 0 + return eof + } + r, w := utf8.DecodeRuneInString(l.input[l.pos:]) + l.width = Pos(w) + l.pos += l.width + return r +} + +// peek returns but does not consume the next rune in the input. +func (l *lexer) peek() rune { + r := l.next() + l.backup() + return r +} + +// backup steps back one rune. Can only be called once per call of next. +func (l *lexer) backup() { + l.pos -= l.width +} + +// emit passes an item back to the client. +func (l *lexer) emit(t itemType) { + l.items <- item{t, l.start, l.input[l.start:l.pos]} + l.start = l.pos +} + +// ignore skips over the pending input before this point. +func (l *lexer) ignore() { + l.start = l.pos +} + +// accept consumes the next rune if it's from the valid set. +func (l *lexer) accept(valid string) bool { + if strings.IndexRune(valid, l.next()) >= 0 { + return true + } + l.backup() + return false +} + +// acceptRun consumes a run of runes from the valid set. +func (l *lexer) acceptRun(valid string) { + for strings.IndexRune(valid, l.next()) >= 0 { + } + l.backup() +} + +// lineNumber reports which line we're on, based on the position of +// the previous item returned by nextItem. Doing it this way +// means we don't have to worry about peek double counting. +func (l *lexer) lineNumber() int { + return 1 + strings.Count(l.input[:l.lastPos], "\n") +} + +// errorf returns an error token and terminates the scan by passing +// back a nil pointer that will be the next state, terminating l.nextItem. +func (l *lexer) errorf(format string, args ...interface{}) stateFn { + l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)} + return nil +} + +// nextItem returns the next item from the input. +func (l *lexer) nextItem() item { + item := <-l.items + l.lastPos = item.pos + return item +} + +// lex creates a new scanner for the input string. +func lex(name, input, left, right string) *lexer { + if left == "" { + left = leftDelim + } + if right == "" { + right = rightDelim + } + l := &lexer{ + name: name, + input: input, + leftDelim: left, + rightDelim: right, + items: make(chan item), + } + go l.run() + return l +} + +// run runs the state machine for the lexer. +func (l *lexer) run() { + for l.state = lexText; l.state != nil; { + l.state = l.state(l) + } +} + +// state functions + +const ( + leftDelim = "{{" + rightDelim = "}}" + leftComment = "/*" + rightComment = "*/" +) + +// lexText scans until an opening action delimiter, "{{". +func lexText(l *lexer) stateFn { + for { + if strings.HasPrefix(l.input[l.pos:], l.leftDelim) { + if l.pos > l.start { + l.emit(itemText) + } + return lexLeftDelim + } + if l.next() == eof { + break + } + } + // Correctly reached EOF. + if l.pos > l.start { + l.emit(itemText) + } + l.emit(itemEOF) + return nil +} + +// lexLeftDelim scans the left delimiter, which is known to be present. +func lexLeftDelim(l *lexer) stateFn { + l.pos += Pos(len(l.leftDelim)) + if strings.HasPrefix(l.input[l.pos:], leftComment) { + return lexComment + } + l.emit(itemLeftDelim) + l.parenDepth = 0 + return lexInsideAction +} + +// lexComment scans a comment. The left comment marker is known to be present. +func lexComment(l *lexer) stateFn { + l.pos += Pos(len(leftComment)) + i := strings.Index(l.input[l.pos:], rightComment) + if i < 0 { + return l.errorf("unclosed comment") + } + l.pos += Pos(i + len(rightComment)) + if !strings.HasPrefix(l.input[l.pos:], l.rightDelim) { + return l.errorf("comment ends before closing delimiter") + + } + l.pos += Pos(len(l.rightDelim)) + l.ignore() + return lexText +} + +// lexRightDelim scans the right delimiter, which is known to be present. +func lexRightDelim(l *lexer) stateFn { + l.pos += Pos(len(l.rightDelim)) + l.emit(itemRightDelim) + if l.peek() == '\\' { + l.pos++ + l.emit(itemElideNewline) + } + return lexText +} + +// lexInsideAction scans the elements inside action delimiters. +func lexInsideAction(l *lexer) stateFn { + // Either number, quoted string, or identifier. + // Spaces separate arguments; runs of spaces turn into itemSpace. + // Pipe symbols separate and are emitted. + if strings.HasPrefix(l.input[l.pos:], l.rightDelim+"\\") || strings.HasPrefix(l.input[l.pos:], l.rightDelim) { + if l.parenDepth == 0 { + return lexRightDelim + } + return l.errorf("unclosed left paren") + } + switch r := l.next(); { + case r == eof || isEndOfLine(r): + return l.errorf("unclosed action") + case isSpace(r): + return lexSpace + case r == ':': + if l.next() != '=' { + return l.errorf("expected :=") + } + l.emit(itemColonEquals) + case r == '|': + l.emit(itemPipe) + case r == '"': + return lexQuote + case r == '`': + return lexRawQuote + case r == '$': + return lexVariable + case r == '\'': + return lexChar + case r == '.': + // special look-ahead for ".field" so we don't break l.backup(). + if l.pos < Pos(len(l.input)) { + r := l.input[l.pos] + if r < '0' || '9' < r { + return lexField + } + } + fallthrough // '.' can start a number. + case r == '+' || r == '-' || ('0' <= r && r <= '9'): + l.backup() + return lexNumber + case isAlphaNumeric(r): + l.backup() + return lexIdentifier + case r == '(': + l.emit(itemLeftParen) + l.parenDepth++ + return lexInsideAction + case r == ')': + l.emit(itemRightParen) + l.parenDepth-- + if l.parenDepth < 0 { + return l.errorf("unexpected right paren %#U", r) + } + return lexInsideAction + case r <= unicode.MaxASCII && unicode.IsPrint(r): + l.emit(itemChar) + return lexInsideAction + default: + return l.errorf("unrecognized character in action: %#U", r) + } + return lexInsideAction +} + +// lexSpace scans a run of space characters. +// One space has already been seen. +func lexSpace(l *lexer) stateFn { + for isSpace(l.peek()) { + l.next() + } + l.emit(itemSpace) + return lexInsideAction +} + +// lexIdentifier scans an alphanumeric. +func lexIdentifier(l *lexer) stateFn { +Loop: + for { + switch r := l.next(); { + case isAlphaNumeric(r): + // absorb. + default: + l.backup() + word := l.input[l.start:l.pos] + if !l.atTerminator() { + return l.errorf("bad character %#U", r) + } + switch { + case key[word] > itemKeyword: + l.emit(key[word]) + case word[0] == '.': + l.emit(itemField) + case word == "true", word == "false": + l.emit(itemBool) + default: + l.emit(itemIdentifier) + } + break Loop + } + } + return lexInsideAction +} + +// lexField scans a field: .Alphanumeric. +// The . has been scanned. +func lexField(l *lexer) stateFn { + return lexFieldOrVariable(l, itemField) +} + +// lexVariable scans a Variable: $Alphanumeric. +// The $ has been scanned. +func lexVariable(l *lexer) stateFn { + if l.atTerminator() { // Nothing interesting follows -> "$". + l.emit(itemVariable) + return lexInsideAction + } + return lexFieldOrVariable(l, itemVariable) +} + +// lexVariable scans a field or variable: [.$]Alphanumeric. +// The . or $ has been scanned. +func lexFieldOrVariable(l *lexer, typ itemType) stateFn { + if l.atTerminator() { // Nothing interesting follows -> "." or "$". + if typ == itemVariable { + l.emit(itemVariable) + } else { + l.emit(itemDot) + } + return lexInsideAction + } + var r rune + for { + r = l.next() + if !isAlphaNumeric(r) { + l.backup() + break + } + } + if !l.atTerminator() { + return l.errorf("bad character %#U", r) + } + l.emit(typ) + return lexInsideAction +} + +// atTerminator reports whether the input is at valid termination character to +// appear after an identifier. Breaks .X.Y into two pieces. Also catches cases +// like "$x+2" not being acceptable without a space, in case we decide one +// day to implement arithmetic. +func (l *lexer) atTerminator() bool { + r := l.peek() + if isSpace(r) || isEndOfLine(r) { + return true + } + switch r { + case eof, '.', ',', '|', ':', ')', '(': + return true + } + // Does r start the delimiter? This can be ambiguous (with delim=="//", $x/2 will + // succeed but should fail) but only in extremely rare cases caused by willfully + // bad choice of delimiter. + if rd, _ := utf8.DecodeRuneInString(l.rightDelim); rd == r { + return true + } + return false +} + +// lexChar scans a character constant. The initial quote is already +// scanned. Syntax checking is done by the parser. +func lexChar(l *lexer) stateFn { +Loop: + for { + switch l.next() { + case '\\': + if r := l.next(); r != eof && r != '\n' { + break + } + fallthrough + case eof, '\n': + return l.errorf("unterminated character constant") + case '\'': + break Loop + } + } + l.emit(itemCharConstant) + return lexInsideAction +} + +// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This +// isn't a perfect number scanner - for instance it accepts "." and "0x0.2" +// and "089" - but when it's wrong the input is invalid and the parser (via +// strconv) will notice. +func lexNumber(l *lexer) stateFn { + if !l.scanNumber() { + return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) + } + if sign := l.peek(); sign == '+' || sign == '-' { + // Complex: 1+2i. No spaces, must end in 'i'. + if !l.scanNumber() || l.input[l.pos-1] != 'i' { + return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) + } + l.emit(itemComplex) + } else { + l.emit(itemNumber) + } + return lexInsideAction +} + +func (l *lexer) scanNumber() bool { + // Optional leading sign. + l.accept("+-") + // Is it hex? + digits := "0123456789" + if l.accept("0") && l.accept("xX") { + digits = "0123456789abcdefABCDEF" + } + l.acceptRun(digits) + if l.accept(".") { + l.acceptRun(digits) + } + if l.accept("eE") { + l.accept("+-") + l.acceptRun("0123456789") + } + // Is it imaginary? + l.accept("i") + // Next thing mustn't be alphanumeric. + if isAlphaNumeric(l.peek()) { + l.next() + return false + } + return true +} + +// lexQuote scans a quoted string. +func lexQuote(l *lexer) stateFn { +Loop: + for { + switch l.next() { + case '\\': + if r := l.next(); r != eof && r != '\n' { + break + } + fallthrough + case eof, '\n': + return l.errorf("unterminated quoted string") + case '"': + break Loop + } + } + l.emit(itemString) + return lexInsideAction +} + +// lexRawQuote scans a raw quoted string. +func lexRawQuote(l *lexer) stateFn { +Loop: + for { + switch l.next() { + case eof, '\n': + return l.errorf("unterminated raw quoted string") + case '`': + break Loop + } + } + l.emit(itemRawString) + return lexInsideAction +} + +// isSpace reports whether r is a space character. +func isSpace(r rune) bool { + return r == ' ' || r == '\t' +} + +// isEndOfLine reports whether r is an end-of-line character. +func isEndOfLine(r rune) bool { + return r == '\r' || r == '\n' +} + +// isAlphaNumeric reports whether r is an alphabetic, digit, or underscore. +func isAlphaNumeric(r rune) bool { + return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r) +} diff --git a/vendor/github.com/alecthomas/template/parse/lex_test.go b/vendor/github.com/alecthomas/template/parse/lex_test.go new file mode 100644 index 0000000..3b92107 --- /dev/null +++ b/vendor/github.com/alecthomas/template/parse/lex_test.go @@ -0,0 +1,468 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package parse + +import ( + "fmt" + "testing" +) + +// Make the types prettyprint. +var itemName = map[itemType]string{ + itemError: "error", + itemBool: "bool", + itemChar: "char", + itemCharConstant: "charconst", + itemComplex: "complex", + itemColonEquals: ":=", + itemEOF: "EOF", + itemField: "field", + itemIdentifier: "identifier", + itemLeftDelim: "left delim", + itemLeftParen: "(", + itemNumber: "number", + itemPipe: "pipe", + itemRawString: "raw string", + itemRightDelim: "right delim", + itemElideNewline: "elide newline", + itemRightParen: ")", + itemSpace: "space", + itemString: "string", + itemVariable: "variable", + + // keywords + itemDot: ".", + itemDefine: "define", + itemElse: "else", + itemIf: "if", + itemEnd: "end", + itemNil: "nil", + itemRange: "range", + itemTemplate: "template", + itemWith: "with", +} + +func (i itemType) String() string { + s := itemName[i] + if s == "" { + return fmt.Sprintf("item%d", int(i)) + } + return s +} + +type lexTest struct { + name string + input string + items []item +} + +var ( + tEOF = item{itemEOF, 0, ""} + tFor = item{itemIdentifier, 0, "for"} + tLeft = item{itemLeftDelim, 0, "{{"} + tLpar = item{itemLeftParen, 0, "("} + tPipe = item{itemPipe, 0, "|"} + tQuote = item{itemString, 0, `"abc \n\t\" "`} + tRange = item{itemRange, 0, "range"} + tRight = item{itemRightDelim, 0, "}}"} + tElideNewline = item{itemElideNewline, 0, "\\"} + tRpar = item{itemRightParen, 0, ")"} + tSpace = item{itemSpace, 0, " "} + raw = "`" + `abc\n\t\" ` + "`" + tRawQuote = item{itemRawString, 0, raw} +) + +var lexTests = []lexTest{ + {"empty", "", []item{tEOF}}, + {"spaces", " \t\n", []item{{itemText, 0, " \t\n"}, tEOF}}, + {"text", `now is the time`, []item{{itemText, 0, "now is the time"}, tEOF}}, + {"elide newline", "{{}}\\", []item{tLeft, tRight, tElideNewline, tEOF}}, + {"text with comment", "hello-{{/* this is a comment */}}-world", []item{ + {itemText, 0, "hello-"}, + {itemText, 0, "-world"}, + tEOF, + }}, + {"punctuation", "{{,@% }}", []item{ + tLeft, + {itemChar, 0, ","}, + {itemChar, 0, "@"}, + {itemChar, 0, "%"}, + tSpace, + tRight, + tEOF, + }}, + {"parens", "{{((3))}}", []item{ + tLeft, + tLpar, + tLpar, + {itemNumber, 0, "3"}, + tRpar, + tRpar, + tRight, + tEOF, + }}, + {"empty action", `{{}}`, []item{tLeft, tRight, tEOF}}, + {"for", `{{for}}`, []item{tLeft, tFor, tRight, tEOF}}, + {"quote", `{{"abc \n\t\" "}}`, []item{tLeft, tQuote, tRight, tEOF}}, + {"raw quote", "{{" + raw + "}}", []item{tLeft, tRawQuote, tRight, tEOF}}, + {"numbers", "{{1 02 0x14 -7.2i 1e3 +1.2e-4 4.2i 1+2i}}", []item{ + tLeft, + {itemNumber, 0, "1"}, + tSpace, + {itemNumber, 0, "02"}, + tSpace, + {itemNumber, 0, "0x14"}, + tSpace, + {itemNumber, 0, "-7.2i"}, + tSpace, + {itemNumber, 0, "1e3"}, + tSpace, + {itemNumber, 0, "+1.2e-4"}, + tSpace, + {itemNumber, 0, "4.2i"}, + tSpace, + {itemComplex, 0, "1+2i"}, + tRight, + tEOF, + }}, + {"characters", `{{'a' '\n' '\'' '\\' '\u00FF' '\xFF' '本'}}`, []item{ + tLeft, + {itemCharConstant, 0, `'a'`}, + tSpace, + {itemCharConstant, 0, `'\n'`}, + tSpace, + {itemCharConstant, 0, `'\''`}, + tSpace, + {itemCharConstant, 0, `'\\'`}, + tSpace, + {itemCharConstant, 0, `'\u00FF'`}, + tSpace, + {itemCharConstant, 0, `'\xFF'`}, + tSpace, + {itemCharConstant, 0, `'本'`}, + tRight, + tEOF, + }}, + {"bools", "{{true false}}", []item{ + tLeft, + {itemBool, 0, "true"}, + tSpace, + {itemBool, 0, "false"}, + tRight, + tEOF, + }}, + {"dot", "{{.}}", []item{ + tLeft, + {itemDot, 0, "."}, + tRight, + tEOF, + }}, + {"nil", "{{nil}}", []item{ + tLeft, + {itemNil, 0, "nil"}, + tRight, + tEOF, + }}, + {"dots", "{{.x . .2 .x.y.z}}", []item{ + tLeft, + {itemField, 0, ".x"}, + tSpace, + {itemDot, 0, "."}, + tSpace, + {itemNumber, 0, ".2"}, + tSpace, + {itemField, 0, ".x"}, + {itemField, 0, ".y"}, + {itemField, 0, ".z"}, + tRight, + tEOF, + }}, + {"keywords", "{{range if else end with}}", []item{ + tLeft, + {itemRange, 0, "range"}, + tSpace, + {itemIf, 0, "if"}, + tSpace, + {itemElse, 0, "else"}, + tSpace, + {itemEnd, 0, "end"}, + tSpace, + {itemWith, 0, "with"}, + tRight, + tEOF, + }}, + {"variables", "{{$c := printf $ $hello $23 $ $var.Field .Method}}", []item{ + tLeft, + {itemVariable, 0, "$c"}, + tSpace, + {itemColonEquals, 0, ":="}, + tSpace, + {itemIdentifier, 0, "printf"}, + tSpace, + {itemVariable, 0, "$"}, + tSpace, + {itemVariable, 0, "$hello"}, + tSpace, + {itemVariable, 0, "$23"}, + tSpace, + {itemVariable, 0, "$"}, + tSpace, + {itemVariable, 0, "$var"}, + {itemField, 0, ".Field"}, + tSpace, + {itemField, 0, ".Method"}, + tRight, + tEOF, + }}, + {"variable invocation", "{{$x 23}}", []item{ + tLeft, + {itemVariable, 0, "$x"}, + tSpace, + {itemNumber, 0, "23"}, + tRight, + tEOF, + }}, + {"pipeline", `intro {{echo hi 1.2 |noargs|args 1 "hi"}} outro`, []item{ + {itemText, 0, "intro "}, + tLeft, + {itemIdentifier, 0, "echo"}, + tSpace, + {itemIdentifier, 0, "hi"}, + tSpace, + {itemNumber, 0, "1.2"}, + tSpace, + tPipe, + {itemIdentifier, 0, "noargs"}, + tPipe, + {itemIdentifier, 0, "args"}, + tSpace, + {itemNumber, 0, "1"}, + tSpace, + {itemString, 0, `"hi"`}, + tRight, + {itemText, 0, " outro"}, + tEOF, + }}, + {"declaration", "{{$v := 3}}", []item{ + tLeft, + {itemVariable, 0, "$v"}, + tSpace, + {itemColonEquals, 0, ":="}, + tSpace, + {itemNumber, 0, "3"}, + tRight, + tEOF, + }}, + {"2 declarations", "{{$v , $w := 3}}", []item{ + tLeft, + {itemVariable, 0, "$v"}, + tSpace, + {itemChar, 0, ","}, + tSpace, + {itemVariable, 0, "$w"}, + tSpace, + {itemColonEquals, 0, ":="}, + tSpace, + {itemNumber, 0, "3"}, + tRight, + tEOF, + }}, + {"field of parenthesized expression", "{{(.X).Y}}", []item{ + tLeft, + tLpar, + {itemField, 0, ".X"}, + tRpar, + {itemField, 0, ".Y"}, + tRight, + tEOF, + }}, + // errors + {"badchar", "#{{\x01}}", []item{ + {itemText, 0, "#"}, + tLeft, + {itemError, 0, "unrecognized character in action: U+0001"}, + }}, + {"unclosed action", "{{\n}}", []item{ + tLeft, + {itemError, 0, "unclosed action"}, + }}, + {"EOF in action", "{{range", []item{ + tLeft, + tRange, + {itemError, 0, "unclosed action"}, + }}, + {"unclosed quote", "{{\"\n\"}}", []item{ + tLeft, + {itemError, 0, "unterminated quoted string"}, + }}, + {"unclosed raw quote", "{{`xx\n`}}", []item{ + tLeft, + {itemError, 0, "unterminated raw quoted string"}, + }}, + {"unclosed char constant", "{{'\n}}", []item{ + tLeft, + {itemError, 0, "unterminated character constant"}, + }}, + {"bad number", "{{3k}}", []item{ + tLeft, + {itemError, 0, `bad number syntax: "3k"`}, + }}, + {"unclosed paren", "{{(3}}", []item{ + tLeft, + tLpar, + {itemNumber, 0, "3"}, + {itemError, 0, `unclosed left paren`}, + }}, + {"extra right paren", "{{3)}}", []item{ + tLeft, + {itemNumber, 0, "3"}, + tRpar, + {itemError, 0, `unexpected right paren U+0029 ')'`}, + }}, + + // Fixed bugs + // Many elements in an action blew the lookahead until + // we made lexInsideAction not loop. + {"long pipeline deadlock", "{{|||||}}", []item{ + tLeft, + tPipe, + tPipe, + tPipe, + tPipe, + tPipe, + tRight, + tEOF, + }}, + {"text with bad comment", "hello-{{/*/}}-world", []item{ + {itemText, 0, "hello-"}, + {itemError, 0, `unclosed comment`}, + }}, + {"text with comment close separted from delim", "hello-{{/* */ }}-world", []item{ + {itemText, 0, "hello-"}, + {itemError, 0, `comment ends before closing delimiter`}, + }}, + // This one is an error that we can't catch because it breaks templates with + // minimized JavaScript. Should have fixed it before Go 1.1. + {"unmatched right delimiter", "hello-{.}}-world", []item{ + {itemText, 0, "hello-{.}}-world"}, + tEOF, + }}, +} + +// collect gathers the emitted items into a slice. +func collect(t *lexTest, left, right string) (items []item) { + l := lex(t.name, t.input, left, right) + for { + item := l.nextItem() + items = append(items, item) + if item.typ == itemEOF || item.typ == itemError { + break + } + } + return +} + +func equal(i1, i2 []item, checkPos bool) bool { + if len(i1) != len(i2) { + return false + } + for k := range i1 { + if i1[k].typ != i2[k].typ { + return false + } + if i1[k].val != i2[k].val { + return false + } + if checkPos && i1[k].pos != i2[k].pos { + return false + } + } + return true +} + +func TestLex(t *testing.T) { + for _, test := range lexTests { + items := collect(&test, "", "") + if !equal(items, test.items, false) { + t.Errorf("%s: got\n\t%+v\nexpected\n\t%v", test.name, items, test.items) + } + } +} + +// Some easy cases from above, but with delimiters $$ and @@ +var lexDelimTests = []lexTest{ + {"punctuation", "$$,@%{{}}@@", []item{ + tLeftDelim, + {itemChar, 0, ","}, + {itemChar, 0, "@"}, + {itemChar, 0, "%"}, + {itemChar, 0, "{"}, + {itemChar, 0, "{"}, + {itemChar, 0, "}"}, + {itemChar, 0, "}"}, + tRightDelim, + tEOF, + }}, + {"empty action", `$$@@`, []item{tLeftDelim, tRightDelim, tEOF}}, + {"for", `$$for@@`, []item{tLeftDelim, tFor, tRightDelim, tEOF}}, + {"quote", `$$"abc \n\t\" "@@`, []item{tLeftDelim, tQuote, tRightDelim, tEOF}}, + {"raw quote", "$$" + raw + "@@", []item{tLeftDelim, tRawQuote, tRightDelim, tEOF}}, +} + +var ( + tLeftDelim = item{itemLeftDelim, 0, "$$"} + tRightDelim = item{itemRightDelim, 0, "@@"} +) + +func TestDelims(t *testing.T) { + for _, test := range lexDelimTests { + items := collect(&test, "$$", "@@") + if !equal(items, test.items, false) { + t.Errorf("%s: got\n\t%v\nexpected\n\t%v", test.name, items, test.items) + } + } +} + +var lexPosTests = []lexTest{ + {"empty", "", []item{tEOF}}, + {"punctuation", "{{,@%#}}", []item{ + {itemLeftDelim, 0, "{{"}, + {itemChar, 2, ","}, + {itemChar, 3, "@"}, + {itemChar, 4, "%"}, + {itemChar, 5, "#"}, + {itemRightDelim, 6, "}}"}, + {itemEOF, 8, ""}, + }}, + {"sample", "0123{{hello}}xyz", []item{ + {itemText, 0, "0123"}, + {itemLeftDelim, 4, "{{"}, + {itemIdentifier, 6, "hello"}, + {itemRightDelim, 11, "}}"}, + {itemText, 13, "xyz"}, + {itemEOF, 16, ""}, + }}, +} + +// The other tests don't check position, to make the test cases easier to construct. +// This one does. +func TestPos(t *testing.T) { + for _, test := range lexPosTests { + items := collect(&test, "", "") + if !equal(items, test.items, true) { + t.Errorf("%s: got\n\t%v\nexpected\n\t%v", test.name, items, test.items) + if len(items) == len(test.items) { + // Detailed print; avoid item.String() to expose the position value. + for i := range items { + if !equal(items[i:i+1], test.items[i:i+1], true) { + i1 := items[i] + i2 := test.items[i] + t.Errorf("\t#%d: got {%v %d %q} expected {%v %d %q}", i, i1.typ, i1.pos, i1.val, i2.typ, i2.pos, i2.val) + } + } + } + } + } +} diff --git a/vendor/github.com/alecthomas/template/parse/node.go b/vendor/github.com/alecthomas/template/parse/node.go new file mode 100644 index 0000000..55c37f6 --- /dev/null +++ b/vendor/github.com/alecthomas/template/parse/node.go @@ -0,0 +1,834 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Parse nodes. + +package parse + +import ( + "bytes" + "fmt" + "strconv" + "strings" +) + +var textFormat = "%s" // Changed to "%q" in tests for better error messages. + +// A Node is an element in the parse tree. The interface is trivial. +// The interface contains an unexported method so that only +// types local to this package can satisfy it. +type Node interface { + Type() NodeType + String() string + // Copy does a deep copy of the Node and all its components. + // To avoid type assertions, some XxxNodes also have specialized + // CopyXxx methods that return *XxxNode. + Copy() Node + Position() Pos // byte position of start of node in full original input string + // tree returns the containing *Tree. + // It is unexported so all implementations of Node are in this package. + tree() *Tree +} + +// NodeType identifies the type of a parse tree node. +type NodeType int + +// Pos represents a byte position in the original input text from which +// this template was parsed. +type Pos int + +func (p Pos) Position() Pos { + return p +} + +// Type returns itself and provides an easy default implementation +// for embedding in a Node. Embedded in all non-trivial Nodes. +func (t NodeType) Type() NodeType { + return t +} + +const ( + NodeText NodeType = iota // Plain text. + NodeAction // A non-control action such as a field evaluation. + NodeBool // A boolean constant. + NodeChain // A sequence of field accesses. + NodeCommand // An element of a pipeline. + NodeDot // The cursor, dot. + nodeElse // An else action. Not added to tree. + nodeEnd // An end action. Not added to tree. + NodeField // A field or method name. + NodeIdentifier // An identifier; always a function name. + NodeIf // An if action. + NodeList // A list of Nodes. + NodeNil // An untyped nil constant. + NodeNumber // A numerical constant. + NodePipe // A pipeline of commands. + NodeRange // A range action. + NodeString // A string constant. + NodeTemplate // A template invocation action. + NodeVariable // A $ variable. + NodeWith // A with action. +) + +// Nodes. + +// ListNode holds a sequence of nodes. +type ListNode struct { + NodeType + Pos + tr *Tree + Nodes []Node // The element nodes in lexical order. +} + +func (t *Tree) newList(pos Pos) *ListNode { + return &ListNode{tr: t, NodeType: NodeList, Pos: pos} +} + +func (l *ListNode) append(n Node) { + l.Nodes = append(l.Nodes, n) +} + +func (l *ListNode) tree() *Tree { + return l.tr +} + +func (l *ListNode) String() string { + b := new(bytes.Buffer) + for _, n := range l.Nodes { + fmt.Fprint(b, n) + } + return b.String() +} + +func (l *ListNode) CopyList() *ListNode { + if l == nil { + return l + } + n := l.tr.newList(l.Pos) + for _, elem := range l.Nodes { + n.append(elem.Copy()) + } + return n +} + +func (l *ListNode) Copy() Node { + return l.CopyList() +} + +// TextNode holds plain text. +type TextNode struct { + NodeType + Pos + tr *Tree + Text []byte // The text; may span newlines. +} + +func (t *Tree) newText(pos Pos, text string) *TextNode { + return &TextNode{tr: t, NodeType: NodeText, Pos: pos, Text: []byte(text)} +} + +func (t *TextNode) String() string { + return fmt.Sprintf(textFormat, t.Text) +} + +func (t *TextNode) tree() *Tree { + return t.tr +} + +func (t *TextNode) Copy() Node { + return &TextNode{tr: t.tr, NodeType: NodeText, Pos: t.Pos, Text: append([]byte{}, t.Text...)} +} + +// PipeNode holds a pipeline with optional declaration +type PipeNode struct { + NodeType + Pos + tr *Tree + Line int // The line number in the input (deprecated; kept for compatibility) + Decl []*VariableNode // Variable declarations in lexical order. + Cmds []*CommandNode // The commands in lexical order. +} + +func (t *Tree) newPipeline(pos Pos, line int, decl []*VariableNode) *PipeNode { + return &PipeNode{tr: t, NodeType: NodePipe, Pos: pos, Line: line, Decl: decl} +} + +func (p *PipeNode) append(command *CommandNode) { + p.Cmds = append(p.Cmds, command) +} + +func (p *PipeNode) String() string { + s := "" + if len(p.Decl) > 0 { + for i, v := range p.Decl { + if i > 0 { + s += ", " + } + s += v.String() + } + s += " := " + } + for i, c := range p.Cmds { + if i > 0 { + s += " | " + } + s += c.String() + } + return s +} + +func (p *PipeNode) tree() *Tree { + return p.tr +} + +func (p *PipeNode) CopyPipe() *PipeNode { + if p == nil { + return p + } + var decl []*VariableNode + for _, d := range p.Decl { + decl = append(decl, d.Copy().(*VariableNode)) + } + n := p.tr.newPipeline(p.Pos, p.Line, decl) + for _, c := range p.Cmds { + n.append(c.Copy().(*CommandNode)) + } + return n +} + +func (p *PipeNode) Copy() Node { + return p.CopyPipe() +} + +// ActionNode holds an action (something bounded by delimiters). +// Control actions have their own nodes; ActionNode represents simple +// ones such as field evaluations and parenthesized pipelines. +type ActionNode struct { + NodeType + Pos + tr *Tree + Line int // The line number in the input (deprecated; kept for compatibility) + Pipe *PipeNode // The pipeline in the action. +} + +func (t *Tree) newAction(pos Pos, line int, pipe *PipeNode) *ActionNode { + return &ActionNode{tr: t, NodeType: NodeAction, Pos: pos, Line: line, Pipe: pipe} +} + +func (a *ActionNode) String() string { + return fmt.Sprintf("{{%s}}", a.Pipe) + +} + +func (a *ActionNode) tree() *Tree { + return a.tr +} + +func (a *ActionNode) Copy() Node { + return a.tr.newAction(a.Pos, a.Line, a.Pipe.CopyPipe()) + +} + +// CommandNode holds a command (a pipeline inside an evaluating action). +type CommandNode struct { + NodeType + Pos + tr *Tree + Args []Node // Arguments in lexical order: Identifier, field, or constant. +} + +func (t *Tree) newCommand(pos Pos) *CommandNode { + return &CommandNode{tr: t, NodeType: NodeCommand, Pos: pos} +} + +func (c *CommandNode) append(arg Node) { + c.Args = append(c.Args, arg) +} + +func (c *CommandNode) String() string { + s := "" + for i, arg := range c.Args { + if i > 0 { + s += " " + } + if arg, ok := arg.(*PipeNode); ok { + s += "(" + arg.String() + ")" + continue + } + s += arg.String() + } + return s +} + +func (c *CommandNode) tree() *Tree { + return c.tr +} + +func (c *CommandNode) Copy() Node { + if c == nil { + return c + } + n := c.tr.newCommand(c.Pos) + for _, c := range c.Args { + n.append(c.Copy()) + } + return n +} + +// IdentifierNode holds an identifier. +type IdentifierNode struct { + NodeType + Pos + tr *Tree + Ident string // The identifier's name. +} + +// NewIdentifier returns a new IdentifierNode with the given identifier name. +func NewIdentifier(ident string) *IdentifierNode { + return &IdentifierNode{NodeType: NodeIdentifier, Ident: ident} +} + +// SetPos sets the position. NewIdentifier is a public method so we can't modify its signature. +// Chained for convenience. +// TODO: fix one day? +func (i *IdentifierNode) SetPos(pos Pos) *IdentifierNode { + i.Pos = pos + return i +} + +// SetTree sets the parent tree for the node. NewIdentifier is a public method so we can't modify its signature. +// Chained for convenience. +// TODO: fix one day? +func (i *IdentifierNode) SetTree(t *Tree) *IdentifierNode { + i.tr = t + return i +} + +func (i *IdentifierNode) String() string { + return i.Ident +} + +func (i *IdentifierNode) tree() *Tree { + return i.tr +} + +func (i *IdentifierNode) Copy() Node { + return NewIdentifier(i.Ident).SetTree(i.tr).SetPos(i.Pos) +} + +// VariableNode holds a list of variable names, possibly with chained field +// accesses. The dollar sign is part of the (first) name. +type VariableNode struct { + NodeType + Pos + tr *Tree + Ident []string // Variable name and fields in lexical order. +} + +func (t *Tree) newVariable(pos Pos, ident string) *VariableNode { + return &VariableNode{tr: t, NodeType: NodeVariable, Pos: pos, Ident: strings.Split(ident, ".")} +} + +func (v *VariableNode) String() string { + s := "" + for i, id := range v.Ident { + if i > 0 { + s += "." + } + s += id + } + return s +} + +func (v *VariableNode) tree() *Tree { + return v.tr +} + +func (v *VariableNode) Copy() Node { + return &VariableNode{tr: v.tr, NodeType: NodeVariable, Pos: v.Pos, Ident: append([]string{}, v.Ident...)} +} + +// DotNode holds the special identifier '.'. +type DotNode struct { + NodeType + Pos + tr *Tree +} + +func (t *Tree) newDot(pos Pos) *DotNode { + return &DotNode{tr: t, NodeType: NodeDot, Pos: pos} +} + +func (d *DotNode) Type() NodeType { + // Override method on embedded NodeType for API compatibility. + // TODO: Not really a problem; could change API without effect but + // api tool complains. + return NodeDot +} + +func (d *DotNode) String() string { + return "." +} + +func (d *DotNode) tree() *Tree { + return d.tr +} + +func (d *DotNode) Copy() Node { + return d.tr.newDot(d.Pos) +} + +// NilNode holds the special identifier 'nil' representing an untyped nil constant. +type NilNode struct { + NodeType + Pos + tr *Tree +} + +func (t *Tree) newNil(pos Pos) *NilNode { + return &NilNode{tr: t, NodeType: NodeNil, Pos: pos} +} + +func (n *NilNode) Type() NodeType { + // Override method on embedded NodeType for API compatibility. + // TODO: Not really a problem; could change API without effect but + // api tool complains. + return NodeNil +} + +func (n *NilNode) String() string { + return "nil" +} + +func (n *NilNode) tree() *Tree { + return n.tr +} + +func (n *NilNode) Copy() Node { + return n.tr.newNil(n.Pos) +} + +// FieldNode holds a field (identifier starting with '.'). +// The names may be chained ('.x.y'). +// The period is dropped from each ident. +type FieldNode struct { + NodeType + Pos + tr *Tree + Ident []string // The identifiers in lexical order. +} + +func (t *Tree) newField(pos Pos, ident string) *FieldNode { + return &FieldNode{tr: t, NodeType: NodeField, Pos: pos, Ident: strings.Split(ident[1:], ".")} // [1:] to drop leading period +} + +func (f *FieldNode) String() string { + s := "" + for _, id := range f.Ident { + s += "." + id + } + return s +} + +func (f *FieldNode) tree() *Tree { + return f.tr +} + +func (f *FieldNode) Copy() Node { + return &FieldNode{tr: f.tr, NodeType: NodeField, Pos: f.Pos, Ident: append([]string{}, f.Ident...)} +} + +// ChainNode holds a term followed by a chain of field accesses (identifier starting with '.'). +// The names may be chained ('.x.y'). +// The periods are dropped from each ident. +type ChainNode struct { + NodeType + Pos + tr *Tree + Node Node + Field []string // The identifiers in lexical order. +} + +func (t *Tree) newChain(pos Pos, node Node) *ChainNode { + return &ChainNode{tr: t, NodeType: NodeChain, Pos: pos, Node: node} +} + +// Add adds the named field (which should start with a period) to the end of the chain. +func (c *ChainNode) Add(field string) { + if len(field) == 0 || field[0] != '.' { + panic("no dot in field") + } + field = field[1:] // Remove leading dot. + if field == "" { + panic("empty field") + } + c.Field = append(c.Field, field) +} + +func (c *ChainNode) String() string { + s := c.Node.String() + if _, ok := c.Node.(*PipeNode); ok { + s = "(" + s + ")" + } + for _, field := range c.Field { + s += "." + field + } + return s +} + +func (c *ChainNode) tree() *Tree { + return c.tr +} + +func (c *ChainNode) Copy() Node { + return &ChainNode{tr: c.tr, NodeType: NodeChain, Pos: c.Pos, Node: c.Node, Field: append([]string{}, c.Field...)} +} + +// BoolNode holds a boolean constant. +type BoolNode struct { + NodeType + Pos + tr *Tree + True bool // The value of the boolean constant. +} + +func (t *Tree) newBool(pos Pos, true bool) *BoolNode { + return &BoolNode{tr: t, NodeType: NodeBool, Pos: pos, True: true} +} + +func (b *BoolNode) String() string { + if b.True { + return "true" + } + return "false" +} + +func (b *BoolNode) tree() *Tree { + return b.tr +} + +func (b *BoolNode) Copy() Node { + return b.tr.newBool(b.Pos, b.True) +} + +// NumberNode holds a number: signed or unsigned integer, float, or complex. +// The value is parsed and stored under all the types that can represent the value. +// This simulates in a small amount of code the behavior of Go's ideal constants. +type NumberNode struct { + NodeType + Pos + tr *Tree + IsInt bool // Number has an integral value. + IsUint bool // Number has an unsigned integral value. + IsFloat bool // Number has a floating-point value. + IsComplex bool // Number is complex. + Int64 int64 // The signed integer value. + Uint64 uint64 // The unsigned integer value. + Float64 float64 // The floating-point value. + Complex128 complex128 // The complex value. + Text string // The original textual representation from the input. +} + +func (t *Tree) newNumber(pos Pos, text string, typ itemType) (*NumberNode, error) { + n := &NumberNode{tr: t, NodeType: NodeNumber, Pos: pos, Text: text} + switch typ { + case itemCharConstant: + rune, _, tail, err := strconv.UnquoteChar(text[1:], text[0]) + if err != nil { + return nil, err + } + if tail != "'" { + return nil, fmt.Errorf("malformed character constant: %s", text) + } + n.Int64 = int64(rune) + n.IsInt = true + n.Uint64 = uint64(rune) + n.IsUint = true + n.Float64 = float64(rune) // odd but those are the rules. + n.IsFloat = true + return n, nil + case itemComplex: + // fmt.Sscan can parse the pair, so let it do the work. + if _, err := fmt.Sscan(text, &n.Complex128); err != nil { + return nil, err + } + n.IsComplex = true + n.simplifyComplex() + return n, nil + } + // Imaginary constants can only be complex unless they are zero. + if len(text) > 0 && text[len(text)-1] == 'i' { + f, err := strconv.ParseFloat(text[:len(text)-1], 64) + if err == nil { + n.IsComplex = true + n.Complex128 = complex(0, f) + n.simplifyComplex() + return n, nil + } + } + // Do integer test first so we get 0x123 etc. + u, err := strconv.ParseUint(text, 0, 64) // will fail for -0; fixed below. + if err == nil { + n.IsUint = true + n.Uint64 = u + } + i, err := strconv.ParseInt(text, 0, 64) + if err == nil { + n.IsInt = true + n.Int64 = i + if i == 0 { + n.IsUint = true // in case of -0. + n.Uint64 = u + } + } + // If an integer extraction succeeded, promote the float. + if n.IsInt { + n.IsFloat = true + n.Float64 = float64(n.Int64) + } else if n.IsUint { + n.IsFloat = true + n.Float64 = float64(n.Uint64) + } else { + f, err := strconv.ParseFloat(text, 64) + if err == nil { + n.IsFloat = true + n.Float64 = f + // If a floating-point extraction succeeded, extract the int if needed. + if !n.IsInt && float64(int64(f)) == f { + n.IsInt = true + n.Int64 = int64(f) + } + if !n.IsUint && float64(uint64(f)) == f { + n.IsUint = true + n.Uint64 = uint64(f) + } + } + } + if !n.IsInt && !n.IsUint && !n.IsFloat { + return nil, fmt.Errorf("illegal number syntax: %q", text) + } + return n, nil +} + +// simplifyComplex pulls out any other types that are represented by the complex number. +// These all require that the imaginary part be zero. +func (n *NumberNode) simplifyComplex() { + n.IsFloat = imag(n.Complex128) == 0 + if n.IsFloat { + n.Float64 = real(n.Complex128) + n.IsInt = float64(int64(n.Float64)) == n.Float64 + if n.IsInt { + n.Int64 = int64(n.Float64) + } + n.IsUint = float64(uint64(n.Float64)) == n.Float64 + if n.IsUint { + n.Uint64 = uint64(n.Float64) + } + } +} + +func (n *NumberNode) String() string { + return n.Text +} + +func (n *NumberNode) tree() *Tree { + return n.tr +} + +func (n *NumberNode) Copy() Node { + nn := new(NumberNode) + *nn = *n // Easy, fast, correct. + return nn +} + +// StringNode holds a string constant. The value has been "unquoted". +type StringNode struct { + NodeType + Pos + tr *Tree + Quoted string // The original text of the string, with quotes. + Text string // The string, after quote processing. +} + +func (t *Tree) newString(pos Pos, orig, text string) *StringNode { + return &StringNode{tr: t, NodeType: NodeString, Pos: pos, Quoted: orig, Text: text} +} + +func (s *StringNode) String() string { + return s.Quoted +} + +func (s *StringNode) tree() *Tree { + return s.tr +} + +func (s *StringNode) Copy() Node { + return s.tr.newString(s.Pos, s.Quoted, s.Text) +} + +// endNode represents an {{end}} action. +// It does not appear in the final parse tree. +type endNode struct { + NodeType + Pos + tr *Tree +} + +func (t *Tree) newEnd(pos Pos) *endNode { + return &endNode{tr: t, NodeType: nodeEnd, Pos: pos} +} + +func (e *endNode) String() string { + return "{{end}}" +} + +func (e *endNode) tree() *Tree { + return e.tr +} + +func (e *endNode) Copy() Node { + return e.tr.newEnd(e.Pos) +} + +// elseNode represents an {{else}} action. Does not appear in the final tree. +type elseNode struct { + NodeType + Pos + tr *Tree + Line int // The line number in the input (deprecated; kept for compatibility) +} + +func (t *Tree) newElse(pos Pos, line int) *elseNode { + return &elseNode{tr: t, NodeType: nodeElse, Pos: pos, Line: line} +} + +func (e *elseNode) Type() NodeType { + return nodeElse +} + +func (e *elseNode) String() string { + return "{{else}}" +} + +func (e *elseNode) tree() *Tree { + return e.tr +} + +func (e *elseNode) Copy() Node { + return e.tr.newElse(e.Pos, e.Line) +} + +// BranchNode is the common representation of if, range, and with. +type BranchNode struct { + NodeType + Pos + tr *Tree + Line int // The line number in the input (deprecated; kept for compatibility) + Pipe *PipeNode // The pipeline to be evaluated. + List *ListNode // What to execute if the value is non-empty. + ElseList *ListNode // What to execute if the value is empty (nil if absent). +} + +func (b *BranchNode) String() string { + name := "" + switch b.NodeType { + case NodeIf: + name = "if" + case NodeRange: + name = "range" + case NodeWith: + name = "with" + default: + panic("unknown branch type") + } + if b.ElseList != nil { + return fmt.Sprintf("{{%s %s}}%s{{else}}%s{{end}}", name, b.Pipe, b.List, b.ElseList) + } + return fmt.Sprintf("{{%s %s}}%s{{end}}", name, b.Pipe, b.List) +} + +func (b *BranchNode) tree() *Tree { + return b.tr +} + +func (b *BranchNode) Copy() Node { + switch b.NodeType { + case NodeIf: + return b.tr.newIf(b.Pos, b.Line, b.Pipe, b.List, b.ElseList) + case NodeRange: + return b.tr.newRange(b.Pos, b.Line, b.Pipe, b.List, b.ElseList) + case NodeWith: + return b.tr.newWith(b.Pos, b.Line, b.Pipe, b.List, b.ElseList) + default: + panic("unknown branch type") + } +} + +// IfNode represents an {{if}} action and its commands. +type IfNode struct { + BranchNode +} + +func (t *Tree) newIf(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *IfNode { + return &IfNode{BranchNode{tr: t, NodeType: NodeIf, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}} +} + +func (i *IfNode) Copy() Node { + return i.tr.newIf(i.Pos, i.Line, i.Pipe.CopyPipe(), i.List.CopyList(), i.ElseList.CopyList()) +} + +// RangeNode represents a {{range}} action and its commands. +type RangeNode struct { + BranchNode +} + +func (t *Tree) newRange(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *RangeNode { + return &RangeNode{BranchNode{tr: t, NodeType: NodeRange, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}} +} + +func (r *RangeNode) Copy() Node { + return r.tr.newRange(r.Pos, r.Line, r.Pipe.CopyPipe(), r.List.CopyList(), r.ElseList.CopyList()) +} + +// WithNode represents a {{with}} action and its commands. +type WithNode struct { + BranchNode +} + +func (t *Tree) newWith(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *WithNode { + return &WithNode{BranchNode{tr: t, NodeType: NodeWith, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}} +} + +func (w *WithNode) Copy() Node { + return w.tr.newWith(w.Pos, w.Line, w.Pipe.CopyPipe(), w.List.CopyList(), w.ElseList.CopyList()) +} + +// TemplateNode represents a {{template}} action. +type TemplateNode struct { + NodeType + Pos + tr *Tree + Line int // The line number in the input (deprecated; kept for compatibility) + Name string // The name of the template (unquoted). + Pipe *PipeNode // The command to evaluate as dot for the template. +} + +func (t *Tree) newTemplate(pos Pos, line int, name string, pipe *PipeNode) *TemplateNode { + return &TemplateNode{tr: t, NodeType: NodeTemplate, Pos: pos, Line: line, Name: name, Pipe: pipe} +} + +func (t *TemplateNode) String() string { + if t.Pipe == nil { + return fmt.Sprintf("{{template %q}}", t.Name) + } + return fmt.Sprintf("{{template %q %s}}", t.Name, t.Pipe) +} + +func (t *TemplateNode) tree() *Tree { + return t.tr +} + +func (t *TemplateNode) Copy() Node { + return t.tr.newTemplate(t.Pos, t.Line, t.Name, t.Pipe.CopyPipe()) +} diff --git a/vendor/github.com/alecthomas/template/parse/parse.go b/vendor/github.com/alecthomas/template/parse/parse.go new file mode 100644 index 0000000..0d77ade --- /dev/null +++ b/vendor/github.com/alecthomas/template/parse/parse.go @@ -0,0 +1,700 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package parse builds parse trees for templates as defined by text/template +// and html/template. Clients should use those packages to construct templates +// rather than this one, which provides shared internal data structures not +// intended for general use. +package parse + +import ( + "bytes" + "fmt" + "runtime" + "strconv" + "strings" +) + +// Tree is the representation of a single parsed template. +type Tree struct { + Name string // name of the template represented by the tree. + ParseName string // name of the top-level template during parsing, for error messages. + Root *ListNode // top-level root of the tree. + text string // text parsed to create the template (or its parent) + // Parsing only; cleared after parse. + funcs []map[string]interface{} + lex *lexer + token [3]item // three-token lookahead for parser. + peekCount int + vars []string // variables defined at the moment. +} + +// Copy returns a copy of the Tree. Any parsing state is discarded. +func (t *Tree) Copy() *Tree { + if t == nil { + return nil + } + return &Tree{ + Name: t.Name, + ParseName: t.ParseName, + Root: t.Root.CopyList(), + text: t.text, + } +} + +// Parse returns a map from template name to parse.Tree, created by parsing the +// templates described in the argument string. The top-level template will be +// given the specified name. If an error is encountered, parsing stops and an +// empty map is returned with the error. +func Parse(name, text, leftDelim, rightDelim string, funcs ...map[string]interface{}) (treeSet map[string]*Tree, err error) { + treeSet = make(map[string]*Tree) + t := New(name) + t.text = text + _, err = t.Parse(text, leftDelim, rightDelim, treeSet, funcs...) + return +} + +// next returns the next token. +func (t *Tree) next() item { + if t.peekCount > 0 { + t.peekCount-- + } else { + t.token[0] = t.lex.nextItem() + } + return t.token[t.peekCount] +} + +// backup backs the input stream up one token. +func (t *Tree) backup() { + t.peekCount++ +} + +// backup2 backs the input stream up two tokens. +// The zeroth token is already there. +func (t *Tree) backup2(t1 item) { + t.token[1] = t1 + t.peekCount = 2 +} + +// backup3 backs the input stream up three tokens +// The zeroth token is already there. +func (t *Tree) backup3(t2, t1 item) { // Reverse order: we're pushing back. + t.token[1] = t1 + t.token[2] = t2 + t.peekCount = 3 +} + +// peek returns but does not consume the next token. +func (t *Tree) peek() item { + if t.peekCount > 0 { + return t.token[t.peekCount-1] + } + t.peekCount = 1 + t.token[0] = t.lex.nextItem() + return t.token[0] +} + +// nextNonSpace returns the next non-space token. +func (t *Tree) nextNonSpace() (token item) { + for { + token = t.next() + if token.typ != itemSpace { + break + } + } + return token +} + +// peekNonSpace returns but does not consume the next non-space token. +func (t *Tree) peekNonSpace() (token item) { + for { + token = t.next() + if token.typ != itemSpace { + break + } + } + t.backup() + return token +} + +// Parsing. + +// New allocates a new parse tree with the given name. +func New(name string, funcs ...map[string]interface{}) *Tree { + return &Tree{ + Name: name, + funcs: funcs, + } +} + +// ErrorContext returns a textual representation of the location of the node in the input text. +// The receiver is only used when the node does not have a pointer to the tree inside, +// which can occur in old code. +func (t *Tree) ErrorContext(n Node) (location, context string) { + pos := int(n.Position()) + tree := n.tree() + if tree == nil { + tree = t + } + text := tree.text[:pos] + byteNum := strings.LastIndex(text, "\n") + if byteNum == -1 { + byteNum = pos // On first line. + } else { + byteNum++ // After the newline. + byteNum = pos - byteNum + } + lineNum := 1 + strings.Count(text, "\n") + context = n.String() + if len(context) > 20 { + context = fmt.Sprintf("%.20s...", context) + } + return fmt.Sprintf("%s:%d:%d", tree.ParseName, lineNum, byteNum), context +} + +// errorf formats the error and terminates processing. +func (t *Tree) errorf(format string, args ...interface{}) { + t.Root = nil + format = fmt.Sprintf("template: %s:%d: %s", t.ParseName, t.lex.lineNumber(), format) + panic(fmt.Errorf(format, args...)) +} + +// error terminates processing. +func (t *Tree) error(err error) { + t.errorf("%s", err) +} + +// expect consumes the next token and guarantees it has the required type. +func (t *Tree) expect(expected itemType, context string) item { + token := t.nextNonSpace() + if token.typ != expected { + t.unexpected(token, context) + } + return token +} + +// expectOneOf consumes the next token and guarantees it has one of the required types. +func (t *Tree) expectOneOf(expected1, expected2 itemType, context string) item { + token := t.nextNonSpace() + if token.typ != expected1 && token.typ != expected2 { + t.unexpected(token, context) + } + return token +} + +// unexpected complains about the token and terminates processing. +func (t *Tree) unexpected(token item, context string) { + t.errorf("unexpected %s in %s", token, context) +} + +// recover is the handler that turns panics into returns from the top level of Parse. +func (t *Tree) recover(errp *error) { + e := recover() + if e != nil { + if _, ok := e.(runtime.Error); ok { + panic(e) + } + if t != nil { + t.stopParse() + } + *errp = e.(error) + } + return +} + +// startParse initializes the parser, using the lexer. +func (t *Tree) startParse(funcs []map[string]interface{}, lex *lexer) { + t.Root = nil + t.lex = lex + t.vars = []string{"$"} + t.funcs = funcs +} + +// stopParse terminates parsing. +func (t *Tree) stopParse() { + t.lex = nil + t.vars = nil + t.funcs = nil +} + +// Parse parses the template definition string to construct a representation of +// the template for execution. If either action delimiter string is empty, the +// default ("{{" or "}}") is used. Embedded template definitions are added to +// the treeSet map. +func (t *Tree) Parse(text, leftDelim, rightDelim string, treeSet map[string]*Tree, funcs ...map[string]interface{}) (tree *Tree, err error) { + defer t.recover(&err) + t.ParseName = t.Name + t.startParse(funcs, lex(t.Name, text, leftDelim, rightDelim)) + t.text = text + t.parse(treeSet) + t.add(treeSet) + t.stopParse() + return t, nil +} + +// add adds tree to the treeSet. +func (t *Tree) add(treeSet map[string]*Tree) { + tree := treeSet[t.Name] + if tree == nil || IsEmptyTree(tree.Root) { + treeSet[t.Name] = t + return + } + if !IsEmptyTree(t.Root) { + t.errorf("template: multiple definition of template %q", t.Name) + } +} + +// IsEmptyTree reports whether this tree (node) is empty of everything but space. +func IsEmptyTree(n Node) bool { + switch n := n.(type) { + case nil: + return true + case *ActionNode: + case *IfNode: + case *ListNode: + for _, node := range n.Nodes { + if !IsEmptyTree(node) { + return false + } + } + return true + case *RangeNode: + case *TemplateNode: + case *TextNode: + return len(bytes.TrimSpace(n.Text)) == 0 + case *WithNode: + default: + panic("unknown node: " + n.String()) + } + return false +} + +// parse is the top-level parser for a template, essentially the same +// as itemList except it also parses {{define}} actions. +// It runs to EOF. +func (t *Tree) parse(treeSet map[string]*Tree) (next Node) { + t.Root = t.newList(t.peek().pos) + for t.peek().typ != itemEOF { + if t.peek().typ == itemLeftDelim { + delim := t.next() + if t.nextNonSpace().typ == itemDefine { + newT := New("definition") // name will be updated once we know it. + newT.text = t.text + newT.ParseName = t.ParseName + newT.startParse(t.funcs, t.lex) + newT.parseDefinition(treeSet) + continue + } + t.backup2(delim) + } + n := t.textOrAction() + if n.Type() == nodeEnd { + t.errorf("unexpected %s", n) + } + t.Root.append(n) + } + return nil +} + +// parseDefinition parses a {{define}} ... {{end}} template definition and +// installs the definition in the treeSet map. The "define" keyword has already +// been scanned. +func (t *Tree) parseDefinition(treeSet map[string]*Tree) { + const context = "define clause" + name := t.expectOneOf(itemString, itemRawString, context) + var err error + t.Name, err = strconv.Unquote(name.val) + if err != nil { + t.error(err) + } + t.expect(itemRightDelim, context) + var end Node + t.Root, end = t.itemList() + if end.Type() != nodeEnd { + t.errorf("unexpected %s in %s", end, context) + } + t.add(treeSet) + t.stopParse() +} + +// itemList: +// textOrAction* +// Terminates at {{end}} or {{else}}, returned separately. +func (t *Tree) itemList() (list *ListNode, next Node) { + list = t.newList(t.peekNonSpace().pos) + for t.peekNonSpace().typ != itemEOF { + n := t.textOrAction() + switch n.Type() { + case nodeEnd, nodeElse: + return list, n + } + list.append(n) + } + t.errorf("unexpected EOF") + return +} + +// textOrAction: +// text | action +func (t *Tree) textOrAction() Node { + switch token := t.nextNonSpace(); token.typ { + case itemElideNewline: + return t.elideNewline() + case itemText: + return t.newText(token.pos, token.val) + case itemLeftDelim: + return t.action() + default: + t.unexpected(token, "input") + } + return nil +} + +// elideNewline: +// Remove newlines trailing rightDelim if \\ is present. +func (t *Tree) elideNewline() Node { + token := t.peek() + if token.typ != itemText { + t.unexpected(token, "input") + return nil + } + + t.next() + stripped := strings.TrimLeft(token.val, "\n\r") + diff := len(token.val) - len(stripped) + if diff > 0 { + // This is a bit nasty. We mutate the token in-place to remove + // preceding newlines. + token.pos += Pos(diff) + token.val = stripped + } + return t.newText(token.pos, token.val) +} + +// Action: +// control +// command ("|" command)* +// Left delim is past. Now get actions. +// First word could be a keyword such as range. +func (t *Tree) action() (n Node) { + switch token := t.nextNonSpace(); token.typ { + case itemElse: + return t.elseControl() + case itemEnd: + return t.endControl() + case itemIf: + return t.ifControl() + case itemRange: + return t.rangeControl() + case itemTemplate: + return t.templateControl() + case itemWith: + return t.withControl() + } + t.backup() + // Do not pop variables; they persist until "end". + return t.newAction(t.peek().pos, t.lex.lineNumber(), t.pipeline("command")) +} + +// Pipeline: +// declarations? command ('|' command)* +func (t *Tree) pipeline(context string) (pipe *PipeNode) { + var decl []*VariableNode + pos := t.peekNonSpace().pos + // Are there declarations? + for { + if v := t.peekNonSpace(); v.typ == itemVariable { + t.next() + // Since space is a token, we need 3-token look-ahead here in the worst case: + // in "$x foo" we need to read "foo" (as opposed to ":=") to know that $x is an + // argument variable rather than a declaration. So remember the token + // adjacent to the variable so we can push it back if necessary. + tokenAfterVariable := t.peek() + if next := t.peekNonSpace(); next.typ == itemColonEquals || (next.typ == itemChar && next.val == ",") { + t.nextNonSpace() + variable := t.newVariable(v.pos, v.val) + decl = append(decl, variable) + t.vars = append(t.vars, v.val) + if next.typ == itemChar && next.val == "," { + if context == "range" && len(decl) < 2 { + continue + } + t.errorf("too many declarations in %s", context) + } + } else if tokenAfterVariable.typ == itemSpace { + t.backup3(v, tokenAfterVariable) + } else { + t.backup2(v) + } + } + break + } + pipe = t.newPipeline(pos, t.lex.lineNumber(), decl) + for { + switch token := t.nextNonSpace(); token.typ { + case itemRightDelim, itemRightParen: + if len(pipe.Cmds) == 0 { + t.errorf("missing value for %s", context) + } + if token.typ == itemRightParen { + t.backup() + } + return + case itemBool, itemCharConstant, itemComplex, itemDot, itemField, itemIdentifier, + itemNumber, itemNil, itemRawString, itemString, itemVariable, itemLeftParen: + t.backup() + pipe.append(t.command()) + default: + t.unexpected(token, context) + } + } +} + +func (t *Tree) parseControl(allowElseIf bool, context string) (pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) { + defer t.popVars(len(t.vars)) + line = t.lex.lineNumber() + pipe = t.pipeline(context) + var next Node + list, next = t.itemList() + switch next.Type() { + case nodeEnd: //done + case nodeElse: + if allowElseIf { + // Special case for "else if". If the "else" is followed immediately by an "if", + // the elseControl will have left the "if" token pending. Treat + // {{if a}}_{{else if b}}_{{end}} + // as + // {{if a}}_{{else}}{{if b}}_{{end}}{{end}}. + // To do this, parse the if as usual and stop at it {{end}}; the subsequent{{end}} + // is assumed. This technique works even for long if-else-if chains. + // TODO: Should we allow else-if in with and range? + if t.peek().typ == itemIf { + t.next() // Consume the "if" token. + elseList = t.newList(next.Position()) + elseList.append(t.ifControl()) + // Do not consume the next item - only one {{end}} required. + break + } + } + elseList, next = t.itemList() + if next.Type() != nodeEnd { + t.errorf("expected end; found %s", next) + } + } + return pipe.Position(), line, pipe, list, elseList +} + +// If: +// {{if pipeline}} itemList {{end}} +// {{if pipeline}} itemList {{else}} itemList {{end}} +// If keyword is past. +func (t *Tree) ifControl() Node { + return t.newIf(t.parseControl(true, "if")) +} + +// Range: +// {{range pipeline}} itemList {{end}} +// {{range pipeline}} itemList {{else}} itemList {{end}} +// Range keyword is past. +func (t *Tree) rangeControl() Node { + return t.newRange(t.parseControl(false, "range")) +} + +// With: +// {{with pipeline}} itemList {{end}} +// {{with pipeline}} itemList {{else}} itemList {{end}} +// If keyword is past. +func (t *Tree) withControl() Node { + return t.newWith(t.parseControl(false, "with")) +} + +// End: +// {{end}} +// End keyword is past. +func (t *Tree) endControl() Node { + return t.newEnd(t.expect(itemRightDelim, "end").pos) +} + +// Else: +// {{else}} +// Else keyword is past. +func (t *Tree) elseControl() Node { + // Special case for "else if". + peek := t.peekNonSpace() + if peek.typ == itemIf { + // We see "{{else if ... " but in effect rewrite it to {{else}}{{if ... ". + return t.newElse(peek.pos, t.lex.lineNumber()) + } + return t.newElse(t.expect(itemRightDelim, "else").pos, t.lex.lineNumber()) +} + +// Template: +// {{template stringValue pipeline}} +// Template keyword is past. The name must be something that can evaluate +// to a string. +func (t *Tree) templateControl() Node { + var name string + token := t.nextNonSpace() + switch token.typ { + case itemString, itemRawString: + s, err := strconv.Unquote(token.val) + if err != nil { + t.error(err) + } + name = s + default: + t.unexpected(token, "template invocation") + } + var pipe *PipeNode + if t.nextNonSpace().typ != itemRightDelim { + t.backup() + // Do not pop variables; they persist until "end". + pipe = t.pipeline("template") + } + return t.newTemplate(token.pos, t.lex.lineNumber(), name, pipe) +} + +// command: +// operand (space operand)* +// space-separated arguments up to a pipeline character or right delimiter. +// we consume the pipe character but leave the right delim to terminate the action. +func (t *Tree) command() *CommandNode { + cmd := t.newCommand(t.peekNonSpace().pos) + for { + t.peekNonSpace() // skip leading spaces. + operand := t.operand() + if operand != nil { + cmd.append(operand) + } + switch token := t.next(); token.typ { + case itemSpace: + continue + case itemError: + t.errorf("%s", token.val) + case itemRightDelim, itemRightParen: + t.backup() + case itemPipe: + default: + t.errorf("unexpected %s in operand; missing space?", token) + } + break + } + if len(cmd.Args) == 0 { + t.errorf("empty command") + } + return cmd +} + +// operand: +// term .Field* +// An operand is a space-separated component of a command, +// a term possibly followed by field accesses. +// A nil return means the next item is not an operand. +func (t *Tree) operand() Node { + node := t.term() + if node == nil { + return nil + } + if t.peek().typ == itemField { + chain := t.newChain(t.peek().pos, node) + for t.peek().typ == itemField { + chain.Add(t.next().val) + } + // Compatibility with original API: If the term is of type NodeField + // or NodeVariable, just put more fields on the original. + // Otherwise, keep the Chain node. + // TODO: Switch to Chains always when we can. + switch node.Type() { + case NodeField: + node = t.newField(chain.Position(), chain.String()) + case NodeVariable: + node = t.newVariable(chain.Position(), chain.String()) + default: + node = chain + } + } + return node +} + +// term: +// literal (number, string, nil, boolean) +// function (identifier) +// . +// .Field +// $ +// '(' pipeline ')' +// A term is a simple "expression". +// A nil return means the next item is not a term. +func (t *Tree) term() Node { + switch token := t.nextNonSpace(); token.typ { + case itemError: + t.errorf("%s", token.val) + case itemIdentifier: + if !t.hasFunction(token.val) { + t.errorf("function %q not defined", token.val) + } + return NewIdentifier(token.val).SetTree(t).SetPos(token.pos) + case itemDot: + return t.newDot(token.pos) + case itemNil: + return t.newNil(token.pos) + case itemVariable: + return t.useVar(token.pos, token.val) + case itemField: + return t.newField(token.pos, token.val) + case itemBool: + return t.newBool(token.pos, token.val == "true") + case itemCharConstant, itemComplex, itemNumber: + number, err := t.newNumber(token.pos, token.val, token.typ) + if err != nil { + t.error(err) + } + return number + case itemLeftParen: + pipe := t.pipeline("parenthesized pipeline") + if token := t.next(); token.typ != itemRightParen { + t.errorf("unclosed right paren: unexpected %s", token) + } + return pipe + case itemString, itemRawString: + s, err := strconv.Unquote(token.val) + if err != nil { + t.error(err) + } + return t.newString(token.pos, token.val, s) + } + t.backup() + return nil +} + +// hasFunction reports if a function name exists in the Tree's maps. +func (t *Tree) hasFunction(name string) bool { + for _, funcMap := range t.funcs { + if funcMap == nil { + continue + } + if funcMap[name] != nil { + return true + } + } + return false +} + +// popVars trims the variable list to the specified length +func (t *Tree) popVars(n int) { + t.vars = t.vars[:n] +} + +// useVar returns a node for a variable reference. It errors if the +// variable is not defined. +func (t *Tree) useVar(pos Pos, name string) Node { + v := t.newVariable(pos, name) + for _, varName := range t.vars { + if varName == v.Ident[0] { + return v + } + } + t.errorf("undefined variable %q", v.Ident[0]) + return nil +} diff --git a/vendor/github.com/alecthomas/template/parse/parse_test.go b/vendor/github.com/alecthomas/template/parse/parse_test.go new file mode 100644 index 0000000..c73640f --- /dev/null +++ b/vendor/github.com/alecthomas/template/parse/parse_test.go @@ -0,0 +1,426 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package parse + +import ( + "flag" + "fmt" + "strings" + "testing" +) + +var debug = flag.Bool("debug", false, "show the errors produced by the main tests") + +type numberTest struct { + text string + isInt bool + isUint bool + isFloat bool + isComplex bool + int64 + uint64 + float64 + complex128 +} + +var numberTests = []numberTest{ + // basics + {"0", true, true, true, false, 0, 0, 0, 0}, + {"-0", true, true, true, false, 0, 0, 0, 0}, // check that -0 is a uint. + {"73", true, true, true, false, 73, 73, 73, 0}, + {"073", true, true, true, false, 073, 073, 073, 0}, + {"0x73", true, true, true, false, 0x73, 0x73, 0x73, 0}, + {"-73", true, false, true, false, -73, 0, -73, 0}, + {"+73", true, false, true, false, 73, 0, 73, 0}, + {"100", true, true, true, false, 100, 100, 100, 0}, + {"1e9", true, true, true, false, 1e9, 1e9, 1e9, 0}, + {"-1e9", true, false, true, false, -1e9, 0, -1e9, 0}, + {"-1.2", false, false, true, false, 0, 0, -1.2, 0}, + {"1e19", false, true, true, false, 0, 1e19, 1e19, 0}, + {"-1e19", false, false, true, false, 0, 0, -1e19, 0}, + {"4i", false, false, false, true, 0, 0, 0, 4i}, + {"-1.2+4.2i", false, false, false, true, 0, 0, 0, -1.2 + 4.2i}, + {"073i", false, false, false, true, 0, 0, 0, 73i}, // not octal! + // complex with 0 imaginary are float (and maybe integer) + {"0i", true, true, true, true, 0, 0, 0, 0}, + {"-1.2+0i", false, false, true, true, 0, 0, -1.2, -1.2}, + {"-12+0i", true, false, true, true, -12, 0, -12, -12}, + {"13+0i", true, true, true, true, 13, 13, 13, 13}, + // funny bases + {"0123", true, true, true, false, 0123, 0123, 0123, 0}, + {"-0x0", true, true, true, false, 0, 0, 0, 0}, + {"0xdeadbeef", true, true, true, false, 0xdeadbeef, 0xdeadbeef, 0xdeadbeef, 0}, + // character constants + {`'a'`, true, true, true, false, 'a', 'a', 'a', 0}, + {`'\n'`, true, true, true, false, '\n', '\n', '\n', 0}, + {`'\\'`, true, true, true, false, '\\', '\\', '\\', 0}, + {`'\''`, true, true, true, false, '\'', '\'', '\'', 0}, + {`'\xFF'`, true, true, true, false, 0xFF, 0xFF, 0xFF, 0}, + {`'パ'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0}, + {`'\u30d1'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0}, + {`'\U000030d1'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0}, + // some broken syntax + {text: "+-2"}, + {text: "0x123."}, + {text: "1e."}, + {text: "0xi."}, + {text: "1+2."}, + {text: "'x"}, + {text: "'xx'"}, + // Issue 8622 - 0xe parsed as floating point. Very embarrassing. + {"0xef", true, true, true, false, 0xef, 0xef, 0xef, 0}, +} + +func TestNumberParse(t *testing.T) { + for _, test := range numberTests { + // If fmt.Sscan thinks it's complex, it's complex. We can't trust the output + // because imaginary comes out as a number. + var c complex128 + typ := itemNumber + var tree *Tree + if test.text[0] == '\'' { + typ = itemCharConstant + } else { + _, err := fmt.Sscan(test.text, &c) + if err == nil { + typ = itemComplex + } + } + n, err := tree.newNumber(0, test.text, typ) + ok := test.isInt || test.isUint || test.isFloat || test.isComplex + if ok && err != nil { + t.Errorf("unexpected error for %q: %s", test.text, err) + continue + } + if !ok && err == nil { + t.Errorf("expected error for %q", test.text) + continue + } + if !ok { + if *debug { + fmt.Printf("%s\n\t%s\n", test.text, err) + } + continue + } + if n.IsComplex != test.isComplex { + t.Errorf("complex incorrect for %q; should be %t", test.text, test.isComplex) + } + if test.isInt { + if !n.IsInt { + t.Errorf("expected integer for %q", test.text) + } + if n.Int64 != test.int64 { + t.Errorf("int64 for %q should be %d Is %d", test.text, test.int64, n.Int64) + } + } else if n.IsInt { + t.Errorf("did not expect integer for %q", test.text) + } + if test.isUint { + if !n.IsUint { + t.Errorf("expected unsigned integer for %q", test.text) + } + if n.Uint64 != test.uint64 { + t.Errorf("uint64 for %q should be %d Is %d", test.text, test.uint64, n.Uint64) + } + } else if n.IsUint { + t.Errorf("did not expect unsigned integer for %q", test.text) + } + if test.isFloat { + if !n.IsFloat { + t.Errorf("expected float for %q", test.text) + } + if n.Float64 != test.float64 { + t.Errorf("float64 for %q should be %g Is %g", test.text, test.float64, n.Float64) + } + } else if n.IsFloat { + t.Errorf("did not expect float for %q", test.text) + } + if test.isComplex { + if !n.IsComplex { + t.Errorf("expected complex for %q", test.text) + } + if n.Complex128 != test.complex128 { + t.Errorf("complex128 for %q should be %g Is %g", test.text, test.complex128, n.Complex128) + } + } else if n.IsComplex { + t.Errorf("did not expect complex for %q", test.text) + } + } +} + +type parseTest struct { + name string + input string + ok bool + result string // what the user would see in an error message. +} + +const ( + noError = true + hasError = false +) + +var parseTests = []parseTest{ + {"empty", "", noError, + ``}, + {"comment", "{{/*\n\n\n*/}}", noError, + ``}, + {"spaces", " \t\n", noError, + `" \t\n"`}, + {"text", "some text", noError, + `"some text"`}, + {"emptyAction", "{{}}", hasError, + `{{}}`}, + {"field", "{{.X}}", noError, + `{{.X}}`}, + {"simple command", "{{printf}}", noError, + `{{printf}}`}, + {"$ invocation", "{{$}}", noError, + "{{$}}"}, + {"variable invocation", "{{with $x := 3}}{{$x 23}}{{end}}", noError, + "{{with $x := 3}}{{$x 23}}{{end}}"}, + {"variable with fields", "{{$.I}}", noError, + "{{$.I}}"}, + {"multi-word command", "{{printf `%d` 23}}", noError, + "{{printf `%d` 23}}"}, + {"pipeline", "{{.X|.Y}}", noError, + `{{.X | .Y}}`}, + {"pipeline with decl", "{{$x := .X|.Y}}", noError, + `{{$x := .X | .Y}}`}, + {"nested pipeline", "{{.X (.Y .Z) (.A | .B .C) (.E)}}", noError, + `{{.X (.Y .Z) (.A | .B .C) (.E)}}`}, + {"field applied to parentheses", "{{(.Y .Z).Field}}", noError, + `{{(.Y .Z).Field}}`}, + {"simple if", "{{if .X}}hello{{end}}", noError, + `{{if .X}}"hello"{{end}}`}, + {"if with else", "{{if .X}}true{{else}}false{{end}}", noError, + `{{if .X}}"true"{{else}}"false"{{end}}`}, + {"if with else if", "{{if .X}}true{{else if .Y}}false{{end}}", noError, + `{{if .X}}"true"{{else}}{{if .Y}}"false"{{end}}{{end}}`}, + {"if else chain", "+{{if .X}}X{{else if .Y}}Y{{else if .Z}}Z{{end}}+", noError, + `"+"{{if .X}}"X"{{else}}{{if .Y}}"Y"{{else}}{{if .Z}}"Z"{{end}}{{end}}{{end}}"+"`}, + {"simple range", "{{range .X}}hello{{end}}", noError, + `{{range .X}}"hello"{{end}}`}, + {"chained field range", "{{range .X.Y.Z}}hello{{end}}", noError, + `{{range .X.Y.Z}}"hello"{{end}}`}, + {"nested range", "{{range .X}}hello{{range .Y}}goodbye{{end}}{{end}}", noError, + `{{range .X}}"hello"{{range .Y}}"goodbye"{{end}}{{end}}`}, + {"range with else", "{{range .X}}true{{else}}false{{end}}", noError, + `{{range .X}}"true"{{else}}"false"{{end}}`}, + {"range over pipeline", "{{range .X|.M}}true{{else}}false{{end}}", noError, + `{{range .X | .M}}"true"{{else}}"false"{{end}}`}, + {"range []int", "{{range .SI}}{{.}}{{end}}", noError, + `{{range .SI}}{{.}}{{end}}`}, + {"range 1 var", "{{range $x := .SI}}{{.}}{{end}}", noError, + `{{range $x := .SI}}{{.}}{{end}}`}, + {"range 2 vars", "{{range $x, $y := .SI}}{{.}}{{end}}", noError, + `{{range $x, $y := .SI}}{{.}}{{end}}`}, + {"constants", "{{range .SI 1 -3.2i true false 'a' nil}}{{end}}", noError, + `{{range .SI 1 -3.2i true false 'a' nil}}{{end}}`}, + {"template", "{{template `x`}}", noError, + `{{template "x"}}`}, + {"template with arg", "{{template `x` .Y}}", noError, + `{{template "x" .Y}}`}, + {"with", "{{with .X}}hello{{end}}", noError, + `{{with .X}}"hello"{{end}}`}, + {"with with else", "{{with .X}}hello{{else}}goodbye{{end}}", noError, + `{{with .X}}"hello"{{else}}"goodbye"{{end}}`}, + {"elide newline", "{{true}}\\\n ", noError, + `{{true}}" "`}, + // Errors. + {"unclosed action", "hello{{range", hasError, ""}, + {"unmatched end", "{{end}}", hasError, ""}, + {"missing end", "hello{{range .x}}", hasError, ""}, + {"missing end after else", "hello{{range .x}}{{else}}", hasError, ""}, + {"undefined function", "hello{{undefined}}", hasError, ""}, + {"undefined variable", "{{$x}}", hasError, ""}, + {"variable undefined after end", "{{with $x := 4}}{{end}}{{$x}}", hasError, ""}, + {"variable undefined in template", "{{template $v}}", hasError, ""}, + {"declare with field", "{{with $x.Y := 4}}{{end}}", hasError, ""}, + {"template with field ref", "{{template .X}}", hasError, ""}, + {"template with var", "{{template $v}}", hasError, ""}, + {"invalid punctuation", "{{printf 3, 4}}", hasError, ""}, + {"multidecl outside range", "{{with $v, $u := 3}}{{end}}", hasError, ""}, + {"too many decls in range", "{{range $u, $v, $w := 3}}{{end}}", hasError, ""}, + {"dot applied to parentheses", "{{printf (printf .).}}", hasError, ""}, + {"adjacent args", "{{printf 3`x`}}", hasError, ""}, + {"adjacent args with .", "{{printf `x`.}}", hasError, ""}, + {"extra end after if", "{{if .X}}a{{else if .Y}}b{{end}}{{end}}", hasError, ""}, + {"invalid newline elision", "{{true}}\\{{true}}", hasError, ""}, + // Equals (and other chars) do not assignments make (yet). + {"bug0a", "{{$x := 0}}{{$x}}", noError, "{{$x := 0}}{{$x}}"}, + {"bug0b", "{{$x = 1}}{{$x}}", hasError, ""}, + {"bug0c", "{{$x ! 2}}{{$x}}", hasError, ""}, + {"bug0d", "{{$x % 3}}{{$x}}", hasError, ""}, + // Check the parse fails for := rather than comma. + {"bug0e", "{{range $x := $y := 3}}{{end}}", hasError, ""}, + // Another bug: variable read must ignore following punctuation. + {"bug1a", "{{$x:=.}}{{$x!2}}", hasError, ""}, // ! is just illegal here. + {"bug1b", "{{$x:=.}}{{$x+2}}", hasError, ""}, // $x+2 should not parse as ($x) (+2). + {"bug1c", "{{$x:=.}}{{$x +2}}", noError, "{{$x := .}}{{$x +2}}"}, // It's OK with a space. +} + +var builtins = map[string]interface{}{ + "printf": fmt.Sprintf, +} + +func testParse(doCopy bool, t *testing.T) { + textFormat = "%q" + defer func() { textFormat = "%s" }() + for _, test := range parseTests { + tmpl, err := New(test.name).Parse(test.input, "", "", make(map[string]*Tree), builtins) + switch { + case err == nil && !test.ok: + t.Errorf("%q: expected error; got none", test.name) + continue + case err != nil && test.ok: + t.Errorf("%q: unexpected error: %v", test.name, err) + continue + case err != nil && !test.ok: + // expected error, got one + if *debug { + fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err) + } + continue + } + var result string + if doCopy { + result = tmpl.Root.Copy().String() + } else { + result = tmpl.Root.String() + } + if result != test.result { + t.Errorf("%s=(%q): got\n\t%v\nexpected\n\t%v", test.name, test.input, result, test.result) + } + } +} + +func TestParse(t *testing.T) { + testParse(false, t) +} + +// Same as TestParse, but we copy the node first +func TestParseCopy(t *testing.T) { + testParse(true, t) +} + +type isEmptyTest struct { + name string + input string + empty bool +} + +var isEmptyTests = []isEmptyTest{ + {"empty", ``, true}, + {"nonempty", `hello`, false}, + {"spaces only", " \t\n \t\n", true}, + {"definition", `{{define "x"}}something{{end}}`, true}, + {"definitions and space", "{{define `x`}}something{{end}}\n\n{{define `y`}}something{{end}}\n\n", true}, + {"definitions and text", "{{define `x`}}something{{end}}\nx\n{{define `y`}}something{{end}}\ny\n", false}, + {"definition and action", "{{define `x`}}something{{end}}{{if 3}}foo{{end}}", false}, +} + +func TestIsEmpty(t *testing.T) { + if !IsEmptyTree(nil) { + t.Errorf("nil tree is not empty") + } + for _, test := range isEmptyTests { + tree, err := New("root").Parse(test.input, "", "", make(map[string]*Tree), nil) + if err != nil { + t.Errorf("%q: unexpected error: %v", test.name, err) + continue + } + if empty := IsEmptyTree(tree.Root); empty != test.empty { + t.Errorf("%q: expected %t got %t", test.name, test.empty, empty) + } + } +} + +func TestErrorContextWithTreeCopy(t *testing.T) { + tree, err := New("root").Parse("{{if true}}{{end}}", "", "", make(map[string]*Tree), nil) + if err != nil { + t.Fatalf("unexpected tree parse failure: %v", err) + } + treeCopy := tree.Copy() + wantLocation, wantContext := tree.ErrorContext(tree.Root.Nodes[0]) + gotLocation, gotContext := treeCopy.ErrorContext(treeCopy.Root.Nodes[0]) + if wantLocation != gotLocation { + t.Errorf("wrong error location want %q got %q", wantLocation, gotLocation) + } + if wantContext != gotContext { + t.Errorf("wrong error location want %q got %q", wantContext, gotContext) + } +} + +// All failures, and the result is a string that must appear in the error message. +var errorTests = []parseTest{ + // Check line numbers are accurate. + {"unclosed1", + "line1\n{{", + hasError, `unclosed1:2: unexpected unclosed action in command`}, + {"unclosed2", + "line1\n{{define `x`}}line2\n{{", + hasError, `unclosed2:3: unexpected unclosed action in command`}, + // Specific errors. + {"function", + "{{foo}}", + hasError, `function "foo" not defined`}, + {"comment", + "{{/*}}", + hasError, `unclosed comment`}, + {"lparen", + "{{.X (1 2 3}}", + hasError, `unclosed left paren`}, + {"rparen", + "{{.X 1 2 3)}}", + hasError, `unexpected ")"`}, + {"space", + "{{`x`3}}", + hasError, `missing space?`}, + {"idchar", + "{{a#}}", + hasError, `'#'`}, + {"charconst", + "{{'a}}", + hasError, `unterminated character constant`}, + {"stringconst", + `{{"a}}`, + hasError, `unterminated quoted string`}, + {"rawstringconst", + "{{`a}}", + hasError, `unterminated raw quoted string`}, + {"number", + "{{0xi}}", + hasError, `number syntax`}, + {"multidefine", + "{{define `a`}}a{{end}}{{define `a`}}b{{end}}", + hasError, `multiple definition of template`}, + {"eof", + "{{range .X}}", + hasError, `unexpected EOF`}, + {"variable", + // Declare $x so it's defined, to avoid that error, and then check we don't parse a declaration. + "{{$x := 23}}{{with $x.y := 3}}{{$x 23}}{{end}}", + hasError, `unexpected ":="`}, + {"multidecl", + "{{$a,$b,$c := 23}}", + hasError, `too many declarations`}, + {"undefvar", + "{{$a}}", + hasError, `undefined variable`}, +} + +func TestErrors(t *testing.T) { + for _, test := range errorTests { + _, err := New(test.name).Parse(test.input, "", "", make(map[string]*Tree)) + if err == nil { + t.Errorf("%q: expected error", test.name) + continue + } + if !strings.Contains(err.Error(), test.result) { + t.Errorf("%q: error %q does not contain %q", test.name, err, test.result) + } + } +} diff --git a/vendor/github.com/alecthomas/template/template.go b/vendor/github.com/alecthomas/template/template.go new file mode 100644 index 0000000..447ed2a --- /dev/null +++ b/vendor/github.com/alecthomas/template/template.go @@ -0,0 +1,218 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package template + +import ( + "fmt" + "reflect" + + "github.com/alecthomas/template/parse" +) + +// common holds the information shared by related templates. +type common struct { + tmpl map[string]*Template + // We use two maps, one for parsing and one for execution. + // This separation makes the API cleaner since it doesn't + // expose reflection to the client. + parseFuncs FuncMap + execFuncs map[string]reflect.Value +} + +// Template is the representation of a parsed template. The *parse.Tree +// field is exported only for use by html/template and should be treated +// as unexported by all other clients. +type Template struct { + name string + *parse.Tree + *common + leftDelim string + rightDelim string +} + +// New allocates a new template with the given name. +func New(name string) *Template { + return &Template{ + name: name, + } +} + +// Name returns the name of the template. +func (t *Template) Name() string { + return t.name +} + +// New allocates a new template associated with the given one and with the same +// delimiters. The association, which is transitive, allows one template to +// invoke another with a {{template}} action. +func (t *Template) New(name string) *Template { + t.init() + return &Template{ + name: name, + common: t.common, + leftDelim: t.leftDelim, + rightDelim: t.rightDelim, + } +} + +func (t *Template) init() { + if t.common == nil { + t.common = new(common) + t.tmpl = make(map[string]*Template) + t.parseFuncs = make(FuncMap) + t.execFuncs = make(map[string]reflect.Value) + } +} + +// Clone returns a duplicate of the template, including all associated +// templates. The actual representation is not copied, but the name space of +// associated templates is, so further calls to Parse in the copy will add +// templates to the copy but not to the original. Clone can be used to prepare +// common templates and use them with variant definitions for other templates +// by adding the variants after the clone is made. +func (t *Template) Clone() (*Template, error) { + nt := t.copy(nil) + nt.init() + nt.tmpl[t.name] = nt + for k, v := range t.tmpl { + if k == t.name { // Already installed. + continue + } + // The associated templates share nt's common structure. + tmpl := v.copy(nt.common) + nt.tmpl[k] = tmpl + } + for k, v := range t.parseFuncs { + nt.parseFuncs[k] = v + } + for k, v := range t.execFuncs { + nt.execFuncs[k] = v + } + return nt, nil +} + +// copy returns a shallow copy of t, with common set to the argument. +func (t *Template) copy(c *common) *Template { + nt := New(t.name) + nt.Tree = t.Tree + nt.common = c + nt.leftDelim = t.leftDelim + nt.rightDelim = t.rightDelim + return nt +} + +// AddParseTree creates a new template with the name and parse tree +// and associates it with t. +func (t *Template) AddParseTree(name string, tree *parse.Tree) (*Template, error) { + if t.common != nil && t.tmpl[name] != nil { + return nil, fmt.Errorf("template: redefinition of template %q", name) + } + nt := t.New(name) + nt.Tree = tree + t.tmpl[name] = nt + return nt, nil +} + +// Templates returns a slice of the templates associated with t, including t +// itself. +func (t *Template) Templates() []*Template { + if t.common == nil { + return nil + } + // Return a slice so we don't expose the map. + m := make([]*Template, 0, len(t.tmpl)) + for _, v := range t.tmpl { + m = append(m, v) + } + return m +} + +// Delims sets the action delimiters to the specified strings, to be used in +// subsequent calls to Parse, ParseFiles, or ParseGlob. Nested template +// definitions will inherit the settings. An empty delimiter stands for the +// corresponding default: {{ or }}. +// The return value is the template, so calls can be chained. +func (t *Template) Delims(left, right string) *Template { + t.leftDelim = left + t.rightDelim = right + return t +} + +// Funcs adds the elements of the argument map to the template's function map. +// It panics if a value in the map is not a function with appropriate return +// type. However, it is legal to overwrite elements of the map. The return +// value is the template, so calls can be chained. +func (t *Template) Funcs(funcMap FuncMap) *Template { + t.init() + addValueFuncs(t.execFuncs, funcMap) + addFuncs(t.parseFuncs, funcMap) + return t +} + +// Lookup returns the template with the given name that is associated with t, +// or nil if there is no such template. +func (t *Template) Lookup(name string) *Template { + if t.common == nil { + return nil + } + return t.tmpl[name] +} + +// Parse parses a string into a template. Nested template definitions will be +// associated with the top-level template t. Parse may be called multiple times +// to parse definitions of templates to associate with t. It is an error if a +// resulting template is non-empty (contains content other than template +// definitions) and would replace a non-empty template with the same name. +// (In multiple calls to Parse with the same receiver template, only one call +// can contain text other than space, comments, and template definitions.) +func (t *Template) Parse(text string) (*Template, error) { + t.init() + trees, err := parse.Parse(t.name, text, t.leftDelim, t.rightDelim, t.parseFuncs, builtins) + if err != nil { + return nil, err + } + // Add the newly parsed trees, including the one for t, into our common structure. + for name, tree := range trees { + // If the name we parsed is the name of this template, overwrite this template. + // The associate method checks it's not a redefinition. + tmpl := t + if name != t.name { + tmpl = t.New(name) + } + // Even if t == tmpl, we need to install it in the common.tmpl map. + if replace, err := t.associate(tmpl, tree); err != nil { + return nil, err + } else if replace { + tmpl.Tree = tree + } + tmpl.leftDelim = t.leftDelim + tmpl.rightDelim = t.rightDelim + } + return t, nil +} + +// associate installs the new template into the group of templates associated +// with t. It is an error to reuse a name except to overwrite an empty +// template. The two are already known to share the common structure. +// The boolean return value reports wither to store this tree as t.Tree. +func (t *Template) associate(new *Template, tree *parse.Tree) (bool, error) { + if new.common != t.common { + panic("internal error: associate not common") + } + name := new.name + if old := t.tmpl[name]; old != nil { + oldIsEmpty := parse.IsEmptyTree(old.Root) + newIsEmpty := parse.IsEmptyTree(tree.Root) + if newIsEmpty { + // Whether old is empty or not, new is empty; no reason to replace old. + return false, nil + } + if !oldIsEmpty { + return false, fmt.Errorf("template: redefinition of template %q", name) + } + } + t.tmpl[name] = new + return true, nil +} diff --git a/vendor/github.com/alecthomas/template/testdata/file1.tmpl b/vendor/github.com/alecthomas/template/testdata/file1.tmpl new file mode 100644 index 0000000..febf9d9 --- /dev/null +++ b/vendor/github.com/alecthomas/template/testdata/file1.tmpl @@ -0,0 +1,2 @@ +{{define "x"}}TEXT{{end}} +{{define "dotV"}}{{.V}}{{end}} diff --git a/vendor/github.com/alecthomas/template/testdata/file2.tmpl b/vendor/github.com/alecthomas/template/testdata/file2.tmpl new file mode 100644 index 0000000..39bf6fb --- /dev/null +++ b/vendor/github.com/alecthomas/template/testdata/file2.tmpl @@ -0,0 +1,2 @@ +{{define "dot"}}{{.}}{{end}} +{{define "nested"}}{{template "dot" .}}{{end}} diff --git a/vendor/github.com/alecthomas/template/testdata/tmpl1.tmpl b/vendor/github.com/alecthomas/template/testdata/tmpl1.tmpl new file mode 100644 index 0000000..b72b3a3 --- /dev/null +++ b/vendor/github.com/alecthomas/template/testdata/tmpl1.tmpl @@ -0,0 +1,3 @@ +template1 +{{define "x"}}x{{end}} +{{template "y"}} diff --git a/vendor/github.com/alecthomas/template/testdata/tmpl2.tmpl b/vendor/github.com/alecthomas/template/testdata/tmpl2.tmpl new file mode 100644 index 0000000..16beba6 --- /dev/null +++ b/vendor/github.com/alecthomas/template/testdata/tmpl2.tmpl @@ -0,0 +1,3 @@ +template2 +{{define "y"}}y{{end}} +{{template "x"}} diff --git a/vendor/github.com/alecthomas/units/COPYING b/vendor/github.com/alecthomas/units/COPYING new file mode 100644 index 0000000..2993ec0 --- /dev/null +++ b/vendor/github.com/alecthomas/units/COPYING @@ -0,0 +1,19 @@ +Copyright (C) 2014 Alec Thomas + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/alecthomas/units/README.md b/vendor/github.com/alecthomas/units/README.md new file mode 100644 index 0000000..bee884e --- /dev/null +++ b/vendor/github.com/alecthomas/units/README.md @@ -0,0 +1,11 @@ +# Units - Helpful unit multipliers and functions for Go + +The goal of this package is to have functionality similar to the [time](http://golang.org/pkg/time/) package. + +It allows for code like this: + +```go +n, err := ParseBase2Bytes("1KB") +// n == 1024 +n = units.Mebibyte * 512 +``` diff --git a/vendor/github.com/alecthomas/units/bytes.go b/vendor/github.com/alecthomas/units/bytes.go new file mode 100644 index 0000000..eaadeb8 --- /dev/null +++ b/vendor/github.com/alecthomas/units/bytes.go @@ -0,0 +1,83 @@ +package units + +// Base2Bytes is the old non-SI power-of-2 byte scale (1024 bytes in a kilobyte, +// etc.). +type Base2Bytes int64 + +// Base-2 byte units. +const ( + Kibibyte Base2Bytes = 1024 + KiB = Kibibyte + Mebibyte = Kibibyte * 1024 + MiB = Mebibyte + Gibibyte = Mebibyte * 1024 + GiB = Gibibyte + Tebibyte = Gibibyte * 1024 + TiB = Tebibyte + Pebibyte = Tebibyte * 1024 + PiB = Pebibyte + Exbibyte = Pebibyte * 1024 + EiB = Exbibyte +) + +var ( + bytesUnitMap = MakeUnitMap("iB", "B", 1024) + oldBytesUnitMap = MakeUnitMap("B", "B", 1024) +) + +// ParseBase2Bytes supports both iB and B in base-2 multipliers. That is, KB +// and KiB are both 1024. +func ParseBase2Bytes(s string) (Base2Bytes, error) { + n, err := ParseUnit(s, bytesUnitMap) + if err != nil { + n, err = ParseUnit(s, oldBytesUnitMap) + } + return Base2Bytes(n), err +} + +func (b Base2Bytes) String() string { + return ToString(int64(b), 1024, "iB", "B") +} + +var ( + metricBytesUnitMap = MakeUnitMap("B", "B", 1000) +) + +// MetricBytes are SI byte units (1000 bytes in a kilobyte). +type MetricBytes SI + +// SI base-10 byte units. +const ( + Kilobyte MetricBytes = 1000 + KB = Kilobyte + Megabyte = Kilobyte * 1000 + MB = Megabyte + Gigabyte = Megabyte * 1000 + GB = Gigabyte + Terabyte = Gigabyte * 1000 + TB = Terabyte + Petabyte = Terabyte * 1000 + PB = Petabyte + Exabyte = Petabyte * 1000 + EB = Exabyte +) + +// ParseMetricBytes parses base-10 metric byte units. That is, KB is 1000 bytes. +func ParseMetricBytes(s string) (MetricBytes, error) { + n, err := ParseUnit(s, metricBytesUnitMap) + return MetricBytes(n), err +} + +func (m MetricBytes) String() string { + return ToString(int64(m), 1000, "B", "B") +} + +// ParseStrictBytes supports both iB and B suffixes for base 2 and metric, +// respectively. That is, KiB represents 1024 and KB represents 1000. +func ParseStrictBytes(s string) (int64, error) { + n, err := ParseUnit(s, bytesUnitMap) + if err != nil { + n, err = ParseUnit(s, metricBytesUnitMap) + } + return int64(n), err +} diff --git a/vendor/github.com/alecthomas/units/bytes_test.go b/vendor/github.com/alecthomas/units/bytes_test.go new file mode 100644 index 0000000..6cbc79d --- /dev/null +++ b/vendor/github.com/alecthomas/units/bytes_test.go @@ -0,0 +1,49 @@ +package units + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBase2BytesString(t *testing.T) { + assert.Equal(t, Base2Bytes(0).String(), "0B") + assert.Equal(t, Base2Bytes(1025).String(), "1KiB1B") + assert.Equal(t, Base2Bytes(1048577).String(), "1MiB1B") +} + +func TestParseBase2Bytes(t *testing.T) { + n, err := ParseBase2Bytes("0B") + assert.NoError(t, err) + assert.Equal(t, 0, int(n)) + n, err = ParseBase2Bytes("1KB") + assert.NoError(t, err) + assert.Equal(t, 1024, int(n)) + n, err = ParseBase2Bytes("1MB1KB25B") + assert.NoError(t, err) + assert.Equal(t, 1049625, int(n)) + n, err = ParseBase2Bytes("1.5MB") + assert.NoError(t, err) + assert.Equal(t, 1572864, int(n)) +} + +func TestMetricBytesString(t *testing.T) { + assert.Equal(t, MetricBytes(0).String(), "0B") + assert.Equal(t, MetricBytes(1001).String(), "1KB1B") + assert.Equal(t, MetricBytes(1001025).String(), "1MB1KB25B") +} + +func TestParseMetricBytes(t *testing.T) { + n, err := ParseMetricBytes("0B") + assert.NoError(t, err) + assert.Equal(t, 0, int(n)) + n, err = ParseMetricBytes("1KB1B") + assert.NoError(t, err) + assert.Equal(t, 1001, int(n)) + n, err = ParseMetricBytes("1MB1KB25B") + assert.NoError(t, err) + assert.Equal(t, 1001025, int(n)) + n, err = ParseMetricBytes("1.5MB") + assert.NoError(t, err) + assert.Equal(t, 1500000, int(n)) +} diff --git a/vendor/github.com/alecthomas/units/doc.go b/vendor/github.com/alecthomas/units/doc.go new file mode 100644 index 0000000..156ae38 --- /dev/null +++ b/vendor/github.com/alecthomas/units/doc.go @@ -0,0 +1,13 @@ +// Package units provides helpful unit multipliers and functions for Go. +// +// The goal of this package is to have functionality similar to the time [1] package. +// +// +// [1] http://golang.org/pkg/time/ +// +// It allows for code like this: +// +// n, err := ParseBase2Bytes("1KB") +// // n == 1024 +// n = units.Mebibyte * 512 +package units diff --git a/vendor/github.com/alecthomas/units/si.go b/vendor/github.com/alecthomas/units/si.go new file mode 100644 index 0000000..8234a9d --- /dev/null +++ b/vendor/github.com/alecthomas/units/si.go @@ -0,0 +1,26 @@ +package units + +// SI units. +type SI int64 + +// SI unit multiples. +const ( + Kilo SI = 1000 + Mega = Kilo * 1000 + Giga = Mega * 1000 + Tera = Giga * 1000 + Peta = Tera * 1000 + Exa = Peta * 1000 +) + +func MakeUnitMap(suffix, shortSuffix string, scale int64) map[string]float64 { + return map[string]float64{ + shortSuffix: 1, + "K" + suffix: float64(scale), + "M" + suffix: float64(scale * scale), + "G" + suffix: float64(scale * scale * scale), + "T" + suffix: float64(scale * scale * scale * scale), + "P" + suffix: float64(scale * scale * scale * scale * scale), + "E" + suffix: float64(scale * scale * scale * scale * scale * scale), + } +} diff --git a/vendor/github.com/alecthomas/units/util.go b/vendor/github.com/alecthomas/units/util.go new file mode 100644 index 0000000..6527e92 --- /dev/null +++ b/vendor/github.com/alecthomas/units/util.go @@ -0,0 +1,138 @@ +package units + +import ( + "errors" + "fmt" + "strings" +) + +var ( + siUnits = []string{"", "K", "M", "G", "T", "P", "E"} +) + +func ToString(n int64, scale int64, suffix, baseSuffix string) string { + mn := len(siUnits) + out := make([]string, mn) + for i, m := range siUnits { + if n%scale != 0 || i == 0 && n == 0 { + s := suffix + if i == 0 { + s = baseSuffix + } + out[mn-1-i] = fmt.Sprintf("%d%s%s", n%scale, m, s) + } + n /= scale + if n == 0 { + break + } + } + return strings.Join(out, "") +} + +// Below code ripped straight from http://golang.org/src/pkg/time/format.go?s=33392:33438#L1123 +var errLeadingInt = errors.New("units: bad [0-9]*") // never printed + +// leadingInt consumes the leading [0-9]* from s. +func leadingInt(s string) (x int64, rem string, err error) { + i := 0 + for ; i < len(s); i++ { + c := s[i] + if c < '0' || c > '9' { + break + } + if x >= (1<<63-10)/10 { + // overflow + return 0, "", errLeadingInt + } + x = x*10 + int64(c) - '0' + } + return x, s[i:], nil +} + +func ParseUnit(s string, unitMap map[string]float64) (int64, error) { + // [-+]?([0-9]*(\.[0-9]*)?[a-z]+)+ + orig := s + f := float64(0) + neg := false + + // Consume [-+]? + if s != "" { + c := s[0] + if c == '-' || c == '+' { + neg = c == '-' + s = s[1:] + } + } + // Special case: if all that is left is "0", this is zero. + if s == "0" { + return 0, nil + } + if s == "" { + return 0, errors.New("units: invalid " + orig) + } + for s != "" { + g := float64(0) // this element of the sequence + + var x int64 + var err error + + // The next character must be [0-9.] + if !(s[0] == '.' || ('0' <= s[0] && s[0] <= '9')) { + return 0, errors.New("units: invalid " + orig) + } + // Consume [0-9]* + pl := len(s) + x, s, err = leadingInt(s) + if err != nil { + return 0, errors.New("units: invalid " + orig) + } + g = float64(x) + pre := pl != len(s) // whether we consumed anything before a period + + // Consume (\.[0-9]*)? + post := false + if s != "" && s[0] == '.' { + s = s[1:] + pl := len(s) + x, s, err = leadingInt(s) + if err != nil { + return 0, errors.New("units: invalid " + orig) + } + scale := 1.0 + for n := pl - len(s); n > 0; n-- { + scale *= 10 + } + g += float64(x) / scale + post = pl != len(s) + } + if !pre && !post { + // no digits (e.g. ".s" or "-.s") + return 0, errors.New("units: invalid " + orig) + } + + // Consume unit. + i := 0 + for ; i < len(s); i++ { + c := s[i] + if c == '.' || ('0' <= c && c <= '9') { + break + } + } + u := s[:i] + s = s[i:] + unit, ok := unitMap[u] + if !ok { + return 0, errors.New("units: unknown unit " + u + " in " + orig) + } + + f += g * unit + } + + if neg { + f = -f + } + if f < float64(-1<<63) || f > float64(1<<63-1) { + return 0, errors.New("units: overflow parsing unit") + } + return int64(f), nil +} diff --git a/vendor/github.com/mattn/go-runewidth/.travis.yml b/vendor/github.com/mattn/go-runewidth/.travis.yml new file mode 100644 index 0000000..5c9c2a3 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/.travis.yml @@ -0,0 +1,8 @@ +language: go +go: + - tip +before_install: + - go get github.com/mattn/goveralls + - go get golang.org/x/tools/cmd/cover +script: + - $HOME/gopath/bin/goveralls -repotoken lAKAWPzcGsD3A8yBX3BGGtRUdJ6CaGERL diff --git a/vendor/github.com/mattn/go-runewidth/README.mkd b/vendor/github.com/mattn/go-runewidth/README.mkd new file mode 100644 index 0000000..ffb0edd --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/README.mkd @@ -0,0 +1,26 @@ +go-runewidth +============ + +[![Build Status](https://travis-ci.org/mattn/go-runewidth.png?branch=master)](https://travis-ci.org/mattn/go-runewidth) +[![Coverage Status](https://coveralls.io/repos/mattn/go-runewidth/badge.png?branch=HEAD)](https://coveralls.io/r/mattn/go-runewidth?branch=HEAD) +[![GoDoc](https://godoc.org/github.com/mattn/go-runewidth?status.svg)](http://godoc.org/github.com/mattn/go-runewidth) + +Provides functions to get fixed width of the character or string. + +Usage +----- + +```go +runewidth.StringWidth("つのだ☆HIRO") == 12 +``` + + +Author +------ + +Yasuhiro Matsumoto + +License +------- + +under the MIT License: http://mattn.mit-license.org/2013 diff --git a/vendor/github.com/mattn/go-runewidth/runewidth.go b/vendor/github.com/mattn/go-runewidth/runewidth.go new file mode 100644 index 0000000..3fbf33d --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth.go @@ -0,0 +1,464 @@ +package runewidth + +var EastAsianWidth = IsEastAsian() +var DefaultCondition = &Condition{EastAsianWidth} + +type interval struct { + first rune + last rune +} + +var combining = []interval{ + {0x0300, 0x036F}, {0x0483, 0x0486}, {0x0488, 0x0489}, + {0x0591, 0x05BD}, {0x05BF, 0x05BF}, {0x05C1, 0x05C2}, + {0x05C4, 0x05C5}, {0x05C7, 0x05C7}, {0x0600, 0x0603}, + {0x0610, 0x0615}, {0x064B, 0x065E}, {0x0670, 0x0670}, + {0x06D6, 0x06E4}, {0x06E7, 0x06E8}, {0x06EA, 0x06ED}, + {0x070F, 0x070F}, {0x0711, 0x0711}, {0x0730, 0x074A}, + {0x07A6, 0x07B0}, {0x07EB, 0x07F3}, {0x0901, 0x0902}, + {0x093C, 0x093C}, {0x0941, 0x0948}, {0x094D, 0x094D}, + {0x0951, 0x0954}, {0x0962, 0x0963}, {0x0981, 0x0981}, + {0x09BC, 0x09BC}, {0x09C1, 0x09C4}, {0x09CD, 0x09CD}, + {0x09E2, 0x09E3}, {0x0A01, 0x0A02}, {0x0A3C, 0x0A3C}, + {0x0A41, 0x0A42}, {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, + {0x0A70, 0x0A71}, {0x0A81, 0x0A82}, {0x0ABC, 0x0ABC}, + {0x0AC1, 0x0AC5}, {0x0AC7, 0x0AC8}, {0x0ACD, 0x0ACD}, + {0x0AE2, 0x0AE3}, {0x0B01, 0x0B01}, {0x0B3C, 0x0B3C}, + {0x0B3F, 0x0B3F}, {0x0B41, 0x0B43}, {0x0B4D, 0x0B4D}, + {0x0B56, 0x0B56}, {0x0B82, 0x0B82}, {0x0BC0, 0x0BC0}, + {0x0BCD, 0x0BCD}, {0x0C3E, 0x0C40}, {0x0C46, 0x0C48}, + {0x0C4A, 0x0C4D}, {0x0C55, 0x0C56}, {0x0CBC, 0x0CBC}, + {0x0CBF, 0x0CBF}, {0x0CC6, 0x0CC6}, {0x0CCC, 0x0CCD}, + {0x0CE2, 0x0CE3}, {0x0D41, 0x0D43}, {0x0D4D, 0x0D4D}, + {0x0DCA, 0x0DCA}, {0x0DD2, 0x0DD4}, {0x0DD6, 0x0DD6}, + {0x0E31, 0x0E31}, {0x0E34, 0x0E3A}, {0x0E47, 0x0E4E}, + {0x0EB1, 0x0EB1}, {0x0EB4, 0x0EB9}, {0x0EBB, 0x0EBC}, + {0x0EC8, 0x0ECD}, {0x0F18, 0x0F19}, {0x0F35, 0x0F35}, + {0x0F37, 0x0F37}, {0x0F39, 0x0F39}, {0x0F71, 0x0F7E}, + {0x0F80, 0x0F84}, {0x0F86, 0x0F87}, {0x0F90, 0x0F97}, + {0x0F99, 0x0FBC}, {0x0FC6, 0x0FC6}, {0x102D, 0x1030}, + {0x1032, 0x1032}, {0x1036, 0x1037}, {0x1039, 0x1039}, + {0x1058, 0x1059}, {0x1160, 0x11FF}, {0x135F, 0x135F}, + {0x1712, 0x1714}, {0x1732, 0x1734}, {0x1752, 0x1753}, + {0x1772, 0x1773}, {0x17B4, 0x17B5}, {0x17B7, 0x17BD}, + {0x17C6, 0x17C6}, {0x17C9, 0x17D3}, {0x17DD, 0x17DD}, + {0x180B, 0x180D}, {0x18A9, 0x18A9}, {0x1920, 0x1922}, + {0x1927, 0x1928}, {0x1932, 0x1932}, {0x1939, 0x193B}, + {0x1A17, 0x1A18}, {0x1B00, 0x1B03}, {0x1B34, 0x1B34}, + {0x1B36, 0x1B3A}, {0x1B3C, 0x1B3C}, {0x1B42, 0x1B42}, + {0x1B6B, 0x1B73}, {0x1DC0, 0x1DCA}, {0x1DFE, 0x1DFF}, + {0x200B, 0x200F}, {0x202A, 0x202E}, {0x2060, 0x2063}, + {0x206A, 0x206F}, {0x20D0, 0x20EF}, {0x302A, 0x302F}, + {0x3099, 0x309A}, {0xA806, 0xA806}, {0xA80B, 0xA80B}, + {0xA825, 0xA826}, {0xFB1E, 0xFB1E}, {0xFE00, 0xFE0F}, + {0xFE20, 0xFE23}, {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFB}, + {0x10A01, 0x10A03}, {0x10A05, 0x10A06}, {0x10A0C, 0x10A0F}, + {0x10A38, 0x10A3A}, {0x10A3F, 0x10A3F}, {0x1D167, 0x1D169}, + {0x1D173, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD}, + {0x1D242, 0x1D244}, {0xE0001, 0xE0001}, {0xE0020, 0xE007F}, + {0xE0100, 0xE01EF}, +} + +type ctype int + +const ( + narrow ctype = iota + ambiguous + wide + halfwidth + fullwidth + neutral +) + +type intervalType struct { + first rune + last rune + ctype ctype +} + +var ctypes = []intervalType{ + {0x0020, 0x007E, narrow}, + {0x00A1, 0x00A1, ambiguous}, + {0x00A2, 0x00A3, narrow}, + {0x00A4, 0x00A4, ambiguous}, + {0x00A5, 0x00A6, narrow}, + {0x00A7, 0x00A8, ambiguous}, + {0x00AA, 0x00AA, ambiguous}, + {0x00AC, 0x00AC, narrow}, + {0x00AD, 0x00AE, ambiguous}, + {0x00AF, 0x00AF, narrow}, + {0x00B0, 0x00B4, ambiguous}, + {0x00B6, 0x00BA, ambiguous}, + {0x00BC, 0x00BF, ambiguous}, + {0x00C6, 0x00C6, ambiguous}, + {0x00D0, 0x00D0, ambiguous}, + {0x00D7, 0x00D8, ambiguous}, + {0x00DE, 0x00E1, ambiguous}, + {0x00E6, 0x00E6, ambiguous}, + {0x00E8, 0x00EA, ambiguous}, + {0x00EC, 0x00ED, ambiguous}, + {0x00F0, 0x00F0, ambiguous}, + {0x00F2, 0x00F3, ambiguous}, + {0x00F7, 0x00FA, ambiguous}, + {0x00FC, 0x00FC, ambiguous}, + {0x00FE, 0x00FE, ambiguous}, + {0x0101, 0x0101, ambiguous}, + {0x0111, 0x0111, ambiguous}, + {0x0113, 0x0113, ambiguous}, + {0x011B, 0x011B, ambiguous}, + {0x0126, 0x0127, ambiguous}, + {0x012B, 0x012B, ambiguous}, + {0x0131, 0x0133, ambiguous}, + {0x0138, 0x0138, ambiguous}, + {0x013F, 0x0142, ambiguous}, + {0x0144, 0x0144, ambiguous}, + {0x0148, 0x014B, ambiguous}, + {0x014D, 0x014D, ambiguous}, + {0x0152, 0x0153, ambiguous}, + {0x0166, 0x0167, ambiguous}, + {0x016B, 0x016B, ambiguous}, + {0x01CE, 0x01CE, ambiguous}, + {0x01D0, 0x01D0, ambiguous}, + {0x01D2, 0x01D2, ambiguous}, + {0x01D4, 0x01D4, ambiguous}, + {0x01D6, 0x01D6, ambiguous}, + {0x01D8, 0x01D8, ambiguous}, + {0x01DA, 0x01DA, ambiguous}, + {0x01DC, 0x01DC, ambiguous}, + {0x0251, 0x0251, ambiguous}, + {0x0261, 0x0261, ambiguous}, + {0x02C4, 0x02C4, ambiguous}, + {0x02C7, 0x02C7, ambiguous}, + {0x02C9, 0x02CB, ambiguous}, + {0x02CD, 0x02CD, ambiguous}, + {0x02D0, 0x02D0, ambiguous}, + {0x02D8, 0x02DB, ambiguous}, + {0x02DD, 0x02DD, ambiguous}, + {0x02DF, 0x02DF, ambiguous}, + {0x0300, 0x036F, ambiguous}, + {0x0391, 0x03A2, ambiguous}, + {0x03A3, 0x03A9, ambiguous}, + {0x03B1, 0x03C1, ambiguous}, + {0x03C3, 0x03C9, ambiguous}, + {0x0401, 0x0401, ambiguous}, + {0x0410, 0x044F, ambiguous}, + {0x0451, 0x0451, ambiguous}, + {0x1100, 0x115F, wide}, + {0x2010, 0x2010, ambiguous}, + {0x2013, 0x2016, ambiguous}, + {0x2018, 0x2019, ambiguous}, + {0x201C, 0x201D, ambiguous}, + {0x2020, 0x2022, ambiguous}, + {0x2024, 0x2027, ambiguous}, + {0x2030, 0x2030, ambiguous}, + {0x2032, 0x2033, ambiguous}, + {0x2035, 0x2035, ambiguous}, + {0x203B, 0x203B, ambiguous}, + {0x203E, 0x203E, ambiguous}, + {0x2074, 0x2074, ambiguous}, + {0x207F, 0x207F, ambiguous}, + {0x2081, 0x2084, ambiguous}, + {0x20A9, 0x20A9, halfwidth}, + {0x20AC, 0x20AC, ambiguous}, + {0x2103, 0x2103, ambiguous}, + {0x2105, 0x2105, ambiguous}, + {0x2109, 0x2109, ambiguous}, + {0x2113, 0x2113, ambiguous}, + {0x2116, 0x2116, ambiguous}, + {0x2121, 0x2122, ambiguous}, + {0x2126, 0x2126, ambiguous}, + {0x212B, 0x212B, ambiguous}, + {0x2153, 0x2154, ambiguous}, + {0x215B, 0x215E, ambiguous}, + {0x2160, 0x216B, ambiguous}, + {0x2170, 0x2179, ambiguous}, + {0x2189, 0x218A, ambiguous}, + {0x2190, 0x2199, ambiguous}, + {0x21B8, 0x21B9, ambiguous}, + {0x21D2, 0x21D2, ambiguous}, + {0x21D4, 0x21D4, ambiguous}, + {0x21E7, 0x21E7, ambiguous}, + {0x2200, 0x2200, ambiguous}, + {0x2202, 0x2203, ambiguous}, + {0x2207, 0x2208, ambiguous}, + {0x220B, 0x220B, ambiguous}, + {0x220F, 0x220F, ambiguous}, + {0x2211, 0x2211, ambiguous}, + {0x2215, 0x2215, ambiguous}, + {0x221A, 0x221A, ambiguous}, + {0x221D, 0x2220, ambiguous}, + {0x2223, 0x2223, ambiguous}, + {0x2225, 0x2225, ambiguous}, + {0x2227, 0x222C, ambiguous}, + {0x222E, 0x222E, ambiguous}, + {0x2234, 0x2237, ambiguous}, + {0x223C, 0x223D, ambiguous}, + {0x2248, 0x2248, ambiguous}, + {0x224C, 0x224C, ambiguous}, + {0x2252, 0x2252, ambiguous}, + {0x2260, 0x2261, ambiguous}, + {0x2264, 0x2267, ambiguous}, + {0x226A, 0x226B, ambiguous}, + {0x226E, 0x226F, ambiguous}, + {0x2282, 0x2283, ambiguous}, + {0x2286, 0x2287, ambiguous}, + {0x2295, 0x2295, ambiguous}, + {0x2299, 0x2299, ambiguous}, + {0x22A5, 0x22A5, ambiguous}, + {0x22BF, 0x22BF, ambiguous}, + {0x2312, 0x2312, ambiguous}, + {0x2329, 0x232A, wide}, + {0x2460, 0x24E9, ambiguous}, + {0x24EB, 0x254B, ambiguous}, + {0x2550, 0x2573, ambiguous}, + {0x2580, 0x258F, ambiguous}, + {0x2592, 0x2595, ambiguous}, + {0x25A0, 0x25A1, ambiguous}, + {0x25A3, 0x25A9, ambiguous}, + {0x25B2, 0x25B3, ambiguous}, + {0x25B6, 0x25B7, ambiguous}, + {0x25BC, 0x25BD, ambiguous}, + {0x25C0, 0x25C1, ambiguous}, + {0x25C6, 0x25C8, ambiguous}, + {0x25CB, 0x25CB, ambiguous}, + {0x25CE, 0x25D1, ambiguous}, + {0x25E2, 0x25E5, ambiguous}, + {0x25EF, 0x25EF, ambiguous}, + {0x2605, 0x2606, ambiguous}, + {0x2609, 0x2609, ambiguous}, + {0x260E, 0x260F, ambiguous}, + {0x2614, 0x2615, ambiguous}, + {0x261C, 0x261C, ambiguous}, + {0x261E, 0x261E, ambiguous}, + {0x2640, 0x2640, ambiguous}, + {0x2642, 0x2642, ambiguous}, + {0x2660, 0x2661, ambiguous}, + {0x2663, 0x2665, ambiguous}, + {0x2667, 0x266A, ambiguous}, + {0x266C, 0x266D, ambiguous}, + {0x266F, 0x266F, ambiguous}, + {0x269E, 0x269F, ambiguous}, + {0x26BE, 0x26BF, ambiguous}, + {0x26C4, 0x26CD, ambiguous}, + {0x26CF, 0x26E1, ambiguous}, + {0x26E3, 0x26E3, ambiguous}, + {0x26E8, 0x26FF, ambiguous}, + {0x273D, 0x273D, ambiguous}, + {0x2757, 0x2757, ambiguous}, + {0x2776, 0x277F, ambiguous}, + {0x27E6, 0x27ED, narrow}, + {0x2985, 0x2986, narrow}, + {0x2B55, 0x2B59, ambiguous}, + {0x2E80, 0x2E9A, wide}, + {0x2E9B, 0x2EF4, wide}, + {0x2F00, 0x2FD6, wide}, + {0x2FF0, 0x2FFC, wide}, + {0x3000, 0x3000, fullwidth}, + {0x3001, 0x303E, wide}, + {0x3041, 0x3097, wide}, + {0x3099, 0x3100, wide}, + {0x3105, 0x312E, wide}, + {0x3131, 0x318F, wide}, + {0x3190, 0x31BB, wide}, + {0x31C0, 0x31E4, wide}, + {0x31F0, 0x321F, wide}, + {0x3220, 0x3247, wide}, + {0x3248, 0x324F, ambiguous}, + {0x3250, 0x32FF, wide}, + {0x3300, 0x4DBF, wide}, + {0x4E00, 0xA48D, wide}, + {0xA490, 0xA4C7, wide}, + {0xA960, 0xA97D, wide}, + {0xAC00, 0xD7A4, wide}, + {0xE000, 0xF8FF, ambiguous}, + {0xF900, 0xFAFF, wide}, + {0xFE00, 0xFE0F, ambiguous}, + {0xFE10, 0xFE1A, wide}, + {0xFE30, 0xFE53, wide}, + {0xFE54, 0xFE67, wide}, + {0xFE68, 0xFE6C, wide}, + {0xFF01, 0xFF60, fullwidth}, + {0xFF61, 0xFFBF, halfwidth}, + {0xFFC2, 0xFFC8, halfwidth}, + {0xFFCA, 0xFFD0, halfwidth}, + {0xFFD2, 0xFFD8, halfwidth}, + {0xFFDA, 0xFFDD, halfwidth}, + {0xFFE0, 0xFFE7, fullwidth}, + {0xFFE8, 0xFFEF, halfwidth}, + {0xFFFD, 0xFFFE, ambiguous}, + {0x1B000, 0x1B002, wide}, + {0x1F100, 0x1F10A, ambiguous}, + {0x1F110, 0x1F12D, ambiguous}, + {0x1F130, 0x1F169, ambiguous}, + {0x1F170, 0x1F19B, ambiguous}, + {0x1F200, 0x1F203, wide}, + {0x1F210, 0x1F23B, wide}, + {0x1F240, 0x1F249, wide}, + {0x1F250, 0x1F252, wide}, + {0x20000, 0x2FFFE, wide}, + {0x30000, 0x3FFFE, wide}, + {0xE0100, 0xE01F0, ambiguous}, + {0xF0000, 0xFFFFD, ambiguous}, + {0x100000, 0x10FFFE, ambiguous}, +} + +type Condition struct { + EastAsianWidth bool +} + +func NewCondition() *Condition { + return &Condition{EastAsianWidth} +} + +// RuneWidth returns the number of cells in r. +// See http://www.unicode.org/reports/tr11/ +func (c *Condition) RuneWidth(r rune) int { + if r == 0 { + return 0 + } + if r < 32 || (r >= 0x7f && r < 0xa0) { + return 1 + } + for _, iv := range combining { + if iv.first <= r && r <= iv.last { + return 0 + } + } + + if c.EastAsianWidth && IsAmbiguousWidth(r) { + return 2 + } + + if r >= 0x1100 && + (r <= 0x115f || r == 0x2329 || r == 0x232a || + (r >= 0x2e80 && r <= 0xa4cf && r != 0x303f) || + (r >= 0xac00 && r <= 0xd7a3) || + (r >= 0xf900 && r <= 0xfaff) || + (r >= 0xfe30 && r <= 0xfe6f) || + (r >= 0xff00 && r <= 0xff60) || + (r >= 0xffe0 && r <= 0xffe6) || + (r >= 0x20000 && r <= 0x2fffd) || + (r >= 0x30000 && r <= 0x3fffd)) { + return 2 + } + return 1 +} + +func (c *Condition) StringWidth(s string) (width int) { + for _, r := range []rune(s) { + width += c.RuneWidth(r) + } + return width +} + +func (c *Condition) Truncate(s string, w int, tail string) string { + if c.StringWidth(s) <= w { + return s + } + r := []rune(s) + tw := c.StringWidth(tail) + w -= tw + width := 0 + i := 0 + for ; i < len(r); i++ { + cw := c.RuneWidth(r[i]) + if width+cw > w { + break + } + width += cw + } + return string(r[0:i]) + tail +} + +func (c *Condition) Wrap(s string, w int) string { + width := 0 + out := "" + for _, r := range []rune(s) { + cw := RuneWidth(r) + if r == '\n' { + out += string(r) + width = 0 + continue + } else if width+cw > w { + out += "\n" + width = 0 + out += string(r) + width += cw + continue + } + out += string(r) + width += cw + } + return out +} + +func (c *Condition) FillLeft(s string, w int) string { + width := c.StringWidth(s) + count := w - width + if count > 0 { + b := make([]byte, count) + for i := range b { + b[i] = ' ' + } + return string(b) + s + } + return s +} + +func (c *Condition) FillRight(s string, w int) string { + width := c.StringWidth(s) + count := w - width + if count > 0 { + b := make([]byte, count) + for i := range b { + b[i] = ' ' + } + return s + string(b) + } + return s +} + +// RuneWidth returns the number of cells in r. +// See http://www.unicode.org/reports/tr11/ +func RuneWidth(r rune) int { + return DefaultCondition.RuneWidth(r) +} + +func ct(r rune) ctype { + for _, iv := range ctypes { + if iv.first <= r && r <= iv.last { + return iv.ctype + } + } + return neutral +} + +// IsAmbiguousWidth returns whether is ambiguous width or not. +func IsAmbiguousWidth(r rune) bool { + return ct(r) == ambiguous +} + +// IsAmbiguousWidth returns whether is ambiguous width or not. +func IsNeutralWidth(r rune) bool { + return ct(r) == neutral +} + +func StringWidth(s string) (width int) { + return DefaultCondition.StringWidth(s) +} + +func Truncate(s string, w int, tail string) string { + return DefaultCondition.Truncate(s, w, tail) +} + +func Wrap(s string, w int) string { + return DefaultCondition.Wrap(s, w) +} + +func FillLeft(s string, w int) string { + return DefaultCondition.FillLeft(s, w) +} + +func FillRight(s string, w int) string { + return DefaultCondition.FillRight(s, w) +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_js.go b/vendor/github.com/mattn/go-runewidth/runewidth_js.go new file mode 100644 index 0000000..0ce32c5 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_js.go @@ -0,0 +1,8 @@ +// +build js + +package runewidth + +func IsEastAsian() bool { + // TODO: Implement this for the web. Detect east asian in a compatible way, and return true. + return false +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_posix.go b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go new file mode 100644 index 0000000..a449590 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go @@ -0,0 +1,69 @@ +// +build !windows,!js + +package runewidth + +import ( + "os" + "regexp" + "strings" +) + +var reLoc = regexp.MustCompile(`^[a-z][a-z][a-z]?(?:_[A-Z][A-Z])?\.(.+)`) + +func IsEastAsian() bool { + locale := os.Getenv("LC_CTYPE") + if locale == "" { + locale = os.Getenv("LANG") + } + + // ignore C locale + if locale == "POSIX" || locale == "C" { + return false + } + if len(locale) > 1 && locale[0] == 'C' && (locale[1] == '.' || locale[1] == '-') { + return false + } + + charset := strings.ToLower(locale) + r := reLoc.FindStringSubmatch(locale) + if len(r) == 2 { + charset = strings.ToLower(r[1]) + } + + if strings.HasSuffix(charset, "@cjk_narrow") { + return false + } + + for pos, b := range []byte(charset) { + if b == '@' { + charset = charset[:pos] + break + } + } + + mbc_max := 1 + switch charset { + case "utf-8", "utf8": + mbc_max = 6 + case "jis": + mbc_max = 8 + case "eucjp": + mbc_max = 3 + case "euckr", "euccn": + mbc_max = 2 + case "sjis", "cp932", "cp51932", "cp936", "cp949", "cp950": + mbc_max = 2 + case "big5": + mbc_max = 2 + case "gbk", "gb2312": + mbc_max = 2 + } + + if mbc_max > 1 && (charset[0] != 'u' || + strings.HasPrefix(locale, "ja") || + strings.HasPrefix(locale, "ko") || + strings.HasPrefix(locale, "zh")) { + return true + } + return false +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_test.go b/vendor/github.com/mattn/go-runewidth/runewidth_test.go new file mode 100644 index 0000000..f943128 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_test.go @@ -0,0 +1,229 @@ +package runewidth + +import ( + "testing" +) + +var runewidthtests = []struct { + in rune + out int +}{ + {'世', 2}, + {'界', 2}, + {'セ', 1}, + {'カ', 1}, + {'イ', 1}, + {'☆', 2}, // double width in ambiguous + {'\x00', 0}, + {'\x01', 1}, + {'\u0300', 0}, +} + +func TestRuneWidth(t *testing.T) { + c := NewCondition() + c.EastAsianWidth = true + for _, tt := range runewidthtests { + if out := c.RuneWidth(tt.in); out != tt.out { + t.Errorf("Width(%q) = %q, want %q", tt.in, out, tt.out) + } + } +} + +var isambiguouswidthtests = []struct { + in rune + out bool +}{ + {'世', false}, + {'■', true}, + {'界', false}, + {'○', true}, + {'㈱', false}, + {'①', true}, + {'②', true}, + {'③', true}, + {'④', true}, + {'⑤', true}, + {'⑥', true}, + {'⑦', true}, + {'⑧', true}, + {'⑨', true}, + {'⑩', true}, + {'⑪', true}, + {'⑫', true}, + {'⑬', true}, + {'⑭', true}, + {'⑮', true}, + {'⑯', true}, + {'⑰', true}, + {'⑱', true}, + {'⑲', true}, + {'⑳', true}, + {'☆', true}, +} + +func TestIsAmbiguousWidth(t *testing.T) { + for _, tt := range isambiguouswidthtests { + if out := IsAmbiguousWidth(tt.in); out != tt.out { + t.Errorf("IsAmbiguousWidth(%q) = %q, want %q", tt.in, out, tt.out) + } + } +} + +var stringwidthtests = []struct { + in string + out int +}{ + {"■㈱の世界①", 12}, + {"スター☆", 8}, +} + +func TestStringWidth(t *testing.T) { + c := NewCondition() + c.EastAsianWidth = true + for _, tt := range stringwidthtests { + if out := c.StringWidth(tt.in); out != tt.out { + t.Errorf("StringWidth(%q) = %q, want %q", tt.in, out, tt.out) + } + } +} + +func TestStringWidthInvalid(t *testing.T) { + s := "こんにちわ\x00世界" + if out := StringWidth(s); out != 14 { + t.Errorf("StringWidth(%q) = %q, want %q", s, out, 14) + } +} + +func TestTruncateSmaller(t *testing.T) { + s := "あいうえお" + expected := "あいうえお" + + if out := Truncate(s, 10, "..."); out != expected { + t.Errorf("Truncate(%q) = %q, want %q", s, out, expected) + } +} + +func TestTruncate(t *testing.T) { + s := "あいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおおおおお" + expected := "あいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおお..." + + out := Truncate(s, 80, "...") + if out != expected { + t.Errorf("Truncate(%q) = %q, want %q", s, out, expected) + } + width := StringWidth(out) + if width != 79 { + t.Errorf("width of Truncate(%q) should be %d, but %d", s, 79, width) + } +} + +func TestTruncateFit(t *testing.T) { + s := "aあいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおおおおお" + expected := "aあいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおお..." + + out := Truncate(s, 80, "...") + if out != expected { + t.Errorf("Truncate(%q) = %q, want %q", s, out, expected) + } + width := StringWidth(out) + if width != 80 { + t.Errorf("width of Truncate(%q) should be %d, but %d", s, 80, width) + } +} + +func TestTruncateJustFit(t *testing.T) { + s := "あいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおおおお" + expected := "あいうえおあいうえおえおおおおおおおおおおおおおおおおおおおおおおおおおおおおお" + + out := Truncate(s, 80, "...") + if out != expected { + t.Errorf("Truncate(%q) = %q, want %q", s, out, expected) + } + width := StringWidth(out) + if width != 80 { + t.Errorf("width of Truncate(%q) should be %d, but %d", s, 80, width) + } +} + +func TestWrap(t *testing.T) { + s := `東京特許許可局局長はよく柿喰う客だ/東京特許許可局局長はよく柿喰う客だ +123456789012345678901234567890 + +END` + expected := `東京特許許可局局長はよく柿喰う +客だ/東京特許許可局局長はよく +柿喰う客だ +123456789012345678901234567890 + +END` + + if out := Wrap(s, 30); out != expected { + t.Errorf("Wrap(%q) = %q, want %q", s, out, expected) + } +} + +func TestTruncateNoNeeded(t *testing.T) { + s := "あいうえおあい" + expected := "あいうえおあい" + + if out := Truncate(s, 80, "..."); out != expected { + t.Errorf("Truncate(%q) = %q, want %q", s, out, expected) + } +} + +var isneutralwidthtests = []struct { + in rune + out bool +}{ + {'→', false}, + {'┊', false}, + {'┈', false}, + {'~', false}, + {'└', false}, + {'⣀', true}, + {'⣀', true}, +} + +func TestIsNeutralWidth(t *testing.T) { + for _, tt := range isneutralwidthtests { + if out := IsNeutralWidth(tt.in); out != tt.out { + t.Errorf("IsNeutralWidth(%q) = %q, want %q", tt.in, out, tt.out) + } + } +} + +func TestFillLeft(t *testing.T) { + s := "あxいうえお" + expected := " あxいうえお" + + if out := FillLeft(s, 15); out != expected { + t.Errorf("FillLeft(%q) = %q, want %q", s, out, expected) + } +} + +func TestFillLeftFit(t *testing.T) { + s := "あいうえお" + expected := "あいうえお" + + if out := FillLeft(s, 10); out != expected { + t.Errorf("FillLeft(%q) = %q, want %q", s, out, expected) + } +} + +func TestFillRight(t *testing.T) { + s := "あxいうえお" + expected := "あxいうえお " + + if out := FillRight(s, 15); out != expected { + t.Errorf("FillRight(%q) = %q, want %q", s, out, expected) + } +} + +func TestFillRightFit(t *testing.T) { + s := "あいうえお" + expected := "あいうえお" + + if out := FillRight(s, 10); out != expected { + t.Errorf("FillRight(%q) = %q, want %q", s, out, expected) + } +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_windows.go b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go new file mode 100644 index 0000000..bdd8445 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go @@ -0,0 +1,24 @@ +package runewidth + +import ( + "syscall" +) + +var ( + kernel32 = syscall.NewLazyDLL("kernel32") + procGetConsoleOutputCP = kernel32.NewProc("GetConsoleOutputCP") +) + +func IsEastAsian() bool { + r1, _, _ := procGetConsoleOutputCP.Call() + if r1 == 0 { + return false + } + + switch int(r1) { + case 932, 51932, 936, 949, 950: + return true + } + + return false +} diff --git a/vendor/github.com/najeira/ltsv/LICENSE b/vendor/github.com/najeira/ltsv/LICENSE new file mode 100644 index 0000000..d01fcf5 --- /dev/null +++ b/vendor/github.com/najeira/ltsv/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013 Tetsuhiro Ueda (najeira), All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of najeira. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/najeira/ltsv/README.rst b/vendor/github.com/najeira/ltsv/README.rst new file mode 100644 index 0000000..27d84e4 --- /dev/null +++ b/vendor/github.com/najeira/ltsv/README.rst @@ -0,0 +1,98 @@ +ltsv +==== + +LTSV (Labeled Tab-separated Values) reader/writer for Go language. + +About LTSV: http://ltsv.org/ + + Labeled Tab-separated Values (LTSV) format is a variant of + Tab-separated Values (TSV).Each record in a LTSV file is represented + as a single line. Each field is separated by TAB and has a label and + a value. The label and the value have been separated by ':'. With + the LTSV format, you can parse each line by spliting with TAB (like + original TSV format) easily, and extend any fields with unique labels + in no particular order. + + +Example +======= + +Reader +------ + +:: + + package main + + import ( + "bytes" + "fmt" + "github.com/najeira/ltsv" + ) + + func main() { + data := ` + time:05/Feb/2013:15:34:47 +0000 host:192.168.50.1 req:GET / HTTP/1.1 status:200 + time:05/Feb/2013:15:35:15 +0000 host:192.168.50.1 req:GET /foo HTTP/1.1 status:200 + time:05/Feb/2013:15:35:54 +0000 host:192.168.50.1 req:GET /bar HTTP/1.1 status:404 + ` + b := bytes.NewBufferString(data) + + // Read LTSV file into map[string]string + reader := ltsv.NewReader(b) + records, err := reader.ReadAll() + if err != nil { + panic(err) + } + + // dump + for i, record := range records { + fmt.Printf("===== Data %d\n", i) + for k, v := range record { + fmt.Printf("\t%s --> %s\n", k, v) + } + } + } + + +Writer +------ + +:: + + package main + + import ( + "fmt" + "bytes" + "github.com/najeira/ltsv" + ) + + func main() { + data := []map[string]string { + {"time": "05/Feb/2013:15:34:47 +0000", "host": "192.168.50.1", "req": "GET / HTTP/1.1", "status": "200"}, + {"time": "05/Feb/2013:15:35:15 +0000", "host": "192.168.50.1", "req": "GET /foo HTTP/1.1", "status": "200"}, + {"time": "05/Feb/2013:15:35:54 +0000", "host": "192.168.50.1", "req": "GET /bar HTTP/1.1", "status": "404"}, + } + + b := &bytes.Buffer{} + writer := ltsv.NewWriter(b) + err := writer.WriteAll(data) + if err != nil { + panic(err) + } + fmt.Printf("%v", b.String()) + } + + +License +======= + +New BSD License. + + +Links +===== + +- http://ltsv.org/ +- https://github.com/ymotongpoo/goltsv LTSV package by ymotongpoo diff --git a/vendor/github.com/najeira/ltsv/ltsv_test.go b/vendor/github.com/najeira/ltsv/ltsv_test.go new file mode 100644 index 0000000..1c5e47e --- /dev/null +++ b/vendor/github.com/najeira/ltsv/ltsv_test.go @@ -0,0 +1,318 @@ +package ltsv + +import ( + "bytes" + "fmt" + "io" + "testing" +) + +type readerTest struct { + value string + records []map[string]string +} + +var readerTests = []readerTest{ + { + `host:127.0.0.1 ident:- user:frank time:[10/Oct/2000:13:55:36 -0700] req:GET /apache_pb.gif + +HTTP/1.0 status:200 size:2326 referer:http://www.example.com/start.html ua:Mozilla/4.08 [en] (Win98; I ;Nav) +`, + []map[string]string{ + {"host": "127.0.0.1", "ident": "-", "user": "frank", "time": "[10/Oct/2000:13:55:36 -0700]", "req": "GET /apache_pb.gif"}, + {"status": "200", "size": "2326", "referer": "http://www.example.com/start.html", "ua": "Mozilla/4.08 [en] (Win98; I ;Nav)"}, + }, + }, + { + ` trimspace :こんにちは + trim space :こんばんは +日本語:ラベル +nolabelnofield +ha,s.p-un_ct: おはよう `, + []map[string]string{ + {"trimspace": "こんにちは"}, + {"trim space": "こんばんは"}, + {"日本語": "ラベル"}, + {"ha,s.p-un_ct": " おはよう "}, + }, + }, + { + `label:こんにちは こんばんは +label2:こんばんは +こんにちは`, + []map[string]string{ + {"label": "こんにちは"}, + {"label2": "こんばんは"}, + }, + }, + { + ` +hoge:foo bar:baz +perl:5.17.8 ruby:2.0 python:2.6 +sushi:寿司 tennpura:天ぷら ramen:ラーメン gyoza:餃子 + `, + []map[string]string{ + {"hoge": "foo", "bar": "baz"}, + {"perl": "5.17.8", "ruby": "2.0", "python": "2.6"}, + {"sushi": "寿司", "tennpura": "天ぷら", "ramen": "ラーメン", "gyoza": "餃子"}, + }, + }, + { + ` +hoge: bar: +perl: ruby: + `, + []map[string]string{ + {"hoge": "", "bar": ""}, + {"perl": "", "ruby": ""}, + }, + }, +} + +type structRecordA struct { + host string + ident string + user string + time string + req string +} + +type structRecordB struct { + status int + size int + referer string + ua string +} + +type structTest struct { + value string + records []interface{} +} + +var structTests = []structTest{ + { + `host:127.0.0.1 ident:- user:frank time:[10/Oct/2000:13:55:36 -0700] req:GET /apache_pb.gif +status:200 size:2326 referer:http://www.example.com/start.html ua:Mozilla/4.08 [en] (Win98; I ;Nav) +`, + []interface{}{ + &structRecordA{host: "127.0.0.1", ident: "-", user: "frank", time: "[10/Oct/2000:13:55:36 -0700]", req: "GET /apache_pb.gif"}, + &structRecordB{status: 200, size: 2326, referer: "http://www.example.com/start.html", ua: "Mozilla/4.08 [en] (Win98; I ;Nav)"}, + }, + }, +} + +type structRecordC struct { + Host string `ltsv:"host"` + Ident string `ltsv:"ident"` + User string `ltsv:"user"` + Time string `ltsv:"time"` + Req string `ltsv:"req"` +} + +func (s *structRecordC) String() string { + return fmt.Sprintf("host:%v ident:%v user:%v time:%v req:%v", + s.Host, s.Ident, s.User, s.Time, s.Req) +} + +type structLoadTest struct { + value string + records []structRecordC +} + +var structLoadTests = []structLoadTest{ + { + `host:127.0.0.1 ident:- user:frank time:[10/Oct/2000:13:55:36 -0700] req:GET /apache_pb.gif +`, + []structRecordC{ + structRecordC{Host: "127.0.0.1", Ident: "-", User: "frank", Time: "[10/Oct/2000:13:55:36 -0700]", Req: "GET /apache_pb.gif"}, + }, + }, +} + +func TestReaderRead(t *testing.T) { + for n, test := range readerTests { + reader := NewReader(bytes.NewBufferString(test.value)) + for i, result := range test.records { + record, err := reader.Read() + if err != nil { + t.Errorf("error %v at test %d, line %d", err, n, i) + } + for label, field := range result { + if record[label] != field { + t.Errorf("wrong field %v at test %d, line %d, label %s, field %s", record[label], n, i, label, field) + } + } + if len(result) != len(record) { + t.Errorf("wrong size of record %d at test %d, line %d", len(record), n, i) + } + } + _, err := reader.Read() + if err == nil || err != io.EOF { + t.Errorf("expected EOF got %v at test %d", err, n) + } + } +} + +func TestReaderLoad(t *testing.T) { + for n, test := range structLoadTests { + reader := NewReader(bytes.NewBufferString(test.value)) + for i, result := range test.records { + st := structRecordC{} + err := reader.Load(&st) + if err != nil { + t.Errorf("error %v at test %d, line %d", err, n, i) + } + if st.String() != result.String() { + t.Errorf("got: %s", st.String()) + } + } + st := structRecordC{} + err := reader.Load(&st) + if err == nil || err != io.EOF { + t.Errorf("expected EOF got %v at test %d", err, n) + } + } +} + +func TestReaderReadAll(t *testing.T) { + for n, test := range readerTests { + reader := NewReader(bytes.NewBufferString(test.value)) + records, err := reader.ReadAll() + if err != nil { + t.Errorf("error %v at test %d", err, n) + } + if len(test.records) != len(records) { + t.Errorf("wrong size of records %d at test %d", len(records), n) + } else { + for i, result := range test.records { + record := records[i] + for label, field := range result { + if record[label] != field { + t.Errorf("wrong field %v at test %d, line %d, label %s, field %s", record[label], n, i, label, field) + } + } + if len(result) != len(record) { + t.Errorf("wrong size of record %d at test %d, line %d", len(record), n, i) + } + } + } + } +} + +func TestWriterWrite(t *testing.T) { + var buf bytes.Buffer + for n, test := range readerTests { + buf.Reset() + writer := NewWriter(&buf) + for i, record := range test.records { + err := writer.Write(record) + if err != nil { + t.Errorf("error %v at test %d, line %d", err, n, i) + } + } + writer.Flush() + + reader := NewReader(&buf) + records, err := reader.ReadAll() + if err != nil { + t.Errorf("error %v at test %d", err, n) + continue + } + if len(records) != len(test.records) { + t.Errorf("wrong size of records %d at test %d", len(records), n) + } else { + for i := 0; i < len(test.records); i++ { + record := records[i] + result := test.records[i] + for label, field := range result { + if field != record[label] { + t.Errorf("wrong field %s at test %d, line %d, label %s, field %s", record[label], n, i, label, field) + } + } + } + } + } +} + +func TestWriterWriteStruct(t *testing.T) { + var buf bytes.Buffer + for n, test := range structTests { + buf.Reset() + writer := NewWriter(&buf) + for i, record := range test.records { + err := writer.Write(record) + if err != nil { + t.Errorf("error %v at test %d, line %d", err, n, i) + } + } + writer.Flush() + if buf.String() != test.value { + t.Errorf("expect:\n%s\ngot:\n%v\n", test.value, buf.String()) + } + } +} + +func TestWriterWriteAll(t *testing.T) { + var buf bytes.Buffer + for n, test := range readerTests { + buf.Reset() + writer := NewWriter(&buf) + writer.WriteAll(test.records) + + reader := NewReader(&buf) + records, err := reader.ReadAll() + if err != nil { + t.Errorf("error %v at test %d", err, n) + continue + } + if len(records) != len(test.records) { + t.Errorf("wrong size of records %d at test %d", len(records), n) + } else { + for i := 0; i < len(test.records); i++ { + record := records[i] + result := test.records[i] + for label, field := range result { + if field != record[label] { + t.Errorf("wrong field %s at test %d, line %d, label %s, field %s", record[label], n, i, label, field) + } + } + } + } + } +} + +func TestWriterWriteAllStruct(t *testing.T) { + var buf bytes.Buffer + for _, test := range structTests { + buf.Reset() + writer := NewWriter(&buf) + writer.WriteAll(test.records) + if buf.String() != test.value { + t.Errorf("expect:\n%s\ngot:\n%v\n", test.value, buf.String()) + } + } +} + +func BenchmarkReaderRead(b *testing.B) { + for i := 0; i < 10000; i++ { + reader := NewReader(bytes.NewBufferString(readerTests[3].value)) + _, e := reader.Read() + if e != nil { + break + } + } +} + +func BenchmarkWriterWrite(b *testing.B) { + var buf bytes.Buffer + for i := 0; i < 10000; i++ { + for _, test := range readerTests { + buf.Reset() + writer := NewWriter(&buf) + for _, record := range test.records { + writer.Write(record) + } + writer.Flush() + } + } +} diff --git a/vendor/github.com/najeira/ltsv/reader.go b/vendor/github.com/najeira/ltsv/reader.go new file mode 100644 index 0000000..a00aa04 --- /dev/null +++ b/vendor/github.com/najeira/ltsv/reader.go @@ -0,0 +1,240 @@ +// based on encoding/csv +package ltsv + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "reflect" + "strings" + "unicode" +) + +// A Reader reads records from a LTSV-encoded file. +// +// As returned by NewReader, a Reader expects input LTSV-encoded file. +// The exported fields can be changed to customize the details before the +// first call to Read or ReadAll. +// +// Delimiter is the field delimiter. It defaults to '\t'. +// +// Comment, if not 0, is the comment character. Lines beginning with the +// Comment character are ignored. +type Reader struct { + Delimiter rune // Field delimiter (set to '\t' by NewReader) + Comment rune // Comment character for start of line + line int + r *bufio.Reader + label bytes.Buffer + field bytes.Buffer +} + +type structLabelIndexMap map[string]int + +// NewReader returns a new Reader that reads from r. +func NewReader(r io.Reader) *Reader { + return &Reader{ + Delimiter: '\t', + r: bufio.NewReader(r), + } +} + +type recorder interface { + record(label, field string) error + len() int +} + +type mapRecorder struct { + records map[string]string +} + +func (r *mapRecorder) record(label, field string) error { + r.records[label] = field + return nil +} + +func (r *mapRecorder) len() int { + return len(r.records) +} + +type structRecorder struct { + count int + value reflect.Value + labels structLabelIndexMap +} + +func (r *structRecorder) record(label, field string) error { + idx, ok := r.labels[label] + if !ok { + return nil + } + r.value.Field(idx).SetString(field) + r.count++ + return nil +} + +func (r *structRecorder) len() int { + return r.count +} + +// Read reads one record from r. The record is a slice of strings with each +// string representing one field. +func (r *Reader) Read() (map[string]string, error) { + rec := mapRecorder{records: make(map[string]string)} + err := r.readRecord(&rec) + if err != nil { + return nil, err + } + return rec.records, nil +} + +func (r *Reader) readRecord(rec recorder) error { + r.line++ + for { + cm, err := r.readComment() + if err != nil { + return err + } else if cm { + continue + } + + label, end, err := r.parseLabel() + if err != nil { + return err + } + if label == "" { + if end { + if rec.len() >= 1 { + return nil + } + } + continue // skip a empty line + } + + field, end, err := r.parseField() + if err != nil { + return err + } + + err = rec.record(label, field) + if err != nil { + return err + } + if end { + return nil + } + } + panic("unreachable") +} + +func (r *Reader) readComment() (bool, error) { + // If we are support comments and it is the comment character + // then skip to the end of line. + r1, err := r.readRune() + if r.Comment != 0 && r1 == r.Comment { + for { + r1, err = r.readRune() + if err != nil { + return false, err + } else if r1 == '\n' { + return true, nil + } + } + } + r.r.UnreadRune() + return false, nil +} + +func (r *Reader) Load(record interface{}) error { + v := reflect.ValueOf(record) + k := v.Kind() + if (k != reflect.Ptr && k != reflect.Interface) || v.IsNil() { + return ErrUnsupportedType + } + e := v.Elem() + rec := structRecorder{value: e, labels: structLabelIndex(e)} + err := r.readRecord(&rec) + if err != nil { + return err + } + return nil +} + +func (r *Reader) parseLabel() (string, bool, error) { + r.label.Reset() + for { + r1, err := r.readRune() + if err != nil { + return "", false, err + } else if r1 == ':' { + return strings.TrimSpace(r.label.String()), false, nil + } else if r1 == '\n' { + return "", true, nil + } else if r1 == '\t' { + return "", false, nil // no label + } else if unicode.IsControl(r1) || !unicode.IsPrint(r1) { + return "", false, errors.New(fmt.Sprintf("line %d: invalid rune at label", r.line)) + } + r.label.WriteRune(r1) + } + panic("unreachable") +} + +func (r *Reader) parseField() (string, bool, error) { + r.field.Reset() + for { + r1, err := r.readRune() + if err != nil { + if err == io.EOF { + return r.field.String(), true, nil + } + return "", false, err + } else if r1 == '\t' { + return r.field.String(), false, nil + } else if r1 == '\n' { + return r.field.String(), true, nil + } + r.field.WriteRune(r1) + } + panic("unreachable") +} + +func (r *Reader) readRune() (rune, error) { + r1, _, err := r.r.ReadRune() + if r1 == '\r' { + r1, _, err = r.r.ReadRune() + if err == nil { + if r1 != '\n' { + r.r.UnreadRune() + r1 = '\r' + } + } + } + return r1, err +} + +func (r *Reader) ReadAll() ([]map[string]string, error) { + records := make([]map[string]string, 0) + for { + record, err := r.Read() + if err == io.EOF { + return records, nil + } + if err != nil { + return nil, err + } + records = append(records, record) + } + panic("unreachable") +} + +func structLabelIndex(v reflect.Value) structLabelIndexMap { + sil := structIndexLabel(v) + sli := make(structLabelIndexMap) + for i, l := range sil { + sli[l] = i + } + return sli +} diff --git a/vendor/github.com/najeira/ltsv/writer.go b/vendor/github.com/najeira/ltsv/writer.go new file mode 100644 index 0000000..9a13008 --- /dev/null +++ b/vendor/github.com/najeira/ltsv/writer.go @@ -0,0 +1,351 @@ +// based on encoding/csv +package ltsv + +import ( + "bufio" + "errors" + "io" + "reflect" + "strconv" + "strings" + "sync" + "unicode" +) + +// A Writer writes records to a LTSV encoded file. +// +// As returned by NewWriter, a Writer writes records terminated by a +// newline and uses '\t' as the field delimiter. The exported fields can be +// changed to customize the details before the first call to Write or WriteAll. +// +// Delimiter is the field delimiter. +// +// If UseCRLF is true, the Writer ends each record with \r\n instead of \n. +type Writer struct { + Delimiter rune // Label delimiter (set to to '\t' by NewWriter) + UseCRLF bool // True to use \r\n as the line terminator + w *bufio.Writer +} + +type structIndexLabelMap map[int]string + +type formatter func(reflect.Value) (string, error) + +var ( + ErrUnsupportedType = errors.New("unsupported type") + ErrLabelInvalid = errors.New("label is invalid") + ErrFieldInvalid = errors.New("field is invalid") + structIndexLabelCache = make(map[reflect.Type]structIndexLabelMap) + structIndexLabelCacheLock sync.RWMutex +) + +// NewWriter returns a new Writer that writes to w. +func NewWriter(w io.Writer) *Writer { + return &Writer{ + Delimiter: '\t', + UseCRLF: false, + w: bufio.NewWriter(w), + } +} + +// Writer writes a single CSV record to w along with any necessary quoting. +// A record is a slice of strings with each string being one field. +func (w *Writer) Write(record interface{}) error { + m, ok := record.(map[string]string) + if ok { + return w.writeMapString(m) + } + return w.writeAny(reflect.ValueOf(record)) +} + +func (w *Writer) writeMapString(record map[string]string) error { + var err error + cnt := 0 + for label, field := range record { + if cnt >= 1 { + if _, err = w.w.WriteRune(w.Delimiter); err != nil { + return err + } + } + if err = w.writeLabelAndField(label, field); err != nil { + return err + } + cnt++ + } + return w.writeLineEnd() +} + +func (w *Writer) writeAny(v reflect.Value) error { + var err error + switch v.Kind() { + case reflect.Struct: + err = w.writeStruct(v) + case reflect.Map: + err = w.writeMap(v) + case reflect.Interface, reflect.Ptr: + if !v.IsNil() { + return w.writeAny(v.Elem()) + } + default: + return ErrUnsupportedType + } + if err != nil { + return err + } + return w.writeLineEnd() +} + +// Flush writes any buffered data to the underlying io.Writer. +func (w *Writer) Flush() { + w.w.Flush() +} + +// WriteAll writes multiple LTSV records to w using Write and then calls Flush. +func (w *Writer) WriteAll(records interface{}) error { + m, ok := records.([]map[string]string) + if ok { + return w.writeMapStringAll(m) + } + return w.writeAnyAll(reflect.ValueOf(records)) +} + +func (w *Writer) writeMapStringAll(records []map[string]string) error { + var err error + for _, record := range records { + if err = w.writeMapString(record); err != nil { + break + } + } + w.Flush() + return err +} + +func (w *Writer) writeAnyAll(v reflect.Value) error { + k := v.Kind() + if k == reflect.Slice { + if v.IsNil() { + return nil + } + } else if k != reflect.Array { + return ErrUnsupportedType + } + var err error + n := v.Len() + for i := 0; i < n; i++ { + if err = w.writeAny(v.Index(i)); err != nil { + break + } + } + w.Flush() + return err +} + +func (w *Writer) writeLabelAndField(label, field string) error { + var err error + if err = w.writeLabel(label); err != nil { + return err + } + if _, err = w.w.WriteRune(':'); err != nil { + return err + } + if err = w.writeField(field); err != nil { + return err + } + return nil +} + +func (w *Writer) writeLineEnd() error { + var line_end string + if w.UseCRLF { + line_end = "\r\n" + } else { + line_end = "\n" + } + _, err := w.w.WriteString(line_end) + return err +} + +func (w *Writer) writeStruct(v reflect.Value) error { + labels := structIndexLabel(v) + n := v.NumField() + cnt := 0 + for i := 0; i < n; i++ { + label, ok := labels[i] + if !ok { + continue + } + field, err := reflectValue(v.Field(i)) + if err != nil { + return err + } + if cnt >= 1 { + if _, err = w.w.WriteRune(w.Delimiter); err != nil { + return err + } + } + if err = w.writeLabelAndField(label, field); err != nil { + return err + } + cnt++ + } + return nil +} + +func (w *Writer) writeMap(v reflect.Value) error { + fm := labelFormatter(v.Type().Key()) + if fm == nil { + return ErrUnsupportedType + } + if !v.IsNil() { + var err error + for i, key := range v.MapKeys() { + if i >= 1 { + if _, err = w.w.WriteRune(w.Delimiter); err != nil { + return err + } + } + label, err := fm(key) + if err != nil { + return err + } + field, err := reflectValue(v.MapIndex(key)) + if err != nil { + return err + } + if err = w.writeLabelAndField(label, field); err != nil { + return err + } + } + } + return nil +} + +func formatInt(v reflect.Value) (string, error) { + return strconv.FormatInt(v.Int(), 10), nil +} + +func formatUint(v reflect.Value) (string, error) { + return strconv.FormatUint(v.Uint(), 10), nil +} + +func formatFloat(v reflect.Value) (string, error) { + return strconv.FormatFloat(v.Float(), 'g', -1, v.Type().Bits()), nil +} + +func formatString(v reflect.Value) (string, error) { + return v.String(), nil +} + +func labelFormatter(t reflect.Type) formatter { + switch t.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return formatInt + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return formatUint + case reflect.Float32, reflect.Float64: + return formatFloat + case reflect.String: + return formatString + } + return nil +} + +func reflectValue(v reflect.Value) (string, error) { + switch v.Kind() { + case reflect.Bool: + if v.Bool() { + return "true", nil + } else { + return "false", nil + } + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return formatInt(v) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return formatUint(v) + case reflect.Float32, reflect.Float64: + return formatFloat(v) + case reflect.String: + return formatString(v) + case reflect.Interface, reflect.Ptr: + if v.IsNil() { + return "", nil + } + return reflectValue(v.Elem()) + } + return "", ErrUnsupportedType +} + +func structIndexLabel(v reflect.Value) structIndexLabelMap { + t := v.Type() + structIndexLabelCacheLock.RLock() + fs, ok := structIndexLabelCache[t] + structIndexLabelCacheLock.RUnlock() + if ok { + return fs + } + structIndexLabelCacheLock.Lock() + defer structIndexLabelCacheLock.Unlock() + fs, ok = structIndexLabelCache[t] + if ok { + return fs + } + labels := make(structIndexLabelMap) + n := t.NumField() + for i := 0; i < n; i++ { + f := t.Field(i) + if f.Anonymous { + continue + } + label := f.Name + tv := f.Tag.Get("ltsv") + if tv != "" { + if tv == "-" { + continue + } + label, _ = parseTag(tv) + } + labels[i] = label + } + structIndexLabelCache[t] = labels + return labels +} + +func parseTag(tag string) (string, []string) { + ss := strings.Split(tag, ",") + for i, s := range ss { + ss[i] = strings.TrimSpace(s) + } + if len(ss) >= 2 { + return ss[0], ss[1:] + } + return ss[0], []string{} +} + +func (w *Writer) writeLabel(s string) error { + if s == "" { + return ErrLabelInvalid + } + var err error + for _, c := range s { + if c == ':' || c == w.Delimiter || c == '\n' || unicode.IsControl(c) || !unicode.IsPrint(c) { + return ErrLabelInvalid + } + if _, err = w.w.WriteRune(c); err != nil { + return err + } + } + return nil +} + +func (w *Writer) writeField(s string) error { + var err error + for _, c := range s { + if c == w.Delimiter || c == '\n' { + return ErrFieldInvalid + } + if _, err = w.w.WriteRune(c); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/olekukonko/tablewriter/.travis.yml b/vendor/github.com/olekukonko/tablewriter/.travis.yml new file mode 100644 index 0000000..354b7f8 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/.travis.yml @@ -0,0 +1,8 @@ +language: go + +go: + - 1.1 + - 1.2 + - 1.3 + - 1.4 + - tip diff --git a/vendor/github.com/olekukonko/tablewriter/LICENCE.md b/vendor/github.com/olekukonko/tablewriter/LICENCE.md new file mode 100644 index 0000000..1fd8484 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/LICENCE.md @@ -0,0 +1,19 @@ +Copyright (C) 2014 by Oleku Konko + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/olekukonko/tablewriter/README.md b/vendor/github.com/olekukonko/tablewriter/README.md new file mode 100644 index 0000000..4ee4435 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/README.md @@ -0,0 +1,141 @@ +ASCII Table Writer +========= + +[![Build Status](https://travis-ci.org/olekukonko/tablewriter.png?branch=master)](https://travis-ci.org/olekukonko/tablewriter) [![Total views](https://sourcegraph.com/api/repos/github.com/olekukonko/tablewriter/counters/views.png)](https://sourcegraph.com/github.com/olekukonko/tablewriter) + +Generate ASCII table on the fly ... Installation is simple as + + go get github.com/olekukonko/tablewriter + + +#### Features +- Automatic Padding +- Support Multiple Lines +- Supports Alignment +- Support Custom Separators +- Automatic Alignment of numbers & percentage +- Write directly to http , file etc via `io.Writer` +- Read directly from CSV file +- Optional row line via `SetRowLine` +- Normalise table header +- Make CSV Headers optional +- Enable or disable table border +- Set custom footer support + + +#### Example 1 - Basic +```go +data := [][]string{ + []string{"A", "The Good", "500"}, + []string{"B", "The Very very Bad Man", "288"}, + []string{"C", "The Ugly", "120"}, + []string{"D", "The Gopher", "800"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Name", "Sign", "Rating"}) + +for _, v := range data { + table.Append(v) +} +table.Render() // Send output +``` + +##### Output 1 +``` ++------+-----------------------+--------+ +| NAME | SIGN | RATING | ++------+-----------------------+--------+ +| A | The Good | 500 | +| B | The Very very Bad Man | 288 | +| C | The Ugly | 120 | +| D | The Gopher | 800 | ++------+-----------------------+--------+ +``` + +#### Example 2 - Without Border / Footer / Bulk Append +```go +data := [][]string{ + []string{"1/1/2014", "Domain name", "2233", "$10.98"}, + []string{"1/1/2014", "January Hosting", "2233", "$54.95"}, + []string{"1/4/2014", "February Hosting", "2233", "$51.00"}, + []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) +table.SetFooter([]string{"", "", "Total", "$146.93"}) // Add Footer +table.SetBorder(false) // Set Border to false +table.AppendBulk(data) // Add Bulk Data +table.Render() +``` + +##### Output 2 +``` + + DATE | DESCRIPTION | CV2 | AMOUNT ++----------+--------------------------+-------+---------+ + 1/1/2014 | Domain name | 2233 | $10.98 + 1/1/2014 | January Hosting | 2233 | $54.95 + 1/4/2014 | February Hosting | 2233 | $51.00 + 1/4/2014 | February Extra Bandwidth | 2233 | $30.00 ++----------+--------------------------+-------+---------+ + TOTAL | $146 93 + +-------+---------+ + +``` + + +#### Example 3 - CSV +```go +table, _ := tablewriter.NewCSV(os.Stdout, "test_info.csv", true) +table.SetAlignment(tablewriter.ALIGN_LEFT) // Set Alignment +table.Render() +``` + +##### Output 3 +``` ++----------+--------------+------+-----+---------+----------------+ +| FIELD | TYPE | NULL | KEY | DEFAULT | EXTRA | ++----------+--------------+------+-----+---------+----------------+ +| user_id | smallint(5) | NO | PRI | NULL | auto_increment | +| username | varchar(10) | NO | | NULL | | +| password | varchar(100) | NO | | NULL | | ++----------+--------------+------+-----+---------+----------------+ +``` + +#### Example 4 - Custom Separator +```go +table, _ := tablewriter.NewCSV(os.Stdout, "test.csv", true) +table.SetRowLine(true) // Enable row line + +// Change table lines +table.SetCenterSeparator("*") +table.SetColumnSeparator("‡") +table.SetRowSeparator("-") + +table.SetAlignment(tablewriter.ALIGN_LEFT) +table.Render() +``` + +##### Output 4 +``` +*------------*-----------*---------* +╪ FIRST NAME ╪ LAST NAME ╪ SSN ╪ +*------------*-----------*---------* +╪ John ╪ Barry ╪ 123456 ╪ +*------------*-----------*---------* +╪ Kathy ╪ Smith ╪ 687987 ╪ +*------------*-----------*---------* +╪ Bob ╪ McCornick ╪ 3979870 ╪ +*------------*-----------*---------* +``` + +#### TODO +- ~~Import Directly from CSV~~ - `done` +- ~~Support for `SetFooter`~~ - `done` +- ~~Support for `SetBorder`~~ - `done` +- ~~Support table with uneven rows~~ - `done` +- Support custom alignment +- General Improvement & Optimisation +- `NewHTML` Parse table from HTML diff --git a/vendor/github.com/olekukonko/tablewriter/csv.go b/vendor/github.com/olekukonko/tablewriter/csv.go new file mode 100644 index 0000000..9887830 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/csv.go @@ -0,0 +1,52 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +package tablewriter + +import ( + "encoding/csv" + "io" + "os" +) + +// Start A new table by importing from a CSV file +// Takes io.Writer and csv File name +func NewCSV(writer io.Writer, fileName string, hasHeader bool) (*Table, error) { + file, err := os.Open(fileName) + if err != nil { + return &Table{}, err + } + defer file.Close() + csvReader := csv.NewReader(file) + t, err := NewCSVReader(writer, csvReader, hasHeader) + return t, err +} + +// Start a New Table Writer with csv.Reader +// This enables customisation such as reader.Comma = ';' +// See http://golang.org/src/pkg/encoding/csv/reader.go?s=3213:3671#L94 +func NewCSVReader(writer io.Writer, csvReader *csv.Reader, hasHeader bool) (*Table, error) { + t := NewWriter(writer) + if hasHeader { + // Read the first row + headers, err := csvReader.Read() + if err != nil { + return &Table{}, err + } + t.SetHeader(headers) + } + for { + record, err := csvReader.Read() + if err == io.EOF { + break + } else if err != nil { + return &Table{}, err + } + t.Append(record) + } + return t, nil +} diff --git a/vendor/github.com/olekukonko/tablewriter/csv2table/README.md b/vendor/github.com/olekukonko/tablewriter/csv2table/README.md new file mode 100644 index 0000000..6cf5628 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/csv2table/README.md @@ -0,0 +1,43 @@ +ASCII Table Writer Tool +========= + +Generate ASCII table on the fly via command line ... Installation is simple as + +#### Get Tool + + go get github.com/olekukonko/tablewriter/csv2table + +#### Install Tool + + go install github.com/olekukonko/tablewriter/csv2table + + +#### Usage + + csv2table -f test.csv + +#### Support for Piping + + cat test.csv | csv2table -p=true + +#### Output + +``` ++------------+-----------+---------+ +| FIRST NAME | LAST NAME | SSN | ++------------+-----------+---------+ +| John | Barry | 123456 | +| Kathy | Smith | 687987 | +| Bob | McCornick | 3979870 | ++------------+-----------+---------+ +``` + +#### Another Piping with Header set to `false` + + echo dance,with,me | csv2table -p=true -h=false + +#### Output + + +-------+------+-----+ + | dance | with | me | + +-------+------+-----+ diff --git a/vendor/github.com/olekukonko/tablewriter/csv2table/csv2table.go b/vendor/github.com/olekukonko/tablewriter/csv2table/csv2table.go new file mode 100644 index 0000000..a4e9b13 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/csv2table/csv2table.go @@ -0,0 +1,84 @@ +package main + +import ( + "encoding/csv" + "flag" + "fmt" + "github.com/olekukonko/tablewriter" + "io" + "os" + "unicode/utf8" +) + +var ( + fileName = flag.String("f", "", "Set file with eg. sample.csv") + delimiter = flag.String("d", ",", "Set CSV File delimiter eg. ,|;|\t ") + header = flag.Bool("h", true, "Set header options eg. true|false ") + align = flag.String("a", "none", "Set aligmement with eg. none|left|right|center") + pipe = flag.Bool("p", false, "Suport for Piping from STDIN") + border = flag.Bool("b", true, "Enable / disable table border") +) + +func main() { + flag.Parse() + fmt.Println() + if *pipe || hasArg("-p") { + process(os.Stdin) + } else { + if *fileName == "" { + fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0]) + flag.PrintDefaults() + fmt.Println() + os.Exit(1) + } + processFile() + } + fmt.Println() +} + +func hasArg(name string) bool { + for _ , v := range os.Args { + if name == v { + return true + } + } + return false +} +func processFile() { + r, err := os.Open(*fileName) + if err != nil { + exit(err) + } + defer r.Close() + process(r) +} +func process(r io.Reader) { + csvReader := csv.NewReader(r) + rune, size := utf8.DecodeRuneInString(*delimiter) + if size == 0 { + rune = ',' + } + csvReader.Comma = rune + + table, err := tablewriter.NewCSVReader(os.Stdout, csvReader, *header) + + if err != nil { + exit(err) + } + + switch *align { + case "left": + table.SetAlignment(tablewriter.ALIGN_LEFT) + case "right": + table.SetAlignment(tablewriter.ALIGN_RIGHT) + case "center": + table.SetAlignment(tablewriter.ALIGN_CENTER) + } + table.SetBorder(*border) + table.Render() +} + +func exit(err error) { + fmt.Fprintf(os.Stderr, "#Error : %s", err) + os.Exit(1) +} diff --git a/vendor/github.com/olekukonko/tablewriter/table.go b/vendor/github.com/olekukonko/tablewriter/table.go new file mode 100644 index 0000000..dd46ef6 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/table.go @@ -0,0 +1,510 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +// Create & Generate text based table +package tablewriter + +import ( + "fmt" + "io" + "regexp" + "strings" +) + +const ( + MAX_ROW_WIDTH = 30 +) + +const ( + CENTER = "+" + ROW = "-" + COLUMN = "|" + SPACE = " " +) + +const ( + ALIGN_DEFAULT = iota + ALIGN_CENTER + ALIGN_RIGHT + ALIGN_LEFT +) + +var ( + decimal = regexp.MustCompile(`^\d*\.?\d*$`) + percent = regexp.MustCompile(`^\d*\.?\d*$%$`) +) + +type Table struct { + out io.Writer + rows [][]string + lines [][][]string + cs map[int]int + rs map[int]int + headers []string + footers []string + autoFmt bool + autoWrap bool + mW int + pCenter string + pRow string + pColumn string + tColumn int + tRow int + hAlign int + fAlign int + align int + rowLine bool + hdrLine bool + border bool + colSize int +} + +// Start New Table +// Take io.Writer Directly +func NewWriter(writer io.Writer) *Table { + t := &Table{ + out: writer, + rows: [][]string{}, + lines: [][][]string{}, + cs: make(map[int]int), + rs: make(map[int]int), + headers: []string{}, + footers: []string{}, + autoFmt: true, + autoWrap: true, + mW: MAX_ROW_WIDTH, + pCenter: CENTER, + pRow: ROW, + pColumn: COLUMN, + tColumn: -1, + tRow: -1, + hAlign: ALIGN_DEFAULT, + fAlign: ALIGN_DEFAULT, + align: ALIGN_DEFAULT, + rowLine: false, + hdrLine: true, + border: true, + colSize: -1} + return t +} + +// Render table output +func (t Table) Render() { + if t.border { + t.printLine(true) + } + t.printHeading() + t.printRows() + + if !t.rowLine && t.border { + t.printLine(true) + } + t.printFooter() + +} + +// Set table header +func (t *Table) SetHeader(keys []string) { + t.colSize = len(keys) + for i, v := range keys { + t.parseDimension(v, i, -1) + t.headers = append(t.headers, v) + } +} + +// Set table Footer +func (t *Table) SetFooter(keys []string) { + //t.colSize = len(keys) + for i, v := range keys { + t.parseDimension(v, i, -1) + t.footers = append(t.footers, v) + } +} + +// Turn header autoformatting on/off. Default is on (true). +func (t *Table) SetAutoFormatHeaders(auto bool) { + t.autoFmt = auto +} + +// Turn automatic multiline text adjustment on/off. Default is on (true). +func (t *Table) SetAutoWrapText(auto bool) { + t.autoWrap = auto +} + +// Set the Default column width +func (t *Table) SetColWidth(width int) { + t.mW = width +} + +// Set the Column Separator +func (t *Table) SetColumnSeparator(sep string) { + t.pColumn = sep +} + +// Set the Row Separator +func (t *Table) SetRowSeparator(sep string) { + t.pRow = sep +} + +// Set the center Separator +func (t *Table) SetCenterSeparator(sep string) { + t.pCenter = sep +} + +// Set Header Alignment +func (t *Table) SetHeaderAlignment(hAlign int) { + t.hAlign = hAlign +} + +// Set Footer Alignment +func (t *Table) SetFooterAlignment(fAlign int) { + t.fAlign = fAlign +} + +// Set Table Alignment +func (t *Table) SetAlignment(align int) { + t.align = align +} + +// Set Header Line +// This would enable / disable a line after the header +func (t *Table) SetHeaderLine(line bool) { + t.hdrLine = line +} + +// Set Row Line +// This would enable / disable a line on each row of the table +func (t *Table) SetRowLine(line bool) { + t.rowLine = line +} + +// Set Table Border +// This would enable / disable line around the table +func (t *Table) SetBorder(border bool) { + t.border = border +} + +// Append row to table +func (t *Table) Append(row []string) { + rowSize := len(t.headers) + if rowSize > t.colSize { + t.colSize = rowSize + } + + n := len(t.lines) + line := [][]string{} + for i, v := range row { + + // Detect string width + // Detect String height + // Break strings into words + out := t.parseDimension(v, i, n) + + // Append broken words + line = append(line, out) + } + t.lines = append(t.lines, line) +} + +// Allow Support for Bulk Append +// Eliminates repeated for loops +func (t *Table) AppendBulk(rows [][]string) { + for _, row := range rows { + t.Append(row) + } +} + +// Print line based on row width +func (t Table) printLine(nl bool) { + fmt.Fprint(t.out, t.pCenter) + for i := 0; i < len(t.cs); i++ { + v := t.cs[i] + fmt.Fprintf(t.out, "%s%s%s%s", + t.pRow, + strings.Repeat(string(t.pRow), v), + t.pRow, + t.pCenter) + } + if nl { + fmt.Fprintln(t.out) + } +} + +// Return the PadRight function if align is left, PadLeft if align is right, +// and Pad by default +func pad(align int) func(string, string, int) string { + padFunc := Pad + switch align { + case ALIGN_LEFT: + padFunc = PadRight + case ALIGN_RIGHT: + padFunc = PadLeft + } + return padFunc +} + +// Print heading information +func (t Table) printHeading() { + // Check if headers is available + if len(t.headers) < 1 { + return + } + + // Check if border is set + // Replace with space if not set + fmt.Fprint(t.out, ConditionString(t.border, t.pColumn, SPACE)) + + // Identify last column + end := len(t.cs) - 1 + + // Get pad function + padFunc := pad(t.hAlign) + + // Print Heading column + for i := 0; i <= end; i++ { + v := t.cs[i] + h := t.headers[i] + if t.autoFmt { + h = Title(h) + } + pad := ConditionString((i == end && !t.border), SPACE, t.pColumn) + fmt.Fprintf(t.out, " %s %s", + padFunc(h, SPACE, v), + pad) + } + // Next line + fmt.Fprintln(t.out) + if t.hdrLine { + t.printLine(true) + } +} + +// Print heading information +func (t Table) printFooter() { + // Check if headers is available + if len(t.footers) < 1 { + return + } + + // Only print line if border is not set + if !t.border { + t.printLine(true) + } + // Check if border is set + // Replace with space if not set + fmt.Fprint(t.out, ConditionString(t.border, t.pColumn, SPACE)) + + // Identify last column + end := len(t.cs) - 1 + + // Get pad function + padFunc := pad(t.fAlign) + + // Print Heading column + for i := 0; i <= end; i++ { + v := t.cs[i] + f := t.footers[i] + if t.autoFmt { + f = Title(f) + } + pad := ConditionString((i == end && !t.border), SPACE, t.pColumn) + + if len(t.footers[i]) == 0 { + pad = SPACE + } + fmt.Fprintf(t.out, " %s %s", + padFunc(f, SPACE, v), + pad) + } + // Next line + fmt.Fprintln(t.out) + //t.printLine(true) + + hasPrinted := false + + for i := 0; i <= end; i++ { + v := t.cs[i] + pad := t.pRow + center := t.pCenter + length := len(t.footers[i]) + + if length > 0 { + hasPrinted = true + } + + // Set center to be space if length is 0 + if length == 0 && !t.border { + center = SPACE + } + + // Print first junction + if i == 0 { + fmt.Fprint(t.out, center) + } + + // Pad With space of length is 0 + if length == 0 { + pad = SPACE + } + // Ignore left space of it has printed before + if hasPrinted || t.border { + pad = t.pRow + center = t.pCenter + } + + // Change Center start position + if center == SPACE { + if i < end && len(t.footers[i+1]) != 0 { + center = t.pCenter + } + } + + // Print the footer + fmt.Fprintf(t.out, "%s%s%s%s", + pad, + strings.Repeat(string(pad), v), + pad, + center) + + } + + fmt.Fprintln(t.out) + +} + +func (t Table) printRows() { + for i, lines := range t.lines { + t.printRow(lines, i) + } + +} + +// Print Row Information +// Adjust column alignment based on type + +func (t Table) printRow(columns [][]string, colKey int) { + // Get Maximum Height + max := t.rs[colKey] + total := len(columns) + + // TODO Fix uneven col size + // if total < t.colSize { + // for n := t.colSize - total; n < t.colSize ; n++ { + // columns = append(columns, []string{SPACE}) + // t.cs[n] = t.mW + // } + //} + + // Pad Each Height + // pads := []int{} + pads := []int{} + + for i, line := range columns { + length := len(line) + pad := max - length + pads = append(pads, pad) + for n := 0; n < pad; n++ { + columns[i] = append(columns[i], " ") + } + } + //fmt.Println(max, "\n") + for x := 0; x < max; x++ { + for y := 0; y < total; y++ { + + // Check if border is set + fmt.Fprint(t.out, ConditionString((!t.border && y == 0), SPACE, t.pColumn)) + + fmt.Fprintf(t.out, SPACE) + str := columns[y][x] + + // This would print alignment + // Default alignment would use multiple configuration + switch t.align { + case ALIGN_CENTER: // + fmt.Fprintf(t.out, "%s", Pad(str, SPACE, t.cs[y])) + case ALIGN_RIGHT: + fmt.Fprintf(t.out, "%s", PadLeft(str, SPACE, t.cs[y])) + case ALIGN_LEFT: + fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y])) + default: + if decimal.MatchString(strings.TrimSpace(str)) || percent.MatchString(strings.TrimSpace(str)) { + fmt.Fprintf(t.out, "%s", PadLeft(str, SPACE, t.cs[y])) + } else { + fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y])) + + // TODO Custom alignment per column + //if max == 1 || pads[y] > 0 { + // fmt.Fprintf(t.out, "%s", Pad(str, SPACE, t.cs[y])) + //} else { + // fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y])) + //} + + } + } + fmt.Fprintf(t.out, SPACE) + } + // Check if border is set + // Replace with space if not set + fmt.Fprint(t.out, ConditionString(t.border, t.pColumn, SPACE)) + fmt.Fprintln(t.out) + } + + if t.rowLine { + t.printLine(true) + } + +} + +func (t *Table) parseDimension(str string, colKey, rowKey int) []string { + var ( + raw []string + max int + ) + w := DisplayWidth(str) + // Calculate Width + // Check if with is grater than maximum width + if w > t.mW { + w = t.mW + } + + // Check if width exists + v, ok := t.cs[colKey] + if !ok || v < w || v == 0 { + t.cs[colKey] = w + } + + if rowKey == -1 { + return raw + } + // Calculate Height + if t.autoWrap { + raw, _ = WrapString(str, t.cs[colKey]) + } else { + raw = getLines(str) + } + + for _, line := range raw { + if w := DisplayWidth(line); w > max { + max = w + } + } + + // Make sure the with is the same length as maximum word + // Important for cases where the width is smaller than maxu word + if max > t.cs[colKey] { + t.cs[colKey] = max + } + + h := len(raw) + v, ok = t.rs[rowKey] + + if !ok || v < h || v == 0 { + t.rs[rowKey] = h + } + //fmt.Printf("Raw %+v %d\n", raw, len(raw)) + return raw +} diff --git a/vendor/github.com/olekukonko/tablewriter/table_test.go b/vendor/github.com/olekukonko/tablewriter/table_test.go new file mode 100644 index 0000000..bcdadd7 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/table_test.go @@ -0,0 +1,292 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +package tablewriter + +import ( + "bytes" + "fmt" + "io" + "os" + "strings" + "testing" +) + +func ExampleShort() { + + data := [][]string{ + []string{"A", "The Good", "500"}, + []string{"B", "The Very very Bad Man", "288"}, + []string{"C", "The Ugly", "120"}, + []string{"D", "The Gopher", "800"}, + } + + table := NewWriter(os.Stdout) + table.SetHeader([]string{"Name", "Sign", "Rating"}) + + for _, v := range data { + table.Append(v) + } + table.Render() + +} + +func ExampleLong() { + + data := [][]string{ + []string{"Learn East has computers with adapted keyboards with enlarged print etc", " Some Data ", " Another Data"}, + []string{"Instead of lining up the letters all ", "the way across, he splits the keyboard in two", "Like most ergonomic keyboards", "See Data"}, + } + + table := NewWriter(os.Stdout) + table.SetHeader([]string{"Name", "Sign", "Rating"}) + table.SetCenterSeparator("*") + table.SetRowSeparator("=") + + for _, v := range data { + table.Append(v) + } + table.Render() + +} + +func ExampleCSV() { + table, _ := NewCSV(os.Stdout, "test.csv", true) + table.SetCenterSeparator("*") + table.SetRowSeparator("=") + + table.Render() +} + +func TestCSVInfo(t *testing.T) { + table, err := NewCSV(os.Stdout, "test_info.csv", true) + if err != nil { + t.Error(err) + return + } + table.SetAlignment(ALIGN_LEFT) + table.SetBorder(false) + table.Render() +} + +func TestCSVSeparator(t *testing.T) { + table, err := NewCSV(os.Stdout, "test.csv", true) + if err != nil { + t.Error(err) + return + } + table.SetRowLine(true) + table.SetCenterSeparator("*") + table.SetColumnSeparator("‡") + table.SetRowSeparator("-") + table.SetAlignment(ALIGN_LEFT) + table.Render() +} + +func TestBorder(t *testing.T) { + data := [][]string{ + []string{"1/1/2014", "Domain name", "2233", "$10.98"}, + []string{"1/1/2014", "January Hosting", "2233", "$54.95"}, + []string{"1/4/2014", "February Hosting", "2233", "$51.00"}, + []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"}, + } + + var buf bytes.Buffer + table := NewWriter(&buf) + table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) + table.SetFooter([]string{"", "", "Total", "$146.93"}) // Add Footer + table.SetBorder(false) // Set Border to false + table.AppendBulk(data) // Add Bulk Data + table.Render() + + want := ` DATE | DESCRIPTION | CV2 | AMOUNT ++----------+--------------------------+-------+---------+ + 1/1/2014 | Domain name | 2233 | $10.98 + 1/1/2014 | January Hosting | 2233 | $54.95 + 1/4/2014 | February Hosting | 2233 | $51.00 + 1/4/2014 | February Extra Bandwidth | 2233 | $30.00 ++----------+--------------------------+-------+---------+ + TOTAL | $146 93 + +-------+---------+ +` + got := buf.String() + if got != want { + t.Errorf("border table rendering failed\ngot:\n%s\nwant:\n%s\n", got, want) + } +} + +func TestPrintHeading(t *testing.T) { + var buf bytes.Buffer + table := NewWriter(&buf) + table.SetHeader([]string{"1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c"}) + table.printHeading() + want := `| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | A | B | C | ++---+---+---+---+---+---+---+---+---+---+---+---+ +` + got := buf.String() + if got != want { + t.Errorf("header rendering failed\ngot:\n%s\nwant:\n%s\n", got, want) + } +} + +func TestPrintHeadingWithoutAutoFormat(t *testing.T) { + var buf bytes.Buffer + table := NewWriter(&buf) + table.SetHeader([]string{"1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c"}) + table.SetAutoFormatHeaders(false) + table.printHeading() + want := `| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | a | b | c | ++---+---+---+---+---+---+---+---+---+---+---+---+ +` + got := buf.String() + if got != want { + t.Errorf("header rendering failed\ngot:\n%s\nwant:\n%s\n", got, want) + } +} + +func TestPrintFooter(t *testing.T) { + var buf bytes.Buffer + table := NewWriter(&buf) + table.SetHeader([]string{"1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c"}) + table.SetFooter([]string{"1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c"}) + table.printFooter() + want := `| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | A | B | C | ++---+---+---+---+---+---+---+---+---+---+---+---+ +` + got := buf.String() + if got != want { + t.Errorf("footer rendering failed\ngot:\n%s\nwant:\n%s\n", got, want) + } +} + +func TestPrintFooterWithoutAutoFormat(t *testing.T) { + var buf bytes.Buffer + table := NewWriter(&buf) + table.SetAutoFormatHeaders(false) + table.SetHeader([]string{"1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c"}) + table.SetFooter([]string{"1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c"}) + table.printFooter() + want := `| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | a | b | c | ++---+---+---+---+---+---+---+---+---+---+---+---+ +` + got := buf.String() + if got != want { + t.Errorf("footer rendering failed\ngot:\n%s\nwant:\n%s\n", got, want) + } +} + +func TestPrintTableWithAndWithoutAutoWrap(t *testing.T) { + var buf bytes.Buffer + var multiline = `A multiline +string with some lines being really long.` + + with := NewWriter(&buf) + with.Append([]string{multiline}) + with.Render() + want := `+--------------------------------+ +| A multiline string with some | +| lines being really long. | ++--------------------------------+ +` + got := buf.String() + if got != want { + t.Errorf("multiline text rendering with wrapping failed\ngot:\n%s\nwant:\n%s\n", got, want) + } + + buf.Truncate(0) + without := NewWriter(&buf) + without.SetAutoWrapText(false) + without.Append([]string{multiline}) + without.Render() + want = `+-------------------------------------------+ +| A multiline | +| string with some lines being really long. | ++-------------------------------------------+ +` + got = buf.String() + if got != want { + t.Errorf("multiline text rendering without wrapping rendering failed\ngot:\n%s\nwant:\n%s\n", got, want) + } +} + +func TestPrintLine(t *testing.T) { + header := make([]string, 12) + val := " " + want := "" + for i := range header { + header[i] = val + want = fmt.Sprintf("%s+-%s-", want, strings.Replace(val, " ", "-", -1)) + val = val + " " + } + want = want + "+" + var buf bytes.Buffer + table := NewWriter(&buf) + table.SetHeader(header) + table.printLine(false) + got := buf.String() + if got != want { + t.Errorf("line rendering failed\ngot:\n%s\nwant:\n%s\n", got, want) + } +} + +func TestAnsiStrip(t *testing.T) { + header := make([]string, 12) + val := " " + want := "" + for i := range header { + header[i] = "\033[43;30m" + val + "\033[00m" + want = fmt.Sprintf("%s+-%s-", want, strings.Replace(val, " ", "-", -1)) + val = val + " " + } + want = want + "+" + var buf bytes.Buffer + table := NewWriter(&buf) + table.SetHeader(header) + table.printLine(false) + got := buf.String() + if got != want { + t.Errorf("line rendering failed\ngot:\n%s\nwant:\n%s\n", got, want) + } +} + +func NewCustomizedTable(out io.Writer) *Table { + table := NewWriter(out) + table.SetCenterSeparator("") + table.SetColumnSeparator("") + table.SetRowSeparator("") + table.SetBorder(false) + table.SetAlignment(ALIGN_LEFT) + table.SetHeader([]string{}) + return table +} + +func TestSubclass(t *testing.T) { + buf := new(bytes.Buffer) + table := NewCustomizedTable(buf) + + data := [][]string{ + []string{"A", "The Good", "500"}, + []string{"B", "The Very very Bad Man", "288"}, + []string{"C", "The Ugly", "120"}, + []string{"D", "The Gopher", "800"}, + } + + for _, v := range data { + table.Append(v) + } + table.Render() + + output := string(buf.Bytes()) + want := ` A The Good 500 + B The Very very Bad Man 288 + C The Ugly 120 + D The Gopher 800 +` + if output != want { + t.Error(fmt.Sprintf("Unexpected output '%v' != '%v'", output, want)) + } +} diff --git a/vendor/github.com/olekukonko/tablewriter/test.csv b/vendor/github.com/olekukonko/tablewriter/test.csv new file mode 100644 index 0000000..1609327 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/test.csv @@ -0,0 +1,4 @@ +first_name,last_name,ssn +John,Barry,123456 +Kathy,Smith,687987 +Bob,McCornick,3979870 \ No newline at end of file diff --git a/vendor/github.com/olekukonko/tablewriter/test_info.csv b/vendor/github.com/olekukonko/tablewriter/test_info.csv new file mode 100644 index 0000000..e4c40e9 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/test_info.csv @@ -0,0 +1,4 @@ +Field,Type,Null,Key,Default,Extra +user_id,smallint(5),NO,PRI,NULL,auto_increment +username,varchar(10),NO,,NULL, +password,varchar(100),NO,,NULL, \ No newline at end of file diff --git a/vendor/github.com/olekukonko/tablewriter/util.go b/vendor/github.com/olekukonko/tablewriter/util.go new file mode 100644 index 0000000..2deefbc --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/util.go @@ -0,0 +1,72 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +package tablewriter + +import ( + "math" + "regexp" + "strings" + + "github.com/mattn/go-runewidth" +) + +var ansi = regexp.MustCompile("\033\\[(?:[0-9]{1,3}(?:;[0-9]{1,3})*)?[m|K]") + +func DisplayWidth(str string) int { + return runewidth.StringWidth(ansi.ReplaceAllLiteralString(str, "")) +} + +// Simple Condition for string +// Returns value based on condition +func ConditionString(cond bool, valid, inValid string) string { + if cond { + return valid + } + return inValid +} + +// Format Table Header +// Replace _ , . and spaces +func Title(name string) string { + name = strings.Replace(name, "_", " ", -1) + name = strings.Replace(name, ".", " ", -1) + name = strings.TrimSpace(name) + return strings.ToUpper(name) +} + +// Pad String +// Attempts to play string in the center +func Pad(s, pad string, width int) string { + gap := width - DisplayWidth(s) + if gap > 0 { + gapLeft := int(math.Ceil(float64(gap / 2))) + gapRight := gap - gapLeft + return strings.Repeat(string(pad), gapLeft) + s + strings.Repeat(string(pad), gapRight) + } + return s +} + +// Pad String Right position +// This would pace string at the left side fo the screen +func PadRight(s, pad string, width int) string { + gap := width - DisplayWidth(s) + if gap > 0 { + return s + strings.Repeat(string(pad), gap) + } + return s +} + +// Pad String Left position +// This would pace string at the right side fo the screen +func PadLeft(s, pad string, width int) string { + gap := width - DisplayWidth(s) + if gap > 0 { + return strings.Repeat(string(pad), gap) + s + } + return s +} diff --git a/vendor/github.com/olekukonko/tablewriter/wrap.go b/vendor/github.com/olekukonko/tablewriter/wrap.go new file mode 100644 index 0000000..f3747d9 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/wrap.go @@ -0,0 +1,103 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +package tablewriter + +import ( + "math" + "strings" + "unicode/utf8" +) + +var ( + nl = "\n" + sp = " " +) + +const defaultPenalty = 1e5 + +// Wrap wraps s into a paragraph of lines of length lim, with minimal +// raggedness. +func WrapString(s string, lim int) ([]string, int) { + words := strings.Split(strings.Replace(strings.TrimSpace(s), nl, sp, -1), sp) + var lines []string + max := 0 + for _, v := range words { + max = len(v) + if max > lim { + lim = max + } + } + for _, line := range WrapWords(words, 1, lim, defaultPenalty) { + lines = append(lines, strings.Join(line, sp)) + } + return lines, lim +} + +// WrapWords is the low-level line-breaking algorithm, useful if you need more +// control over the details of the text wrapping process. For most uses, +// WrapString will be sufficient and more convenient. +// +// WrapWords splits a list of words into lines with minimal "raggedness", +// treating each rune as one unit, accounting for spc units between adjacent +// words on each line, and attempting to limit lines to lim units. Raggedness +// is the total error over all lines, where error is the square of the +// difference of the length of the line and lim. Too-long lines (which only +// happen when a single word is longer than lim units) have pen penalty units +// added to the error. +func WrapWords(words []string, spc, lim, pen int) [][]string { + n := len(words) + + length := make([][]int, n) + for i := 0; i < n; i++ { + length[i] = make([]int, n) + length[i][i] = utf8.RuneCountInString(words[i]) + for j := i + 1; j < n; j++ { + length[i][j] = length[i][j-1] + spc + utf8.RuneCountInString(words[j]) + } + } + nbrk := make([]int, n) + cost := make([]int, n) + for i := range cost { + cost[i] = math.MaxInt32 + } + for i := n - 1; i >= 0; i-- { + if length[i][n-1] <= lim { + cost[i] = 0 + nbrk[i] = n + } else { + for j := i + 1; j < n; j++ { + d := lim - length[i][j-1] + c := d*d + cost[j] + if length[i][j-1] > lim { + c += pen // too-long lines get a worse penalty + } + if c < cost[i] { + cost[i] = c + nbrk[i] = j + } + } + } + } + var lines [][]string + i := 0 + for i < n { + lines = append(lines, words[i:nbrk[i]]) + i = nbrk[i] + } + return lines +} + +// getLines decomposes a multiline string into a slice of strings. +func getLines(s string) []string { + var lines []string + + for _, line := range strings.Split(strings.TrimSpace(s), nl) { + lines = append(lines, line) + } + return lines +} diff --git a/vendor/github.com/olekukonko/tablewriter/wrap_test.go b/vendor/github.com/olekukonko/tablewriter/wrap_test.go new file mode 100644 index 0000000..2d510d1 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/wrap_test.go @@ -0,0 +1,55 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +package tablewriter + +import ( + "strings" + "testing" +) + +var text = "The quick brown fox jumps over the lazy dog." + +func TestWrap(t *testing.T) { + exp := []string{ + "The", "quick", "brown", "fox", + "jumps", "over", "the", "lazy", "dog."} + + got, _ := WrapString(text, 6) + if len(exp) != len(got) { + t.Fail() + } +} + +func TestWrapOneLine(t *testing.T) { + exp := "The quick brown fox jumps over the lazy dog." + words, _ := WrapString(text, 500) + got := strings.Join(words, string(sp)) + if exp != got { + t.Fail() + } +} + +func TestUnicode(t *testing.T) { + input := "Česká řeřicha" + wordsUnicode, _ := WrapString(input, 13) + // input contains 13 runes, so it fits on one line. + if len(wordsUnicode) != 1 { + t.Fail() + } +} + +func TestDisplayWidth(t *testing.T) { + input := "Česká řeřicha" + if n := DisplayWidth(input); n != 13 { + t.Errorf("Wants: %d Got: %d", 13, n) + } + input = "\033[43;30m" + input + "\033[00m" + if n := DisplayWidth(input); n != 13 { + t.Errorf("Wants: %d Got: %d", 13, n) + } +} diff --git a/vendor/github.com/tkuchiki/go-timezone/LICENSE b/vendor/github.com/tkuchiki/go-timezone/LICENSE new file mode 100644 index 0000000..ceb1c65 --- /dev/null +++ b/vendor/github.com/tkuchiki/go-timezone/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 tkuchiki + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/tkuchiki/go-timezone/README.md b/vendor/github.com/tkuchiki/go-timezone/README.md new file mode 100644 index 0000000..9f4cae6 --- /dev/null +++ b/vendor/github.com/tkuchiki/go-timezone/README.md @@ -0,0 +1,69 @@ +# go-timezone +Timezone utility for Golang + +## Example + +### Code + +``` +package main + +import ( + "fmt" + "github.com/tkuchiki/go-timezone" + "time" +) + +func main() { + offset, err := timezone.GetOffset("JST") + fmt.Println(offset, err) + + offset, err = timezone.GetOffset("hogehoge") + fmt.Println(offset, err) + + var zones []string + zones, err = timezone.GetTimezones("UTC") + fmt.Println(zones, err) + + zones, err = timezone.GetTimezones("foobar") + fmt.Println(zones, err) + + now := time.Now() + + fmt.Println("## current timezone") + fmt.Println(now) + + var jst time.Time + loc, _ := time.LoadLocation("UTC") + utc := now.In(loc) + + jst, _ = timezone.FixedTimezone(utc, "") + + fmt.Println("## UTC") + fmt.Println(utc) + fmt.Println("## UTC -> JST (current timezone)") + fmt.Println(jst) + + var est time.Time + est, _ = timezone.FixedTimezone(now, "America/New_York") + fmt.Println("## JST -> EST") + fmt.Println(est) +} +``` + +### Result + +``` +32400 +0 Invalid short timezone: hogehoge +[Antarctica/Troll Etc/UTC Etc/Universal Etc/Zulu UTC Universal Zulu] +[] Invalid short timezone: foobar +## current timezone +2016-03-02 14:33:49.078798783 +0900 JST +## UTC +2016-03-02 05:33:49.078798783 +0000 UTC +## UTC -> JST (current timezone) +2016-03-02 14:33:49.078798783 +0900 JST +## JST -> EST +2016-03-02 00:33:49.078798783 -0500 EST +``` diff --git a/vendor/github.com/tkuchiki/go-timezone/timezone.go b/vendor/github.com/tkuchiki/go-timezone/timezone.go new file mode 100644 index 0000000..151cb48 --- /dev/null +++ b/vendor/github.com/tkuchiki/go-timezone/timezone.go @@ -0,0 +1,1155 @@ +package timezone + +import ( + "errors" + "fmt" + "time" +) + +var offsets = map[string]int{ + "GMT+12": -43200, + "GMT+11": -39600, + "NUT": -39600, + "SST": -39600, + "CKT": -36000, + "GMT+10": -36000, + "HST": -36000, + "TAHT": -36000, + "MART": -34200, + "AKST": -32400, + "GAMT": -32400, + "GMT+9": -32400, + "GMT+8": -28800, + "PST": -28800, + "GMT+7": -25200, + "MST": -25200, + "CST": -21600, + "GALT": -21600, + "GMT+6": -21600, + "ACT": -18000, + "COT": -18000, + "EAST": -18000, + "ECT": -18000, + "EST": -18000, + "GMT+5": -18000, + "PET": -18000, + "VET": -16200, + "AMT": -14400, + "AST": -14400, + "BOT": -14400, + "GMT+4": -14400, + "GYT": -14400, + "NST": -12600, + "ART": -10800, + "BRT": -10800, + "CLT": -10800, + "FKST": -10800, + "GFT": -10800, + "GMT+3": -10800, + "PMST": -10800, + "PYST": -10800, + "ROTT": -10800, + "SRT": -10800, + "WGT": -10800, + "FNT": -7200, + "GMT+2": -7200, + "UYST": -7200, + "AZOT": -3600, + "CVT": -3600, + "EGT": -3600, + "GMT+1": -3600, + "GMT": 0, + "UCT": 0, + "UTC": 0, + "WET": 0, + "CET": 3600, + "GMT-1": 3600, + "MET": 3600, + "WAT": 3600, + "CAT": 7200, + "EET": 7200, + "GMT-2": 7200, + "SAST": 7200, + "WAST": 7200, + "EAT": 10800, + "GMT-3": 10800, + "MSK": 10800, + "SYOT": 10800, + "IRST": 12600, + "AZT": 14400, + "GET": 14400, + "GMT-4": 14400, + "GST": 14400, + "MUT": 14400, + "RET": 14400, + "SAMT": 14400, + "SCT": 14400, + "AFT": 16200, + "AQTT": 18000, + "GMT-5": 18000, + "MAWT": 18000, + "MVT": 18000, + "ORAT": 18000, + "PKT": 18000, + "TFT": 18000, + "TJT": 18000, + "TMT": 18000, + "UZT": 18000, + "YEKT": 18000, + "IST": 19800, + "NPT": 20700, + "ALMT": 21600, + "BDT": 21600, + "BTT": 21600, + "GMT-6": 21600, + "IOT": 21600, + "KGT": 21600, + "NOVT": 21600, + "OMST": 21600, + "QYZT": 21600, + "VOST": 21600, + "XJT": 21600, + "CCT": 23400, + "MMT": 23400, + "CXT": 25200, + "DAVT": 25200, + "GMT-7": 25200, + "HOVT": 25200, + "ICT": 25200, + "KRAT": 25200, + "WIB": 25200, + "AWST": 28800, + "BNT": 28800, + "CHOT": 28800, + "GMT-8": 28800, + "HKT": 28800, + "IRKT": 28800, + "MYT": 28800, + "PHT": 28800, + "SGT": 28800, + "ULAT": 28800, + "WITA": 28800, + "ACWST": 31500, + "GMT-9": 32400, + "JST": 32400, + "KST": 32400, + "PWT": 32400, + "TLT": 32400, + "WIT": 32400, + "YAKT": 32400, + "ACST": 34200, + "AEST": 36000, + "CHUT": 36000, + "ChST": 36000, + "DDUT": 36000, + "GMT-10": 36000, + "MAGT": 36000, + "PGT": 36000, + "SAKT": 36000, + "VLAT": 36000, + "ACDT": 37800, + "AEDT": 39600, + "BST": 39600, + "GMT-11": 39600, + "KOST": 39600, + "LHDT": 39600, + "MIST": 39600, + "NCT": 39600, + "PONT": 39600, + "SBT": 39600, + "SRET": 39600, + "VUT": 39600, + "NFT": 41400, + "ANAT": 43200, + "FJT": 43200, + "GILT": 43200, + "GMT-12": 43200, + "MHT": 43200, + "NRT": 43200, + "PETT": 43200, + "TVT": 43200, + "WAKT": 43200, + "WFT": 43200, + "GMT-13": 46800, + "NZDT": 46800, + "PHOT": 46800, + "TKT": 46800, + "TOT": 46800, + "CHADT": 49500, + "GMT-14": 50400, + "LINT": 50400, + "WSDT": 50400, +} + +var timezones = map[string][]string{ + "ACDT": []string{ + "Australia/Adelaide", + "Australia/Broken_Hill", + "Australia/South", + "Australia/Yancowinna", + }, + "ACST": []string{ + "Australia/Darwin", + "Australia/North", + }, + "ACT": []string{ + "America/Eirunepe", + "America/Porto_Acre", + "America/Rio_Branco", + "Brazil/Acre", + }, + "ACWST": []string{ + "Australia/Eucla", + }, + "AEDT": []string{ + "Australia/ACT", + "Australia/Canberra", + "Australia/Currie", + "Australia/Hobart", + "Australia/Melbourne", + "Australia/NSW", + "Australia/Sydney", + "Australia/Tasmania", + "Australia/Victoria", + }, + "AEST": []string{ + "Australia/Brisbane", + "Australia/Lindeman", + "Australia/Queensland", + }, + "AFT": []string{ + "Asia/Kabul", + }, + "AKST": []string{ + "America/Anchorage", + "America/Juneau", + "America/Nome", + "America/Sitka", + "America/Yakutat", + "US/Alaska", + }, + + "ALMT": []string{ + "Asia/Almaty", + }, + "AMT": []string{ + "America/Boa_Vista", + "America/Campo_Grande", + "America/Cuiaba", + "America/Manaus", + "America/Porto_Velho", + "Asia/Yerevan", + "Brazil/West", + }, + + "ANAT": []string{ + "Asia/Anadyr", + }, + "AQTT": []string{ + "Asia/Aqtau", + "Asia/Aqtobe", + }, + "ART": []string{ + "America/Argentina/Buenos_Aires", + "America/Argentina/Catamarca", + "America/Argentina/ComodRivadavia", + "America/Argentina/Cordoba", + "America/Argentina/Jujuy", + "America/Argentina/La_Rioja", + "America/Argentina/Mendoza", + "America/Argentina/Rio_Gallegos", + "America/Argentina/Salta", + "America/Argentina/San_Juan", + "America/Argentina/San_Luis", + "America/Argentina/Tucuman", + "America/Argentina/Ushuaia", + "America/Buenos_Aires", + "America/Catamarca", + "America/Cordoba", + "America/Jujuy", + "America/Mendoza", + "America/Rosario", + }, + "AST": []string{ + "America/Anguilla", + "America/Antigua", + "America/Aruba", + "America/Barbados", + "America/Blanc-Sablon", + "America/Curacao", + "America/Dominica", + "America/Glace_Bay", + "America/Goose_Bay", + "America/Grand_Turk", + "America/Grenada", + "America/Guadeloupe", + "America/Halifax", + "America/Kralendijk", + "America/Lower_Princes", + "America/Marigot", + "America/Martinique", + "America/Moncton", + "America/Montserrat", + "America/Port_of_Spain", + "America/Puerto_Rico", + "America/Santo_Domingo", + "America/St_Barthelemy", + "America/St_Kitts", + "America/St_Lucia", + "America/St_Thomas", + "America/St_Vincent", + "America/Thule", + "America/Tortola", + "America/Virgin", + "Asia/Aden", + "Asia/Baghdad", + "Asia/Bahrain", + "Asia/Kuwait", + "Asia/Qatar", + "Asia/Riyadh", + "Atlantic/Bermuda", + "Canada/Atlantic", + }, + "AWST": []string{ + "Antarctica/Casey", + "Australia/Perth", + "Australia/West", + }, + "AZOT": []string{ + "Atlantic/Azores", + }, + "AZT": []string{ + "Asia/Baku", + }, + "BDT": []string{ + "Asia/Dacca", + "Asia/Dhaka", + }, + + "BNT": []string{ + "Asia/Brunei", + }, + "BOT": []string{ + "America/La_Paz", + }, + "BRT": []string{ + "America/Araguaina", + "America/Bahia", + "America/Belem", + "America/Fortaleza", + "America/Maceio", + "America/Recife", + "America/Santarem", + "America/Sao_Paulo", + "Brazil/East", + }, + "BST": []string{ + "Pacific/Bougainville", + }, + + "BTT": []string{ + "Asia/Thimbu", + "Asia/Thimphu", + }, + "CAT": []string{ + "Africa/Blantyre", + "Africa/Bujumbura", + "Africa/Gaborone", + "Africa/Harare", + "Africa/Kigali", + "Africa/Lubumbashi", + "Africa/Lusaka", + "Africa/Maputo", + }, + "CCT": []string{ + "Indian/Cocos", + }, + "CET": []string{ + "Africa/Algiers", + "Africa/Ceuta", + "Africa/Tunis", + "Arctic/Longyearbyen", + "Atlantic/Jan_Mayen", + "CET", + "Europe/Amsterdam", + "Europe/Andorra", + "Europe/Belgrade", + "Europe/Berlin", + "Europe/Bratislava", + "Europe/Brussels", + "Europe/Budapest", + "Europe/Busingen", + "Europe/Copenhagen", + "Europe/Gibraltar", + "Europe/Ljubljana", + "Europe/Luxembourg", + "Europe/Madrid", + "Europe/Malta", + "Europe/Monaco", + "Europe/Oslo", + "Europe/Paris", + "Europe/Podgorica", + "Europe/Prague", + "Europe/Rome", + "Europe/San_Marino", + "Europe/Sarajevo", + "Europe/Skopje", + "Europe/Stockholm", + "Europe/Tirane", + "Europe/Vaduz", + "Europe/Vatican", + "Europe/Vienna", + "Europe/Warsaw", + "Europe/Zagreb", + "Europe/Zurich", + "Poland", + }, + "CHADT": []string{ + "NZ-CHAT", + "Pacific/Chatham", + }, + "CHOT": []string{ + "Asia/Choibalsan", + }, + "CHUT": []string{ + "Pacific/Chuuk", + "Pacific/Truk", + "Pacific/Yap", + }, + "CKT": []string{ + "Pacific/Rarotonga", + }, + "CLT": []string{ + "America/Santiago", + "Antarctica/Palmer", + "Chile/Continental", + }, + "COT": []string{ + "America/Bogota", + }, + "CST": []string{ + "America/Bahia_Banderas", + "America/Belize", + "America/Chicago", + "America/Costa_Rica", + "America/El_Salvador", + "America/Guatemala", + "America/Havana", + "America/Indiana/Knox", + "America/Indiana/Tell_City", + "America/Knox_IN", + "America/Managua", + "America/Matamoros", + "America/Menominee", + "America/Merida", + "America/Mexico_City", + "America/Monterrey", + "America/North_Dakota/Beulah", + "America/North_Dakota/Center", + "America/North_Dakota/New_Salem", + "America/Rainy_River", + "America/Rankin_Inlet", + "America/Regina", + "America/Resolute", + "America/Swift_Current", + "America/Tegucigalpa", + "America/Winnipeg", + "Asia/Chongqing", + "Asia/Chungking", + "Asia/Harbin", + "Asia/Macao", + "Asia/Macau", + "Asia/Shanghai", + "Asia/Taipei", + "CST6CDT", + "Canada/Central", + "Canada/East-Saskatchewan", + "Canada/Saskatchewan", + "Cuba", + "Mexico/General", + "PRC", + "ROC", + "US/Central", + "US/Indiana-Starke", + }, + + "CVT": []string{ + "Atlantic/Cape_Verde", + }, + "CXT": []string{ + "Indian/Christmas", + }, + "ChST": []string{ + "Pacific/Guam", + "Pacific/Saipan", + }, + "DAVT": []string{ + "Antarctica/Davis", + }, + "DDUT": []string{ + "Antarctica/DumontDUrville", + }, + "EAST": []string{ + "Chile/EasterIsland", + "Pacific/Easter", + }, + "EAT": []string{"Africa/Addis_Ababa", + "Africa/Asmara", + "Africa/Asmera", + "Africa/Dar_es_Salaam", + "Africa/Djibouti", + "Africa/Juba", + "Africa/Kampala", + "Africa/Khartoum", + "Africa/Mogadishu", + "Africa/Nairobi", + "Indian/Antananarivo", + "Indian/Comoro", + "Indian/Mayotte", + }, + "ECT": []string{ + "America/Guayaquil", + }, + "EET": []string{ + "Africa/Cairo", + "Africa/Tripoli", + "Asia/Amman", + "Asia/Beirut", + "Asia/Damascus", + "Asia/Gaza", + "Asia/Hebron", + "Asia/Istanbul", + "Asia/Nicosia", + "EET", + "Egypt", + "Europe/Athens", + "Europe/Bucharest", + "Europe/Chisinau", + "Europe/Helsinki", + "Europe/Istanbul", + "Europe/Kaliningrad", + "Europe/Kiev", + "Europe/Mariehamn", + "Europe/Nicosia", + "Europe/Riga", + "Europe/Sofia", + "Europe/Tallinn", + "Europe/Tiraspol", + "Europe/Uzhgorod", + "Europe/Vilnius", + "Europe/Zaporozhye", + "Libya", + "Turkey", + }, + "EGT": []string{ + "America/Scoresbysund", + }, + "EST": []string{ + "America/Atikokan", + "America/Cancun", + "America/Cayman", + "America/Coral_Harbour", + "America/Detroit", + "America/Fort_Wayne", + "America/Indiana/Indianapolis", + "America/Indiana/Marengo", + "America/Indiana/Petersburg", + "America/Indiana/Vevay", + "America/Indiana/Vincennes", + "America/Indiana/Winamac", + "America/Indianapolis", + "America/Iqaluit", + "America/Jamaica", + "America/Kentucky/Louisville", + "America/Kentucky/Monticello", + "America/Louisville", + "America/Montreal", + "America/Nassau", + "America/New_York", + "America/Nipigon", + "America/Panama", + "America/Pangnirtung", + "America/Port-au-Prince", + "America/Thunder_Bay", + "America/Toronto", + "Canada/Eastern", + "EST", + "EST5EDT", + "Jamaica", + "US/East-Indiana", + "US/Eastern", + "US/Michigan", + }, + "FJT": []string{ + "Pacific/Fiji", + }, + "FKST": []string{ + "Atlantic/Stanley", + }, + "FNT": []string{ + "America/Noronha", + "Brazil/DeNoronha", + }, + "GALT": []string{ + "Pacific/Galapagos", + }, + "GAMT": []string{ + "Pacific/Gambier", + }, + "GET": []string{ + "Asia/Tbilisi", + }, + "GFT": []string{ + "America/Cayenne", + }, + "GILT": []string{ + "Pacific/Tarawa", + }, + "GMT": []string{ + "Africa/Abidjan", + "Africa/Accra", + "Africa/Bamako", + "Africa/Banjul", + "Africa/Bissau", + "Africa/Conakry", + "Africa/Dakar", + "Africa/Freetown", + "Africa/Lome", + "Africa/Monrovia", + "Africa/Nouakchott", + "Africa/Ouagadougou", + "Africa/Sao_Tome", + "Africa/Timbuktu", + "America/Danmarkshavn", + "Atlantic/Reykjavik", + "Atlantic/St_Helena", + "Eire", + "Etc/GMT", + "Etc/GMT+0", + "Etc/GMT-0", + "Etc/GMT0", + "Etc/Greenwich", + "Europe/Belfast", + "Europe/Dublin", + "Europe/Guernsey", + "Europe/Isle_of_Man", + "Europe/Jersey", + "Europe/London", + "GB", + "GB-Eire", + "GMT", + "GMT+0", + "GMT-0", + "GMT0", + "Greenwich", + "Iceland", + }, + "GMT+1": []string{ + "Etc/GMT+1", + }, + "GMT+10": []string{ + "Etc/GMT+10", + }, + "GMT+11": []string{ + "Etc/GMT+11", + }, + "GMT+12": []string{ + "Etc/GMT+12", + }, + "GMT+2": []string{ + "Etc/GMT+2", + }, + "GMT+3": []string{ + "Etc/GMT+3", + }, + "GMT+4": []string{ + "Etc/GMT+4", + }, + "GMT+5": []string{ + "Etc/GMT+5", + }, + "GMT+6": []string{ + "Etc/GMT+6", + }, + "GMT+7": []string{ + "Etc/GMT+7", + }, + "GMT+8": []string{ + "Etc/GMT+8", + }, + "GMT+9": []string{ + "Etc/GMT+9", + }, + "GMT-1": []string{ + "Etc/GMT-1", + }, + "GMT-10": []string{ + "Etc/GMT-10", + }, + "GMT-11": []string{ + "Etc/GMT-11", + }, + "GMT-12": []string{ + "Etc/GMT-12", + }, + "GMT-13": []string{ + "Etc/GMT-13", + }, + "GMT-14": []string{ + "Etc/GMT-14", + }, + "GMT-2": []string{ + "Etc/GMT-2", + }, + "GMT-3": []string{ + "Etc/GMT-3", + }, + "GMT-4": []string{ + "Etc/GMT-4", + }, + "GMT-5": []string{ + "Etc/GMT-5", + }, + "GMT-6": []string{ + "Etc/GMT-6", + }, + "GMT-7": []string{ + "Etc/GMT-7", + }, + "GMT-8": []string{ + "Etc/GMT-8", + }, + "GMT-9": []string{ + "Etc/GMT-9", + }, + "GST": []string{ + "Asia/Dubai", + "Asia/Muscat", + "Atlantic/South_Georgia", + }, + "GYT": []string{ + "America/Guyana", + }, + "HKT": []string{ + "Asia/Hong_Kong", + "Hongkong", + }, + "HOVT": []string{ + "Asia/Hovd", + }, + "HST": []string{ + "America/Adak", + "America/Atka", + "HST", + "Pacific/Honolulu", + "Pacific/Johnston", + "US/Aleutian", + "US/Hawaii", + }, + "ICT": []string{ + "Asia/Bangkok", + "Asia/Ho_Chi_Minh", + "Asia/Phnom_Penh", + "Asia/Saigon", + "Asia/Vientiane", + }, + "IOT": []string{ + "Indian/Chagos", + }, + "IRKT": []string{ + "Asia/Chita", + "Asia/Irkutsk", + }, + "IRST": []string{ + "Asia/Tehran", + "Iran", + }, + "IST": []string{ + "Asia/Calcutta", + "Asia/Colombo", + "Asia/Jerusalem", + "Asia/Kolkata", + "Asia/Tel_Aviv", + "Israel", + }, + "JST": []string{ + "Asia/Tokyo", + "Japan", + }, + "KGT": []string{ + "Asia/Bishkek", + }, + "KOST": []string{ + "Pacific/Kosrae", + }, + "KRAT": []string{ + "Asia/Krasnoyarsk", + "Asia/Novokuznetsk", + }, + "KST": []string{ + "Asia/Pyongyang", + "Asia/Seoul", + "ROK", + }, + "LHDT": []string{ + "Australia/LHI", + "Australia/Lord_Howe", + }, + "LINT": []string{ + "Pacific/Kiritimati", + }, + "MAGT": []string{ + "Asia/Magadan", + }, + "MART": []string{ + "Pacific/Marquesas", + }, + "MAWT": []string{ + "Antarctica/Mawson", + }, + "MET": []string{ + "MET", + }, + "MHT": []string{ + "Kwajalein", + "Pacific/Kwajalein", + "Pacific/Majuro", + }, + "MIST": []string{ + "Antarctica/Macquarie", + }, + "MMT": []string{ + "Asia/Rangoon", + }, + "MSK": []string{ + "Europe/Minsk", + "Europe/Moscow", + "Europe/Simferopol", + "Europe/Volgograd", + "W-SU", + }, + "MST": []string{ + "America/Boise", + "America/Cambridge_Bay", + "America/Chihuahua", + "America/Creston", + "America/Dawson_Creek", + "America/Denver", + "America/Edmonton", + "America/Fort_Nelson", + "America/Hermosillo", + "America/Inuvik", + "America/Mazatlan", + "America/Ojinaga", + "America/Phoenix", + "America/Shiprock", + "America/Yellowknife", + "Canada/Mountain", + "MST", + "MST7MDT", + "Mexico/BajaSur", + "Navajo", + "US/Arizona", + "US/Mountain", + }, + "MUT": []string{ + "Indian/Mauritius", + }, + "MVT": []string{ + "Indian/Maldives", + "Asia/Kuala_Lumpur", + "Asia/Kuching", + }, + "NCT": []string{ + "Pacific/Noumea", + }, + "NFT": []string{ + "Pacific/Norfolk", + }, + "NOVT": []string{ + "Asia/Novosibirsk", + }, + "NPT": []string{ + "Asia/Kathmandu", + "Asia/Katmandu", + }, + "NRT": []string{ + "Pacific/Nauru", + }, + "NST": []string{ + "America/St_Johns", + "Canada/Newfoundland", + }, + "NUT": []string{ + "Pacific/Niue", + }, + "NZDT": []string{ + "Antarctica/McMurdo", + "Antarctica/South_Pole", + "NZ", + "Pacific/Auckland", + }, + "OMST": []string{ + "Asia/Omsk", + }, + "ORAT": []string{ + "Asia/Oral", + }, + "PET": []string{ + "America/Lima", + }, + "PETT": []string{ + "Asia/Kamchatka", + }, + "PGT": []string{ + "Pacific/Port_Moresby", + }, + "PHOT": []string{ + "Pacific/Enderbury", + }, + "PHT": []string{ + "Asia/Manila", + }, + "PKT": []string{ + "Asia/Karachi", + }, + "PMST": []string{ + "America/Miquelon", + }, + "PONT": []string{ + "Pacific/Pohnpei", + "Pacific/Ponape", + }, + "PST": []string{ + "America/Dawson", + "America/Ensenada", + "America/Los_Angeles", + "America/Metlakatla", + "America/Santa_Isabel", + "America/Tijuana", + "America/Vancouver", + "America/Whitehorse", + "Canada/Pacific", + "Canada/Yukon", + "Mexico/BajaNorte", + "PST8PDT", + "Pacific/Pitcairn", + "US/Pacific", + "US/Pacific-New", + }, + "PWT": []string{ + "Pacific/Palau", + }, + "PYST": []string{ + "America/Asuncion", + }, + "QYZT": []string{ + "Asia/Qyzylorda", + }, + "RET": []string{ + "Indian/Reunion", + }, + "ROTT": []string{ + "Antarctica/Rothera", + }, + "SAKT": []string{ + "Asia/Sakhalin", + }, + "SAMT": []string{ + "Europe/Samara", + }, + "SAST": []string{ + "Africa/Johannesburg", + "Africa/Maseru", + "Africa/Mbabane", + }, + "SBT": []string{ + "Pacific/Guadalcanal", + }, + "SCT": []string{ + "Indian/Mahe", + }, + "SGT": []string{ + "Asia/Singapore", + "Singapore", + }, + "SRET": []string{ + "Asia/Srednekolymsk", + }, + "SRT": []string{ + "America/Paramaribo", + }, + "SST": []string{ + "Pacific/Midway", + "Pacific/Pago_Pago", + "Pacific/Samoa", + "US/Samoa", + }, + "SYOT": []string{ + "Antarctica/Syowa", + }, + "TAHT": []string{ + "Pacific/Tahiti", + }, + "TFT": []string{ + "Indian/Kerguelen", + }, + "TJT": []string{ + "Asia/Dushanbe", + }, + "TKT": []string{ + "Pacific/Fakaofo", + }, + "TLT": []string{ + "Asia/Dili", + }, + "TMT": []string{ + "Asia/Ashgabat", + "Asia/Ashkhabad", + }, + "TOT": []string{ + "Pacific/Tongatapu", + }, + "TVT": []string{ + "Pacific/Funafuti", + }, + "UCT": []string{ + "Etc/UCT", + "UCT", + }, + "ULAT": []string{ + "Asia/Ulaanbaatar", + "Asia/Ulan_Bator", + }, + "UTC": []string{ + "Antarctica/Troll", + "Etc/UTC", + "Etc/Universal", + "Etc/Zulu", + "UTC", + "Universal", + "Zulu", + }, + "UYST": []string{ + "America/Montevideo", + }, + "UZT": []string{ + "Asia/Samarkand", + "Asia/Tashkent", + }, + "VET": []string{ + "America/Caracas", + }, + "VLAT": []string{ + "Asia/Ust-Nera", + "Asia/Vladivostok", + }, + "VOST": []string{ + "Antarctica/Vostok", + }, + "VUT": []string{ + "Pacific/Efate", + }, + "WAKT": []string{ + "Pacific/Wake", + }, + "WAST": []string{ + "Africa/Windhoek", + }, + "WAT": []string{ + "Africa/Bangui", + "Africa/Brazzaville", + "Africa/Douala", + "Africa/Kinshasa", + "Africa/Lagos", + "Africa/Libreville", + "Africa/Luanda", + "Africa/Malabo", + "Africa/Ndjamena", + "Africa/Niamey", + "Africa/Porto-Novo", + }, + "WET": []string{ + "Africa/Casablanca", + "Africa/El_Aaiun", + "Atlantic/Canary", + "Atlantic/Faeroe", + "Atlantic/Faroe", + "Atlantic/Madeira", + "Europe/Lisbon", + "Portugal", + "WET", + }, + "WFT": []string{ + "Pacific/Wallis", + }, + "WGT": []string{ + "America/Godthab", + }, + "WIB": []string{ + "Asia/Jakarta", + "Asia/Pontianak", + }, + "WIT": []string{ + "Asia/Jayapura", + }, + "WITA": []string{ + "Asia/Makassar", + "Asia/Ujung_Pandang", + }, + "WSDT": []string{ + "Pacific/Apia", + }, + "XJT": []string{ + "Asia/Kashgar", + "Asia/Urumqi", + }, + "YAKT": []string{ + "Asia/Khandyga", + "Asia/Yakutsk", + }, + "YEKT": []string{ + "Asia/Yekaterinburg", + }, + "Local time zone must be set--see zic manual page": []string{ + "Factory", + }, +} + +func GetOffset(shortZone string) (int, error) { + if _, ok := offsets[shortZone]; !ok { + return 0, errors.New(fmt.Sprintf("Invalid short timezone: %s", shortZone)) + } + + return offsets[shortZone], nil +} + +func GetTimezones(shortZone string) ([]string, error) { + if _, ok := timezones[shortZone]; !ok { + return []string{}, errors.New(fmt.Sprintf("Invalid short timezone: %s", shortZone)) + } + + return timezones[shortZone], nil +} + +func FixedTimezone(t time.Time, timezone string) (time.Time, error) { + var err error + var loc *time.Location + zone, offset := time.Now().In(time.Local).Zone() + + if zone == "UTC" { + return t, err + } + + if timezone != "" { + loc, err = time.LoadLocation(timezone) + if err != nil { + return t, err + } + + return t.In(loc), err + } + + loc = time.FixedZone(zone, offset) + return t.In(loc), err +} diff --git a/vendor/github.com/tkuchiki/parsetime/.gitignore b/vendor/github.com/tkuchiki/parsetime/.gitignore new file mode 100644 index 0000000..d1d302c --- /dev/null +++ b/vendor/github.com/tkuchiki/parsetime/.gitignore @@ -0,0 +1,2 @@ +pkg/ +src/ \ No newline at end of file diff --git a/vendor/github.com/tkuchiki/parsetime/.travis.yml b/vendor/github.com/tkuchiki/parsetime/.travis.yml new file mode 100644 index 0000000..b608ed9 --- /dev/null +++ b/vendor/github.com/tkuchiki/parsetime/.travis.yml @@ -0,0 +1,17 @@ +language: go + +go: + - 1.5 + - 1.5.1 + - 1.5.2 + - 1.5.3 + - 1.6 + - tip + +script: + - make test + +after_script: + - make cover + +sodo: false diff --git a/vendor/github.com/tkuchiki/parsetime/LICENSE b/vendor/github.com/tkuchiki/parsetime/LICENSE new file mode 100644 index 0000000..e34b51f --- /dev/null +++ b/vendor/github.com/tkuchiki/parsetime/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 tkuchiki + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/vendor/github.com/tkuchiki/parsetime/Makefile b/vendor/github.com/tkuchiki/parsetime/Makefile new file mode 100644 index 0000000..53fdc11 --- /dev/null +++ b/vendor/github.com/tkuchiki/parsetime/Makefile @@ -0,0 +1,22 @@ +BUILD_OS_TARGETS = "linux darwin freebsd windows" + +test: deps + go test -v -covermode=count -coverprofile=coverage.out + +deps: + go get -d -v -t ./... + go get github.com/golang/lint/golint + go get golang.org/x/tools/cmd/cover + go get github.com/mattn/goveralls + +LINT_RET = .golint.txt +lint: deps + rm -f $(LINT_RET) + golint ./... | tee $(LINT_RET) + test ! -s $(LINT_RET) + +cover: deps + go get github.com/axw/gocov/gocov + goveralls -coverprofile=coverage.out -service=travis-ci -repotoken $(COVERALLS_TOKEN) + +.PHONY: test deps lint cover diff --git a/vendor/github.com/tkuchiki/parsetime/README.md b/vendor/github.com/tkuchiki/parsetime/README.md new file mode 100644 index 0000000..c76e31f --- /dev/null +++ b/vendor/github.com/tkuchiki/parsetime/README.md @@ -0,0 +1,636 @@ +# parsetime + +[![Build Status](https://travis-ci.org/tkuchiki/parsetime.svg?branch=master)](https://travis-ci.org/tkuchiki/parsetime) +[![Coverage Status](https://coveralls.io/repos/tkuchiki/parsetime/badge.svg?branch=master&service=github)](https://coveralls.io/github/tkuchiki/parsetime?branch=master) + +date/time parser for golang + +## Usage + +### `parsetime.NewParseTime` + +#### `parsetime.NewParseTime()` + +```go +package main + +import ( + "fmt" + "github.com/tkuchiki/parsetime" + "log" +) + +func main() { + p, err := parsetime.NewParseTime() + if err != nil { + log.Fatal(err) + } + + t, err2 := p.Parse("2016-01-02T03:04:05") + + if err2 != nil { + log.Fatal(err) + } + + + // Local timezone: JST + // 2016-01-02 03:04:05 +0900 JST + fmt.Println(t) +} +``` + +#### `parsetime.NewParseTime("location name")` + +```go +func main() { + p, err := parsetime.NewParseTime("US/Arizona") + if err != nil { + log.Fatal(err) + } + + t, err2 := p.Parse("2016-01-02T03:04:05") + + if err2 != nil { + log.Fatal(err) + } + + + // Local timezone: JST + // 2016-01-02 03:04:05 -0700 MST + fmt.Println(t) +} +``` + +#### `parsetime.NewParseTime("timezone name")` + +```go +func main() { + p, err := parsetime.NewParseTime("MST") + if err != nil { + log.Fatal(err) + } + + t, err2 := p.Parse("2016-01-02T03:04:05") + + if err2 != nil { + log.Fatal(err) + } + + + // Local timezone: JST + // 2016-01-02 03:04:05 -0700 MST + fmt.Println(t) +} +``` + +#### `parsetime.NewParseTime("timezone name", offset)` + +```go +func main() { + p, err := parsetime.NewParseTime("MST", -7*3600) + if err != nil { + log.Fatal(err) + } + + t, err2 := p.Parse("2016-01-02T03:04:05") + + if err2 != nil { + log.Fatal(err) + } + + + // Local timezone: JST + // 2016-01-02 03:04:05 -0700 MST + fmt.Println(t) +} +``` + +### `ParseTime` + +#### `ParseTime.GetLocation` + +Returns `*time.Location` + +```go +var loc *time.Location +p, _ := parsetime.NewParseTime() + +loc = p.GetLocation() +``` + +#### `ParseTime.SetLocation` + +Sets `*time.Location` + +```go +var loc *time.Location +var err error +p, _ := parsetime.NewParseTime() + +loc, err = time.LoadLocation("US/Arizona") +p.SetLocation(loc) +``` + +#### `ParseTime.ISO8601` + +Parses ISO8601, RFC3339 date/time string + +```go +var t time.Time +var err error + +p, _ := parsetime.NewParseTime() + +t, err = p.ISO8601("2016-01-02T03:04:05") +``` + +#### `ParseTime.RFC8xx1123` + +Parses RFC822, RFC850, RFC1123 date/time string + +```go +var t time.Time +var err error + +p, _ := parsetime.NewParseTime() + +t, err = p.RFC8xx1123("2016-01-02T03:04:05") +``` + +#### `ParseTime.ANSIC` + +Parses ANSIC date/time string + +```go +var t time.Time +var err error + +p, _ := parsetime.NewParseTime() + +t, err = p.ANSIC("2016-01-02T03:04:05") +``` + +#### `ParseTime.US` + +Parses MM/DD/YYYY format date/time string + +```go +var t time.Time +var err error + +p, _ := parsetime.NewParseTime() + +t, err = p.US("2016-01-02T03:04:05") +``` + +#### `ParseTime.Parse` + +Parses date/time string + +```go +var t time.Time +var err error + +p, _ := parsetime.NewParseTime() + +t, err = p.Parse("2016-01-02T03:04:05") +``` + +## Examples + +#### ISO8601 + +| Input String | _time.Time | +| ---------------------------------------- | ----------------------------------------- | +| 2006-01-02 15:04 | 2006-01-02 15:04:00 +0900 JST | +| 2006-01-02 15:04-07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| 2006-01-02 15:04 -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| 2006-01-02 15:04:05 | 2006-01-02 15:04:05 +0900 JST | +| 2006-01-02 15:04:05-07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02 15:04:05 -07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02 15:04:05-07:00 MST | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02 15:04:05 -07:00 MST | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02 15:04:05.999999999 | 2006-01-02 15:04:05.999999999 +0900 JST | +| 2006-01-02 15:04:05.999999-07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02 15:04:05.9-07:00 MST | 2006-01-02 15:04:05.000000009 -0700 -0700 | +| 2006-01-02 15:04:05.9 -07:00 MST | 2006-01-02 15:04:05.000000009 -0700 -0700 | +| 2006-01-02 15:04:05.999-07:00 MST | 2006-01-02 15:04:05.000000999 -0700 -0700 | +| 2006-01-02 15:04:05.999 -07:00 MST | 2006-01-02 15:04:05.000000999 -0700 -0700 | +| 2006-01-02 15:04:05.999999-07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02 15:04:05.999999 -07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02 15:04:05.999999999-07:00 MST | 2006-01-02 15:04:05.999999999 -0700 -0700 | +| 2006-01-02 15:04:05.999999999 -07:00 MST | 2006-01-02 15:04:05.999999999 -0700 -0700 | +| 2006-01-02T15:04 | 2006-01-02 15:04:00 +0900 JST | +| 2006-01-02T15:04-07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| 2006-01-02T15:04 -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| 2006-01-02T15:04:05 | 2006-01-02 15:04:05 +0900 JST | +| 2006-01-02T15:04:05-07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02T15:04:05 -07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02T15:04:05-07:00 MST | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02T15:04:05 -07:00 MST | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02T15:04:05.999999999 | 2006-01-02 15:04:05.999999999 +0900 JST | +| 2006-01-02T15:04:05.999999999-07:00 MST | 2006-01-02 15:04:05.999999999 -0700 -0700 | +| 2006-01-02T15:04:05.999999999 -07:00 MST | 2006-01-02 15:04:05.999999999 -0700 -0700 | +| 2006-01-02T15:04:05.999999-07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02T15:04:05.999999 -07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02T15:04:05.9-07:00 MST | 2006-01-02 15:04:05.000000009 -0700 -0700 | +| 2006-01-02T15:04:05.9 -07:00 MST | 2006-01-02 15:04:05.000000009 -0700 -0700 | +| 2006-01-02 | 2006-01-02 00:00:00 +0900 JST | +| 20060102 | 2006-01-02 00:00:00 +0900 JST | +| 20060102150405 | 2006-01-02 15:04:05 +0900 JST | +| 20060102 150405 | 2006-01-02 15:04:05 +0900 JST | +| 20060102T150405 | 2006-01-02 15:04:05 +0900 JST | +| 15:04:05 | 2016-05-06 15:04:05 +0900 JST | +| 15:04:05-07:00 MST | 2016-05-06 15:04:05 -0700 -0700 | +| 15:04:05 -07:00 MST | 2016-05-06 15:04:05 -0700 -0700 | +| 15:04:05.9-07:00 MST | 2016-05-06 15:04:05.000000009 -0700 -0700 | +| 15:04:05.9 -07:00 MST | 2016-05-06 15:04:05.000000009 -0700 -0700 | +| 15:04:05.999-07:00 MST | 2016-05-06 15:04:05.000000999 -0700 -0700 | +| 15:04:05.999 -07:00 MST | 2016-05-06 15:04:05.000000999 -0700 -0700 | +| 15:04:05.999999-07:00 MST | 2016-05-06 15:04:05.000999999 -0700 -0700 | +| 15:04:05.999999 -07:00 MST | 2016-05-06 15:04:05.000999999 -0700 -0700 | +| 15:04:05.999999999-07:00 MST | 2016-05-06 15:04:05.999999999 -0700 -0700 | +| 15:04:05.999999999 -07:00 MST | 2016-05-06 15:04:05.999999999 -0700 -0700 | +| 150405-07:00 MST | 2016-05-06 15:04:05 -0700 -0700 | +| 150405 -07:00 MST | 2016-05-06 15:04:05 -0700 -0700 | +| 150405.9-07:00 MST | 2016-05-06 15:04:05.000000009 -0700 -0700 | +| 150405.9 -07:00 MST | 2016-05-06 15:04:05.000000009 -0700 -0700 | +| 150405.999-07:00 MST | 2016-05-06 15:04:05.000000999 -0700 -0700 | +| 150405.999 -07:00 MST | 2016-05-06 15:04:05.000000999 -0700 -0700 | +| 150405.999999-07:00 MST | 2016-05-06 15:04:05.000999999 -0700 -0700 | +| 150405.999999 -07:00 MST | 2016-05-06 15:04:05.000999999 -0700 -0700 | +| 150405.999999999-07:00 MST | 2016-05-06 15:04:05.999999999 -0700 -0700 | +| 150405.999999999 -07:00 MST | 2016-05-06 15:04:05.999999999 -0700 -0700 | +| 2006-01-02 15:04:05Z | 2006-01-02 15:04:05 +0000 UTC | +| 2006-01-02T15:04:05Z | 2006-01-02 15:04:05 +0000 UTC | +| 2006-01-02 15:04:05.9Z | 2006-01-02 15:04:05.000000009 +0000 UTC | +| 2006-01-02T15:04:05.9Z | 2006-01-02 15:04:05.000000009 +0000 UTC | +| 2006-01-02 15:04:05.999Z | 2006-01-02 15:04:05.000000999 +0000 UTC | +| 2006-01-02T15:04:05.999Z | 2006-01-02 15:04:05.000000999 +0000 UTC | +| 2006-01-02 15:04:05.999999Z | 2006-01-02 15:04:05.000999999 +0000 UTC | +| 2006-01-02T15:04:05.999999Z | 2006-01-02 15:04:05.000999999 +0000 UTC | +| 2006-01-02 15:04:05.999999999Z | 2006-01-02 15:04:05.999999999 +0000 UTC | +| 2006-01-02T15:04:05.999999999Z | 2006-01-02 15:04:05.999999999 +0000 UTC | + +#### RFC8xx1123 + +| Input String | _time.Time | +| ---------------------------------------- | ----------------------------------------- | +| 02-Jan-06 1504 MST | 2006-01-02 15:04:00 -0700 MST | +| 02-Jan-06 15:04 MST | 2006-01-02 15:04:00 -0700 MST | +| 02-Jan-06 150405 MST | 2006-01-02 15:04:05 -0700 MST | +| 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 -0700 MST | +| 02-Jan-06 1504-0700 | 2006-01-02 15:04:00 -0700 -0700 | +| 02-Jan-06 15:04-0700 | 2006-01-02 15:04:00 -0700 -0700 | +| 02-Jan-06 150405-0700 | 2006-01-02 15:04:05 -0700 -0700 | +| 02-Jan-06 15:04:05-0700 | 2006-01-02 15:04:05 -0700 -0700 | +| 02-Jan-06 15:04 -0700 | 2006-01-02 15:04:00 -0700 -0700 | +| 02-Jan-06 15:04:05 -0700 | 2006-01-02 15:04:05 -0700 -0700 | +| Monday, 02-Jan-06 15:04 MST | 2006-01-02 15:04:00 -0700 MST | +| Monday, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 -0700 MST | +| Mon, 02-Jan-06 15:04 MST | 2006-01-02 15:04:00 -0700 MST | +| Mon, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 -0700 MST | +| Mon, 02-Jan-06 15:04-07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-06 15:04:05-07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-06 15:04 -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-06 15:04:05 -07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-2006 15:04-07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-2006 15:04:05-07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-2006 15:04 -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-2006 15:04:05 -07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-70 15:04-07:00 | 1970-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-70 15:04:05-07:00 | 1970-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-70 15:04 -07:00 | 1970-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-70 15:04:05 -07:00 | 1970-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-99 15:04-07:00 | 1999-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-99 15:04:05-07:00 | 1999-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-99 15:04:05 -07:00 | 1999-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-00 15:04-07:00 | 2000-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05-07:00 | 2000-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05 -07:00 | 2000-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.9-07:00 | 2000-01-02 15:04:05.000000009 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.9 -07:00 | 2000-01-02 15:04:05.000000009 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999-07:00 | 2000-01-02 15:04:05.000000999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999 -07:00 | 2000-01-02 15:04:05.000000999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999999-07:00 | 2000-01-02 15:04:05.000999999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999999 -07:00 | 2000-01-02 15:04:05.000999999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999999999-07:00 | 2000-01-02 15:04:05.999999999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999999999 -07:00 | 2000-01-02 15:04:05.999999999 -0700 -0700 | + +#### ANSIC + +| Input String | _time.Time | +| ------------------------------- | --------------------------------------- | +| Mon Jan 02 150405 2006 | 2006-01-02 15:04:05 +0900 JST | +| Mon Jan 02 15:04:05 2006 | 2006-01-02 15:04:05 +0900 JST | +| Mon Jan 02 150405 MST 2006 | 2006-01-02 15:04:05 -0700 MST | +| Mon Jan 02 15:04:05 MST 2006 | 2006-01-02 15:04:05 -0700 MST | +| Mon Jan 02 1504-07:00 2006 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon Jan 02 15:04-07:00 2006 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon Jan 02 1504 -07:00 2006 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon Jan 02 15:04 -07:00 2006 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon Jan 02 150405-07:00 2006 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon Jan 02 15:04:05-07:00 2006 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon Jan 02 150405 -07:00 2006 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon Jan 02 15:04:05 -07:00 2006 | 2006-01-02 15:04:05 -0700 -0700 | +| Jan 02 150405 | 2016-01-02 15:04:05 +0900 JST | +| Jan 02 15:04:05 | 2016-01-02 15:04:05 +0900 JST | +| Jan 02 150405.9 | 2016-01-02 15:04:05.000000009 +0900 JST | +| Jan 02 15:04:05.9 | 2016-01-02 15:04:05.000000009 +0900 JST | +| Jan 02 150405.999 | 2016-01-02 15:04:05.000000999 +0900 JST | +| Jan 02 15:04:05.999 | 2016-01-02 15:04:05.000000999 +0900 JST | +| Jan 02 150405.999999 | 2016-01-02 15:04:05.000999999 +0900 JST | +| Jan 02 15:04:05.999999 | 2016-01-02 15:04:05.000999999 +0900 JST | +| Jan 02 150405.999999999 | 2016-01-02 15:04:05.999999999 +0900 JST | +| Jan 02 15:04:05.999999999 | 2016-01-02 15:04:05.999999999 +0900 JST | + +#### US + +| Input String | _time.Time | +| ---------------------------------------- | --------------------------------------- | +| 11:04AM | 2016-05-06 11:04:00 +0900 JST | +| 11:04PM | 2016-05-06 23:04:00 +0900 JST | +| 11:04 AM | 2016-05-06 11:04:00 +0900 JST | +| 11:04 PM | 2016-05-06 23:04:00 +0900 JST | +| 11:04:05 AM | 2016-05-06 11:04:05 +0900 JST | +| 11:04:05 PM | 2016-05-06 23:04:05 +0900 JST | +| 11:04:05.9AM | 2016-05-06 11:04:05.000000009 +0900 JST | +| 11:04:05.9 AM | 2016-05-06 11:04:05.000000009 +0900 JST | +| 11:04:05.9PM | 2016-05-06 23:04:05.000000009 +0900 JST | +| 11:04:05.9 PM | 2016-05-06 23:04:05.000000009 +0900 JST | +| 11:04:05.999AM | 2016-05-06 11:04:05.000000999 +0900 JST | +| 11:04:05.999 AM | 2016-05-06 11:04:05.000000999 +0900 JST | +| 11:04:05.999PM | 2016-05-06 23:04:05.000000999 +0900 JST | +| 11:04:05.999 PM | 2016-05-06 23:04:05.000000999 +0900 JST | +| 11:04:05.999999AM | 2016-05-06 11:04:05.000999999 +0900 JST | +| 11:04:05.999999 AM | 2016-05-06 11:04:05.000999999 +0900 JST | +| 11:04:05.999999PM | 2016-05-06 23:04:05.000999999 +0900 JST | +| 11:04:05.999999 PM | 2016-05-06 23:04:05.000999999 +0900 JST | +| 11:04:05.999999999AM | 2016-05-06 11:04:05.999999999 +0900 JST | +| 11:04:05.999999999 AM | 2016-05-06 11:04:05.999999999 +0900 JST | +| 11:04:05.999999999PM | 2016-05-06 23:04:05.999999999 +0900 JST | +| 11:04:05.999999999 PM | 2016-05-06 23:04:05.999999999 +0900 JST | +| 01-02-06 3:04AM | 2006-01-02 03:04:00 +0900 JST | +| 01-02-06 3:04 AM | 2006-01-02 03:04:00 +0900 JST | +| 01-02-06 3:04PM | 2006-01-02 15:04:00 +0900 JST | +| 01-02-06 3:04 PM | 2006-01-02 15:04:00 +0900 JST | +| 01-02-06 03:04:05AM | 2006-01-02 03:04:05 +0900 JST | +| 01-02-06 03:04:05 AM | 2006-01-02 03:04:05 +0900 JST | +| 01-02-06 03:04:05PM | 2006-01-02 15:04:05 +0900 JST | +| 01-02-06 03:04:05 PM | 2006-01-02 15:04:05 +0900 JST | +| 01-02-06 03:04:05.9AM | 2006-01-02 03:04:05.000000009 +0900 JST | +| 01-02-06 03:04:05.9 AM | 2006-01-02 03:04:05.000000009 +0900 JST | +| 01-02-06 03:04:05.9PM | 2006-01-02 15:04:05.000000009 +0900 JST | +| 01-02-06 03:04:05.9 PM | 2006-01-02 15:04:05.000000009 +0900 JST | +| 01-02-06 03:04:05.999AM | 2006-01-02 03:04:05.000000999 +0900 JST | +| 01-02-06 03:04:05.999 AM | 2006-01-02 03:04:05.000000999 +0900 JST | +| 01-02-06 03:04:05.999PM | 2006-01-02 15:04:05.000000999 +0900 JST | +| 01-02-06 03:04:05.999 PM | 2006-01-02 15:04:05.000000999 +0900 JST | +| 01-02-06 03:04:05.999999AM | 2006-01-02 03:04:05.000999999 +0900 JST | +| 01-02-06 03:04:05.999999 AM | 2006-01-02 03:04:05.000999999 +0900 JST | +| 01-02-06 03:04:05.999999PM | 2006-01-02 15:04:05.000999999 +0900 JST | +| 01-02-06 03:04:05.999999 PM | 2006-01-02 15:04:05.000999999 +0900 JST | +| 01-02-06 03:04:05.999999999AM | 2006-01-02 03:04:05.999999999 +0900 JST | +| 01-02-06 03:04:05.999999999 AM | 2006-01-02 03:04:05.999999999 +0900 JST | +| 01-02-06 03:04:05.999999999PM | 2006-01-02 15:04:05.999999999 +0900 JST | +| 01-02-06 03:04:05.999999999 PM | 2006-01-02 15:04:05.999999999 +0900 JST | +| Jan 2, 2006 | 2006-01-02 00:00:00 +0900 JST | +| Jan 2, 2006 at 3:04am (MST) | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 03:04am (MST) | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 3:04pm (MST) | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 03:04pm (MST) | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 3:04 am (MST) | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 03:04 am (MST) | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 3:04 pm (MST) | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 03:04 pm (MST) | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 3:04:05am (MST) | 2006-01-02 03:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05pm (MST) | 2006-01-02 15:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05 am (MST) | 2006-01-02 03:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05 pm (MST) | 2006-01-02 15:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05.9am (MST) | 2006-01-02 03:04:05.000000009 -0700 MST | +| Jan 2, 2006 at 3:04:05.9pm (MST) | 2006-01-02 15:04:05.000000009 -0700 MST | +| Jan 2, 2006 at 3:04:05.999am (MST) | 2006-01-02 03:04:05.000000999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999pm (MST) | 2006-01-02 15:04:05.000000999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999999am (MST) | 2006-01-02 03:04:05.000999999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999999pm (MST) | 2006-01-02 15:04:05.000999999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999999999am (MST) | 2006-01-02 03:04:05.999999999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999999999pm (MST) | 2006-01-02 15:04:05.999999999 -0700 MST | +| Jan 2, 2006 at 3:04am MST | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 3:04pm MST | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 3:04am -07:00 | 2006-01-02 03:04:00 -0700 -0700 | +| Jan 2, 2006 at 3:04pm -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Jan 2, 2006 at 3:04:05am MST | 2006-01-02 03:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05pm MST | 2006-01-02 15:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05am -07:00 | 2006-01-02 03:04:05 -0700 -0700 | +| Jan 2, 2006 at 3:04:05pm -07:00 | 2006-01-02 15:04:05 -0700 -0700 | + +#### Parse + +| Input String | _time.Time | +| ---------------------------------------- | ----------------------------------------- | +| 2006-01-02 15:04 | 2006-01-02 15:04:00 +0900 JST | +| 2006-01-02 15:04-07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| 2006-01-02 15:04 -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| 2006-01-02 15:04:05 | 2006-01-02 15:04:05 +0900 JST | +| 2006-01-02 15:04:05-07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02 15:04:05 -07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02 15:04:05-07:00 MST | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02 15:04:05 -07:00 MST | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02 15:04:05.999999999 | 2006-01-02 15:04:05.999999999 +0900 JST | +| 2006-01-02 15:04:05.999999-07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02 15:04:05.9-07:00 MST | 2006-01-02 15:04:05.000000009 -0700 -0700 | +| 2006-01-02 15:04:05.9 -07:00 MST | 2006-01-02 15:04:05.000000009 -0700 -0700 | +| 2006-01-02 15:04:05.999-07:00 MST | 2006-01-02 15:04:05.000000999 -0700 -0700 | +| 2006-01-02 15:04:05.999 -07:00 MST | 2006-01-02 15:04:05.000000999 -0700 -0700 | +| 2006-01-02 15:04:05.999999-07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02 15:04:05.999999 -07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02 15:04:05.999999999-07:00 MST | 2006-01-02 15:04:05.999999999 -0700 -0700 | +| 2006-01-02 15:04:05.999999999 -07:00 MST | 2006-01-02 15:04:05.999999999 -0700 -0700 | +| 2006-01-02T15:04 | 2006-01-02 15:04:00 +0900 JST | +| 2006-01-02T15:04-07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| 2006-01-02T15:04 -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| 2006-01-02T15:04:05 | 2006-01-02 15:04:05 +0900 JST | +| 2006-01-02T15:04:05-07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02T15:04:05 -07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02T15:04:05-07:00 MST | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02T15:04:05 -07:00 MST | 2006-01-02 15:04:05 -0700 -0700 | +| 2006-01-02T15:04:05.999999999 | 2006-01-02 15:04:05.999999999 +0900 JST | +| 2006-01-02T15:04:05.999999999-07:00 MST | 2006-01-02 15:04:05.999999999 -0700 -0700 | +| 2006-01-02T15:04:05.999999999 -07:00 MST | 2006-01-02 15:04:05.999999999 -0700 -0700 | +| 2006-01-02T15:04:05.999999-07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02T15:04:05.999999 -07:00 MST | 2006-01-02 15:04:05.000999999 -0700 -0700 | +| 2006-01-02T15:04:05.9-07:00 MST | 2006-01-02 15:04:05.000000009 -0700 -0700 | +| 2006-01-02T15:04:05.9 -07:00 MST | 2006-01-02 15:04:05.000000009 -0700 -0700 | +| 2006-01-02 | 2006-01-02 00:00:00 +0900 JST | +| 20060102 | 2006-01-02 00:00:00 +0900 JST | +| 20060102150405 | 2006-01-02 15:04:05 +0900 JST | +| 20060102 150405 | 2006-01-02 15:04:05 +0900 JST | +| 20060102T150405 | 2006-01-02 15:04:05 +0900 JST | +| 15:04:05 | 2016-05-06 15:04:05 +0900 JST | +| 15:04:05-07:00 MST | 2016-05-06 15:04:05 -0700 -0700 | +| 15:04:05 -07:00 MST | 2016-05-06 15:04:05 -0700 -0700 | +| 15:04:05.9-07:00 MST | 2016-05-06 15:04:05.000000009 -0700 -0700 | +| 15:04:05.9 -07:00 MST | 2016-05-06 15:04:05.000000009 -0700 -0700 | +| 15:04:05.999-07:00 MST | 2016-05-06 15:04:05.000000999 -0700 -0700 | +| 15:04:05.999 -07:00 MST | 2016-05-06 15:04:05.000000999 -0700 -0700 | +| 15:04:05.999999-07:00 MST | 2016-05-06 15:04:05.000999999 -0700 -0700 | +| 15:04:05.999999 -07:00 MST | 2016-05-06 15:04:05.000999999 -0700 -0700 | +| 15:04:05.999999999-07:00 MST | 2016-05-06 15:04:05.999999999 -0700 -0700 | +| 15:04:05.999999999 -07:00 MST | 2016-05-06 15:04:05.999999999 -0700 -0700 | +| 150405-07:00 MST | 2016-05-06 15:04:05 -0700 -0700 | +| 150405 -07:00 MST | 2016-05-06 15:04:05 -0700 -0700 | +| 150405.9-07:00 MST | 2016-05-06 15:04:05.000000009 -0700 -0700 | +| 150405.9 -07:00 MST | 2016-05-06 15:04:05.000000009 -0700 -0700 | +| 150405.999-07:00 MST | 2016-05-06 15:04:05.000000999 -0700 -0700 | +| 150405.999 -07:00 MST | 2016-05-06 15:04:05.000000999 -0700 -0700 | +| 150405.999999-07:00 MST | 2016-05-06 15:04:05.000999999 -0700 -0700 | +| 150405.999999 -07:00 MST | 2016-05-06 15:04:05.000999999 -0700 -0700 | +| 150405.999999999-07:00 MST | 2016-05-06 15:04:05.999999999 -0700 -0700 | +| 150405.999999999 -07:00 MST | 2016-05-06 15:04:05.999999999 -0700 -0700 | +| 2006-01-02 15:04:05Z | 2006-01-02 15:04:05 +0000 UTC | +| 2006-01-02T15:04:05Z | 2006-01-02 15:04:05 +0000 UTC | +| 2006-01-02 15:04:05.9Z | 2006-01-02 15:04:05.000000009 +0000 UTC | +| 2006-01-02T15:04:05.9Z | 2006-01-02 15:04:05.000000009 +0000 UTC | +| 2006-01-02 15:04:05.999Z | 2006-01-02 15:04:05.000000999 +0000 UTC | +| 2006-01-02T15:04:05.999Z | 2006-01-02 15:04:05.000000999 +0000 UTC | +| 2006-01-02 15:04:05.999999Z | 2006-01-02 15:04:05.000999999 +0000 UTC | +| 2006-01-02T15:04:05.999999Z | 2006-01-02 15:04:05.000999999 +0000 UTC | +| 2006-01-02 15:04:05.999999999Z | 2006-01-02 15:04:05.999999999 +0000 UTC | +| 2006-01-02T15:04:05.999999999Z | 2006-01-02 15:04:05.999999999 +0000 UTC | +| 02-Jan-06 1504 MST | 2006-01-02 15:04:00 -0700 MST | +| 02-Jan-06 15:04 MST | 2006-01-02 15:04:00 -0700 MST | +| 02-Jan-06 150405 MST | 2006-01-02 15:04:05 -0700 MST | +| 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 -0700 MST | +| 02-Jan-06 1504-0700 | 2006-01-02 15:04:00 -0700 -0700 | +| 02-Jan-06 15:04-0700 | 2006-01-02 15:04:00 -0700 -0700 | +| 02-Jan-06 150405-0700 | 2006-01-02 15:04:05 -0700 -0700 | +| 02-Jan-06 15:04:05-0700 | 2006-01-02 15:04:05 -0700 -0700 | +| 02-Jan-06 15:04 -0700 | 2006-01-02 15:04:00 -0700 -0700 | +| 02-Jan-06 15:04:05 -0700 | 2006-01-02 15:04:05 -0700 -0700 | +| Monday, 02-Jan-06 15:04 MST | 2006-01-02 15:04:00 -0700 MST | +| Monday, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 -0700 MST | +| Mon, 02-Jan-06 15:04 MST | 2006-01-02 15:04:00 -0700 MST | +| Mon, 02-Jan-06 15:04:05 MST | 2006-01-02 15:04:05 -0700 MST | +| Mon, 02-Jan-06 15:04-07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-06 15:04:05-07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-06 15:04 -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-06 15:04:05 -07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-2006 15:04-07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-2006 15:04:05-07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-2006 15:04 -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-2006 15:04:05 -07:00 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-70 15:04-07:00 | 1970-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-70 15:04:05-07:00 | 1970-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-70 15:04 -07:00 | 1970-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-70 15:04:05 -07:00 | 1970-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-99 15:04-07:00 | 1999-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-99 15:04:05-07:00 | 1999-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-99 15:04:05 -07:00 | 1999-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-00 15:04-07:00 | 2000-01-02 15:04:00 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05-07:00 | 2000-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05 -07:00 | 2000-01-02 15:04:05 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.9-07:00 | 2000-01-02 15:04:05.000000009 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.9 -07:00 | 2000-01-02 15:04:05.000000009 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999-07:00 | 2000-01-02 15:04:05.000000999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999 -07:00 | 2000-01-02 15:04:05.000000999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999999-07:00 | 2000-01-02 15:04:05.000999999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999999 -07:00 | 2000-01-02 15:04:05.000999999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999999999-07:00 | 2000-01-02 15:04:05.999999999 -0700 -0700 | +| Mon, 02-Jan-00 15:04:05.999999999 -07:00 | 2000-01-02 15:04:05.999999999 -0700 -0700 | +| Mon Jan 02 150405 2006 | 2006-01-02 15:04:05 +0900 JST | +| Mon Jan 02 15:04:05 2006 | 2006-01-02 15:04:05 +0900 JST | +| Mon Jan 02 150405 MST 2006 | 2006-01-02 15:04:05 -0700 MST | +| Mon Jan 02 15:04:05 MST 2006 | 2006-01-02 15:04:05 -0700 MST | +| Mon Jan 02 1504-07:00 2006 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon Jan 02 15:04-07:00 2006 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon Jan 02 1504 -07:00 2006 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon Jan 02 15:04 -07:00 2006 | 2006-01-02 15:04:00 -0700 -0700 | +| Mon Jan 02 150405-07:00 2006 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon Jan 02 15:04:05-07:00 2006 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon Jan 02 150405 -07:00 2006 | 2006-01-02 15:04:05 -0700 -0700 | +| Mon Jan 02 15:04:05 -07:00 2006 | 2006-01-02 15:04:05 -0700 -0700 | +| Jan 02 150405 | 2016-01-02 15:04:05 +0900 JST | +| Jan 02 15:04:05 | 2016-01-02 15:04:05 +0900 JST | +| Jan 02 150405.9 | 2016-01-02 15:04:05.000000009 +0900 JST | +| Jan 02 15:04:05.9 | 2016-01-02 15:04:05.000000009 +0900 JST | +| Jan 02 150405.999 | 2016-01-02 15:04:05.000000999 +0900 JST | +| Jan 02 15:04:05.999 | 2016-01-02 15:04:05.000000999 +0900 JST | +| Jan 02 150405.999999 | 2016-01-02 15:04:05.000999999 +0900 JST | +| Jan 02 15:04:05.999999 | 2016-01-02 15:04:05.000999999 +0900 JST | +| Jan 02 150405.999999999 | 2016-01-02 15:04:05.999999999 +0900 JST | +| Jan 02 15:04:05.999999999 | 2016-01-02 15:04:05.999999999 +0900 JST | +| 11:04AM | 2016-05-06 11:04:00 +0900 JST | +| 11:04PM | 2016-05-06 23:04:00 +0900 JST | +| 11:04 AM | 2016-05-06 11:04:00 +0900 JST | +| 11:04 PM | 2016-05-06 23:04:00 +0900 JST | +| 11:04:05 AM | 2016-05-06 11:04:05 +0900 JST | +| 11:04:05 PM | 2016-05-06 23:04:05 +0900 JST | +| 11:04:05.9AM | 2016-05-06 11:04:05.000000009 +0900 JST | +| 11:04:05.9 AM | 2016-05-06 11:04:05.000000009 +0900 JST | +| 11:04:05.9PM | 2016-05-06 23:04:05.000000009 +0900 JST | +| 11:04:05.9 PM | 2016-05-06 23:04:05.000000009 +0900 JST | +| 11:04:05.999AM | 2016-05-06 11:04:05.000000999 +0900 JST | +| 11:04:05.999 AM | 2016-05-06 11:04:05.000000999 +0900 JST | +| 11:04:05.999PM | 2016-05-06 23:04:05.000000999 +0900 JST | +| 11:04:05.999 PM | 2016-05-06 23:04:05.000000999 +0900 JST | +| 11:04:05.999999AM | 2016-05-06 11:04:05.000999999 +0900 JST | +| 11:04:05.999999 AM | 2016-05-06 11:04:05.000999999 +0900 JST | +| 11:04:05.999999PM | 2016-05-06 23:04:05.000999999 +0900 JST | +| 11:04:05.999999 PM | 2016-05-06 23:04:05.000999999 +0900 JST | +| 11:04:05.999999999AM | 2016-05-06 11:04:05.999999999 +0900 JST | +| 11:04:05.999999999 AM | 2016-05-06 11:04:05.999999999 +0900 JST | +| 11:04:05.999999999PM | 2016-05-06 23:04:05.999999999 +0900 JST | +| 11:04:05.999999999 PM | 2016-05-06 23:04:05.999999999 +0900 JST | +| 01-02-06 3:04AM | 2006-01-02 03:04:00 +0900 JST | +| 01-02-06 3:04 AM | 2006-01-02 03:04:00 +0900 JST | +| 01-02-06 3:04PM | 2006-01-02 15:04:00 +0900 JST | +| 01-02-06 3:04 PM | 2006-01-02 15:04:00 +0900 JST | +| 01-02-06 03:04:05AM | 2006-01-02 03:04:05 +0900 JST | +| 01-02-06 03:04:05 AM | 2006-01-02 03:04:05 +0900 JST | +| 01-02-06 03:04:05PM | 2006-01-02 15:04:05 +0900 JST | +| 01-02-06 03:04:05 PM | 2006-01-02 15:04:05 +0900 JST | +| 01-02-06 03:04:05.9AM | 2006-01-02 03:04:05.000000009 +0900 JST | +| 01-02-06 03:04:05.9 AM | 2006-01-02 03:04:05.000000009 +0900 JST | +| 01-02-06 03:04:05.9PM | 2006-01-02 15:04:05.000000009 +0900 JST | +| 01-02-06 03:04:05.9 PM | 2006-01-02 15:04:05.000000009 +0900 JST | +| 01-02-06 03:04:05.999AM | 2006-01-02 03:04:05.000000999 +0900 JST | +| 01-02-06 03:04:05.999 AM | 2006-01-02 03:04:05.000000999 +0900 JST | +| 01-02-06 03:04:05.999PM | 2006-01-02 15:04:05.000000999 +0900 JST | +| 01-02-06 03:04:05.999 PM | 2006-01-02 15:04:05.000000999 +0900 JST | +| 01-02-06 03:04:05.999999AM | 2006-01-02 03:04:05.000999999 +0900 JST | +| 01-02-06 03:04:05.999999 AM | 2006-01-02 03:04:05.000999999 +0900 JST | +| 01-02-06 03:04:05.999999PM | 2006-01-02 15:04:05.000999999 +0900 JST | +| 01-02-06 03:04:05.999999 PM | 2006-01-02 15:04:05.000999999 +0900 JST | +| 01-02-06 03:04:05.999999999AM | 2006-01-02 03:04:05.999999999 +0900 JST | +| 01-02-06 03:04:05.999999999 AM | 2006-01-02 03:04:05.999999999 +0900 JST | +| 01-02-06 03:04:05.999999999PM | 2006-01-02 15:04:05.999999999 +0900 JST | +| 01-02-06 03:04:05.999999999 PM | 2006-01-02 15:04:05.999999999 +0900 JST | +| Jan 2, 2006 | 2006-01-02 00:00:00 +0900 JST | +| Jan 2, 2006 at 3:04am (MST) | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 03:04am (MST) | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 3:04pm (MST) | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 03:04pm (MST) | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 3:04 am (MST) | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 03:04 am (MST) | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 3:04 pm (MST) | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 03:04 pm (MST) | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 3:04:05am (MST) | 2006-01-02 03:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05pm (MST) | 2006-01-02 15:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05 am (MST) | 2006-01-02 03:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05 pm (MST) | 2006-01-02 15:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05.9am (MST) | 2006-01-02 03:04:05.000000009 -0700 MST | +| Jan 2, 2006 at 3:04:05.9pm (MST) | 2006-01-02 15:04:05.000000009 -0700 MST | +| Jan 2, 2006 at 3:04:05.999am (MST) | 2006-01-02 03:04:05.000000999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999pm (MST) | 2006-01-02 15:04:05.000000999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999999am (MST) | 2006-01-02 03:04:05.000999999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999999pm (MST) | 2006-01-02 15:04:05.000999999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999999999am (MST) | 2006-01-02 03:04:05.999999999 -0700 MST | +| Jan 2, 2006 at 3:04:05.999999999pm (MST) | 2006-01-02 15:04:05.999999999 -0700 MST | +| Jan 2, 2006 at 3:04am MST | 2006-01-02 03:04:00 -0700 MST | +| Jan 2, 2006 at 3:04pm MST | 2006-01-02 15:04:00 -0700 MST | +| Jan 2, 2006 at 3:04am -07:00 | 2006-01-02 03:04:00 -0700 -0700 | +| Jan 2, 2006 at 3:04pm -07:00 | 2006-01-02 15:04:00 -0700 -0700 | +| Jan 2, 2006 at 3:04:05am MST | 2006-01-02 03:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05pm MST | 2006-01-02 15:04:05 -0700 MST | +| Jan 2, 2006 at 3:04:05am -07:00 | 2006-01-02 03:04:05 -0700 -0700 | +| Jan 2, 2006 at 3:04:05pm -07:00 | 2006-01-02 15:04:05 -0700 -0700 | diff --git a/vendor/github.com/tkuchiki/parsetime/const.go b/vendor/github.com/tkuchiki/parsetime/const.go new file mode 100644 index 0000000..9fc6ccd --- /dev/null +++ b/vendor/github.com/tkuchiki/parsetime/const.go @@ -0,0 +1,83 @@ +package parsetime + +import ( + "strings" +) + +const ( + year = `(2[0-9]{3}|19[7-9][0-9])` + month = `(1[012]|0?[1-9])` + day = `([12][0-9]|3[01]|0?[1-9])` + hour = `(2[0-3]|[01]?[0-9])` + min = `([0-5]?[0-9])` + sec = min + nsec = `(?:[.])?([0-9]{1,9})?` + weekday = `(?:Mon|Monday|Tue|Tuesday|Wed|Wednesday|Thu|Thursday|Fri|Friday|Sat|Saturday|Sun|Sunday)` + monthAbbr = `(Jan|January|Feb|Februray|Mar|March|Apr|April|May|Jun|June|Jul|July|Aug|August|Sep|September|Oct|October|Nov|November|Dec|December|1[012]|0?[1-9])` + offset = `(Z|[+-][01][1-9]:[0-9]{2})?` + zone = `(?:[a-zA-Z0-9+-]{3,6})?` + ymdSep = `[ /.-]?` + hmsSep = `[ :.]?` + t = `(?:t|T|\s*)?` + s = `(?:\s*)?` + ampm = `([aApP][mM])` + ampmHour = `(1[01]|[0]?[0-9])` + shortYear = `(2[0-9]{3}|19[7-9][0-9]|[0-9]{2})` + offsetZone = `([+-][01][1-9]:[0-9]{2}|[a-zA-Z0-9+-]{3,6})?` + usOffsetZone = `(?:[(])?([+-][01][1-9]:[0-9]{2}|[a-zA-Z0-9+-]{3,6})?(?:[)])?` +) + +// Regular expressions +var ( + // ISO8601, RFC3339 + ISO8601 = strings.Join([]string{ + `(?:`, year, ymdSep, month, ymdSep, day, `)?`, t, + `(?:`, hour, hmsSep, min, hmsSep, sec, `?`, nsec, `)?`, + s, offset, s, zone, + }, "") + + // RFC822, RFC850, RFC1123 + RFC8xx1123 = strings.Join([]string{ + `(?:`, weekday, `,?`, s, `)?`, day, ymdSep, monthAbbr, ymdSep, shortYear, + hmsSep, `(?:`, hour, hmsSep, min, hmsSep, sec, `?`, nsec, `)?`, + s, offsetZone, + }, "") + + ANSIC = strings.Join([]string{ + `(?:`, weekday, s, `)?`, monthAbbr, ymdSep, day, ymdSep, + `(?:`, hour, hmsSep, min, hmsSep, sec, `?`, nsec, `)?`, + s, `(?:`, offsetZone, s, year, `)?`, + }, "") + + US = strings.Join([]string{ + `(?:`, monthAbbr, ymdSep, day, `(?:,)?`, ymdSep, shortYear, `)?`, s, `(?:at)?`, s, + `(?:`, hour, hmsSep, min, hmsSep, sec, `?`, nsec, `)?`, + s, ampm, `?`, s, usOffsetZone, + }, "") + + Months = map[string]int{ + "Jan": 1, + "January": 1, + "Feb": 2, + "Februray": 2, + "Mar": 3, + "March": 3, + "Apr": 4, + "April": 4, + "May": 5, + "Jun": 6, + "June": 6, + "Jul": 7, + "July": 7, + "Aug": 8, + "August": 8, + "Sep": 9, + "September": 9, + "Oct": 10, + "October": 10, + "Nov": 11, + "November": 11, + "Dec": 12, + "December": 12, + } +) diff --git a/vendor/github.com/tkuchiki/parsetime/example/parse.go b/vendor/github.com/tkuchiki/parsetime/example/parse.go new file mode 100644 index 0000000..172e5d0 --- /dev/null +++ b/vendor/github.com/tkuchiki/parsetime/example/parse.go @@ -0,0 +1,276 @@ +package main + +import ( + "fmt" + "github.com/crackcomm/go-clitable" + "github.com/tkuchiki/parsetime" + "log" +) + +var iso8601Times = []string{ + "2006-01-02 15:04", + "2006-01-02 15:04-07:00", + "2006-01-02 15:04 -07:00", + "2006-01-02 15:04:05", + "2006-01-02 15:04:05-07:00", + "2006-01-02 15:04:05 -07:00", + "2006-01-02 15:04:05-07:00 MST", + "2006-01-02 15:04:05 -07:00 MST", + "2006-01-02 15:04:05.999999999", + "2006-01-02 15:04:05.999999-07:00 MST", + "2006-01-02 15:04:05.9-07:00 MST", + "2006-01-02 15:04:05.9 -07:00 MST", + "2006-01-02 15:04:05.999-07:00 MST", + "2006-01-02 15:04:05.999 -07:00 MST", + "2006-01-02 15:04:05.999999-07:00 MST", + "2006-01-02 15:04:05.999999 -07:00 MST", + "2006-01-02 15:04:05.999999999-07:00 MST", + "2006-01-02 15:04:05.999999999 -07:00 MST", + "2006-01-02T15:04", + "2006-01-02T15:04-07:00", + "2006-01-02T15:04 -07:00", + "2006-01-02T15:04:05", + "2006-01-02T15:04:05-07:00", + "2006-01-02T15:04:05 -07:00", + "2006-01-02T15:04:05-07:00 MST", + "2006-01-02T15:04:05 -07:00 MST", + "2006-01-02T15:04:05.999999999", + "2006-01-02T15:04:05.999999999-07:00 MST", + "2006-01-02T15:04:05.999999999 -07:00 MST", + "2006-01-02T15:04:05.999999-07:00 MST", + "2006-01-02T15:04:05.999999 -07:00 MST", + "2006-01-02T15:04:05.9-07:00 MST", + "2006-01-02T15:04:05.9 -07:00 MST", + "2006-01-02", + "20060102", + "20060102150405", + "20060102 150405", + "20060102T150405", + "15:04:05", + "15:04:05-07:00 MST", + "15:04:05 -07:00 MST", + "15:04:05.9-07:00 MST", + "15:04:05.9 -07:00 MST", + "15:04:05.999-07:00 MST", + "15:04:05.999 -07:00 MST", + "15:04:05.999999-07:00 MST", + "15:04:05.999999 -07:00 MST", + "15:04:05.999999999-07:00 MST", + "15:04:05.999999999 -07:00 MST", + "150405-07:00 MST", + "150405 -07:00 MST", + "150405.9-07:00 MST", + "150405.9 -07:00 MST", + "150405.999-07:00 MST", + "150405.999 -07:00 MST", + "150405.999999-07:00 MST", + "150405.999999 -07:00 MST", + "150405.999999999-07:00 MST", + "150405.999999999 -07:00 MST", + "2006-01-02 15:04:05Z", + "2006-01-02T15:04:05Z", + "2006-01-02 15:04:05.9Z", + "2006-01-02T15:04:05.9Z", + "2006-01-02 15:04:05.999Z", + "2006-01-02T15:04:05.999Z", + "2006-01-02 15:04:05.999999Z", + "2006-01-02T15:04:05.999999Z", + "2006-01-02 15:04:05.999999999Z", + "2006-01-02T15:04:05.999999999Z", +} + +var rfc8xx1123Times = []string{ + "02-Jan-06 1504 MST", + "02-Jan-06 15:04 MST", + "02-Jan-06 150405 MST", + "02-Jan-06 15:04:05 MST", + "02-Jan-06 1504-0700", + "02-Jan-06 15:04-0700", + "02-Jan-06 150405-0700", + "02-Jan-06 15:04:05-0700", + "02-Jan-06 15:04 -0700", + "02-Jan-06 15:04:05 -0700", + "Monday, 02-Jan-06 15:04 MST", + "Monday, 02-Jan-06 15:04:05 MST", + "Mon, 02-Jan-06 15:04 MST", + "Mon, 02-Jan-06 15:04:05 MST", + "Mon, 02-Jan-06 15:04-07:00", + "Mon, 02-Jan-06 15:04:05-07:00", + "Mon, 02-Jan-06 15:04 -07:00", + "Mon, 02-Jan-06 15:04:05 -07:00", + "Mon, 02-Jan-2006 15:04-07:00", + "Mon, 02-Jan-2006 15:04:05-07:00", + "Mon, 02-Jan-2006 15:04 -07:00", + "Mon, 02-Jan-2006 15:04:05 -07:00", + "Mon, 02-Jan-70 15:04-07:00", + "Mon, 02-Jan-70 15:04:05-07:00", + "Mon, 02-Jan-70 15:04 -07:00", + "Mon, 02-Jan-70 15:04:05 -07:00", + "Mon, 02-Jan-99 15:04-07:00", + "Mon, 02-Jan-99 15:04:05-07:00", + "Mon, 02-Jan-99 15:04:05 -07:00", + "Mon, 02-Jan-00 15:04-07:00", + "Mon, 02-Jan-00 15:04:05-07:00", + "Mon, 02-Jan-00 15:04:05 -07:00", + "Mon, 02-Jan-00 15:04:05.9-07:00", + "Mon, 02-Jan-00 15:04:05.9 -07:00", + "Mon, 02-Jan-00 15:04:05.999-07:00", + "Mon, 02-Jan-00 15:04:05.999 -07:00", + "Mon, 02-Jan-00 15:04:05.999999-07:00", + "Mon, 02-Jan-00 15:04:05.999999 -07:00", + "Mon, 02-Jan-00 15:04:05.999999999-07:00", + "Mon, 02-Jan-00 15:04:05.999999999 -07:00", +} + +var ansicTimes = []string{ + "Mon Jan 02 150405 2006", + "Mon Jan 02 15:04:05 2006", + "Mon Jan 02 150405 MST 2006", + "Mon Jan 02 15:04:05 MST 2006", + "Mon Jan 02 1504-07:00 2006", + "Mon Jan 02 15:04-07:00 2006", + "Mon Jan 02 1504 -07:00 2006", + "Mon Jan 02 15:04 -07:00 2006", + "Mon Jan 02 150405-07:00 2006", + "Mon Jan 02 15:04:05-07:00 2006", + "Mon Jan 02 150405 -07:00 2006", + "Mon Jan 02 15:04:05 -07:00 2006", + "Jan 02 150405", + "Jan 02 15:04:05", + "Jan 02 150405.9", + "Jan 02 15:04:05.9", + "Jan 02 150405.999", + "Jan 02 15:04:05.999", + "Jan 02 150405.999999", + "Jan 02 15:04:05.999999", + "Jan 02 150405.999999999", + "Jan 02 15:04:05.999999999", +} + +var usTimes = []string{ + "11:04AM", + "11:04PM", + "11:04 AM", + "11:04 PM", + "11:04:05 AM", + "11:04:05 PM", + "11:04:05.9AM", + "11:04:05.9 AM", + "11:04:05.9PM", + "11:04:05.9 PM", + "11:04:05.999AM", + "11:04:05.999 AM", + "11:04:05.999PM", + "11:04:05.999 PM", + "11:04:05.999999AM", + "11:04:05.999999 AM", + "11:04:05.999999PM", + "11:04:05.999999 PM", + "11:04:05.999999999AM", + "11:04:05.999999999 AM", + "11:04:05.999999999PM", + "11:04:05.999999999 PM", + "01-02-06 3:04AM", + "01-02-06 3:04 AM", + "01-02-06 3:04PM", + "01-02-06 3:04 PM", + "01-02-06 03:04:05AM", + "01-02-06 03:04:05 AM", + "01-02-06 03:04:05PM", + "01-02-06 03:04:05 PM", + "01-02-06 03:04:05.9AM", + "01-02-06 03:04:05.9 AM", + "01-02-06 03:04:05.9PM", + "01-02-06 03:04:05.9 PM", + "01-02-06 03:04:05.999AM", + "01-02-06 03:04:05.999 AM", + "01-02-06 03:04:05.999PM", + "01-02-06 03:04:05.999 PM", + "01-02-06 03:04:05.999999AM", + "01-02-06 03:04:05.999999 AM", + "01-02-06 03:04:05.999999PM", + "01-02-06 03:04:05.999999 PM", + "01-02-06 03:04:05.999999999AM", + "01-02-06 03:04:05.999999999 AM", + "01-02-06 03:04:05.999999999PM", + "01-02-06 03:04:05.999999999 PM", + "Jan 2, 2006", + "Jan 2, 2006 at 3:04am (MST)", + "Jan 2, 2006 at 03:04am (MST)", + "Jan 2, 2006 at 3:04pm (MST)", + "Jan 2, 2006 at 03:04pm (MST)", + "Jan 2, 2006 at 3:04 am (MST)", + "Jan 2, 2006 at 03:04 am (MST)", + "Jan 2, 2006 at 3:04 pm (MST)", + "Jan 2, 2006 at 03:04 pm (MST)", + "Jan 2, 2006 at 3:04:05am (MST)", + "Jan 2, 2006 at 3:04:05pm (MST)", + "Jan 2, 2006 at 3:04:05 am (MST)", + "Jan 2, 2006 at 3:04:05 pm (MST)", + "Jan 2, 2006 at 3:04:05.9am (MST)", + "Jan 2, 2006 at 3:04:05.9pm (MST)", + "Jan 2, 2006 at 3:04:05.999am (MST)", + "Jan 2, 2006 at 3:04:05.999pm (MST)", + "Jan 2, 2006 at 3:04:05.999999am (MST)", + "Jan 2, 2006 at 3:04:05.999999pm (MST)", + "Jan 2, 2006 at 3:04:05.999999999am (MST)", + "Jan 2, 2006 at 3:04:05.999999999pm (MST)", + "Jan 2, 2006 at 3:04am MST", + "Jan 2, 2006 at 3:04pm MST", + "Jan 2, 2006 at 3:04am -07:00", + "Jan 2, 2006 at 3:04pm -07:00", + "Jan 2, 2006 at 3:04:05am MST", + "Jan 2, 2006 at 3:04:05pm MST", + "Jan 2, 2006 at 3:04:05am -07:00", + "Jan 2, 2006 at 3:04:05pm -07:00", +} + +func printTable(p parsetime.ParseTime, header string, times []string) { + fmt.Printf("### %s", header) + fmt.Println("") + fmt.Println("") + + table := clitable.New([]string{"Input string", "_time.Time"}) + + for _, v := range times { + t, err := p.Parse(v) + if err != nil { + log.Printf("%s : %s", err, t.String()) + fmt.Println() + } + + table.AddRow(map[string]interface{}{"Input string": v, "_time.Time": t.String()}) + } + + table.Markdown = true + + table.Print() +} + +func mergeSlice(times ...[]string) []string { + var mergedTimes []string + mergedTimes = make([]string, 0) + + for _, t := range times { + mergedTimes = append(mergedTimes, t...) + } + + return mergedTimes +} + +func main() { + p, _ := parsetime.NewParseTime() + + printTable(p, "ISO8601", iso8601Times) + fmt.Println("") + printTable(p, "RFC8xx1123", rfc8xx1123Times) + fmt.Println("") + printTable(p, "ANSIC", ansicTimes) + fmt.Println("") + printTable(p, "US", usTimes) + fmt.Println("") + + parseTimes := mergeSlice(iso8601Times, rfc8xx1123Times, ansicTimes, usTimes) + printTable(p, "Parse", parseTimes) + fmt.Println("") +} diff --git a/vendor/github.com/tkuchiki/parsetime/parsetime.go b/vendor/github.com/tkuchiki/parsetime/parsetime.go new file mode 100644 index 0000000..6056117 --- /dev/null +++ b/vendor/github.com/tkuchiki/parsetime/parsetime.go @@ -0,0 +1,539 @@ +package parsetime + +import ( + "errors" + "fmt" + "github.com/tkuchiki/go-timezone" + "regexp" + "sort" + "strconv" + "strings" + "time" + "unicode/utf8" +) + +var ( + errInvalidDateTime = errors.New("Invalid date/time") + errInvalidOffset = errors.New("Invalid offset") + errInvalidArgs = errors.New("Invalid arguments") + errInvalidTimezone = errors.New("Invalid timezone") + reISO8601 = regexp.MustCompile(ISO8601) + reRFC8xx1123 = regexp.MustCompile(RFC8xx1123) + reANSIC = regexp.MustCompile(ANSIC) + reUS = regexp.MustCompile(US) +) + +type sortedTime struct { + time time.Time + priority int +} + +type sortedTimes []sortedTime + +func (st sortedTimes) Len() int { return len(st) } +func (st sortedTimes) Swap(i, j int) { st[i], st[j] = st[j], st[i] } +func (st sortedTimes) Less(i, j int) bool { return st[i].priority < st[j].priority } + +// ParseTime parses the date/time string +type ParseTime struct { + location *time.Location +} + +// NewParseTime returns a new parser +func NewParseTime(location ...interface{}) (ParseTime, error) { + var loc *time.Location + var err error + + switch len(location) { + case 0: + zone, offset := time.Now().In(time.Local).Zone() + loc = time.FixedZone(zone, offset) + case 1: + switch val := location[0].(type) { + case *time.Location: + loc = val + case string: + if val == "" { + zone, offset := time.Now().In(time.Local).Zone() + loc = time.FixedZone(zone, offset) + } else { + loc, err = time.LoadLocation(val) + if err != nil { + var offset int + offset, err = timezone.GetOffset(val) + if err != nil { + return ParseTime{}, errInvalidTimezone + } + loc = time.FixedZone(val, offset) + } + } + default: + return ParseTime{}, fmt.Errorf("Invalid type: %T", val) + } + case 2: + loc = time.FixedZone(location[0].(string), location[1].(int)) + default: + return ParseTime{}, errInvalidArgs + } + + return ParseTime{ + location: loc, + }, err +} + +// GetLocation returns *time.Location +func (pt *ParseTime) GetLocation() *time.Location { + return pt.location +} + +// SetLocation sets *time.Location +func (pt *ParseTime) SetLocation(loc *time.Location) { + pt.location = loc +} + +func fixedZone(t time.Time) *time.Location { + zone, offset := t.Zone() + return time.FixedZone(zone, offset) +} + +func parseOffset(value string) (*time.Location, error) { + var err error + var t time.Time + var loc *time.Location + + t, err = time.Parse("-07:00", value) + if err == nil { + return fixedZone(t), nil + } + + t, err = time.Parse("-0700", value) + if err == nil { + return fixedZone(t), nil + } + + _, err = time.Parse("MST", value) + if err == nil { + var offset int + offset, err = timezone.GetOffset(value) + if err != nil { + return loc, err + } + + return time.FixedZone(value, offset), nil + } + + return loc, errInvalidOffset +} + +func toLocation(offset string) (*time.Location, error) { + var err error + var loc *time.Location + + if strings.ToUpper(offset) == "Z" { + loc = time.UTC + } else { + loc, err = parseOffset(offset) + } + + return loc, err +} + +func twoDigitTo4DigitYear(year string) (int, error) { + val, err := strconv.Atoi(year) + if err != nil { + return 0, err + } + + if val >= 70 && val <= 99 { + return 1900 + val, err + } + + return 2000 + val, err +} + +func dateToInt(date string, dateType string, loc *time.Location) (int, error) { + var err error + var val int + + if date == "" { + switch dateType { + case "year": + val = time.Now().In(loc).Year() + case "month": + val = int(time.Now().In(loc).Month()) + case "day": + val = time.Now().In(loc).Day() + case "hour": + val = time.Now().In(loc).Hour() + case "min": + val = time.Now().In(loc).Minute() + case "sec": + if date == "" { + val = 0 + } else { + val = time.Now().In(loc).Second() + } + case "nsec": + if date == "" { + val = 0 + } else { + val = time.Now().In(loc).Nanosecond() + } + default: + err = errInvalidDateTime + } + } else { + switch dateType { + case "year": + if stringLen(date) == 2 { + return twoDigitTo4DigitYear(date) + } + case "month": + if _, ok := Months[date]; ok { + return Months[date], nil + } + } + + val, err = strconv.Atoi(date) + return val, err + } + + return val, err +} + +func isOnlyDate(year, month, day, hour, min string) bool { + return year != "" && month != "" && day != "" && hour == "" && min == "" +} + +func stringLen(value string) int { + return utf8.RuneCountInString(strings.Join(strings.Fields(value), "")) +} + +func to24Hour(ampm string, value int) int { + if strings.ToUpper(ampm) == "PM" { + return 12 + value + } + + return value +} + +func parseISO8601(value string, loc *time.Location) (time.Time, int, error) { + var t time.Time + var priority int + var err error + + group := reISO8601.FindStringSubmatch(value) + + if len(group) == 0 { + return t, priority, errInvalidDateTime + } + + priority = stringLen(value) - stringLen(group[0]) + + var year, month, day, hour, min, sec, nsec int + + if group[8] != "" { + loc, err = toLocation(group[8]) + if err != nil { + return t, priority, err + } + } + + year, err = dateToInt(group[1], "year", loc) + if err != nil { + return t, priority, err + } + + month, err = dateToInt(group[2], "month", loc) + if err != nil { + return t, priority, err + } + + day, err = dateToInt(group[3], "day", loc) + if err != nil { + return t, priority, err + } + + // 2006-01-02 -> 2006-01-02T00:00 + if isOnlyDate(group[1], group[2], group[3], group[4], group[5]) { + group[4] = "0" + group[5] = "0" + } + + hour, err = dateToInt(group[4], "hour", loc) + if err != nil { + return t, priority, err + } + + min, err = dateToInt(group[5], "min", loc) + if err != nil { + return t, priority, err + } + + sec, err = dateToInt(group[6], "sec", loc) + if err != nil { + return t, priority, err + } + + nsec, err = dateToInt(group[7], "nsec", loc) + if err != nil { + return t, priority, err + } + + return time.Date(year, time.Month(month), day, hour, min, sec, nsec, loc), priority, err +} + +// ISO8601 parses ISO8601, RFC3339 date/time string +func (pt *ParseTime) ISO8601(value string) (time.Time, error) { + t, _, err := parseISO8601(value, pt.location) + return t, err +} + +// RFC822, RFC850, RFC1123 +func parseRFC8xx1123(value string, loc *time.Location) (time.Time, int, error) { + var t time.Time + var priority int + var err error + + group := reRFC8xx1123.FindStringSubmatch(value) + + if len(group) == 0 { + return t, priority, errInvalidDateTime + } + + priority = stringLen(value) - stringLen(group[0]) + + var year, month, day, hour, min, sec, nsec int + + if group[8] != "" { + loc, err = toLocation(group[8]) + if err != nil { + return t, priority, err + } + } + + day, err = dateToInt(group[1], "day", loc) + if err != nil { + return t, priority, err + } + + month, err = dateToInt(group[2], "month", loc) + if err != nil { + return t, priority, err + } + + year, err = dateToInt(group[3], "year", loc) + if err != nil { + return t, priority, err + } + + // 02-Jan-06 -> 02-Jan-06 00:00 + if isOnlyDate(group[1], group[2], group[3], group[4], group[5]) { + group[4] = "0" + group[5] = "0" + } + + hour, err = dateToInt(group[4], "hour", loc) + if err != nil { + return t, priority, err + } + + min, err = dateToInt(group[5], "min", loc) + if err != nil { + return t, priority, err + } + + sec, err = dateToInt(group[6], "sec", loc) + if err != nil { + return t, priority, err + } + + nsec, err = dateToInt(group[7], "nsec", loc) + if err != nil { + return t, priority, err + } + + return time.Date(year, time.Month(month), day, hour, min, sec, nsec, loc), priority, err +} + +// RFC8xx1123 parses RFC822, RFC850, RFC1123 date/time string +func (pt *ParseTime) RFC8xx1123(value string) (time.Time, error) { + t, _, err := parseRFC8xx1123(value, pt.location) + return t, err +} + +func parseANSIC(value string, loc *time.Location) (time.Time, int, error) { + var t time.Time + var err error + var priority int + + group := reANSIC.FindStringSubmatch(value) + + if len(group) == 0 { + return t, priority, errInvalidDateTime + } + + priority = stringLen(value) - stringLen(group[0]) + + var year, month, day, hour, min, sec, nsec int + + if group[7] != "" { + loc, err = toLocation(group[7]) + if err != nil { + return t, priority, err + } + } + + month, err = dateToInt(group[1], "month", loc) + if err != nil { + return t, priority, err + } + + day, err = dateToInt(group[2], "day", loc) + if err != nil { + return t, priority, err + } + + hour, err = dateToInt(group[3], "hour", loc) + if err != nil { + return t, priority, err + } + + min, err = dateToInt(group[4], "min", loc) + if err != nil { + return t, priority, err + } + + sec, err = dateToInt(group[5], "sec", loc) + if err != nil { + return t, priority, err + } + + nsec, err = dateToInt(group[6], "nsec", loc) + if err != nil { + return t, priority, err + } + + year, err = dateToInt(group[8], "year", loc) + if err != nil { + return t, priority, err + } + + return time.Date(year, time.Month(month), day, hour, min, sec, nsec, loc), priority, err +} + +// ANSIC parses ANSIC date/time string +func (pt *ParseTime) ANSIC(value string) (time.Time, error) { + t, _, err := parseANSIC(value, pt.location) + return t, err +} + +func parseUS(value string, loc *time.Location) (time.Time, int, error) { + var t time.Time + var priority int + var err error + + group := reUS.FindStringSubmatch(value) + + if len(group) == 0 { + return t, priority, errInvalidDateTime + } + + priority = stringLen(value) - stringLen(group[0]) + + var year, month, day, hour, min, sec, nsec int + + if group[9] != "" { + loc, err = toLocation(group[9]) + if err != nil { + return t, priority, err + } + } + + month, err = dateToInt(group[1], "month", loc) + if err != nil { + return t, priority, err + } + + day, err = dateToInt(group[2], "day", loc) + if err != nil { + return t, priority, err + } + + year, err = dateToInt(group[3], "year", loc) + if err != nil { + return t, priority, err + } + + if isOnlyDate(group[1], group[2], group[3], group[4], group[5]) { + group[4] = "0" + group[5] = "0" + } + + hour, err = dateToInt(group[4], "hour", loc) + if err != nil { + return t, priority, err + } + + min, err = dateToInt(group[5], "min", loc) + if err != nil { + return t, priority, err + } + + sec, err = dateToInt(group[6], "sec", loc) + if err != nil { + return t, priority, err + } + + nsec, err = dateToInt(group[7], "nsec", loc) + if err != nil { + return t, priority, err + } + + ampm := group[8] + + if ampm != "" { + hour = to24Hour(ampm, hour) + } + + return time.Date(year, time.Month(month), day, hour, min, sec, nsec, loc), priority, err +} + +// US parses MM/DD/YYYY format date/time string +func (pt *ParseTime) US(value string) (time.Time, error) { + t, _, err := parseUS(value, pt.location) + return t, err +} + +// Parse parses date/time string +func (pt *ParseTime) Parse(value string) (time.Time, error) { + times := make(sortedTimes, 0) + t, priority, _ := parseISO8601(value, pt.location) + if !t.IsZero() { + times = append(times, sortedTime{time: t, priority: priority}) + } + + t, priority, _ = parseRFC8xx1123(value, pt.location) + if !t.IsZero() { + times = append(times, sortedTime{time: t, priority: priority}) + } + + t, priority, _ = parseANSIC(value, pt.location) + if !t.IsZero() { + times = append(times, sortedTime{time: t, priority: priority}) + } + + t, priority, _ = parseUS(value, pt.location) + if !t.IsZero() { + times = append(times, sortedTime{time: t, priority: priority}) + } + + if len(times) == 0 { + var tmpT time.Time + return tmpT, errInvalidDateTime + } + + sort.Sort(times) + + return times[0].time, nil +} diff --git a/vendor/github.com/tkuchiki/parsetime/parsetime_test.go b/vendor/github.com/tkuchiki/parsetime/parsetime_test.go new file mode 100644 index 0000000..663a5a9 --- /dev/null +++ b/vendor/github.com/tkuchiki/parsetime/parsetime_test.go @@ -0,0 +1,1021 @@ +package parsetime + +import ( + "github.com/stretchr/testify/assert" + "testing" + "time" +) + +var loc = createLocation("US/Arizona") + +var iso8601Times = []TestTime{ + { + Value: "2006-01-02 15:04", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:00", time.Local), + }, + { + Value: "2006-01-02 15:04-07:00", + Time: createTime("2006-01-02T15:04-07:00", "2006-01-02T15:04-07:00"), + }, + { + Value: "2006-01-02 15:04 -07:00", + Time: createTime("2006-01-02T15:04-07:00", "2006-01-02T15:04-07:00"), + }, + { + Value: "2006-01-02 15:04:05", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", time.Local), + }, + { + Value: "2006-01-02 15:04:05-07:00", + Time: createTime("2006-01-02T15:04:05-07:00", "2006-01-02T15:04:05-07:00"), + }, + { + Value: "2006-01-02 15:04:05 -07:00", + Time: createTime("2006-01-02T15:04:05-07:00", "2006-01-02T15:04:05-07:00"), + }, + { + Value: "2006-01-02 15:04:05-07:00 MST", + Time: createTime("2006-01-02T15:04:05-07:00 MST", "2006-01-02T15:04:05-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05 -07:00 MST", + Time: createTime("2006-01-02T15:04:05-07:00 MST", "2006-01-02T15:04:05-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05.999999999", + Time: createTimeInLocation("2006-01-02T15:04:05.999999999", "2006-01-02T15:04:05.999999999", time.Local), + }, + { + Value: "2006-01-02 15:04:05.999999-07:00 MST", + Time: createTime("2006-01-02T15:04:05.999999-07:00 MST", "2006-01-02T15:04:05.999999-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05.9-07:00 MST", + Time: createTime("2006-01-02T15:04:05.9-07:00 MST", "2006-01-02T15:04:05.9-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05.9 -07:00 MST", + Time: createTime("2006-01-02T15:04:05.9-07:00 MST", "2006-01-02T15:04:05.9-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05.999-07:00 MST", + Time: createTime("2006-01-02T15:04:05.999-07:00 MST", "2006-01-02T15:04:05.999-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05.999 -07:00 MST", + Time: createTime("2006-01-02T15:04:05.999-07:00 MST", "2006-01-02T15:04:05.999-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05.999999-07:00 MST", + Time: createTime("2006-01-02T15:04:05.999999-07:00 MST", "2006-01-02T15:04:05.999999-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05.999999 -07:00 MST", + Time: createTime("2006-01-02T15:04:05.999999-07:00 MST", "2006-01-02T15:04:05.999999-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05.999999999-07:00 MST", + Time: createTime("2006-01-02T15:04:05.999999999-07:00 MST", "2006-01-02T15:04:05.999999999-07:00 MST"), + }, + { + Value: "2006-01-02 15:04:05.999999999 -07:00 MST", + Time: createTime("2006-01-02T15:04:05.999999999-07:00 MST", "2006-01-02T15:04:05.999999999-07:00 MST"), + }, + { + Value: "2006-01-02T15:04", + Time: createTimeInLocation("2006-01-02T15:04:00", "2006-01-02T15:04:00", time.Local), + }, + { + Value: "2006-01-02T15:04-07:00", + Time: createTime("2006-01-02T15:04-07:00", "2006-01-02T15:04-07:00"), + }, + { + Value: "2006-01-02T15:04 -07:00", + Time: createTime("2006-01-02T15:04-07:00", "2006-01-02T15:04-07:00"), + }, + { + Value: "2006-01-02T15:04:05", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", time.Local), + }, + { + Value: "2006-01-02T15:04:05-07:00", + Time: createTime("2006-01-02T15:04:05-07:00", "2006-01-02T15:04:05-07:00"), + }, + { + Value: "2006-01-02T15:04:05 -07:00", + Time: createTime("2006-01-02T15:04:05-07:00", "2006-01-02T15:04:05-07:00"), + }, + { + Value: "2006-01-02T15:04:05-07:00 MST", + Time: createTime("2006-01-02T15:04:05-07:00 MST", "2006-01-02T15:04:05-07:00 MST"), + }, + { + Value: "2006-01-02T15:04:05 -07:00 MST", + Time: createTime("2006-01-02T15:04:05-07:00 MST", "2006-01-02T15:04:05-07:00 MST"), + }, + { + Value: "2006-01-02T15:04:05.999999999", + Time: createTimeInLocation("2006-01-02T15:04:05.999999999", "2006-01-02T15:04:05.999999999", time.Local), + }, + { + Value: "2006-01-02T15:04:05.999999999-07:00 MST", + Time: createTime("2006-01-02T15:04:05.999999999-07:00 MST", "2006-01-02T15:04:05.999999999-07:00 MST"), + }, + { + Value: "2006-01-02T15:04:05.999999999 -07:00 MST", + Time: createTime("2006-01-02T15:04:05.999999999-07:00 MST", "2006-01-02T15:04:05.999999999-07:00 MST"), + }, + { + Value: "2006-01-02T15:04:05.999999-07:00 MST", + Time: createTime("2006-01-02T15:04:05.999999-07:00 MST", "2006-01-02T15:04:05.999999-07:00 MST"), + }, + { + Value: "2006-01-02T15:04:05.999999 -07:00 MST", + Time: createTime("2006-01-02T15:04:05.999999-07:00 MST", "2006-01-02T15:04:05.999999-07:00 MST"), + }, + { + Value: "2006-01-02T15:04:05.9-07:00 MST", + Time: createTime("2006-01-02T15:04:05.9-07:00 MST", "2006-01-02T15:04:05.9-07:00 MST"), + }, + { + Value: "2006-01-02T15:04:05.9 -07:00 MST", + Time: createTime("2006-01-02T15:04:05.9-07:00 MST", "2006-01-02T15:04:05.9-07:00 MST"), + }, + { + Value: "2006-01-02", + Time: createTimeInLocation("2006-01-02", "2006-01-02", time.Local), + }, + { + Value: "20060102", + Time: createTimeInLocation("20060102", "20060102", time.Local), + }, + { + Value: "20060102150405", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", time.Local), + }, + { + Value: "20060102 150405", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", time.Local), + }, + { + Value: "20060102T150405", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", time.Local), + }, + { + Value: "15:04:05", + Time: createCurrentDateInLocation("15:04:05", "15:04:05", time.Local), + }, + { + Value: "15:04:05-07:00 MST", + Time: createCurrentDateInLocation("15:04:05-07:00 MST", "15:04:05-07:00 MST", loc), + }, + { + Value: "15:04:05 -07:00 MST", + Time: createCurrentDateInLocation("15:04:05-07:00 MST", "15:04:05-07:00 MST", loc), + }, + { + Value: "15:04:05.9-07:00 MST", + Time: createCurrentDateInLocation("15:04:05.9-07:00 MST", "15:04:05.9-07:00 MST", loc), + }, + { + Value: "15:04:05.9 -07:00 MST", + Time: createCurrentDateInLocation("15:04:05.9-07:00 MST", "15:04:05.9-07:00 MST", loc), + }, + { + Value: "15:04:05.999-07:00 MST", + Time: createCurrentDateInLocation("15:04:05.999-07:00 MST", "15:04:05.999-07:00 MST", loc), + }, + { + Value: "15:04:05.999 -07:00 MST", + Time: createCurrentDateInLocation("15:04:05.999-07:00 MST", "15:04:05.999-07:00 MST", loc), + }, + { + Value: "15:04:05.999999-07:00 MST", + Time: createCurrentDateInLocation("15:04:05.999999-07:00 MST", "15:04:05.999999-07:00 MST", loc), + }, + { + Value: "15:04:05.999999 -07:00 MST", + Time: createCurrentDateInLocation("15:04:05.999999-07:00 MST", "15:04:05.999999-07:00 MST", loc), + }, + { + Value: "15:04:05.999999999-07:00 MST", + Time: createCurrentDateInLocation("15:04:05.999999999-07:00 MST", "15:04:05.999999999-07:00 MST", loc), + }, + { + Value: "15:04:05.999999999 -07:00 MST", + Time: createCurrentDateInLocation("15:04:05.999999999-07:00 MST", "15:04:05.999999999-07:00 MST", loc), + }, + { + Value: "150405-07:00 MST", + Time: createCurrentDateInLocation("150405-07:00 MST", "150405-07:00 MST", loc), + }, + { + Value: "150405 -07:00 MST", + Time: createCurrentDateInLocation("150405-07:00 MST", "150405-07:00 MST", loc), + }, + { + Value: "150405.9-07:00 MST", + Time: createCurrentDateInLocation("150405.9-07:00 MST", "150405.9-07:00 MST", loc), + }, + { + Value: "150405.9 -07:00 MST", + Time: createCurrentDateInLocation("150405.9-07:00 MST", "150405.9-07:00 MST", loc), + }, + { + Value: "150405.999-07:00 MST", + Time: createCurrentDateInLocation("150405.999-07:00 MST", "150405.999-07:00 MST", loc), + }, + { + Value: "150405.999 -07:00 MST", + Time: createCurrentDateInLocation("150405.999-07:00 MST", "150405.999-07:00 MST", loc), + }, + { + Value: "150405.999999-07:00 MST", + Time: createCurrentDateInLocation("150405.999999-07:00 MST", "150405.999999-07:00 MST", loc), + }, + { + Value: "150405.999999 -07:00 MST", + Time: createCurrentDateInLocation("150405.999999-07:00 MST", "150405.999999-07:00 MST", loc), + }, + { + Value: "150405.999999999-07:00 MST", + Time: createCurrentDateInLocation("150405.999999999-07:00 MST", "150405.999999999-07:00 MST", loc), + }, + { + Value: "150405.999999999 -07:00 MST", + Time: createCurrentDateInLocation("150405.999999999-07:00 MST", "150405.999999999-07:00 MST", loc), + }, + { + Value: "2006-01-02 15:04:05Z", + Time: createTime("2006-01-02T15:04:05Z0700", "2006-01-02T15:04:05Z"), + }, + { + Value: "2006-01-02T15:04:05Z", + Time: createTime("2006-01-02T15:04:05Z0700", "2006-01-02T15:04:05Z"), + }, + { + Value: "2006-01-02 15:04:05.9Z", + Time: createTime("2006-01-02T15:04:05.9Z0700", "2006-01-02T15:04:05.9Z"), + }, + { + Value: "2006-01-02T15:04:05.9Z", + Time: createTime("2006-01-02T15:04:05.9Z0700", "2006-01-02T15:04:05.9Z"), + }, + { + Value: "2006-01-02 15:04:05.999Z", + Time: createTime("2006-01-02T15:04:05.999Z0700", "2006-01-02T15:04:05.999Z"), + }, + { + Value: "2006-01-02T15:04:05.999Z", + Time: createTime("2006-01-02T15:04:05.999Z0700", "2006-01-02T15:04:05.999Z"), + }, + { + Value: "2006-01-02 15:04:05.999999Z", + Time: createTime("2006-01-02T15:04:05.999999Z0700", "2006-01-02T15:04:05.999999Z"), + }, + { + Value: "2006-01-02T15:04:05.999999Z", + Time: createTime("2006-01-02T15:04:05.999999Z0700", "2006-01-02T15:04:05.999999Z"), + }, + { + Value: "2006-01-02 15:04:05.999999999Z", + Time: createTime("2006-01-02T15:04:05.999999999Z0700", "2006-01-02T15:04:05.999999999Z"), + }, + { + Value: "2006-01-02T15:04:05.999999999Z", + Time: createTime("2006-01-02T15:04:05.999999999Z0700", "2006-01-02T15:04:05.999999999Z"), + }, +} + +var rfc8xx1123Times = []TestTime{ + { + Value: "02-Jan-06 1504 MST", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:00 MST", loc), + }, + { + Value: "02-Jan-06 15:04 MST", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:00 MST", loc), + }, + { + Value: "02-Jan-06 150405 MST", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:05 MST", loc), + }, + { + Value: "02-Jan-06 15:04:05 MST", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:05 MST", loc), + }, + { + Value: "02-Jan-06 1504-0700", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:00 MST", loc), + }, + { + Value: "02-Jan-06 15:04-0700", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:00 MST", loc), + }, + { + Value: "02-Jan-06 150405-0700", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:05 MST", loc), + }, + { + Value: "02-Jan-06 15:04:05-0700", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:05 MST", loc), + }, + { + Value: "02-Jan-06 15:04 -0700", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:00 MST", loc), + }, + { + Value: "02-Jan-06 15:04:05 -0700", + Time: createTimeInLocation("02-Jan-06 15:04:05 MST", "02-Jan-06 15:04:05 MST", loc), + }, + { + Value: "Monday, 02-Jan-06 15:04 MST", + Time: createTimeInLocation("Monday, 02-Jan-06 15:04:05 MST", "Monday, 02-Jan-06 15:04:00 MST", loc), + }, + { + Value: "Monday, 02-Jan-06 15:04:05 MST", + Time: createTimeInLocation("Monday, 02-Jan-06 15:04:05 MST", "Monday, 02-Jan-06 15:04:05 MST", loc), + }, + { + Value: "Mon, 02-Jan-06 15:04 MST", + Time: createTimeInLocation("Mon, 02-Jan-06 15:04:05 MST", "Mon, 02-Jan-06 15:04:00 MST", loc), + }, + { + Value: "Mon, 02-Jan-06 15:04:05 MST", + Time: createTimeInLocation("Mon, 02-Jan-06 15:04:05 MST", "Mon, 02-Jan-06 15:04:05 MST", loc), + }, + { + Value: "Mon, 02-Jan-06 15:04-07:00", + Time: createTimeInLocation("Mon, 02-Jan-06 15:04:05 -07:00", "Mon, 02-Jan-06 15:04:00 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-06 15:04:05-07:00", + Time: createTimeInLocation("Mon, 02-Jan-06 15:04:05 -07:00", "Mon, 02-Jan-06 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-06 15:04 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-06 15:04:05 -07:00", "Mon, 02-Jan-06 15:04:00 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-06 15:04:05 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-06 15:04:05 -07:00", "Mon, 02-Jan-06 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-2006 15:04-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-2006 15:04:00 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-2006 15:04:05-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-2006 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-2006 15:04 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-2006 15:04:00 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-2006 15:04:05 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-2006 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-70 15:04-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-1970 15:04:00 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-70 15:04:05-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-1970 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-70 15:04 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-1970 15:04:00 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-70 15:04:05 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-1970 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-99 15:04-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-1999 15:04:00 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-99 15:04:05-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-1999 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-99 15:04:05 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-1999 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-2000 15:04:00 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-2000 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05 -07:00", "Mon, 02-Jan-2000 15:04:05 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05.9-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05.9 -07:00", "Mon, 02-Jan-2000 15:04:05.9 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05.9 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05.9 -07:00", "Mon, 02-Jan-2000 15:04:05.9 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05.999-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05.999 -07:00", "Mon, 02-Jan-2000 15:04:05.999 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05.999 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05.999 -07:00", "Mon, 02-Jan-2000 15:04:05.999 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05.999999-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05.999999 -07:00", "Mon, 02-Jan-2000 15:04:05.999999 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05.999999 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05.999999 -07:00", "Mon, 02-Jan-2000 15:04:05.999999 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05.999999999-07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05.999999999 -07:00", "Mon, 02-Jan-2000 15:04:05.999999999 -07:00", loc), + }, + { + Value: "Mon, 02-Jan-00 15:04:05.999999999 -07:00", + Time: createTimeInLocation("Mon, 02-Jan-2006 15:04:05.999999999 -07:00", "Mon, 02-Jan-2000 15:04:05.999999999 -07:00", loc), + }, +} + +var ansicTimes = []TestTime{ + { + Value: "Mon Jan 02 150405 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 2006", "Mon Jan 02 15:04:05 2006", time.Local), + }, + { + Value: "Mon Jan 02 15:04:05 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 2006", "Mon Jan 02 15:04:05 2006", time.Local), + }, + { + Value: "Mon Jan 02 150405 MST 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 MST 2006", "Mon Jan 02 15:04:05 MST 2006", loc), + }, + { + Value: "Mon Jan 02 15:04:05 MST 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 MST 2006", "Mon Jan 02 15:04:05 MST 2006", loc), + }, + { + Value: "Mon Jan 02 1504-07:00 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 -07:00 2006", "Mon Jan 02 15:04:00 -07:00 2006", loc), + }, + { + Value: "Mon Jan 02 15:04-07:00 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 -07:00 2006", "Mon Jan 02 15:04:00 -07:00 2006", loc), + }, + { + Value: "Mon Jan 02 1504 -07:00 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 -07:00 2006", "Mon Jan 02 15:04:00 -07:00 2006", loc), + }, + { + Value: "Mon Jan 02 15:04 -07:00 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 -07:00 2006", "Mon Jan 02 15:04:00 -07:00 2006", loc), + }, + { + Value: "Mon Jan 02 150405-07:00 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 -07:00 2006", "Mon Jan 02 15:04:05 -07:00 2006", loc), + }, + { + Value: "Mon Jan 02 15:04:05-07:00 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 -07:00 2006", "Mon Jan 02 15:04:05 -07:00 2006", loc), + }, + { + Value: "Mon Jan 02 150405 -07:00 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 -07:00 2006", "Mon Jan 02 15:04:05 -07:00 2006", loc), + }, + { + Value: "Mon Jan 02 15:04:05 -07:00 2006", + Time: createTimeInLocation("Mon Jan 02 15:04:05 -07:00 2006", "Mon Jan 02 15:04:05 -07:00 2006", loc), + }, + { + Value: "Jan 02 150405", + Time: createCurrentYearInLocation("Jan 02 15:04:05", "Jan 02 15:04:05", time.Local), + }, + { + Value: "Jan 02 15:04:05", + Time: createCurrentYearInLocation("Jan 02 15:04:05", "Jan 02 15:04:05", time.Local), + }, + { + Value: "Jan 02 150405.9", + Time: createCurrentYearInLocation("Jan 02 15:04:05.9", "Jan 02 15:04:05.9", time.Local), + }, + { + Value: "Jan 02 15:04:05.9", + Time: createCurrentYearInLocation("Jan 02 15:04:05.9", "Jan 02 15:04:05.9", time.Local), + }, + { + Value: "Jan 02 150405.999", + Time: createCurrentYearInLocation("Jan 02 15:04:05.999", "Jan 02 15:04:05.999", time.Local), + }, + { + Value: "Jan 02 15:04:05.999", + Time: createCurrentYearInLocation("Jan 02 15:04:05.999", "Jan 02 15:04:05.999", time.Local), + }, + { + Value: "Jan 02 150405.999999", + Time: createCurrentYearInLocation("Jan 02 15:04:05.999999", "Jan 02 15:04:05.999999", time.Local), + }, + { + Value: "Jan 02 15:04:05.999999", + Time: createCurrentYearInLocation("Jan 02 15:04:05.999999", "Jan 02 15:04:05.999999", time.Local), + }, + { + Value: "Jan 02 150405.999999999", + Time: createCurrentYearInLocation("Jan 02 15:04:05.999999999", "Jan 02 15:04:05.999999999", time.Local), + }, + { + Value: "Jan 02 15:04:05.999999999", + Time: createCurrentYearInLocation("Jan 02 15:04:05.999999999", "Jan 02 15:04:05.999999999", time.Local), + }, +} + +var usTimes = []TestTime{ + { + Value: "11:04AM", + Time: createCurrentDateInLocation("15:04:05", "11:04:00", time.Local), + }, + { + Value: "11:04PM", + Time: createCurrentDateInLocation("15:04:05", "23:04:00", time.Local), + }, + { + Value: "11:04 AM", + Time: createCurrentDateInLocation("15:04:05", "11:04:00", time.Local), + }, + { + Value: "11:04 PM", + Time: createCurrentDateInLocation("15:04:05", "23:04:00", time.Local), + }, + { + Value: "11:04:05 AM", + Time: createCurrentDateInLocation("15:04:05", "11:04:05", time.Local), + }, + { + Value: "11:04:05 PM", + Time: createCurrentDateInLocation("15:04:05", "23:04:05", time.Local), + }, + { + Value: "11:04:05.9AM", + Time: createCurrentDateInLocation("15:04:05.9", "11:04:05.9", time.Local), + }, + { + Value: "11:04:05.9 AM", + Time: createCurrentDateInLocation("15:04:05.9", "11:04:05.9", time.Local), + }, + { + Value: "11:04:05.9PM", + Time: createCurrentDateInLocation("15:04:05.9", "23:04:05.9", time.Local), + }, + { + Value: "11:04:05.9 PM", + Time: createCurrentDateInLocation("15:04:05.9", "23:04:05.9", time.Local), + }, + { + Value: "11:04:05.999AM", + Time: createCurrentDateInLocation("15:04:05.999", "11:04:05.999", time.Local), + }, + { + Value: "11:04:05.999 AM", + Time: createCurrentDateInLocation("15:04:05.999", "11:04:05.999", time.Local), + }, + { + Value: "11:04:05.999PM", + Time: createCurrentDateInLocation("15:04:05.999", "23:04:05.999", time.Local), + }, + { + Value: "11:04:05.999 PM", + Time: createCurrentDateInLocation("15:04:05.999", "23:04:05.999", time.Local), + }, + { + Value: "11:04:05.999999AM", + Time: createCurrentDateInLocation("15:04:05.999999", "11:04:05.999999", time.Local), + }, + { + Value: "11:04:05.999999 AM", + Time: createCurrentDateInLocation("15:04:05.999999", "11:04:05.999999", time.Local), + }, + { + Value: "11:04:05.999999PM", + Time: createCurrentDateInLocation("15:04:05.999999", "23:04:05.999999", time.Local), + }, + { + Value: "11:04:05.999999 PM", + Time: createCurrentDateInLocation("15:04:05.999999", "23:04:05.999999", time.Local), + }, + { + Value: "11:04:05.999999999AM", + Time: createCurrentDateInLocation("15:04:05.999999999", "11:04:05.999999999", time.Local), + }, + { + Value: "11:04:05.999999999 AM", + Time: createCurrentDateInLocation("15:04:05.999999999", "11:04:05.999999999", time.Local), + }, + { + Value: "11:04:05.999999999PM", + Time: createCurrentDateInLocation("15:04:05.999999999", "23:04:05.999999999", time.Local), + }, + { + Value: "11:04:05.999999999 PM", + Time: createCurrentDateInLocation("15:04:05.999999999", "23:04:05.999999999", time.Local), + }, + { + Value: "01-02-06 3:04AM", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:00", time.Local), + }, + { + Value: "01-02-06 3:04 AM", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:00", time.Local), + }, + { + Value: "01-02-06 3:04PM", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:00", time.Local), + }, + { + Value: "01-02-06 3:04 PM", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:00", time.Local), + }, + { + Value: "01-02-06 03:04:05AM", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:05", time.Local), + }, + { + Value: "01-02-06 03:04:05 AM", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:05", time.Local), + }, + { + Value: "01-02-06 03:04:05PM", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", time.Local), + }, + { + Value: "01-02-06 03:04:05 PM", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", time.Local), + }, + { + Value: "01-02-06 03:04:05.9AM", + Time: createTimeInLocation("2006-01-02T15:04:05.9", "2006-01-02T03:04:05.9", time.Local), + }, + { + Value: "01-02-06 03:04:05.9 AM", + Time: createTimeInLocation("2006-01-02T15:04:05.9", "2006-01-02T03:04:05.9", time.Local), + }, + { + Value: "01-02-06 03:04:05.9PM", + Time: createTimeInLocation("2006-01-02T15:04:05.9", "2006-01-02T15:04:05.9", time.Local), + }, + { + Value: "01-02-06 03:04:05.9 PM", + Time: createTimeInLocation("2006-01-02T15:04:05.9", "2006-01-02T15:04:05.9", time.Local), + }, + { + Value: "01-02-06 03:04:05.999AM", + Time: createTimeInLocation("2006-01-02T15:04:05.999", "2006-01-02T03:04:05.999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999 AM", + Time: createTimeInLocation("2006-01-02T15:04:05.999", "2006-01-02T03:04:05.999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999PM", + Time: createTimeInLocation("2006-01-02T15:04:05.999", "2006-01-02T15:04:05.999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999 PM", + Time: createTimeInLocation("2006-01-02T15:04:05.999", "2006-01-02T15:04:05.999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999999AM", + Time: createTimeInLocation("2006-01-02T15:04:05.999999", "2006-01-02T03:04:05.999999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999999 AM", + Time: createTimeInLocation("2006-01-02T15:04:05.999999", "2006-01-02T03:04:05.999999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999999PM", + Time: createTimeInLocation("2006-01-02T15:04:05.999999", "2006-01-02T15:04:05.999999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999999 PM", + Time: createTimeInLocation("2006-01-02T15:04:05.999999", "2006-01-02T15:04:05.999999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999999999AM", + Time: createTimeInLocation("2006-01-02T15:04:05.999999999", "2006-01-02T03:04:05.999999999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999999999 AM", + Time: createTimeInLocation("2006-01-02T15:04:05.999999999", "2006-01-02T03:04:05.999999999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999999999PM", + Time: createTimeInLocation("2006-01-02T15:04:05.999999999", "2006-01-02T15:04:05.999999999", time.Local), + }, + { + Value: "01-02-06 03:04:05.999999999 PM", + Time: createTimeInLocation("2006-01-02T15:04:05.999999999", "2006-01-02T15:04:05.999999999", time.Local), + }, + { + Value: "Jan 2, 2006", + Time: createTimeInLocation("2006-01-02", "2006-01-02", time.Local), + }, + { + Value: "Jan 2, 2006 at 3:04am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 03:04am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 3:04pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 03:04pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 3:04 am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 03:04 am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 3:04 pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 03:04 pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:05", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05 am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:05", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05 pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05.9am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05.9", "2006-01-02T03:04:05.9", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05.9pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05.9", "2006-01-02T15:04:05.9", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05.999am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05.999", "2006-01-02T03:04:05.999", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05.999pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05.999", "2006-01-02T15:04:05.999", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05.999999am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05.999999", "2006-01-02T03:04:05.999999", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05.999999pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05.999999", "2006-01-02T15:04:05.999999", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05.999999999am (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05.999999999", "2006-01-02T03:04:05.999999999", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05.999999999pm (MST)", + Time: createTimeInLocation("2006-01-02T15:04:05.999999999", "2006-01-02T15:04:05.999999999", loc), + }, + { + Value: "Jan 2, 2006 at 3:04am MST", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 3:04pm MST", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 3:04am -07:00", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 3:04pm -07:00", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:00", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05am MST", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:05", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05pm MST", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05am -07:00", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T03:04:05", loc), + }, + { + Value: "Jan 2, 2006 at 3:04:05pm -07:00", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", loc), + }, +} + +type TestTime struct { + Value string + Time time.Time +} + +func createLocation(name string) *time.Location { + loc, _ := time.LoadLocation(name) + return loc +} + +func getOffset(t time.Time) int { + _, offset := t.Zone() + return offset +} + +func localTime() time.Time { + t, _ := time.ParseInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", time.Local) + return t +} + +func createTime(layout, value string) time.Time { + t, _ := time.Parse(layout, value) + return t +} + +func createTimeInLocation(layout, value string, loc *time.Location) time.Time { + t, _ := time.ParseInLocation(layout, value, loc) + return t +} + +func createCurrentDateInLocation(layout, value string, loc *time.Location) time.Time { + tmpT, _ := time.ParseInLocation(layout, value, loc) + + now := time.Now().In(loc) + year := now.Year() + month := time.Month(now.Month()) + day := now.Day() + + return time.Date(year, month, day, tmpT.Hour(), tmpT.Minute(), tmpT.Second(), tmpT.Nanosecond(), loc) +} + +func createCurrentYearInLocation(layout, value string, loc *time.Location) time.Time { + tmpT, _ := time.ParseInLocation(layout, value, loc) + + now := time.Now().In(loc) + year := now.Year() + + return time.Date(year, time.Month(tmpT.Month()), tmpT.Day(), tmpT.Hour(), tmpT.Minute(), tmpT.Second(), tmpT.Nanosecond(), loc) +} + +func testTimes(times []TestTime, parseType string, test *testing.T) { + var t time.Time + var err error + assert := assert.New(test) + + for _, tt := range times { + p, _ := NewParseTime() + + switch parseType { + case "ISO8601": + t, err = p.ISO8601(tt.Value) + case "RFC8xx1123": + t, err = p.RFC8xx1123(tt.Value) + case "ANSIC": + t, err = p.ANSIC(tt.Value) + case "US": + t, err = p.US(tt.Value) + case "Parse": + t, err = p.Parse(tt.Value) + } + + assert.Equal(nil, err, "Invalid date/time") + + t2 := tt.Time + + assert.Equal(getOffset(t), getOffset(t2), "Incorrect offset") + assert.Equal(t.Unix(), t2.Unix(), "Parse error") + } +} + +func TestNewParseTime(test *testing.T) { + assert := assert.New(test) + + p, _ := NewParseTime() + loc := p.GetLocation() + zone, offset := time.Now().In(time.Local).Zone() + loc2 := time.FixedZone(zone, offset) + + assert.Equal(loc.String(), loc2.String(), "Incorrect location") +} + +func TestNewParseTimeEmptyString(test *testing.T) { + assert := assert.New(test) + + p, _ := NewParseTime("") + + loc := p.GetLocation() + zone, offset := time.Now().In(time.Local).Zone() + loc2 := time.FixedZone(zone, offset) + + assert.Equal(loc.String(), loc2.String(), "Incorrect location") +} + +func TestNewParseTimeLocation(test *testing.T) { + assert := assert.New(test) + + loc, _ := time.LoadLocation("Etc/GMT+12") + p, _ := NewParseTime(loc) + loc2 := p.GetLocation() + + assert.Equal(loc.String(), loc2.String(), "Incorrect location") +} + +func TestNewParseTimeTimezone(test *testing.T) { + assert := assert.New(test) + + p, _ := NewParseTime("GMT+12") + loc2, _ := time.LoadLocation("Etc/GMT+12") + t, _ := p.Parse("2006-01-02T15:04:05") + t2, _ := time.ParseInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", loc2) + + assert.Equal(t.Unix(), t2.Unix(), "Incorrect location") +} + +func TestNewParseTimeLoadLocation(test *testing.T) { + assert := assert.New(test) + + p, _ := NewParseTime("Etc/GMT+12") + loc := p.GetLocation() + loc2, _ := time.LoadLocation("Etc/GMT+12") + + assert.Equal(loc.String(), loc2.String(), "Incorrect location") +} + +func TestNewParseTimeFixedZone(test *testing.T) { + assert := assert.New(test) + + p, _ := NewParseTime("Etc/GMT+12", -43200) + loc := p.GetLocation() + loc2, _ := time.LoadLocation("Etc/GMT+12") + + assert.Equal(loc.String(), loc2.String(), "Incorrect location") +} + +func TestISO8601(test *testing.T) { + testTimes(iso8601Times, "ISO8601", test) +} + +func TestRFC8xx1123(test *testing.T) { + testTimes(rfc8xx1123Times, "RFC8xx1123", test) +} + +func TestANSIC(test *testing.T) { + testTimes(ansicTimes, "ANSIC", test) +} + +func TestUS(test *testing.T) { + testTimes(usTimes, "US", test) + + times := []TestTime{ + { + Value: "01/02/2006 15:04:05", + Time: createTimeInLocation("2006-01-02T15:04:05", "2006-01-02T15:04:05", time.Local), + }, + } + + testTimes(times, "US", test) +} + +func TestParse(test *testing.T) { + testTimes(iso8601Times, "Parse", test) + testTimes(rfc8xx1123Times, "Parse", test) + testTimes(ansicTimes, "Parse", test) + testTimes(usTimes, "Parse", test) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/.travis.yml b/vendor/gopkg.in/alecthomas/kingpin.v2/.travis.yml new file mode 100644 index 0000000..e564b74 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/.travis.yml @@ -0,0 +1,4 @@ +sudo: false +language: go +install: go get -t -v ./... +go: 1.2 diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/COPYING b/vendor/gopkg.in/alecthomas/kingpin.v2/COPYING new file mode 100644 index 0000000..2993ec0 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/COPYING @@ -0,0 +1,19 @@ +Copyright (C) 2014 Alec Thomas + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/README.md b/vendor/gopkg.in/alecthomas/kingpin.v2/README.md new file mode 100644 index 0000000..f98363c --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/README.md @@ -0,0 +1,671 @@ +# Kingpin - A Go (golang) command line and flag parser [![](https://godoc.org/github.com/alecthomas/kingpin?status.svg)](http://godoc.org/github.com/alecthomas/kingpin) [![Build Status](https://travis-ci.org/alecthomas/kingpin.png)](https://travis-ci.org/alecthomas/kingpin) + + + +- [Overview](#overview) +- [Features](#features) +- [User-visible changes between v1 and v2](#user-visible-changes-between-v1-and-v2) + - [Flags can be used at any point after their definition.](#flags-can-be-used-at-any-point-after-their-definition) + - [Short flags can be combined with their parameters](#short-flags-can-be-combined-with-their-parameters) +- [API changes between v1 and v2](#api-changes-between-v1-and-v2) +- [Versions](#versions) + - [V2 is the current stable version](#v2-is-the-current-stable-version) + - [V1 is the OLD stable version](#v1-is-the-old-stable-version) +- [Change History](#change-history) +- [Examples](#examples) + - [Simple Example](#simple-example) + - [Complex Example](#complex-example) +- [Reference Documentation](#reference-documentation) + - [Displaying errors and usage information](#displaying-errors-and-usage-information) + - [Sub-commands](#sub-commands) + - [Custom Parsers](#custom-parsers) + - [Repeatable flags](#repeatable-flags) + - [Boolean Values](#boolean-values) + - [Default Values](#default-values) + - [Place-holders in Help](#place-holders-in-help) + - [Consuming all remaining arguments](#consuming-all-remaining-arguments) + - [Bash/ZSH Shell Completion](#bashzsh-shell-completion) + - [Supporting -h for help](#supporting--h-for-help) + - [Custom help](#custom-help) + + + +## Overview + +Kingpin is a [fluent-style](http://en.wikipedia.org/wiki/Fluent_interface), +type-safe command-line parser. It supports flags, nested commands, and +positional arguments. + +Install it with: + + $ go get gopkg.in/alecthomas/kingpin.v2 + +It looks like this: + +```go +var ( + verbose = kingpin.Flag("verbose", "Verbose mode.").Short('v').Bool() + name = kingpin.Arg("name", "Name of user.").Required().String() +) + +func main() { + kingpin.Parse() + fmt.Printf("%v, %s\n", *verbose, *name) +} +``` + +More [examples](https://github.com/alecthomas/kingpin/tree/master/examples) are available. + +Second to parsing, providing the user with useful help is probably the most +important thing a command-line parser does. Kingpin tries to provide detailed +contextual help if `--help` is encountered at any point in the command line +(excluding after `--`). + +## Features + +- Help output that isn't as ugly as sin. +- Fully [customisable help](#custom-help), via Go templates. +- Parsed, type-safe flags (`kingpin.Flag("f", "help").Int()`) +- Parsed, type-safe positional arguments (`kingpin.Arg("a", "help").Int()`). +- Parsed, type-safe, arbitrarily deep commands (`kingpin.Command("c", "help")`). +- Support for required flags and required positional arguments (`kingpin.Flag("f", "").Required().Int()`). +- Support for arbitrarily nested default commands (`command.Default()`). +- Callbacks per command, flag and argument (`kingpin.Command("c", "").Action(myAction)`). +- POSIX-style short flag combining (`-a -b` -> `-ab`). +- Short-flag+parameter combining (`-a parm` -> `-aparm`). +- Read command-line from files (`@`). +- Automatically generate man pages (`--help-man`). + +## User-visible changes between v1 and v2 + +### Flags can be used at any point after their definition. + +Flags can be specified at any point after their definition, not just +*immediately after their associated command*. From the chat example below, the +following used to be required: + +``` +$ chat --server=chat.server.com:8080 post --image=~/Downloads/owls.jpg pics +``` + +But the following will now work: + +``` +$ chat post --server=chat.server.com:8080 --image=~/Downloads/owls.jpg pics +``` + +### Short flags can be combined with their parameters + +Previously, if a short flag was used, any argument to that flag would have to +be separated by a space. That is no longer the case. + +## API changes between v1 and v2 + +- `ParseWithFileExpansion()` is gone. The new parser directly supports expanding `@`. +- Added `FatalUsage()` and `FatalUsageContext()` for displaying an error + usage and terminating. +- `Dispatch()` renamed to `Action()`. +- Added `ParseContext()` for parsing a command line into its intermediate context form without executing. +- Added `Terminate()` function to override the termination function. +- Added `UsageForContextWithTemplate()` for printing usage via a custom template. +- Added `UsageTemplate()` for overriding the default template to use. Two templates are included: + 1. `DefaultUsageTemplate` - default template. + 2. `CompactUsageTemplate` - compact command template for larger applications. + +## Versions + +Kingpin uses [gopkg.in](https://gopkg.in/alecthomas/kingpin) for versioning. + +The current stable version is [gopkg.in/alecthomas/kingpin.v2](https://gopkg.in/alecthomas/kingpin.v2). The previous version, [gopkg.in/alecthomas/kingpin.v1](https://gopkg.in/alecthomas/kingpin.v1), is deprecated and in maintenance mode. + +### [V2](https://gopkg.in/alecthomas/kingpin.v2) is the current stable version + +Installation: + +```sh +$ go get gopkg.in/alecthomas/kingpin.v2 +``` + +### [V1](https://gopkg.in/alecthomas/kingpin.v1) is the OLD stable version + +Installation: + +```sh +$ go get gopkg.in/alecthomas/kingpin.v1 +``` + +## Change History + +- *2015-09-19* -- Stable v2.1.0 release. + - Added `command.Default()` to specify a default command to use if no other + command matches. This allows for convenient user shortcuts. + - Exposed `HelpFlag` and `VersionFlag` for further cusomisation. + - `Action()` and `PreAction()` added and both now support an arbitrary + number of callbacks. + - `kingpin.SeparateOptionalFlagsUsageTemplate`. + - `--help-long` and `--help-man` (hidden by default) flags. + - Flags are "interspersed" by default, but can be disabled with `app.Interspersed(false)`. + - Added flags for all simple builtin types (int8, uint16, etc.) and slice variants. + - Use `app.Writer(os.Writer)` to specify the default writer for all output functions. + - Dropped `os.Writer` prefix from all printf-like functions. + +- *2015-05-22* -- Stable v2.0.0 release. + - Initial stable release of v2.0.0. + - Fully supports interspersed flags, commands and arguments. + - Flags can be present at any point after their logical definition. + - Application.Parse() terminates if commands are present and a command is not parsed. + - Dispatch() -> Action(). + - Actions are dispatched after all values are populated. + - Override termination function (defaults to os.Exit). + - Override output stream (defaults to os.Stderr). + - Templatised usage help, with default and compact templates. + - Make error/usage functions more consistent. + - Support argument expansion from files by default (with @). + - Fully public data model is available via .Model(). + - Parser has been completely refactored. + - Parsing and execution has been split into distinct stages. + - Use `go generate` to generate repeated flags. + - Support combined short-flag+argument: -fARG. + +- *2015-01-23* -- Stable v1.3.4 release. + - Support "--" for separating flags from positional arguments. + - Support loading flags from files (ParseWithFileExpansion()). Use @FILE as an argument. + - Add post-app and post-cmd validation hooks. This allows arbitrary validation to be added. + - A bunch of improvements to help usage and formatting. + - Support arbitrarily nested sub-commands. + +- *2014-07-08* -- Stable v1.2.0 release. + - Pass any value through to `Strings()` when final argument. + Allows for values that look like flags to be processed. + - Allow `--help` to be used with commands. + - Support `Hidden()` flags. + - Parser for [units.Base2Bytes](https://github.com/alecthomas/units) + type. Allows for flags like `--ram=512MB` or `--ram=1GB`. + - Add an `Enum()` value, allowing only one of a set of values + to be selected. eg. `Flag(...).Enum("debug", "info", "warning")`. + +- *2014-06-27* -- Stable v1.1.0 release. + - Bug fixes. + - Always return an error (rather than panicing) when misconfigured. + - `OpenFile(flag, perm)` value type added, for finer control over opening files. + - Significantly improved usage formatting. + +- *2014-06-19* -- Stable v1.0.0 release. + - Support [cumulative positional](#consuming-all-remaining-arguments) arguments. + - Return error rather than panic when there are fatal errors not caught by + the type system. eg. when a default value is invalid. + - Use gokpg.in. + +- *2014-06-10* -- Place-holder streamlining. + - Renamed `MetaVar` to `PlaceHolder`. + - Removed `MetaVarFromDefault`. Kingpin now uses [heuristics](#place-holders-in-help) + to determine what to display. + +## Examples + +### Simple Example + +Kingpin can be used for simple flag+arg applications like so: + +``` +$ ping --help +usage: ping [] [] + +Flags: + --debug Enable debug mode. + --help Show help. + -t, --timeout=5s Timeout waiting for ping. + +Args: + IP address to ping. + [] Number of packets to send +$ ping 1.2.3.4 5 +Would ping: 1.2.3.4 with timeout 5s and count 0 +``` + +From the following source: + +```go +package main + +import ( + "fmt" + + "gopkg.in/alecthomas/kingpin.v2" +) + +var ( + debug = kingpin.Flag("debug", "Enable debug mode.").Bool() + timeout = kingpin.Flag("timeout", "Timeout waiting for ping.").Default("5s").OverrideDefaultFromEnvar("PING_TIMEOUT").Short('t').Duration() + ip = kingpin.Arg("ip", "IP address to ping.").Required().IP() + count = kingpin.Arg("count", "Number of packets to send").Int() +) + +func main() { + kingpin.Version("0.0.1") + kingpin.Parse() + fmt.Printf("Would ping: %s with timeout %s and count %d", *ip, *timeout, *count) +} +``` + +### Complex Example + +Kingpin can also produce complex command-line applications with global flags, +subcommands, and per-subcommand flags, like this: + +``` +$ chat --help +usage: chat [] [] [ ...] + +A command-line chat application. + +Flags: + --help Show help. + --debug Enable debug mode. + --server=127.0.0.1 Server address. + +Commands: + help [] + Show help for a command. + + register + Register a new user. + + post [] [] + Post a message to a channel. + +$ chat help post +usage: chat [] post [] [] + +Post a message to a channel. + +Flags: + --image=IMAGE Image to post. + +Args: + Channel to post to. + [] Text to post. + +$ chat post --image=~/Downloads/owls.jpg pics +... +``` + +From this code: + +```go +package main + +import ( + "os" + "strings" + "gopkg.in/alecthomas/kingpin.v2" +) + +var ( + app = kingpin.New("chat", "A command-line chat application.") + debug = app.Flag("debug", "Enable debug mode.").Bool() + serverIP = app.Flag("server", "Server address.").Default("127.0.0.1").IP() + + register = app.Command("register", "Register a new user.") + registerNick = register.Arg("nick", "Nickname for user.").Required().String() + registerName = register.Arg("name", "Name of user.").Required().String() + + post = app.Command("post", "Post a message to a channel.") + postImage = post.Flag("image", "Image to post.").File() + postChannel = post.Arg("channel", "Channel to post to.").Required().String() + postText = post.Arg("text", "Text to post.").Strings() +) + +func main() { + switch kingpin.MustParse(app.Parse(os.Args[1:])) { + // Register user + case register.FullCommand(): + println(*registerNick) + + // Post message + case post.FullCommand(): + if *postImage != nil { + } + text := strings.Join(*postText, " ") + println("Post:", text) + } +} +``` + +## Reference Documentation + +### Displaying errors and usage information + +Kingpin exports a set of functions to provide consistent errors and usage +information to the user. + +Error messages look something like this: + + : error: + +The functions on `Application` are: + +Function | Purpose +---------|-------------- +`Errorf(format, args)` | Display a printf formatted error to the user. +`Fatalf(format, args)` | As with Errorf, but also call the termination handler. +`FatalUsage(format, args)` | As with Fatalf, but also print contextual usage information. +`FatalUsageContext(context, format, args)` | As with Fatalf, but also print contextual usage information from a `ParseContext`. +`FatalIfError(err, format, args)` | Conditionally print an error prefixed with format+args, then call the termination handler + +There are equivalent global functions in the kingpin namespace for the default +`kingpin.CommandLine` instance. + +### Sub-commands + +Kingpin supports nested sub-commands, with separate flag and positional +arguments per sub-command. Note that positional arguments may only occur after +sub-commands. + +For example: + +```go +var ( + deleteCommand = kingpin.Command("delete", "Delete an object.") + deleteUserCommand = deleteCommand.Command("user", "Delete a user.") + deleteUserUIDFlag = deleteUserCommand.Flag("uid", "Delete user by UID rather than username.") + deleteUserUsername = deleteUserCommand.Arg("username", "Username to delete.") + deletePostCommand = deleteCommand.Command("post", "Delete a post.") +) + +func main() { + switch kingpin.Parse() { + case "delete user": + case "delete post": + } +} +``` + +### Custom Parsers + +Kingpin supports both flag and positional argument parsers for converting to +Go types. For example, some included parsers are `Int()`, `Float()`, +`Duration()` and `ExistingFile()`. + +Parsers conform to Go's [`flag.Value`](http://godoc.org/flag#Value) +interface, so any existing implementations will work. + +For example, a parser for accumulating HTTP header values might look like this: + +```go +type HTTPHeaderValue http.Header + +func (h *HTTPHeaderValue) Set(value string) error { + parts := strings.SplitN(value, ":", 2) + if len(parts) != 2 { + return fmt.Errorf("expected HEADER:VALUE got '%s'", value) + } + (*http.Header)(h).Add(parts[0], parts[1]) + return nil +} + +func (h *HTTPHeaderValue) String() string { + return "" +} +``` + +As a convenience, I would recommend something like this: + +```go +func HTTPHeader(s Settings) (target *http.Header) { + target = new(http.Header) + s.SetValue((*HTTPHeaderValue)(target)) + return +} +``` + +You would use it like so: + +```go +headers = HTTPHeader(kingpin.Flag("header", "Add a HTTP header to the request.").Short('H')) +``` + +### Repeatable flags + +Depending on the `Value` they hold, some flags may be repeated. The +`IsCumulative() bool` function on `Value` tells if it's safe to call `Set()` +multiple times or if an error should be raised if several values are passed. + +The built-in `Value`s returning slices and maps, as well as `Counter` are +examples of `Value`s that make a flag repeatable. + +### Boolean values + +Boolean values are uniquely managed by Kingpin. Each boolean flag will have a negative complement: +`--` and `--no-`. + +### Default Values + +The default value is the zero value for a type. This can be overridden with +the `Default(value...)` function on flags and arguments. This function accepts +one or several strings, which are parsed by the value itself, so they *must* +be compliant with the format expected. + +### Place-holders in Help + +The place-holder value for a flag is the value used in the help to describe +the value of a non-boolean flag. + +The value provided to PlaceHolder() is used if provided, then the value +provided by Default() if provided, then finally the capitalised flag name is +used. + +Here are some examples of flags with various permutations: + + --name=NAME // Flag(...).String() + --name="Harry" // Flag(...).Default("Harry").String() + --name=FULL-NAME // flag(...).PlaceHolder("FULL-NAME").Default("Harry").String() + +### Consuming all remaining arguments + +A common command-line idiom is to use all remaining arguments for some +purpose. eg. The following command accepts an arbitrary number of +IP addresses as positional arguments: + + ./cmd ping 10.1.1.1 192.168.1.1 + +Such arguments are similar to [repeatable flags](#repeatable-flags), but for +arguments. Therefore they use the same `IsCumulative() bool` function on the +underlying `Value`, so the built-in `Value`s for which the `Set()` function +can be called several times will consume multiple arguments. + +To implement the above example with a custom `Value`, we might do something +like this: + +```go +type ipList []net.IP + +func (i *ipList) Set(value string) error { + if ip := net.ParseIP(value); ip == nil { + return fmt.Errorf("'%s' is not an IP address", value) + } else { + *i = append(*i, ip) + return nil + } +} + +func (i *ipList) String() string { + return "" +} + +func (i *ipList) IsCumulative() bool { + return true +} + +func IPList(s Settings) (target *[]net.IP) { + target = new([]net.IP) + s.SetValue((*ipList)(target)) + return +} +``` + +And use it like so: + +```go +ips := IPList(kingpin.Arg("ips", "IP addresses to ping.")) +``` + +### Bash/ZSH Shell Completion + +By default, all flags and commands/subcommands generate completions +internally. + +Out of the box, CLI tools using kingpin should be able to take advantage +of completion hinting for flags and commands. By specifying +`--completion-bash` as the first argument, your CLI tool will show +possible subcommands. By ending your argv with `--`, hints for flags +will be shown. + +To allow your end users to take advantage you must package a +`/etc/bash_completion.d` script with your distribution (or the equivalent +for your target platform/shell). An alternative is to instruct your end +user to source a script from their `bash_profile` (or equivalent). + +Fortunately Kingpin makes it easy to generate or source a script for use +with end users shells. `./yourtool --completion-script-bash` and +`./yourtool --completion-script-zsh` will generate these scripts for you. + +**Installation by Package** + +For the best user experience, you should bundle your pre-created +completion script with your CLI tool and install it inside +`/etc/bash_completion.d` (or equivalent). A good suggestion is to add +this as an automated step to your build pipeline, in the implementation +is improved for bug fixed. + +**Installation by `bash_profile`** + +Alternatively, instruct your users to add an additional statement to +their `bash_profile` (or equivalent): + +``` +eval "$(your-cli-tool --completion-script-bash)" +``` + +Or for ZSH + +``` +eval "$(your-cli-tool --completion-script-zsh)" +``` + +#### Additional API +To provide more flexibility, a completion option API has been +exposed for flags to allow user defined completion options, to extend +completions further than just EnumVar/Enum. + + +**Provide Static Options** + +When using an `Enum` or `EnumVar`, users are limited to only the options +given. Maybe we wish to hint possible options to the user, but also +allow them to provide their own custom option. `HintOptions` gives +this functionality to flags. + +``` +app := kingpin.New("completion", "My application with bash completion.") +app.Flag("port", "Provide a port to connect to"). + Required(). + HintOptions("80", "443", "8080"). + IntVar(&c.port) +``` + +**Provide Dynamic Options** +Consider the case that you needed to read a local database or a file to +provide suggestions. You can dynamically generate the options + +``` +func listHosts(args []string) []string { + // Provide a dynamic list of hosts from a hosts file or otherwise + // for bash completion. In this example we simply return static slice. + + // You could use this functionality to reach into a hosts file to provide + // completion for a list of known hosts. + return []string{"sshhost.example", "webhost.example", "ftphost.example"} +} + +app := kingpin.New("completion", "My application with bash completion.") +app.Flag("flag-1", "").HintAction(listHosts).String() +``` + +**EnumVar/Enum** +When using `Enum` or `EnumVar`, any provided options will be automatically +used for bash autocompletion. However, if you wish to provide a subset or +different options, you can use `HintOptions` or `HintAction` which will override +the default completion options for `Enum`/`EnumVar`. + + +**Examples** +You can see an in depth example of the completion API within +`examples/completion/main.go` + + +### Supporting -h for help + +`kingpin.CommandLine.HelpFlag.Short('h')` + +### Custom help + +Kingpin v2 supports templatised help using the text/template library (actually, [a fork](https://github.com/alecthomas/template)). + +You can specify the template to use with the [Application.UsageTemplate()](http://godoc.org/gopkg.in/alecthomas/kingpin.v2#Application.UsageTemplate) function. + +There are four included templates: `kingpin.DefaultUsageTemplate` is the default, +`kingpin.CompactUsageTemplate` provides a more compact representation for more complex command-line structures, +`kingpin.SeparateOptionalFlagsUsageTemplate` looks like the default template, but splits required +and optional command flags into separate lists, and `kingpin.ManPageTemplate` is used to generate man pages. + +See the above templates for examples of usage, and the the function [UsageForContextWithTemplate()](https://github.com/alecthomas/kingpin/blob/master/usage.go#L198) method for details on the context. + +#### Default help template + +``` +$ go run ./examples/curl/curl.go --help +usage: curl [] [ ...] + +An example implementation of curl. + +Flags: + --help Show help. + -t, --timeout=5s Set connection timeout. + -H, --headers=HEADER=VALUE + Add HTTP headers to the request. + +Commands: + help [...] + Show help. + + get url + Retrieve a URL. + + get file + Retrieve a file. + + post [] + POST a resource. +``` + +#### Compact help template + +``` +$ go run ./examples/curl/curl.go --help +usage: curl [] [ ...] + +An example implementation of curl. + +Flags: + --help Show help. + -t, --timeout=5s Set connection timeout. + -H, --headers=HEADER=VALUE + Add HTTP headers to the request. + +Commands: + help [...] + get [] + url + file + post [] +``` diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/chat1/main.go b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/chat1/main.go new file mode 100644 index 0000000..2a233fc --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/chat1/main.go @@ -0,0 +1,20 @@ +package main + +import ( + "fmt" + + "gopkg.in/alecthomas/kingpin.v2" +) + +var ( + debug = kingpin.Flag("debug", "Enable debug mode.").Bool() + timeout = kingpin.Flag("timeout", "Timeout waiting for ping.").Default("5s").OverrideDefaultFromEnvar("PING_TIMEOUT").Short('t').Duration() + ip = kingpin.Arg("ip", "IP address to ping.").Required().IP() + count = kingpin.Arg("count", "Number of packets to send").Int() +) + +func main() { + kingpin.Version("0.0.1") + kingpin.Parse() + fmt.Printf("Would ping: %s with timeout %s and count %d", *ip, *timeout, *count) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/chat2/main.go b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/chat2/main.go new file mode 100644 index 0000000..83891a7 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/chat2/main.go @@ -0,0 +1,38 @@ +package main + +import ( + "os" + "strings" + + "gopkg.in/alecthomas/kingpin.v2" +) + +var ( + app = kingpin.New("chat", "A command-line chat application.") + debug = app.Flag("debug", "Enable debug mode.").Bool() + serverIP = app.Flag("server", "Server address.").Default("127.0.0.1").IP() + + register = app.Command("register", "Register a new user.") + registerNick = register.Arg("nick", "Nickname for user.").Required().String() + registerName = register.Arg("name", "Name of user.").Required().String() + + post = app.Command("post", "Post a message to a channel.") + postImage = post.Flag("image", "Image to post.").File() + postChannel = post.Arg("channel", "Channel to post to.").Required().String() + postText = post.Arg("text", "Text to post.").Strings() +) + +func main() { + switch kingpin.MustParse(app.Parse(os.Args[1:])) { + // Register user + case register.FullCommand(): + println(*registerNick) + + // Post message + case post.FullCommand(): + if *postImage != nil { + } + text := strings.Join(*postText, " ") + println("Post:", text) + } +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/completion/main.go b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/completion/main.go new file mode 100644 index 0000000..fe17b52 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/completion/main.go @@ -0,0 +1,96 @@ +package main + +import ( + "fmt" + "os" + + "github.com/alecthomas/kingpin" +) + +func listHosts() []string { + // Provide a dynamic list of hosts from a hosts file or otherwise + // for bash completion. In this example we simply return static slice. + + // You could use this functionality to reach into a hosts file to provide + // completion for a list of known hosts. + return []string{"sshhost.example", "webhost.example", "ftphost.example"} +} + +type NetcatCommand struct { + hostName string + port int + format string +} + +func (n *NetcatCommand) run(c *kingpin.ParseContext) error { + fmt.Printf("Would have run netcat to hostname %v, port %d, and output format %v\n", n.hostName, n.port, n.format) + return nil +} + +func configureNetcatCommand(app *kingpin.Application) { + c := &NetcatCommand{} + nc := app.Command("nc", "Connect to a Host").Action(c.run) + nc.Flag("nop-flag", "Example of a flag with no options").Bool() + + // You can provide hint options using a function to generate them + nc.Flag("host", "Provide a hostname to nc"). + Required(). + HintAction(listHosts). + StringVar(&c.hostName) + + // You can provide hint options statically + nc.Flag("port", "Provide a port to connect to"). + Required(). + HintOptions("80", "443", "8080"). + IntVar(&c.port) + + // Enum/EnumVar options will be turned into completion options automatically + nc.Flag("format", "Define the output format"). + Default("raw"). + EnumVar(&c.format, "raw", "json") + + // You can combine HintOptions with HintAction too + nc.Flag("host-with-multi", "Define a hostname"). + HintAction(listHosts). + HintOptions("myhost.com"). + String() + + // And combine with themselves + nc.Flag("host-with-multi-options", "Define a hostname"). + HintOptions("myhost.com"). + HintOptions("myhost2.com"). + String() + + // If you specify HintOptions/HintActions for Enum/EnumVar, the options + // provided for Enum/EnumVar will be overridden. + nc.Flag("format-with-override-1", "Define a format"). + HintAction(listHosts). + Enum("option1", "option2") + + nc.Flag("format-with-override-2", "Define a format"). + HintOptions("myhost.com", "myhost2.com"). + Enum("option1", "option2") +} + +func addSubCommand(app *kingpin.Application, name string, description string) { + c := app.Command(name, description).Action(func(c *kingpin.ParseContext) error { + fmt.Printf("Would have run command %s.\n", name) + return nil + }) + c.Flag("nop-flag", "Example of a flag with no options").Bool() +} + +func main() { + app := kingpin.New("completion", "My application with bash completion.") + app.Flag("flag-1", "").String() + app.Flag("flag-2", "").HintOptions("opt1", "opt2").String() + + configureNetcatCommand(app) + + // Add some additional top level commands + addSubCommand(app, "ls", "Additional top level command to show command completion") + addSubCommand(app, "ping", "Additional top level command to show command completion") + addSubCommand(app, "nmap", "Additional top level command to show command completion") + + kingpin.MustParse(app.Parse(os.Args[1:])) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/curl/main.go b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/curl/main.go new file mode 100644 index 0000000..a877e7b --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/curl/main.go @@ -0,0 +1,105 @@ +// A curl-like HTTP command-line client. +package main + +import ( + "errors" + "fmt" + "io" + "net/http" + "os" + "strings" + + "gopkg.in/alecthomas/kingpin.v2" +) + +var ( + timeout = kingpin.Flag("timeout", "Set connection timeout.").Short('t').Default("5s").Duration() + headers = HTTPHeader(kingpin.Flag("headers", "Add HTTP headers to the request.").Short('H').PlaceHolder("HEADER=VALUE")) + + get = kingpin.Command("get", "GET a resource.").Default() + getFlag = get.Flag("test", "Test flag").Bool() + getURL = get.Command("url", "Retrieve a URL.").Default() + getURLURL = getURL.Arg("url", "URL to GET.").Required().URL() + getFile = get.Command("file", "Retrieve a file.") + getFileFile = getFile.Arg("file", "File to retrieve.").Required().ExistingFile() + + post = kingpin.Command("post", "POST a resource.") + postData = post.Flag("data", "Key-value data to POST").Short('d').PlaceHolder("KEY:VALUE").StringMap() + postBinaryFile = post.Flag("data-binary", "File with binary data to POST.").File() + postURL = post.Arg("url", "URL to POST to.").Required().URL() +) + +type HTTPHeaderValue http.Header + +func (h HTTPHeaderValue) Set(value string) error { + parts := strings.SplitN(value, "=", 2) + if len(parts) != 2 { + return fmt.Errorf("expected HEADER=VALUE got '%s'", value) + } + (http.Header)(h).Add(parts[0], parts[1]) + return nil +} + +func (h HTTPHeaderValue) String() string { + return "" +} + +func HTTPHeader(s kingpin.Settings) (target *http.Header) { + target = &http.Header{} + s.SetValue((*HTTPHeaderValue)(target)) + return +} + +func applyRequest(req *http.Request) error { + req.Header = *headers + resp, err := http.DefaultClient.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + if resp.StatusCode < 200 || resp.StatusCode > 299 { + return fmt.Errorf("HTTP request failed: %s", resp.Status) + } + _, err = io.Copy(os.Stdout, resp.Body) + return err +} + +func apply(method string, url string) error { + req, err := http.NewRequest(method, url, nil) + if err != nil { + return err + } + return applyRequest(req) +} + +func applyPOST() error { + req, err := http.NewRequest("POST", (*postURL).String(), nil) + if err != nil { + return err + } + if len(*postData) > 0 { + for key, value := range *postData { + req.Form.Set(key, value) + } + } else if postBinaryFile != nil { + if headers.Get("Content-Type") != "" { + headers.Set("Content-Type", "application/octet-stream") + } + req.Body = *postBinaryFile + } else { + return errors.New("--data or --data-binary must be provided to POST") + } + return applyRequest(req) +} + +func main() { + kingpin.UsageTemplate(kingpin.CompactUsageTemplate).Version("1.0").Author("Alec Thomas") + kingpin.CommandLine.Help = "An example implementation of curl." + switch kingpin.Parse() { + case "get url": + kingpin.FatalIfError(apply("GET", (*getURLURL).String()), "GET failed") + + case "post": + kingpin.FatalIfError(applyPOST(), "POST failed") + } +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/modular/main.go b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/modular/main.go new file mode 100644 index 0000000..34cfa0b --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/modular/main.go @@ -0,0 +1,30 @@ +package main + +import ( + "fmt" + "os" + + "gopkg.in/alecthomas/kingpin.v2" +) + +// Context for "ls" command +type LsCommand struct { + All bool +} + +func (l *LsCommand) run(c *kingpin.ParseContext) error { + fmt.Printf("all=%v\n", l.All) + return nil +} + +func configureLsCommand(app *kingpin.Application) { + c := &LsCommand{} + ls := app.Command("ls", "List files.").Action(c.run) + ls.Flag("all", "List all files.").Short('a').BoolVar(&c.All) +} + +func main() { + app := kingpin.New("modular", "My modular application.") + configureLsCommand(app) + kingpin.MustParse(app.Parse(os.Args[1:])) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/ping/main.go b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/ping/main.go new file mode 100644 index 0000000..41ea263 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/_examples/ping/main.go @@ -0,0 +1,20 @@ +package main + +import ( + "fmt" + + "gopkg.in/alecthomas/kingpin.v2" +) + +var ( + debug = kingpin.Flag("debug", "Enable debug mode.").Bool() + timeout = kingpin.Flag("timeout", "Timeout waiting for ping.").OverrideDefaultFromEnvar("PING_TIMEOUT").Required().Short('t').Duration() + ip = kingpin.Arg("ip", "IP address to ping.").Required().IP() + count = kingpin.Arg("count", "Number of packets to send").Int() +) + +func main() { + kingpin.Version("0.0.1") + kingpin.Parse() + fmt.Printf("Would ping: %s with timeout %s and count %d", *ip, *timeout, *count) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/actions.go b/vendor/gopkg.in/alecthomas/kingpin.v2/actions.go new file mode 100644 index 0000000..72d6cbd --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/actions.go @@ -0,0 +1,42 @@ +package kingpin + +// Action callback executed at various stages after all values are populated. +// The application, commands, arguments and flags all have corresponding +// actions. +type Action func(*ParseContext) error + +type actionMixin struct { + actions []Action + preActions []Action +} + +type actionApplier interface { + applyActions(*ParseContext) error + applyPreActions(*ParseContext) error +} + +func (a *actionMixin) addAction(action Action) { + a.actions = append(a.actions, action) +} + +func (a *actionMixin) addPreAction(action Action) { + a.preActions = append(a.preActions, action) +} + +func (a *actionMixin) applyActions(context *ParseContext) error { + for _, action := range a.actions { + if err := action(context); err != nil { + return err + } + } + return nil +} + +func (a *actionMixin) applyPreActions(context *ParseContext) error { + for _, preAction := range a.preActions { + if err := preAction(context); err != nil { + return err + } + } + return nil +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/app.go b/vendor/gopkg.in/alecthomas/kingpin.v2/app.go new file mode 100644 index 0000000..9909131 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/app.go @@ -0,0 +1,686 @@ +package kingpin + +import ( + "fmt" + "io" + "os" + "regexp" + "strings" +) + +var ( + ErrCommandNotSpecified = fmt.Errorf("command not specified") +) + +var ( + envarTransformRegexp = regexp.MustCompile(`[^a-zA-Z_]+`) +) + +type ApplicationValidator func(*Application) error + +// An Application contains the definitions of flags, arguments and commands +// for an application. +type Application struct { + cmdMixin + initialized bool + + Name string + Help string + + author string + version string + writer io.Writer // Destination for usage and errors. + usageTemplate string + validator ApplicationValidator + terminate func(status int) // See Terminate() + noInterspersed bool // can flags be interspersed with args (or must they come first) + defaultEnvars bool + completion bool + + // Help flag. Exposed for user customisation. + HelpFlag *FlagClause + // Help command. Exposed for user customisation. May be nil. + HelpCommand *CmdClause + // Version flag. Exposed for user customisation. May be nil. + VersionFlag *FlagClause +} + +// New creates a new Kingpin application instance. +func New(name, help string) *Application { + a := &Application{ + Name: name, + Help: help, + writer: os.Stderr, + usageTemplate: DefaultUsageTemplate, + terminate: os.Exit, + } + a.flagGroup = newFlagGroup() + a.argGroup = newArgGroup() + a.cmdGroup = newCmdGroup(a) + a.HelpFlag = a.Flag("help", "Show context-sensitive help (also try --help-long and --help-man).") + a.HelpFlag.Bool() + a.Flag("help-long", "Generate long help.").Hidden().PreAction(a.generateLongHelp).Bool() + a.Flag("help-man", "Generate a man page.").Hidden().PreAction(a.generateManPage).Bool() + a.Flag("completion-bash", "Output possible completions for the given args.").Hidden().BoolVar(&a.completion) + a.Flag("completion-script-bash", "Generate completion script for bash.").Hidden().PreAction(a.generateBashCompletionScript).Bool() + a.Flag("completion-script-zsh", "Generate completion script for ZSH.").Hidden().PreAction(a.generateZSHCompletionScript).Bool() + + return a +} + +func (a *Application) generateLongHelp(c *ParseContext) error { + a.Writer(os.Stdout) + if err := a.UsageForContextWithTemplate(c, 2, LongHelpTemplate); err != nil { + return err + } + a.terminate(0) + return nil +} + +func (a *Application) generateManPage(c *ParseContext) error { + a.Writer(os.Stdout) + if err := a.UsageForContextWithTemplate(c, 2, ManPageTemplate); err != nil { + return err + } + a.terminate(0) + return nil +} + +func (a *Application) generateBashCompletionScript(c *ParseContext) error { + a.Writer(os.Stdout) + if err := a.UsageForContextWithTemplate(c, 2, BashCompletionTemplate); err != nil { + return err + } + a.terminate(0) + return nil +} + +func (a *Application) generateZSHCompletionScript(c *ParseContext) error { + a.Writer(os.Stdout) + if err := a.UsageForContextWithTemplate(c, 2, ZshCompletionTemplate); err != nil { + return err + } + a.terminate(0) + return nil +} + +// DefaultEnvars configures all flags (that do not already have an associated +// envar) to use a default environment variable in the form "_". +// +// For example, if the application is named "foo" and a flag is named "bar- +// waz" the environment variable: "FOO_BAR_WAZ". +func (a *Application) DefaultEnvars() *Application { + a.defaultEnvars = true + return a +} + +// Terminate specifies the termination handler. Defaults to os.Exit(status). +// If nil is passed, a no-op function will be used. +func (a *Application) Terminate(terminate func(int)) *Application { + if terminate == nil { + terminate = func(int) {} + } + a.terminate = terminate + return a +} + +// Specify the writer to use for usage and errors. Defaults to os.Stderr. +func (a *Application) Writer(w io.Writer) *Application { + a.writer = w + return a +} + +// UsageTemplate specifies the text template to use when displaying usage +// information. The default is UsageTemplate. +func (a *Application) UsageTemplate(template string) *Application { + a.usageTemplate = template + return a +} + +// Validate sets a validation function to run when parsing. +func (a *Application) Validate(validator ApplicationValidator) *Application { + a.validator = validator + return a +} + +// ParseContext parses the given command line and returns the fully populated +// ParseContext. +func (a *Application) ParseContext(args []string) (*ParseContext, error) { + return a.parseContext(false, args) +} + +func (a *Application) parseContext(ignoreDefault bool, args []string) (*ParseContext, error) { + if err := a.init(); err != nil { + return nil, err + } + context := tokenize(args, ignoreDefault) + err := parse(context, a) + return context, err +} + +// Parse parses command-line arguments. It returns the selected command and an +// error. The selected command will be a space separated subcommand, if +// subcommands have been configured. +// +// This will populate all flag and argument values, call all callbacks, and so +// on. +func (a *Application) Parse(args []string) (command string, err error) { + + context, parseErr := a.ParseContext(args) + selected := []string{} + var setValuesErr error + + if context == nil { + // Since we do not throw error immediately, there could be a case + // where a context returns nil. Protect against that. + return "", parseErr + } + + if err = a.setDefaults(context); err != nil { + return "", err + } + + selected, setValuesErr = a.setValues(context) + + if err = a.applyPreActions(context, !a.completion); err != nil { + return "", err + } + + if a.completion { + a.generateBashCompletion(context) + a.terminate(0) + } else { + if parseErr != nil { + return "", parseErr + } + + a.maybeHelp(context) + if !context.EOL() { + return "", fmt.Errorf("unexpected argument '%s'", context.Peek()) + } + + if setValuesErr != nil { + return "", setValuesErr + } + + command, err = a.execute(context, selected) + if err == ErrCommandNotSpecified { + a.writeUsage(context, nil) + } + } + return command, err +} + +func (a *Application) writeUsage(context *ParseContext, err error) { + if err != nil { + a.Errorf("%s", err) + } + if err := a.UsageForContext(context); err != nil { + panic(err) + } + a.terminate(1) +} + +func (a *Application) maybeHelp(context *ParseContext) { + for _, element := range context.Elements { + if flag, ok := element.Clause.(*FlagClause); ok && flag == a.HelpFlag { + a.writeUsage(context, nil) + } + } +} + +// findCommandFromArgs finds a command (if any) from the given command line arguments. +func (a *Application) findCommandFromArgs(args []string) (command string, err error) { + if err := a.init(); err != nil { + return "", err + } + context := tokenize(args, false) + if _, err := a.parse(context); err != nil { + return "", err + } + return a.findCommandFromContext(context), nil +} + +// findCommandFromContext finds a command (if any) from a parsed context. +func (a *Application) findCommandFromContext(context *ParseContext) string { + commands := []string{} + for _, element := range context.Elements { + if c, ok := element.Clause.(*CmdClause); ok { + commands = append(commands, c.name) + } + } + return strings.Join(commands, " ") +} + +// Version adds a --version flag for displaying the application version. +func (a *Application) Version(version string) *Application { + a.version = version + a.VersionFlag = a.Flag("version", "Show application version.").PreAction(func(*ParseContext) error { + fmt.Fprintln(a.writer, version) + a.terminate(0) + return nil + }) + a.VersionFlag.Bool() + return a +} + +func (a *Application) Author(author string) *Application { + a.author = author + return a +} + +// Action callback to call when all values are populated and parsing is +// complete, but before any command, flag or argument actions. +// +// All Action() callbacks are called in the order they are encountered on the +// command line. +func (a *Application) Action(action Action) *Application { + a.addAction(action) + return a +} + +// Action called after parsing completes but before validation and execution. +func (a *Application) PreAction(action Action) *Application { + a.addPreAction(action) + return a +} + +// Command adds a new top-level command. +func (a *Application) Command(name, help string) *CmdClause { + return a.addCommand(name, help) +} + +// Interspersed control if flags can be interspersed with positional arguments +// +// true (the default) means that they can, false means that all the flags must appear before the first positional arguments. +func (a *Application) Interspersed(interspersed bool) *Application { + a.noInterspersed = !interspersed + return a +} + +func (a *Application) defaultEnvarPrefix() string { + if a.defaultEnvars { + return a.Name + } + return "" +} + +func (a *Application) init() error { + if a.initialized { + return nil + } + if a.cmdGroup.have() && a.argGroup.have() { + return fmt.Errorf("can't mix top-level Arg()s with Command()s") + } + + // If we have subcommands, add a help command at the top-level. + if a.cmdGroup.have() { + var command []string + a.HelpCommand = a.Command("help", "Show help.").PreAction(func(context *ParseContext) error { + a.Usage(command) + a.terminate(0) + return nil + }) + a.HelpCommand.Arg("command", "Show help on command.").StringsVar(&command) + // Make help first command. + l := len(a.commandOrder) + a.commandOrder = append(a.commandOrder[l-1:l], a.commandOrder[:l-1]...) + } + + if err := a.flagGroup.init(a.defaultEnvarPrefix()); err != nil { + return err + } + if err := a.cmdGroup.init(); err != nil { + return err + } + if err := a.argGroup.init(); err != nil { + return err + } + for _, cmd := range a.commands { + if err := cmd.init(); err != nil { + return err + } + } + flagGroups := []*flagGroup{a.flagGroup} + for _, cmd := range a.commandOrder { + if err := checkDuplicateFlags(cmd, flagGroups); err != nil { + return err + } + } + a.initialized = true + return nil +} + +// Recursively check commands for duplicate flags. +func checkDuplicateFlags(current *CmdClause, flagGroups []*flagGroup) error { + // Check for duplicates. + for _, flags := range flagGroups { + for _, flag := range current.flagOrder { + if flag.shorthand != 0 { + if _, ok := flags.short[string(flag.shorthand)]; ok { + return fmt.Errorf("duplicate short flag -%c", flag.shorthand) + } + } + if _, ok := flags.long[flag.name]; ok { + return fmt.Errorf("duplicate long flag --%s", flag.name) + } + } + } + flagGroups = append(flagGroups, current.flagGroup) + // Check subcommands. + for _, subcmd := range current.commandOrder { + if err := checkDuplicateFlags(subcmd, flagGroups); err != nil { + return err + } + } + return nil +} + +func (a *Application) execute(context *ParseContext, selected []string) (string, error) { + var err error + + if err = a.validateRequired(context); err != nil { + return "", err + } + + if err = a.applyValidators(context); err != nil { + return "", err + } + + if err = a.applyActions(context); err != nil { + return "", err + } + + command := strings.Join(selected, " ") + if command == "" && a.cmdGroup.have() { + return "", ErrCommandNotSpecified + } + return command, err +} + +func (a *Application) setDefaults(context *ParseContext) error { + flagElements := map[string]*ParseElement{} + for _, element := range context.Elements { + if flag, ok := element.Clause.(*FlagClause); ok { + flagElements[flag.name] = element + } + } + + argElements := map[string]*ParseElement{} + for _, element := range context.Elements { + if arg, ok := element.Clause.(*ArgClause); ok { + argElements[arg.name] = element + } + } + + // Check required flags and set defaults. + for _, flag := range context.flags.long { + if flagElements[flag.name] == nil { + if err := flag.setDefault(); err != nil { + return err + } + } + } + + for _, arg := range context.arguments.args { + if argElements[arg.name] == nil { + // Set defaults, if any. + for _, defaultValue := range arg.defaultValues { + if err := arg.value.Set(defaultValue); err != nil { + return err + } + } + } + } + + return nil +} + +func (a *Application) validateRequired(context *ParseContext) error { + flagElements := map[string]*ParseElement{} + for _, element := range context.Elements { + if flag, ok := element.Clause.(*FlagClause); ok { + flagElements[flag.name] = element + } + } + + argElements := map[string]*ParseElement{} + for _, element := range context.Elements { + if arg, ok := element.Clause.(*ArgClause); ok { + argElements[arg.name] = element + } + } + + // Check required flags and set defaults. + for _, flag := range context.flags.long { + if flagElements[flag.name] == nil { + // Check required flags were provided. + if flag.needsValue() { + return fmt.Errorf("required flag --%s not provided", flag.name) + } + } + } + + for _, arg := range context.arguments.args { + if argElements[arg.name] == nil { + if arg.required { + return fmt.Errorf("required argument '%s' not provided", arg.name) + } + } + } + return nil +} + +func (a *Application) setValues(context *ParseContext) (selected []string, err error) { + // Set all arg and flag values. + var ( + lastCmd *CmdClause + flagSet = map[string]struct{}{} + ) + for _, element := range context.Elements { + switch clause := element.Clause.(type) { + case *FlagClause: + if _, ok := flagSet[clause.name]; ok { + if v, ok := clause.value.(repeatableFlag); !ok || !v.IsCumulative() { + return nil, fmt.Errorf("flag '%s' cannot be repeated", clause.name) + } + } + if err = clause.value.Set(*element.Value); err != nil { + return + } + flagSet[clause.name] = struct{}{} + + case *ArgClause: + if err = clause.value.Set(*element.Value); err != nil { + return + } + + case *CmdClause: + if clause.validator != nil { + if err = clause.validator(clause); err != nil { + return + } + } + selected = append(selected, clause.name) + lastCmd = clause + } + } + + if lastCmd != nil && len(lastCmd.commands) > 0 { + return nil, fmt.Errorf("must select a subcommand of '%s'", lastCmd.FullCommand()) + } + + return +} + +func (a *Application) applyValidators(context *ParseContext) (err error) { + // Call command validation functions. + for _, element := range context.Elements { + if cmd, ok := element.Clause.(*CmdClause); ok && cmd.validator != nil { + if err = cmd.validator(cmd); err != nil { + return err + } + } + } + + if a.validator != nil { + err = a.validator(a) + } + return err +} + +func (a *Application) applyPreActions(context *ParseContext, dispatch bool) error { + if err := a.actionMixin.applyPreActions(context); err != nil { + return err + } + // Dispatch to actions. + if dispatch { + for _, element := range context.Elements { + if applier, ok := element.Clause.(actionApplier); ok { + if err := applier.applyPreActions(context); err != nil { + return err + } + } + } + } + + return nil +} + +func (a *Application) applyActions(context *ParseContext) error { + if err := a.actionMixin.applyActions(context); err != nil { + return err + } + // Dispatch to actions. + for _, element := range context.Elements { + if applier, ok := element.Clause.(actionApplier); ok { + if err := applier.applyActions(context); err != nil { + return err + } + } + } + return nil +} + +// Errorf prints an error message to w in the format ": error: ". +func (a *Application) Errorf(format string, args ...interface{}) { + fmt.Fprintf(a.writer, a.Name+": error: "+format+"\n", args...) +} + +// Fatalf writes a formatted error to w then terminates with exit status 1. +func (a *Application) Fatalf(format string, args ...interface{}) { + a.Errorf(format, args...) + a.terminate(1) +} + +// FatalUsage prints an error message followed by usage information, then +// exits with a non-zero status. +func (a *Application) FatalUsage(format string, args ...interface{}) { + a.Errorf(format, args...) + a.Usage([]string{}) + a.terminate(1) +} + +// FatalUsageContext writes a printf formatted error message to w, then usage +// information for the given ParseContext, before exiting. +func (a *Application) FatalUsageContext(context *ParseContext, format string, args ...interface{}) { + a.Errorf(format, args...) + if err := a.UsageForContext(context); err != nil { + panic(err) + } + a.terminate(1) +} + +// FatalIfError prints an error and exits if err is not nil. The error is printed +// with the given formatted string, if any. +func (a *Application) FatalIfError(err error, format string, args ...interface{}) { + if err != nil { + prefix := "" + if format != "" { + prefix = fmt.Sprintf(format, args...) + ": " + } + a.Errorf(prefix+"%s", err) + a.terminate(1) + } +} + +func (a *Application) completionOptions(context *ParseContext) []string { + args := context.rawArgs + + var ( + currArg string + prevArg string + target cmdMixin + ) + + numArgs := len(args) + if numArgs > 1 { + args = args[1:] + currArg = args[len(args)-1] + } + if numArgs > 2 { + prevArg = args[len(args)-2] + } + + target = a.cmdMixin + if context.SelectedCommand != nil { + // A subcommand was in use. We will use it as the target + target = context.SelectedCommand.cmdMixin + } + + if (currArg != "" && strings.HasPrefix(currArg, "--")) || strings.HasPrefix(prevArg, "--") { + // Perform completion for A flag. The last/current argument started with "-" + var ( + flagName string // The name of a flag if given (could be half complete) + flagValue string // The value assigned to a flag (if given) (could be half complete) + ) + + if strings.HasPrefix(prevArg, "--") && !strings.HasPrefix(currArg, "--") { + // Matches: ./myApp --flag value + // Wont Match: ./myApp --flag -- + flagName = prevArg[2:] // Strip the "--" + flagValue = currArg + } else if strings.HasPrefix(currArg, "--") { + // Matches: ./myApp --flag -- + // Matches: ./myApp --flag somevalue -- + // Matches: ./myApp -- + flagName = currArg[2:] // Strip the "--" + } + + options, flagMatched, valueMatched := target.FlagCompletion(flagName, flagValue) + if valueMatched { + // Value Matched. Show cmdCompletions + return target.CmdCompletion() + } + + // Add top level flags if we're not at the top level and no match was found. + if context.SelectedCommand != nil && flagMatched == false { + topOptions, topFlagMatched, topValueMatched := a.FlagCompletion(flagName, flagValue) + if topValueMatched { + // Value Matched. Back to cmdCompletions + return target.CmdCompletion() + } + + if topFlagMatched { + // Top level had a flag which matched the input. Return it's options. + options = topOptions + } else { + // Add top level flags + options = append(options, topOptions...) + } + } + return options + } else { + // Perform completion for sub commands. + return target.CmdCompletion() + } +} + +func (a *Application) generateBashCompletion(context *ParseContext) { + options := a.completionOptions(context) + fmt.Printf("%s", strings.Join(options, "\n")) +} + +func envarTransform(name string) string { + return strings.ToUpper(envarTransformRegexp.ReplaceAllString(name, "_")) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/app_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/app_test.go new file mode 100644 index 0000000..75860f3 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/app_test.go @@ -0,0 +1,349 @@ +package kingpin + +import ( + "io/ioutil" + + "github.com/alecthomas/assert" + + "sort" + "strings" + "testing" + "time" +) + +func newTestApp() *Application { + return New("test", "").Terminate(nil) +} + +func TestCommander(t *testing.T) { + c := newTestApp() + ping := c.Command("ping", "Ping an IP address.") + pingTTL := ping.Flag("ttl", "TTL for ICMP packets").Short('t').Default("5s").Duration() + + selected, err := c.Parse([]string{"ping"}) + assert.NoError(t, err) + assert.Equal(t, "ping", selected) + assert.Equal(t, 5*time.Second, *pingTTL) + + selected, err = c.Parse([]string{"ping", "--ttl=10s"}) + assert.NoError(t, err) + assert.Equal(t, "ping", selected) + assert.Equal(t, 10*time.Second, *pingTTL) +} + +func TestRequiredFlags(t *testing.T) { + c := newTestApp() + c.Flag("a", "a").String() + c.Flag("b", "b").Required().String() + + _, err := c.Parse([]string{"--a=foo"}) + assert.Error(t, err) + _, err = c.Parse([]string{"--b=foo"}) + assert.NoError(t, err) +} + +func TestRepeatableFlags(t *testing.T) { + c := newTestApp() + c.Flag("a", "a").String() + c.Flag("b", "b").Strings() + _, err := c.Parse([]string{"--a=foo", "--a=bar"}) + assert.Error(t, err) + _, err = c.Parse([]string{"--b=foo", "--b=bar"}) + assert.NoError(t, err) +} + +func TestInvalidDefaultFlagValueErrors(t *testing.T) { + c := newTestApp() + c.Flag("foo", "foo").Default("a").Int() + _, err := c.Parse([]string{}) + assert.Error(t, err) +} + +func TestInvalidDefaultArgValueErrors(t *testing.T) { + c := newTestApp() + cmd := c.Command("cmd", "cmd") + cmd.Arg("arg", "arg").Default("one").Int() + _, err := c.Parse([]string{"cmd"}) + assert.Error(t, err) +} + +func TestArgsRequiredAfterNonRequiredErrors(t *testing.T) { + c := newTestApp() + cmd := c.Command("cmd", "") + cmd.Arg("a", "a").String() + cmd.Arg("b", "b").Required().String() + _, err := c.Parse([]string{"cmd"}) + assert.Error(t, err) +} + +func TestArgsMultipleRequiredThenNonRequired(t *testing.T) { + c := newTestApp().Writer(ioutil.Discard) + cmd := c.Command("cmd", "") + cmd.Arg("a", "a").Required().String() + cmd.Arg("b", "b").Required().String() + cmd.Arg("c", "c").String() + cmd.Arg("d", "d").String() + _, err := c.Parse([]string{"cmd", "a", "b"}) + assert.NoError(t, err) + _, err = c.Parse([]string{}) + assert.Error(t, err) +} + +func TestDispatchCallbackIsCalled(t *testing.T) { + dispatched := false + c := newTestApp() + c.Command("cmd", "").Action(func(*ParseContext) error { + dispatched = true + return nil + }) + + _, err := c.Parse([]string{"cmd"}) + assert.NoError(t, err) + assert.True(t, dispatched) +} + +func TestTopLevelArgWorks(t *testing.T) { + c := newTestApp() + s := c.Arg("arg", "help").String() + _, err := c.Parse([]string{"foo"}) + assert.NoError(t, err) + assert.Equal(t, "foo", *s) +} + +func TestTopLevelArgCantBeUsedWithCommands(t *testing.T) { + c := newTestApp() + c.Arg("arg", "help").String() + c.Command("cmd", "help") + _, err := c.Parse([]string{}) + assert.Error(t, err) +} + +func TestTooManyArgs(t *testing.T) { + a := newTestApp() + a.Arg("a", "").String() + _, err := a.Parse([]string{"a", "b"}) + assert.Error(t, err) +} + +func TestTooManyArgsAfterCommand(t *testing.T) { + a := newTestApp() + a.Command("a", "") + assert.NoError(t, a.init()) + _, err := a.Parse([]string{"a", "b"}) + assert.Error(t, err) +} + +func TestArgsLooksLikeFlagsWithConsumeRemainder(t *testing.T) { + a := newTestApp() + a.Arg("opts", "").Required().Strings() + _, err := a.Parse([]string{"hello", "-world"}) + assert.Error(t, err) +} + +func TestCommandParseDoesNotResetFlagsToDefault(t *testing.T) { + app := newTestApp() + flag := app.Flag("flag", "").Default("default").String() + app.Command("cmd", "") + + _, err := app.Parse([]string{"--flag=123", "cmd"}) + assert.NoError(t, err) + assert.Equal(t, "123", *flag) +} + +func TestCommandParseDoesNotFailRequired(t *testing.T) { + app := newTestApp() + flag := app.Flag("flag", "").Required().String() + app.Command("cmd", "") + + _, err := app.Parse([]string{"cmd", "--flag=123"}) + assert.NoError(t, err) + assert.Equal(t, "123", *flag) +} + +func TestSelectedCommand(t *testing.T) { + app := newTestApp() + c0 := app.Command("c0", "") + c0.Command("c1", "") + s, err := app.Parse([]string{"c0", "c1"}) + assert.NoError(t, err) + assert.Equal(t, "c0 c1", s) +} + +func TestSubCommandRequired(t *testing.T) { + app := newTestApp() + c0 := app.Command("c0", "") + c0.Command("c1", "") + _, err := app.Parse([]string{"c0"}) + assert.Error(t, err) +} + +func TestInterspersedFalse(t *testing.T) { + app := newTestApp().Interspersed(false) + a1 := app.Arg("a1", "").String() + a2 := app.Arg("a2", "").String() + f1 := app.Flag("flag", "").String() + + _, err := app.Parse([]string{"a1", "--flag=flag"}) + assert.NoError(t, err) + assert.Equal(t, "a1", *a1) + assert.Equal(t, "--flag=flag", *a2) + assert.Equal(t, "", *f1) +} + +func TestInterspersedTrue(t *testing.T) { + // test once with the default value and once with explicit true + for i := 0; i < 2; i++ { + app := newTestApp() + if i != 0 { + t.Log("Setting explicit") + app.Interspersed(true) + } else { + t.Log("Using default") + } + a1 := app.Arg("a1", "").String() + a2 := app.Arg("a2", "").String() + f1 := app.Flag("flag", "").String() + + _, err := app.Parse([]string{"a1", "--flag=flag"}) + assert.NoError(t, err) + assert.Equal(t, "a1", *a1) + assert.Equal(t, "", *a2) + assert.Equal(t, "flag", *f1) + } +} + +func TestDefaultEnvars(t *testing.T) { + a := New("some-app", "").Terminate(nil).DefaultEnvars() + f0 := a.Flag("some-flag", "") + f0.Bool() + f1 := a.Flag("some-other-flag", "").NoEnvar() + f1.Bool() + _, err := a.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, "SOME_APP_SOME_FLAG", f0.envar) + assert.Equal(t, "", f1.envar) +} + +func TestBashCompletionOptionsWithEmptyApp(t *testing.T) { + a := newTestApp() + context, err := a.ParseContext([]string{"--completion-bash"}) + if err != nil { + t.Errorf("Unexpected error whilst parsing context: [%v]", err) + } + args := a.completionOptions(context) + assert.Equal(t, []string(nil), args) +} + +func TestBashCompletionOptions(t *testing.T) { + a := newTestApp() + a.Command("one", "") + a.Flag("flag-0", "").String() + a.Flag("flag-1", "").HintOptions("opt1", "opt2", "opt3").String() + + two := a.Command("two", "") + two.Flag("flag-2", "").String() + two.Flag("flag-3", "").HintOptions("opt4", "opt5", "opt6").String() + + cases := []struct { + Args string + ExpectedOptions []string + }{ + { + Args: "--completion-bash", + ExpectedOptions: []string{"help", "one", "two"}, + }, + { + Args: "--completion-bash --", + ExpectedOptions: []string{"--flag-0", "--flag-1", "--help"}, + }, + { + Args: "--completion-bash --fla", + ExpectedOptions: []string{"--flag-0", "--flag-1", "--help"}, + }, + { + // No options available for flag-0, return to cmd completion + Args: "--completion-bash --flag-0", + ExpectedOptions: []string{"help", "one", "two"}, + }, + { + Args: "--completion-bash --flag-0 --", + ExpectedOptions: []string{"--flag-0", "--flag-1", "--help"}, + }, + { + Args: "--completion-bash --flag-1", + ExpectedOptions: []string{"opt1", "opt2", "opt3"}, + }, + { + Args: "--completion-bash --flag-1 opt", + ExpectedOptions: []string{"opt1", "opt2", "opt3"}, + }, + { + Args: "--completion-bash --flag-1 opt1", + ExpectedOptions: []string{"help", "one", "two"}, + }, + { + Args: "--completion-bash --flag-1 opt1 --", + ExpectedOptions: []string{"--flag-0", "--flag-1", "--help"}, + }, + + // Try Subcommand + { + Args: "--completion-bash two", + ExpectedOptions: []string(nil), + }, + { + Args: "--completion-bash two --", + ExpectedOptions: []string{"--help", "--flag-2", "--flag-3", "--flag-0", "--flag-1"}, + }, + { + Args: "--completion-bash two --flag", + ExpectedOptions: []string{"--help", "--flag-2", "--flag-3", "--flag-0", "--flag-1"}, + }, + { + Args: "--completion-bash two --flag-2", + ExpectedOptions: []string(nil), + }, + { + // Top level flags carry downwards + Args: "--completion-bash two --flag-1", + ExpectedOptions: []string{"opt1", "opt2", "opt3"}, + }, + { + // Top level flags carry downwards + Args: "--completion-bash two --flag-1 opt", + ExpectedOptions: []string{"opt1", "opt2", "opt3"}, + }, + { + // Top level flags carry downwards + Args: "--completion-bash two --flag-1 opt1", + ExpectedOptions: []string(nil), + }, + { + Args: "--completion-bash two --flag-3", + ExpectedOptions: []string{"opt4", "opt5", "opt6"}, + }, + { + Args: "--completion-bash two --flag-3 opt", + ExpectedOptions: []string{"opt4", "opt5", "opt6"}, + }, + { + Args: "--completion-bash two --flag-3 opt4", + ExpectedOptions: []string(nil), + }, + { + Args: "--completion-bash two --flag-3 opt4 --", + ExpectedOptions: []string{"--help", "--flag-2", "--flag-3", "--flag-0", "--flag-1"}, + }, + } + + for _, c := range cases { + context, _ := a.ParseContext(strings.Split(c.Args, " ")) + args := a.completionOptions(context) + + sort.Strings(args) + sort.Strings(c.ExpectedOptions) + + assert.Equal(t, c.ExpectedOptions, args, "Expected != Actual: [%v] != [%v]. \nInput was: [%v]", c.ExpectedOptions, args, c.Args) + } + +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/args.go b/vendor/gopkg.in/alecthomas/kingpin.v2/args.go new file mode 100644 index 0000000..9dd0c0e --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/args.go @@ -0,0 +1,118 @@ +package kingpin + +import "fmt" + +type argGroup struct { + args []*ArgClause +} + +func newArgGroup() *argGroup { + return &argGroup{} +} + +func (a *argGroup) have() bool { + return len(a.args) > 0 +} + +// GetArg gets an argument definition. +// +// This allows existing arguments to be modified after definition but before parsing. Useful for +// modular applications. +func (a *argGroup) GetArg(name string) *ArgClause { + for _, arg := range a.args { + if arg.name == name { + return arg + } + } + return nil +} + +func (a *argGroup) Arg(name, help string) *ArgClause { + arg := newArg(name, help) + a.args = append(a.args, arg) + return arg +} + +func (a *argGroup) init() error { + required := 0 + seen := map[string]struct{}{} + previousArgMustBeLast := false + for i, arg := range a.args { + if previousArgMustBeLast { + return fmt.Errorf("Args() can't be followed by another argument '%s'", arg.name) + } + if arg.consumesRemainder() { + previousArgMustBeLast = true + } + if _, ok := seen[arg.name]; ok { + return fmt.Errorf("duplicate argument '%s'", arg.name) + } + seen[arg.name] = struct{}{} + if arg.required && required != i { + return fmt.Errorf("required arguments found after non-required") + } + if arg.required { + required++ + } + if err := arg.init(); err != nil { + return err + } + } + return nil +} + +type ArgClause struct { + actionMixin + parserMixin + name string + help string + defaultValues []string + required bool +} + +func newArg(name, help string) *ArgClause { + a := &ArgClause{ + name: name, + help: help, + } + return a +} + +func (a *ArgClause) consumesRemainder() bool { + if r, ok := a.value.(remainderArg); ok { + return r.IsCumulative() + } + return false +} + +// Required arguments must be input by the user. They can not have a Default() value provided. +func (a *ArgClause) Required() *ArgClause { + a.required = true + return a +} + +// Default values for this argument. They *must* be parseable by the value of the argument. +func (a *ArgClause) Default(values ...string) *ArgClause { + a.defaultValues = values + return a +} + +func (a *ArgClause) Action(action Action) *ArgClause { + a.addAction(action) + return a +} + +func (a *ArgClause) PreAction(action Action) *ArgClause { + a.addPreAction(action) + return a +} + +func (a *ArgClause) init() error { + if a.required && len(a.defaultValues) > 0 { + return fmt.Errorf("required argument '%s' with unusable default value", a.name) + } + if a.value == nil { + return fmt.Errorf("no parser defined for arg '%s'", a.name) + } + return nil +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/args_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/args_test.go new file mode 100644 index 0000000..d9668c0 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/args_test.go @@ -0,0 +1,57 @@ +package kingpin + +import ( + "io/ioutil" + "testing" + + "github.com/alecthomas/assert" +) + +func TestArgRemainder(t *testing.T) { + app := New("test", "") + v := app.Arg("test", "").Strings() + args := []string{"hello", "world"} + _, err := app.Parse(args) + assert.NoError(t, err) + assert.Equal(t, args, *v) +} + +func TestArgRemainderErrorsWhenNotLast(t *testing.T) { + a := newArgGroup() + a.Arg("test", "").Strings() + a.Arg("test2", "").String() + assert.Error(t, a.init()) +} + +func TestArgMultipleRequired(t *testing.T) { + terminated := false + app := New("test", "") + app.Version("0.0.0").Writer(ioutil.Discard) + app.Arg("a", "").Required().String() + app.Arg("b", "").Required().String() + app.Terminate(func(int) { terminated = true }) + + _, err := app.Parse([]string{}) + assert.Error(t, err) + _, err = app.Parse([]string{"A"}) + assert.Error(t, err) + _, err = app.Parse([]string{"A", "B"}) + assert.NoError(t, err) + _, err = app.Parse([]string{"--version"}) + assert.True(t, terminated) +} + +func TestInvalidArgsDefaultCanBeOverridden(t *testing.T) { + app := New("test", "") + app.Arg("a", "").Default("invalid").Bool() + _, err := app.Parse([]string{}) + assert.Error(t, err) +} + +func TesArgtMultipleValuesDefault(t *testing.T) { + app := New("test", "") + a := app.Arg("a", "").Default("default1", "default2").Strings() + _, err := app.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, []string{"default1", "default2"}, *a) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go b/vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go new file mode 100644 index 0000000..b8cbb9b --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go @@ -0,0 +1,244 @@ +package kingpin + +import ( + "fmt" + "strings" +) + +type cmdMixin struct { + *flagGroup + *argGroup + *cmdGroup + actionMixin +} + +func (c *cmdMixin) CmdCompletion() []string { + rv := []string{} + if len(c.cmdGroup.commandOrder) > 0 { + // This command has subcommands. We should + // show these to the user. + for _, option := range c.cmdGroup.commandOrder { + rv = append(rv, option.name) + } + } else { + // No subcommands + rv = nil + } + return rv +} + +func (c *cmdMixin) FlagCompletion(flagName string, flagValue string) (choices []string, flagMatch bool, optionMatch bool) { + // Check if flagName matches a known flag. + // If it does, show the options for the flag + // Otherwise, show all flags + + options := []string{} + + for _, flag := range c.flagGroup.flagOrder { + // Loop through each flag and determine if a match exists + if flag.name == flagName { + // User typed entire flag. Need to look for flag options. + options = flag.resolveCompletions() + if len(options) == 0 { + // No Options to Choose From, Assume Match. + return options, true, true + } + + // Loop options to find if the user specified value matches + isPrefix := false + matched := false + + for _, opt := range options { + if flagValue == opt { + matched = true + } else if strings.HasPrefix(opt, flagValue) { + isPrefix = true + } + } + + // Matched Flag Directly + // Flag Value Not Prefixed, and Matched Directly + return options, true, !isPrefix && matched + } + + if !flag.hidden { + options = append(options, "--"+flag.name) + } + } + // No Flag directly matched. + return options, false, false + +} + +type cmdGroup struct { + app *Application + parent *CmdClause + commands map[string]*CmdClause + commandOrder []*CmdClause +} + +func (c *cmdGroup) defaultSubcommand() *CmdClause { + for _, cmd := range c.commandOrder { + if cmd.isDefault { + return cmd + } + } + return nil +} + +// GetArg gets a command definition. +// +// This allows existing commands to be modified after definition but before parsing. Useful for +// modular applications. +func (c *cmdGroup) GetCommand(name string) *CmdClause { + return c.commands[name] +} + +func newCmdGroup(app *Application) *cmdGroup { + return &cmdGroup{ + app: app, + commands: make(map[string]*CmdClause), + } +} + +func (c *cmdGroup) flattenedCommands() (out []*CmdClause) { + for _, cmd := range c.commandOrder { + if len(cmd.commands) == 0 { + out = append(out, cmd) + } + out = append(out, cmd.flattenedCommands()...) + } + return +} + +func (c *cmdGroup) addCommand(name, help string) *CmdClause { + cmd := newCommand(c.app, name, help) + c.commands[name] = cmd + c.commandOrder = append(c.commandOrder, cmd) + return cmd +} + +func (c *cmdGroup) init() error { + seen := map[string]bool{} + if c.defaultSubcommand() != nil && !c.have() { + return fmt.Errorf("default subcommand %q provided but no subcommands defined", c.defaultSubcommand().name) + } + defaults := []string{} + for _, cmd := range c.commandOrder { + if cmd.isDefault { + defaults = append(defaults, cmd.name) + } + if seen[cmd.name] { + return fmt.Errorf("duplicate command %q", cmd.name) + } + seen[cmd.name] = true + for _, alias := range cmd.aliases { + if seen[alias] { + return fmt.Errorf("alias duplicates existing command %q", alias) + } + c.commands[alias] = cmd + } + if err := cmd.init(); err != nil { + return err + } + } + if len(defaults) > 1 { + return fmt.Errorf("more than one default subcommand exists: %s", strings.Join(defaults, ", ")) + } + return nil +} + +func (c *cmdGroup) have() bool { + return len(c.commands) > 0 +} + +type CmdClauseValidator func(*CmdClause) error + +// A CmdClause is a single top-level command. It encapsulates a set of flags +// and either subcommands or positional arguments. +type CmdClause struct { + cmdMixin + app *Application + name string + aliases []string + help string + isDefault bool + validator CmdClauseValidator + hidden bool +} + +func newCommand(app *Application, name, help string) *CmdClause { + c := &CmdClause{ + app: app, + name: name, + help: help, + } + c.flagGroup = newFlagGroup() + c.argGroup = newArgGroup() + c.cmdGroup = newCmdGroup(app) + return c +} + +// Add an Alias for this command. +func (c *CmdClause) Alias(name string) *CmdClause { + c.aliases = append(c.aliases, name) + return c +} + +// Validate sets a validation function to run when parsing. +func (c *CmdClause) Validate(validator CmdClauseValidator) *CmdClause { + c.validator = validator + return c +} + +func (c *CmdClause) FullCommand() string { + out := []string{c.name} + for p := c.parent; p != nil; p = p.parent { + out = append([]string{p.name}, out...) + } + return strings.Join(out, " ") +} + +// Command adds a new sub-command. +func (c *CmdClause) Command(name, help string) *CmdClause { + cmd := c.addCommand(name, help) + cmd.parent = c + return cmd +} + +// Default makes this command the default if commands don't match. +func (c *CmdClause) Default() *CmdClause { + c.isDefault = true + return c +} + +func (c *CmdClause) Action(action Action) *CmdClause { + c.addAction(action) + return c +} + +func (c *CmdClause) PreAction(action Action) *CmdClause { + c.addPreAction(action) + return c +} + +func (c *CmdClause) init() error { + if err := c.flagGroup.init(c.app.defaultEnvarPrefix()); err != nil { + return err + } + if c.argGroup.have() && c.cmdGroup.have() { + return fmt.Errorf("can't mix Arg()s with Command()s") + } + if err := c.argGroup.init(); err != nil { + return err + } + if err := c.cmdGroup.init(); err != nil { + return err + } + return nil +} + +func (c *CmdClause) Hidden() *CmdClause { + c.hidden = true + return c +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/cmd/genvalues/main.go b/vendor/gopkg.in/alecthomas/kingpin.v2/cmd/genvalues/main.go new file mode 100644 index 0000000..1411539 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/cmd/genvalues/main.go @@ -0,0 +1,134 @@ +package main + +import ( + "encoding/json" + "os" + "os/exec" + "strings" + + "github.com/alecthomas/template" +) + +const ( + tmpl = `package kingpin + +// This file is autogenerated by "go generate .". Do not modify. + +{{range .}} +{{if not .NoValueParser}} +// -- {{.Type}} Value +type {{.|ValueName}} struct { v *{{.Type}} } + +func new{{.|Name}}Value(p *{{.Type}}) *{{.|ValueName}} { + return &{{.|ValueName}}{p} +} + +func (f *{{.|ValueName}}) Set(s string) error { + v, err := {{.Parser}} + if err == nil { + *f.v = ({{.Type}})(v) + } + return err +} + +func (f *{{.|ValueName}}) Get() interface{} { return ({{.Type}})(*f.v) } + +func (f *{{.|ValueName}}) String() string { return {{.|Format}} } + +{{if .Help}} +// {{.Help}} +{{else}}\ +// {{.|Name}} parses the next command-line value as {{.Type}}. +{{end}}\ +func (p *parserMixin) {{.|Name}}() (target *{{.Type}}) { + target = new({{.Type}}) + p.{{.|Name}}Var(target) + return +} + +func (p *parserMixin) {{.|Name}}Var(target *{{.Type}}) { + p.SetValue(new{{.|Name}}Value(target)) +} + +{{end}} +// {{.|Plural}} accumulates {{.Type}} values into a slice. +func (p *parserMixin) {{.|Plural}}() (target *[]{{.Type}}) { + target = new([]{{.Type}}) + p.{{.|Plural}}Var(target) + return +} + +func (p *parserMixin) {{.|Plural}}Var(target *[]{{.Type}}) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return new{{.|Name}}Value(v.(*{{.Type}})) + })) +} + +{{end}} +` +) + +type Value struct { + Name string `json:"name"` + NoValueParser bool `json:"no_value_parser"` + Type string `json:"type"` + Parser string `json:"parser"` + Format string `json:"format"` + Plural string `json:"plural"` + Help string `json:"help"` +} + +func fatalIfError(err error) { + if err != nil { + panic(err) + } +} + +func main() { + r, err := os.Open("values.json") + fatalIfError(err) + defer r.Close() + + v := []Value{} + err = json.NewDecoder(r).Decode(&v) + fatalIfError(err) + + valueName := func(v *Value) string { + if v.Name != "" { + return v.Name + } + return strings.Title(v.Type) + } + + t, err := template.New("genvalues").Funcs(template.FuncMap{ + "Lower": strings.ToLower, + "Format": func(v *Value) string { + if v.Format != "" { + return v.Format + } + return "fmt.Sprintf(\"%v\", *f)" + }, + "ValueName": func(v *Value) string { + name := valueName(v) + return strings.ToLower(name[0:1]) + name[1:] + "Value" + }, + "Name": valueName, + "Plural": func(v *Value) string { + if v.Plural != "" { + return v.Plural + } + return valueName(v) + "List" + }, + }).Parse(tmpl) + fatalIfError(err) + + w, err := os.Create("values_generated.go") + fatalIfError(err) + defer w.Close() + + err = t.Execute(w, v) + fatalIfError(err) + + err = exec.Command("goimports", "-w", "values_generated.go").Run() + fatalIfError(err) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/cmd_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/cmd_test.go new file mode 100644 index 0000000..7d59b8d --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/cmd_test.go @@ -0,0 +1,276 @@ +package kingpin + +import ( + "strings" + + "github.com/alecthomas/assert" + + "testing" +) + +func parseAndExecute(app *Application, context *ParseContext) (string, error) { + if err := parse(context, app); err != nil { + return "", err + } + + selected, err := app.setValues(context) + if err != nil { + return "", err + } + + return app.execute(context, selected) +} + +func TestNestedCommands(t *testing.T) { + app := New("app", "") + sub1 := app.Command("sub1", "") + sub1.Flag("sub1", "") + subsub1 := sub1.Command("sub1sub1", "") + subsub1.Command("sub1sub1end", "") + + sub2 := app.Command("sub2", "") + sub2.Flag("sub2", "") + sub2.Command("sub2sub1", "") + + context := tokenize([]string{"sub1", "sub1sub1", "sub1sub1end"}, false) + selected, err := parseAndExecute(app, context) + assert.NoError(t, err) + assert.True(t, context.EOL()) + assert.Equal(t, "sub1 sub1sub1 sub1sub1end", selected) +} + +func TestNestedCommandsWithArgs(t *testing.T) { + app := New("app", "") + cmd := app.Command("a", "").Command("b", "") + a := cmd.Arg("a", "").String() + b := cmd.Arg("b", "").String() + context := tokenize([]string{"a", "b", "c", "d"}, false) + selected, err := parseAndExecute(app, context) + assert.NoError(t, err) + assert.True(t, context.EOL()) + assert.Equal(t, "a b", selected) + assert.Equal(t, "c", *a) + assert.Equal(t, "d", *b) +} + +func TestNestedCommandsWithFlags(t *testing.T) { + app := New("app", "") + cmd := app.Command("a", "").Command("b", "") + a := cmd.Flag("aaa", "").Short('a').String() + b := cmd.Flag("bbb", "").Short('b').String() + err := app.init() + assert.NoError(t, err) + context := tokenize(strings.Split("a b --aaa x -b x", " "), false) + selected, err := parseAndExecute(app, context) + assert.NoError(t, err) + assert.True(t, context.EOL()) + assert.Equal(t, "a b", selected) + assert.Equal(t, "x", *a) + assert.Equal(t, "x", *b) +} + +func TestNestedCommandWithMergedFlags(t *testing.T) { + app := New("app", "") + cmd0 := app.Command("a", "") + cmd0f0 := cmd0.Flag("aflag", "").Bool() + // cmd1 := app.Command("b", "") + // cmd1f0 := cmd0.Flag("bflag", "").Bool() + cmd00 := cmd0.Command("aa", "") + cmd00f0 := cmd00.Flag("aaflag", "").Bool() + err := app.init() + assert.NoError(t, err) + context := tokenize(strings.Split("a aa --aflag --aaflag", " "), false) + selected, err := parseAndExecute(app, context) + assert.NoError(t, err) + assert.True(t, *cmd0f0) + assert.True(t, *cmd00f0) + assert.Equal(t, "a aa", selected) +} + +func TestNestedCommandWithDuplicateFlagErrors(t *testing.T) { + app := New("app", "") + app.Flag("test", "").Bool() + app.Command("cmd0", "").Flag("test", "").Bool() + err := app.init() + assert.Error(t, err) +} + +func TestNestedCommandWithArgAndMergedFlags(t *testing.T) { + app := New("app", "") + cmd0 := app.Command("a", "") + cmd0f0 := cmd0.Flag("aflag", "").Bool() + // cmd1 := app.Command("b", "") + // cmd1f0 := cmd0.Flag("bflag", "").Bool() + cmd00 := cmd0.Command("aa", "") + cmd00a0 := cmd00.Arg("arg", "").String() + cmd00f0 := cmd00.Flag("aaflag", "").Bool() + err := app.init() + assert.NoError(t, err) + context := tokenize(strings.Split("a aa hello --aflag --aaflag", " "), false) + selected, err := parseAndExecute(app, context) + assert.NoError(t, err) + assert.True(t, *cmd0f0) + assert.True(t, *cmd00f0) + assert.Equal(t, "a aa", selected) + assert.Equal(t, "hello", *cmd00a0) +} + +func TestDefaultSubcommandEOL(t *testing.T) { + app := newTestApp() + c0 := app.Command("c0", "").Default() + c0.Command("c01", "").Default() + c0.Command("c02", "") + + cmd, err := app.Parse([]string{"c0"}) + assert.NoError(t, err) + assert.Equal(t, "c0 c01", cmd) +} + +func TestDefaultSubcommandWithArg(t *testing.T) { + app := newTestApp() + c0 := app.Command("c0", "").Default() + c01 := c0.Command("c01", "").Default() + c012 := c01.Command("c012", "").Default() + a0 := c012.Arg("a0", "").String() + c0.Command("c02", "") + + cmd, err := app.Parse([]string{"c0", "hello"}) + assert.NoError(t, err) + assert.Equal(t, "c0 c01 c012", cmd) + assert.Equal(t, "hello", *a0) +} + +func TestDefaultSubcommandWithFlags(t *testing.T) { + app := newTestApp() + c0 := app.Command("c0", "").Default() + _ = c0.Flag("f0", "").Int() + c0c1 := c0.Command("c1", "").Default() + c0c1f1 := c0c1.Flag("f1", "").Int() + selected, err := app.Parse([]string{"--f1=2"}) + assert.NoError(t, err) + assert.Equal(t, "c0 c1", selected) + assert.Equal(t, 2, *c0c1f1) + _, err = app.Parse([]string{"--f2"}) + assert.Error(t, err) +} + +func TestMultipleDefaultCommands(t *testing.T) { + app := newTestApp() + app.Command("c0", "").Default() + app.Command("c1", "").Default() + _, err := app.Parse([]string{}) + assert.Error(t, err) +} + +func TestAliasedCommand(t *testing.T) { + app := newTestApp() + app.Command("one", "").Alias("two") + selected, _ := app.Parse([]string{"one"}) + assert.Equal(t, "one", selected) + selected, _ = app.Parse([]string{"two"}) + assert.Equal(t, "one", selected) + // 2 due to "help" and "one" + assert.Equal(t, 2, len(app.Model().FlattenedCommands())) +} + +func TestDuplicateAlias(t *testing.T) { + app := newTestApp() + app.Command("one", "") + app.Command("two", "").Alias("one") + _, err := app.Parse([]string{"one"}) + assert.Error(t, err) +} + +func TestFlagCompletion(t *testing.T) { + app := newTestApp() + app.Command("one", "") + two := app.Command("two", "") + two.Flag("flag-1", "") + two.Flag("flag-2", "").HintOptions("opt1", "opt2", "opt3") + two.Flag("flag-3", "") + + cases := []struct { + target cmdMixin + flagName string + flagValue string + expectedFlagMatch bool + expectedOptionMatch bool + expectedFlags []string + }{ + { + // Test top level flags + target: app.cmdMixin, + flagName: "", + flagValue: "", + expectedFlagMatch: false, + expectedOptionMatch: false, + expectedFlags: []string{"--help"}, + }, + { + // Test no flag passed + target: two.cmdMixin, + flagName: "", + flagValue: "", + expectedFlagMatch: false, + expectedOptionMatch: false, + expectedFlags: []string{"--flag-1", "--flag-2", "--flag-3"}, + }, + { + // Test an incomplete flag. Should still give all options as if the flag wasn't given at all. + target: two.cmdMixin, + flagName: "flag-", + flagValue: "", + expectedFlagMatch: false, + expectedOptionMatch: false, + expectedFlags: []string{"--flag-1", "--flag-2", "--flag-3"}, + }, + { + // Test with a complete flag. Should show available choices for the flag + // This flag has no options. No options should be produced. + // Should also report an option was matched + target: two.cmdMixin, + flagName: "flag-1", + flagValue: "", + expectedFlagMatch: true, + expectedOptionMatch: true, + expectedFlags: []string(nil), + }, + { + // Test with a complete flag. Should show available choices for the flag + target: two.cmdMixin, + flagName: "flag-2", + flagValue: "", + expectedFlagMatch: true, + expectedOptionMatch: false, + expectedFlags: []string{"opt1", "opt2", "opt3"}, + }, + { + // Test with a complete flag and complete option for that flag. + target: two.cmdMixin, + flagName: "flag-2", + flagValue: "opt1", + expectedFlagMatch: true, + expectedOptionMatch: true, + expectedFlags: []string{"opt1", "opt2", "opt3"}, + }, + } + + for i, c := range cases { + choices, flagMatch, optionMatch := c.target.FlagCompletion(c.flagName, c.flagValue) + assert.Equal(t, c.expectedFlags, choices, "Test case %d: expectedFlags != actual flags", i+1) + assert.Equal(t, c.expectedFlagMatch, flagMatch, "Test case %d: expectedFlagMatch != flagMatch", i+1) + assert.Equal(t, c.expectedOptionMatch, optionMatch, "Test case %d: expectedOptionMatch != optionMatch", i+1) + } + +} + +func TestCmdCompletion(t *testing.T) { + app := newTestApp() + app.Command("one", "") + two := app.Command("two", "") + two.Command("sub1", "") + two.Command("sub2", "") + + assert.Equal(t, []string{"one", "two"}, app.CmdCompletion()) + assert.Equal(t, []string{"sub1", "sub2"}, two.CmdCompletion()) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/completions.go b/vendor/gopkg.in/alecthomas/kingpin.v2/completions.go new file mode 100644 index 0000000..6e7b409 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/completions.go @@ -0,0 +1,33 @@ +package kingpin + +// HintAction is a function type who is expected to return a slice of possible +// command line arguments. +type HintAction func() []string +type completionsMixin struct { + hintActions []HintAction + builtinHintActions []HintAction +} + +func (a *completionsMixin) addHintAction(action HintAction) { + a.hintActions = append(a.hintActions, action) +} + +// Allow adding of HintActions which are added internally, ie, EnumVar +func (a *completionsMixin) addHintActionBuiltin(action HintAction) { + a.builtinHintActions = append(a.builtinHintActions, action) +} + +func (a *completionsMixin) resolveCompletions() []string { + var hints []string + + options := a.builtinHintActions + if len(a.hintActions) > 0 { + // User specified their own hintActions. Use those instead. + options = a.hintActions + } + + for _, hintAction := range options { + hints = append(hints, hintAction()...) + } + return hints +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/completions_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/completions_test.go new file mode 100644 index 0000000..74656ea --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/completions_test.go @@ -0,0 +1,78 @@ +package kingpin + +import ( + "testing" + + "github.com/alecthomas/assert" +) + +func TestResolveWithBuiltin(t *testing.T) { + a := completionsMixin{} + + hintAction1 := func() []string { + return []string{"opt1", "opt2"} + } + hintAction2 := func() []string { + return []string{"opt3", "opt4"} + } + + a.builtinHintActions = []HintAction{hintAction1, hintAction2} + + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2", "opt3", "opt4"}, args) +} + +func TestResolveWithUser(t *testing.T) { + a := completionsMixin{} + hintAction1 := func() []string { + return []string{"opt1", "opt2"} + } + hintAction2 := func() []string { + return []string{"opt3", "opt4"} + } + + a.hintActions = []HintAction{hintAction1, hintAction2} + + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2", "opt3", "opt4"}, args) +} + +func TestResolveWithCombination(t *testing.T) { + a := completionsMixin{} + builtin := func() []string { + return []string{"opt1", "opt2"} + } + user := func() []string { + return []string{"opt3", "opt4"} + } + + a.builtinHintActions = []HintAction{builtin} + a.hintActions = []HintAction{user} + + args := a.resolveCompletions() + // User provided args take preference over builtin (enum-defined) args. + assert.Equal(t, []string{"opt3", "opt4"}, args) +} + +func TestAddHintAction(t *testing.T) { + a := completionsMixin{} + hintFunc := func() []string { + return []string{"opt1", "opt2"} + } + a.addHintAction(hintFunc) + + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) +} + +func TestAddHintActionBuiltin(t *testing.T) { + a := completionsMixin{} + hintFunc := func() []string { + return []string{"opt1", "opt2"} + } + + a.addHintActionBuiltin(hintFunc) + + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/doc.go b/vendor/gopkg.in/alecthomas/kingpin.v2/doc.go new file mode 100644 index 0000000..c14762c --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/doc.go @@ -0,0 +1,68 @@ +// Package kingpin provides command line interfaces like this: +// +// $ chat +// usage: chat [] [] [ ...] +// +// Flags: +// --debug enable debug mode +// --help Show help. +// --server=127.0.0.1 server address +// +// Commands: +// help +// Show help for a command. +// +// post [] +// Post a message to a channel. +// +// register +// Register a new user. +// +// $ chat help post +// usage: chat [] post [] [] +// +// Post a message to a channel. +// +// Flags: +// --image=IMAGE image to post +// +// Args: +// channel to post to +// [] text to post +// $ chat post --image=~/Downloads/owls.jpg pics +// +// From code like this: +// +// package main +// +// import "gopkg.in/alecthomas/kingpin.v1" +// +// var ( +// debug = kingpin.Flag("debug", "enable debug mode").Default("false").Bool() +// serverIP = kingpin.Flag("server", "server address").Default("127.0.0.1").IP() +// +// register = kingpin.Command("register", "Register a new user.") +// registerNick = register.Arg("nick", "nickname for user").Required().String() +// registerName = register.Arg("name", "name of user").Required().String() +// +// post = kingpin.Command("post", "Post a message to a channel.") +// postImage = post.Flag("image", "image to post").ExistingFile() +// postChannel = post.Arg("channel", "channel to post to").Required().String() +// postText = post.Arg("text", "text to post").String() +// ) +// +// func main() { +// switch kingpin.Parse() { +// // Register user +// case "register": +// println(*registerNick) +// +// // Post message +// case "post": +// if *postImage != nil { +// } +// if *postText != "" { +// } +// } +// } +package kingpin diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/examples_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/examples_test.go new file mode 100644 index 0000000..7c3e34f --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/examples_test.go @@ -0,0 +1,46 @@ +package kingpin + +import ( + "fmt" + "net/http" + "strings" +) + +type HTTPHeaderValue http.Header + +func (h *HTTPHeaderValue) Set(value string) error { + parts := strings.SplitN(value, ":", 2) + if len(parts) != 2 { + return fmt.Errorf("expected HEADER:VALUE got '%s'", value) + } + (*http.Header)(h).Add(parts[0], parts[1]) + return nil +} + +func (h *HTTPHeaderValue) Get() interface{} { + return (http.Header)(*h) +} + +func (h *HTTPHeaderValue) String() string { + return "" +} + +func HTTPHeader(s Settings) (target *http.Header) { + target = new(http.Header) + s.SetValue((*HTTPHeaderValue)(target)) + return +} + +// This example ilustrates how to define custom parsers. HTTPHeader +// cumulatively parses each encountered --header flag into a http.Header struct. +func ExampleValue() { + var ( + curl = New("curl", "transfer a URL") + headers = HTTPHeader(curl.Flag("headers", "Add HTTP headers to the request.").Short('H').PlaceHolder("HEADER:VALUE")) + ) + + curl.Parse([]string{"-H Content-Type:application/octet-stream"}) + for key, value := range *headers { + fmt.Printf("%s = %s\n", key, value) + } +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/flags.go b/vendor/gopkg.in/alecthomas/kingpin.v2/flags.go new file mode 100644 index 0000000..ef0e845 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/flags.go @@ -0,0 +1,329 @@ +package kingpin + +import ( + "fmt" + "os" + "regexp" + "strings" +) + +var ( + envVarValuesSeparator = "\r?\n" + envVarValuesTrimmer = regexp.MustCompile(envVarValuesSeparator + "$") + envVarValuesSplitter = regexp.MustCompile(envVarValuesSeparator) +) + +type flagGroup struct { + short map[string]*FlagClause + long map[string]*FlagClause + flagOrder []*FlagClause +} + +func newFlagGroup() *flagGroup { + return &flagGroup{ + short: map[string]*FlagClause{}, + long: map[string]*FlagClause{}, + } +} + +// GetFlag gets a flag definition. +// +// This allows existing flags to be modified after definition but before parsing. Useful for +// modular applications. +func (f *flagGroup) GetFlag(name string) *FlagClause { + return f.long[name] +} + +// Flag defines a new flag with the given long name and help. +func (f *flagGroup) Flag(name, help string) *FlagClause { + flag := newFlag(name, help) + f.long[name] = flag + f.flagOrder = append(f.flagOrder, flag) + return flag +} + +func (f *flagGroup) init(defaultEnvarPrefix string) error { + if err := f.checkDuplicates(); err != nil { + return err + } + for _, flag := range f.long { + if defaultEnvarPrefix != "" && !flag.noEnvar && flag.envar == "" { + flag.envar = envarTransform(defaultEnvarPrefix + "_" + flag.name) + } + if err := flag.init(); err != nil { + return err + } + if flag.shorthand != 0 { + f.short[string(flag.shorthand)] = flag + } + } + return nil +} + +func (f *flagGroup) checkDuplicates() error { + seenShort := map[byte]bool{} + seenLong := map[string]bool{} + for _, flag := range f.flagOrder { + if flag.shorthand != 0 { + if _, ok := seenShort[flag.shorthand]; ok { + return fmt.Errorf("duplicate short flag -%c", flag.shorthand) + } + seenShort[flag.shorthand] = true + } + if _, ok := seenLong[flag.name]; ok { + return fmt.Errorf("duplicate long flag --%s", flag.name) + } + seenLong[flag.name] = true + } + return nil +} + +func (f *flagGroup) parse(context *ParseContext) (*FlagClause, error) { + var token *Token + +loop: + for { + token = context.Peek() + switch token.Type { + case TokenEOL: + break loop + + case TokenLong, TokenShort: + flagToken := token + defaultValue := "" + var flag *FlagClause + var ok bool + invert := false + + name := token.Value + if token.Type == TokenLong { + if strings.HasPrefix(name, "no-") { + name = name[3:] + invert = true + } + flag, ok = f.long[name] + if !ok { + return nil, fmt.Errorf("unknown long flag '%s'", flagToken) + } + } else { + flag, ok = f.short[name] + if !ok { + return nil, fmt.Errorf("unknown short flag '%s'", flagToken) + } + } + + context.Next() + + fb, ok := flag.value.(boolFlag) + if ok && fb.IsBoolFlag() { + if invert { + defaultValue = "false" + } else { + defaultValue = "true" + } + } else { + if invert { + context.Push(token) + return nil, fmt.Errorf("unknown long flag '%s'", flagToken) + } + token = context.Peek() + if token.Type != TokenArg { + context.Push(token) + return nil, fmt.Errorf("expected argument for flag '%s'", flagToken) + } + context.Next() + defaultValue = token.Value + } + + context.matchedFlag(flag, defaultValue) + return flag, nil + + default: + break loop + } + } + return nil, nil +} + +func (f *flagGroup) visibleFlags() int { + count := 0 + for _, flag := range f.long { + if !flag.hidden { + count++ + } + } + return count +} + +// FlagClause is a fluid interface used to build flags. +type FlagClause struct { + parserMixin + actionMixin + completionsMixin + name string + shorthand byte + help string + envar string + noEnvar bool + defaultValues []string + placeholder string + hidden bool +} + +func newFlag(name, help string) *FlagClause { + f := &FlagClause{ + name: name, + help: help, + } + return f +} + +func (f *FlagClause) setDefault() error { + if !f.noEnvar && f.envar != "" { + if envarValue := os.Getenv(f.envar); envarValue != "" { + if v, ok := f.value.(repeatableFlag); !ok || !v.IsCumulative() { + // Use the value as-is + return f.value.Set(envarValue) + } else { + // Split by new line to extract multiple values, if any. + trimmed := envVarValuesTrimmer.ReplaceAllString(envarValue, "") + for _, value := range envVarValuesSplitter.Split(trimmed, -1) { + if err := f.value.Set(value); err != nil { + return err + } + } + return nil + } + } + } + + if len(f.defaultValues) > 0 { + for _, defaultValue := range f.defaultValues { + if err := f.value.Set(defaultValue); err != nil { + return err + } + } + return nil + } + + return nil +} + +func (f *FlagClause) needsValue() bool { + haveDefault := len(f.defaultValues) > 0 + haveEnvar := !f.noEnvar && f.envar != "" && os.Getenv(f.envar) != "" + return f.required && !(haveDefault || haveEnvar) +} + +func (f *FlagClause) init() error { + if f.required && len(f.defaultValues) > 0 { + return fmt.Errorf("required flag '--%s' with default value that will never be used", f.name) + } + if f.value == nil { + return fmt.Errorf("no type defined for --%s (eg. .String())", f.name) + } + if v, ok := f.value.(repeatableFlag); (!ok || !v.IsCumulative()) && len(f.defaultValues) > 1 { + return fmt.Errorf("invalid default for '--%s', expecting single value", f.name) + } + return nil +} + +// Dispatch to the given function after the flag is parsed and validated. +func (f *FlagClause) Action(action Action) *FlagClause { + f.addAction(action) + return f +} + +func (f *FlagClause) PreAction(action Action) *FlagClause { + f.addPreAction(action) + return f +} + +// HintAction registers a HintAction (function) for the flag to provide completions +func (a *FlagClause) HintAction(action HintAction) *FlagClause { + a.addHintAction(action) + return a +} + +// HintOptions registers any number of options for the flag to provide completions +func (a *FlagClause) HintOptions(options ...string) *FlagClause { + a.addHintAction(func() []string { + return options + }) + return a +} + +func (a *FlagClause) EnumVar(target *string, options ...string) { + a.parserMixin.EnumVar(target, options...) + a.addHintActionBuiltin(func() []string { + return options + }) +} + +func (a *FlagClause) Enum(options ...string) (target *string) { + a.addHintActionBuiltin(func() []string { + return options + }) + return a.parserMixin.Enum(options...) +} + +// Default values for this flag. They *must* be parseable by the value of the flag. +func (f *FlagClause) Default(values ...string) *FlagClause { + f.defaultValues = values + return f +} + +// DEPRECATED: Use Envar(name) instead. +func (f *FlagClause) OverrideDefaultFromEnvar(envar string) *FlagClause { + return f.Envar(envar) +} + +// Envar overrides the default value(s) for a flag from an environment variable, +// if it is set. Several default values can be provided by using new lines to +// separate them. +func (f *FlagClause) Envar(name string) *FlagClause { + f.envar = name + f.noEnvar = false + return f +} + +// NoEnvar forces environment variable defaults to be disabled for this flag. +// Most useful in conjunction with app.DefaultEnvars(). +func (f *FlagClause) NoEnvar() *FlagClause { + f.envar = "" + f.noEnvar = true + return f +} + +// PlaceHolder sets the place-holder string used for flag values in the help. The +// default behaviour is to use the value provided by Default() if provided, +// then fall back on the capitalized flag name. +func (f *FlagClause) PlaceHolder(placeholder string) *FlagClause { + f.placeholder = placeholder + return f +} + +// Hidden hides a flag from usage but still allows it to be used. +func (f *FlagClause) Hidden() *FlagClause { + f.hidden = true + return f +} + +// Required makes the flag required. You can not provide a Default() value to a Required() flag. +func (f *FlagClause) Required() *FlagClause { + f.required = true + return f +} + +// Short sets the short flag name. +func (f *FlagClause) Short(name byte) *FlagClause { + f.shorthand = name + return f +} + +// Bool makes this flag a boolean flag. +func (f *FlagClause) Bool() (target *bool) { + target = new(bool) + f.SetValue(newBoolValue(target)) + return +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/flags_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/flags_test.go new file mode 100644 index 0000000..2552da3 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/flags_test.go @@ -0,0 +1,319 @@ +package kingpin + +import ( + "io/ioutil" + "os" + + "github.com/alecthomas/assert" + + "testing" +) + +func TestBool(t *testing.T) { + app := newTestApp() + b := app.Flag("b", "").Bool() + _, err := app.Parse([]string{"--b"}) + assert.NoError(t, err) + assert.True(t, *b) +} + +func TestNoBool(t *testing.T) { + fg := newFlagGroup() + f := fg.Flag("b", "").Default("true") + b := f.Bool() + fg.init("") + tokens := tokenize([]string{"--no-b"}, false) + _, err := fg.parse(tokens) + assert.NoError(t, err) + assert.False(t, *b) +} + +func TestNegateNonBool(t *testing.T) { + fg := newFlagGroup() + f := fg.Flag("b", "") + f.Int() + fg.init("") + tokens := tokenize([]string{"--no-b"}, false) + _, err := fg.parse(tokens) + assert.Error(t, err) +} + +func TestInvalidFlagDefaultCanBeOverridden(t *testing.T) { + app := newTestApp() + app.Flag("a", "").Default("invalid").Bool() + _, err := app.Parse([]string{}) + assert.Error(t, err) +} + +func TestRequiredFlag(t *testing.T) { + app := newTestApp() + app.Version("0.0.0").Writer(ioutil.Discard) + exits := 0 + app.Terminate(func(int) { exits++ }) + app.Flag("a", "").Required().Bool() + _, err := app.Parse([]string{"--a"}) + assert.NoError(t, err) + _, err = app.Parse([]string{}) + assert.Error(t, err) + _, err = app.Parse([]string{"--version"}) + assert.Equal(t, 1, exits) +} + +func TestShortFlag(t *testing.T) { + app := newTestApp() + f := app.Flag("long", "").Short('s').Bool() + _, err := app.Parse([]string{"-s"}) + assert.NoError(t, err) + assert.True(t, *f) +} + +func TestCombinedShortFlags(t *testing.T) { + app := newTestApp() + a := app.Flag("short0", "").Short('0').Bool() + b := app.Flag("short1", "").Short('1').Bool() + c := app.Flag("short2", "").Short('2').Bool() + _, err := app.Parse([]string{"-01"}) + assert.NoError(t, err) + assert.True(t, *a) + assert.True(t, *b) + assert.False(t, *c) +} + +func TestCombinedShortFlagArg(t *testing.T) { + a := newTestApp() + n := a.Flag("short", "").Short('s').Int() + _, err := a.Parse([]string{"-s10"}) + assert.NoError(t, err) + assert.Equal(t, 10, *n) +} + +func TestEmptyShortFlagIsAnError(t *testing.T) { + _, err := newTestApp().Parse([]string{"-"}) + assert.Error(t, err) +} + +func TestRequiredWithEnvarMissingErrors(t *testing.T) { + app := newTestApp() + app.Flag("t", "").OverrideDefaultFromEnvar("TEST_ENVAR").Required().Int() + _, err := app.Parse([]string{}) + assert.Error(t, err) +} + +func TestRequiredWithEnvar(t *testing.T) { + os.Setenv("TEST_ENVAR", "123") + app := newTestApp() + flag := app.Flag("t", "").Envar("TEST_ENVAR").Required().Int() + _, err := app.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, 123, *flag) +} + +func TestSubcommandFlagRequiredWithEnvar(t *testing.T) { + os.Setenv("TEST_ENVAR", "123") + app := newTestApp() + cmd := app.Command("command", "") + flag := cmd.Flag("t", "").Envar("TEST_ENVAR").Required().Int() + _, err := app.Parse([]string{"command"}) + assert.NoError(t, err) + assert.Equal(t, 123, *flag) +} + +func TestRegexp(t *testing.T) { + app := newTestApp() + flag := app.Flag("reg", "").Regexp() + _, err := app.Parse([]string{"--reg", "^abc$"}) + assert.NoError(t, err) + assert.NotNil(t, *flag) + assert.Equal(t, "^abc$", (*flag).String()) + assert.Regexp(t, *flag, "abc") + assert.NotRegexp(t, *flag, "abcd") +} + +func TestDuplicateShortFlag(t *testing.T) { + app := newTestApp() + app.Flag("a", "").Short('a').String() + app.Flag("b", "").Short('a').String() + _, err := app.Parse([]string{}) + assert.Error(t, err) +} + +func TestDuplicateLongFlag(t *testing.T) { + app := newTestApp() + app.Flag("a", "").String() + app.Flag("a", "").String() + _, err := app.Parse([]string{}) + assert.Error(t, err) +} + +func TestGetFlagAndOverrideDefault(t *testing.T) { + app := newTestApp() + a := app.Flag("a", "").Default("default").String() + _, err := app.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, "default", *a) + app.GetFlag("a").Default("new") + _, err = app.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, "new", *a) +} + +func TestEnvarOverrideDefault(t *testing.T) { + os.Setenv("TEST_ENVAR", "123") + app := newTestApp() + flag := app.Flag("t", "").Default("default").Envar("TEST_ENVAR").String() + _, err := app.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, "123", *flag) +} + +func TestFlagMultipleValuesDefault(t *testing.T) { + app := newTestApp() + a := app.Flag("a", "").Default("default1", "default2").Strings() + _, err := app.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, []string{"default1", "default2"}, *a) +} + +func TestFlagMultipleValuesDefaultNonRepeatable(t *testing.T) { + c := newTestApp() + c.Flag("foo", "foo").Default("a", "b").String() + _, err := c.Parse([]string{}) + assert.Error(t, err) +} + +func TestFlagMultipleValuesDefaultEnvarUnix(t *testing.T) { + app := newTestApp() + a := app.Flag("a", "").Envar("TEST_MULTIPLE_VALUES").Strings() + os.Setenv("TEST_MULTIPLE_VALUES", "123\n456\n") + _, err := app.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, []string{"123", "456"}, *a) +} + +func TestFlagMultipleValuesDefaultEnvarWindows(t *testing.T) { + app := newTestApp() + a := app.Flag("a", "").Envar("TEST_MULTIPLE_VALUES").Strings() + os.Setenv("TEST_MULTIPLE_VALUES", "123\r\n456\r\n") + _, err := app.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, []string{"123", "456"}, *a) +} + +func TestFlagMultipleValuesDefaultEnvarNonRepeatable(t *testing.T) { + c := newTestApp() + a := c.Flag("foo", "foo").Envar("TEST_MULTIPLE_VALUES_NON_REPEATABLE").String() + os.Setenv("TEST_MULTIPLE_VALUES_NON_REPEATABLE", "123\n456") + _, err := c.Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, "123\n456", *a) +} + +func TestFlagHintAction(t *testing.T) { + c := newTestApp() + + action := func() []string { + return []string{"opt1", "opt2"} + } + + a := c.Flag("foo", "foo").HintAction(action) + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) +} + +func TestFlagHintOptions(t *testing.T) { + c := newTestApp() + + a := c.Flag("foo", "foo").HintOptions("opt1", "opt2") + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) +} + +func TestFlagEnumVar(t *testing.T) { + c := newTestApp() + var bar string + + a := c.Flag("foo", "foo") + a.Enum("opt1", "opt2") + b := c.Flag("bar", "bar") + b.EnumVar(&bar, "opt3", "opt4") + + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) + + args = b.resolveCompletions() + assert.Equal(t, []string{"opt3", "opt4"}, args) +} + +func TestMultiHintOptions(t *testing.T) { + c := newTestApp() + + a := c.Flag("foo", "foo").HintOptions("opt1").HintOptions("opt2") + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) +} +func TestMultiHintActions(t *testing.T) { + c := newTestApp() + + a := c.Flag("foo", "foo"). + HintAction(func() []string { + return []string{"opt1"} + }). + HintAction(func() []string { + return []string{"opt2"} + }) + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) +} + +func TestCombinationHintActionsOptions(t *testing.T) { + c := newTestApp() + + a := c.Flag("foo", "foo").HintAction(func() []string { + return []string{"opt1"} + }).HintOptions("opt2") + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) +} + +func TestCombinationEnumActions(t *testing.T) { + c := newTestApp() + var foo string + + a := c.Flag("foo", "foo"). + HintAction(func() []string { + return []string{"opt1", "opt2"} + }) + a.Enum("opt3", "opt4") + + b := c.Flag("bar", "bar"). + HintAction(func() []string { + return []string{"opt5", "opt6"} + }) + b.EnumVar(&foo, "opt3", "opt4") + + // Provided HintActions should override automatically generated Enum options. + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) + + args = b.resolveCompletions() + assert.Equal(t, []string{"opt5", "opt6"}, args) +} + +func TestCombinationEnumOptions(t *testing.T) { + c := newTestApp() + var foo string + + a := c.Flag("foo", "foo").HintOptions("opt1", "opt2") + a.Enum("opt3", "opt4") + + b := c.Flag("bar", "bar").HintOptions("opt5", "opt6") + b.EnumVar(&foo, "opt3", "opt4") + + // Provided HintOptions should override automatically generated Enum options. + args := a.resolveCompletions() + assert.Equal(t, []string{"opt1", "opt2"}, args) + + args = b.resolveCompletions() + assert.Equal(t, []string{"opt5", "opt6"}, args) + +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/global.go b/vendor/gopkg.in/alecthomas/kingpin.v2/global.go new file mode 100644 index 0000000..10a2913 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/global.go @@ -0,0 +1,94 @@ +package kingpin + +import ( + "os" + "path/filepath" +) + +var ( + // CommandLine is the default Kingpin parser. + CommandLine = New(filepath.Base(os.Args[0]), "") + // Global help flag. Exposed for user customisation. + HelpFlag = CommandLine.HelpFlag + // Top-level help command. Exposed for user customisation. May be nil. + HelpCommand = CommandLine.HelpCommand + // Global version flag. Exposed for user customisation. May be nil. + VersionFlag = CommandLine.VersionFlag +) + +// Command adds a new command to the default parser. +func Command(name, help string) *CmdClause { + return CommandLine.Command(name, help) +} + +// Flag adds a new flag to the default parser. +func Flag(name, help string) *FlagClause { + return CommandLine.Flag(name, help) +} + +// Arg adds a new argument to the top-level of the default parser. +func Arg(name, help string) *ArgClause { + return CommandLine.Arg(name, help) +} + +// Parse and return the selected command. Will call the termination handler if +// an error is encountered. +func Parse() string { + selected := MustParse(CommandLine.Parse(os.Args[1:])) + if selected == "" && CommandLine.cmdGroup.have() { + Usage() + CommandLine.terminate(0) + } + return selected +} + +// Errorf prints an error message to stderr. +func Errorf(format string, args ...interface{}) { + CommandLine.Errorf(format, args...) +} + +// Fatalf prints an error message to stderr and exits. +func Fatalf(format string, args ...interface{}) { + CommandLine.Fatalf(format, args...) +} + +// FatalIfError prints an error and exits if err is not nil. The error is printed +// with the given prefix. +func FatalIfError(err error, format string, args ...interface{}) { + CommandLine.FatalIfError(err, format, args...) +} + +// FatalUsage prints an error message followed by usage information, then +// exits with a non-zero status. +func FatalUsage(format string, args ...interface{}) { + CommandLine.FatalUsage(format, args...) +} + +// FatalUsageContext writes a printf formatted error message to stderr, then +// usage information for the given ParseContext, before exiting. +func FatalUsageContext(context *ParseContext, format string, args ...interface{}) { + CommandLine.FatalUsageContext(context, format, args...) +} + +// Usage prints usage to stderr. +func Usage() { + CommandLine.Usage(os.Args[1:]) +} + +// Set global usage template to use (defaults to DefaultUsageTemplate). +func UsageTemplate(template string) *Application { + return CommandLine.UsageTemplate(template) +} + +// MustParse can be used with app.Parse(args) to exit with an error if parsing fails. +func MustParse(command string, err error) string { + if err != nil { + Fatalf("%s, try --help", err) + } + return command +} + +// Version adds a flag for displaying the application version number. +func Version(version string) *Application { + return CommandLine.Version(version) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go b/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go new file mode 100644 index 0000000..a269531 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go @@ -0,0 +1,9 @@ +// +build appengine !linux,!freebsd,!darwin,!dragonfly,!netbsd,!openbsd + +package kingpin + +import "io" + +func guessWidth(w io.Writer) int { + return 80 +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go b/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go new file mode 100644 index 0000000..ad8163f --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go @@ -0,0 +1,38 @@ +// +build !appengine,linux freebsd darwin dragonfly netbsd openbsd + +package kingpin + +import ( + "io" + "os" + "strconv" + "syscall" + "unsafe" +) + +func guessWidth(w io.Writer) int { + // check if COLUMNS env is set to comply with + // http://pubs.opengroup.org/onlinepubs/009604499/basedefs/xbd_chap08.html + colsStr := os.Getenv("COLUMNS") + if colsStr != "" { + if cols, err := strconv.Atoi(colsStr); err == nil { + return cols + } + } + + if t, ok := w.(*os.File); ok { + fd := t.Fd() + var dimensions [4]uint16 + + if _, _, err := syscall.Syscall6( + syscall.SYS_IOCTL, + uintptr(fd), + uintptr(syscall.TIOCGWINSZ), + uintptr(unsafe.Pointer(&dimensions)), + 0, 0, 0, + ); err == 0 { + return int(dimensions[1]) + } + } + return 80 +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/model.go b/vendor/gopkg.in/alecthomas/kingpin.v2/model.go new file mode 100644 index 0000000..ee770b0 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/model.go @@ -0,0 +1,225 @@ +package kingpin + +import ( + "fmt" + "strconv" + "strings" +) + +// Data model for Kingpin command-line structure. + +type FlagGroupModel struct { + Flags []*FlagModel +} + +func (f *FlagGroupModel) FlagSummary() string { + out := []string{} + count := 0 + for _, flag := range f.Flags { + if flag.Name != "help" { + count++ + } + if flag.Required { + if flag.IsBoolFlag() { + out = append(out, fmt.Sprintf("--[no-]%s", flag.Name)) + } else { + out = append(out, fmt.Sprintf("--%s=%s", flag.Name, flag.FormatPlaceHolder())) + } + } + } + if count != len(out) { + out = append(out, "[]") + } + return strings.Join(out, " ") +} + +type FlagModel struct { + Name string + Help string + Short rune + Default []string + Envar string + PlaceHolder string + Required bool + Hidden bool + Value Value +} + +func (f *FlagModel) String() string { + return f.Value.String() +} + +func (f *FlagModel) IsBoolFlag() bool { + if fl, ok := f.Value.(boolFlag); ok { + return fl.IsBoolFlag() + } + return false +} + +func (f *FlagModel) FormatPlaceHolder() string { + if f.PlaceHolder != "" { + return f.PlaceHolder + } + if len(f.Default) > 0 { + ellipsis := "" + if len(f.Default) > 1 { + ellipsis = "..." + } + if _, ok := f.Value.(*stringValue); ok { + return strconv.Quote(f.Default[0]) + ellipsis + } + return f.Default[0] + ellipsis + } + return strings.ToUpper(f.Name) +} + +type ArgGroupModel struct { + Args []*ArgModel +} + +func (a *ArgGroupModel) ArgSummary() string { + depth := 0 + out := []string{} + for _, arg := range a.Args { + h := "<" + arg.Name + ">" + if !arg.Required { + h = "[" + h + depth++ + } + out = append(out, h) + } + out[len(out)-1] = out[len(out)-1] + strings.Repeat("]", depth) + return strings.Join(out, " ") +} + +type ArgModel struct { + Name string + Help string + Default []string + Required bool + Value Value +} + +func (a *ArgModel) String() string { + return a.Value.String() +} + +type CmdGroupModel struct { + Commands []*CmdModel +} + +func (c *CmdGroupModel) FlattenedCommands() (out []*CmdModel) { + for _, cmd := range c.Commands { + if len(cmd.Commands) == 0 { + out = append(out, cmd) + } + out = append(out, cmd.FlattenedCommands()...) + } + return +} + +type CmdModel struct { + Name string + Aliases []string + Help string + FullCommand string + Depth int + Hidden bool + Default bool + *FlagGroupModel + *ArgGroupModel + *CmdGroupModel +} + +func (c *CmdModel) String() string { + return c.FullCommand +} + +type ApplicationModel struct { + Name string + Help string + Version string + Author string + *ArgGroupModel + *CmdGroupModel + *FlagGroupModel +} + +func (a *Application) Model() *ApplicationModel { + return &ApplicationModel{ + Name: a.Name, + Help: a.Help, + Version: a.version, + Author: a.author, + FlagGroupModel: a.flagGroup.Model(), + ArgGroupModel: a.argGroup.Model(), + CmdGroupModel: a.cmdGroup.Model(), + } +} + +func (a *argGroup) Model() *ArgGroupModel { + m := &ArgGroupModel{} + for _, arg := range a.args { + m.Args = append(m.Args, arg.Model()) + } + return m +} + +func (a *ArgClause) Model() *ArgModel { + return &ArgModel{ + Name: a.name, + Help: a.help, + Default: a.defaultValues, + Required: a.required, + Value: a.value, + } +} + +func (f *flagGroup) Model() *FlagGroupModel { + m := &FlagGroupModel{} + for _, fl := range f.flagOrder { + m.Flags = append(m.Flags, fl.Model()) + } + return m +} + +func (f *FlagClause) Model() *FlagModel { + return &FlagModel{ + Name: f.name, + Help: f.help, + Short: rune(f.shorthand), + Default: f.defaultValues, + Envar: f.envar, + PlaceHolder: f.placeholder, + Required: f.required, + Hidden: f.hidden, + Value: f.value, + } +} + +func (c *cmdGroup) Model() *CmdGroupModel { + m := &CmdGroupModel{} + for _, cm := range c.commandOrder { + m.Commands = append(m.Commands, cm.Model()) + } + return m +} + +func (c *CmdClause) Model() *CmdModel { + depth := 0 + for i := c; i != nil; i = i.parent { + depth++ + } + return &CmdModel{ + Name: c.name, + Aliases: c.aliases, + Help: c.help, + Depth: depth, + Hidden: c.hidden, + Default: c.isDefault, + FullCommand: c.FullCommand(), + FlagGroupModel: c.flagGroup.Model(), + ArgGroupModel: c.argGroup.Model(), + CmdGroupModel: c.cmdGroup.Model(), + } +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/parser.go b/vendor/gopkg.in/alecthomas/kingpin.v2/parser.go new file mode 100644 index 0000000..9f3f7e5 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/parser.go @@ -0,0 +1,376 @@ +package kingpin + +import ( + "bufio" + "fmt" + "os" + "strings" +) + +type TokenType int + +// Token types. +const ( + TokenShort TokenType = iota + TokenLong + TokenArg + TokenError + TokenEOL +) + +func (t TokenType) String() string { + switch t { + case TokenShort: + return "short flag" + case TokenLong: + return "long flag" + case TokenArg: + return "argument" + case TokenError: + return "error" + case TokenEOL: + return "" + } + return "?" +} + +var ( + TokenEOLMarker = Token{-1, TokenEOL, ""} +) + +type Token struct { + Index int + Type TokenType + Value string +} + +func (t *Token) Equal(o *Token) bool { + return t.Index == o.Index +} + +func (t *Token) IsFlag() bool { + return t.Type == TokenShort || t.Type == TokenLong +} + +func (t *Token) IsEOF() bool { + return t.Type == TokenEOL +} + +func (t *Token) String() string { + switch t.Type { + case TokenShort: + return "-" + t.Value + case TokenLong: + return "--" + t.Value + case TokenArg: + return t.Value + case TokenError: + return "error: " + t.Value + case TokenEOL: + return "" + default: + panic("unhandled type") + } +} + +// A union of possible elements in a parse stack. +type ParseElement struct { + // Clause is either *CmdClause, *ArgClause or *FlagClause. + Clause interface{} + // Value is corresponding value for an ArgClause or FlagClause (if any). + Value *string +} + +// ParseContext holds the current context of the parser. When passed to +// Action() callbacks Elements will be fully populated with *FlagClause, +// *ArgClause and *CmdClause values and their corresponding arguments (if +// any). +type ParseContext struct { + SelectedCommand *CmdClause + ignoreDefault bool + argsOnly bool + peek []*Token + argi int // Index of current command-line arg we're processing. + args []string + rawArgs []string + flags *flagGroup + arguments *argGroup + argumenti int // Cursor into arguments + // Flags, arguments and commands encountered and collected during parse. + Elements []*ParseElement +} + +func (p *ParseContext) nextArg() *ArgClause { + if p.argumenti >= len(p.arguments.args) { + return nil + } + arg := p.arguments.args[p.argumenti] + if !arg.consumesRemainder() { + p.argumenti++ + } + return arg +} + +func (p *ParseContext) next() { + p.argi++ + p.args = p.args[1:] +} + +// HasTrailingArgs returns true if there are unparsed command-line arguments. +// This can occur if the parser can not match remaining arguments. +func (p *ParseContext) HasTrailingArgs() bool { + return len(p.args) > 0 +} + +func tokenize(args []string, ignoreDefault bool) *ParseContext { + return &ParseContext{ + ignoreDefault: ignoreDefault, + args: args, + rawArgs: args, + flags: newFlagGroup(), + arguments: newArgGroup(), + } +} + +func (p *ParseContext) mergeFlags(flags *flagGroup) { + for _, flag := range flags.flagOrder { + if flag.shorthand != 0 { + p.flags.short[string(flag.shorthand)] = flag + } + p.flags.long[flag.name] = flag + p.flags.flagOrder = append(p.flags.flagOrder, flag) + } +} + +func (p *ParseContext) mergeArgs(args *argGroup) { + for _, arg := range args.args { + p.arguments.args = append(p.arguments.args, arg) + } +} + +func (p *ParseContext) EOL() bool { + return p.Peek().Type == TokenEOL +} + +// Next token in the parse context. +func (p *ParseContext) Next() *Token { + if len(p.peek) > 0 { + return p.pop() + } + + // End of tokens. + if len(p.args) == 0 { + return &Token{Index: p.argi, Type: TokenEOL} + } + + arg := p.args[0] + p.next() + + if p.argsOnly { + return &Token{p.argi, TokenArg, arg} + } + + // All remaining args are passed directly. + if arg == "--" { + p.argsOnly = true + return p.Next() + } + + if strings.HasPrefix(arg, "--") { + parts := strings.SplitN(arg[2:], "=", 2) + token := &Token{p.argi, TokenLong, parts[0]} + if len(parts) == 2 { + p.Push(&Token{p.argi, TokenArg, parts[1]}) + } + return token + } + + if strings.HasPrefix(arg, "-") { + if len(arg) == 1 { + return &Token{Index: p.argi, Type: TokenShort} + } + short := arg[1:2] + flag, ok := p.flags.short[short] + // Not a known short flag, we'll just return it anyway. + if !ok { + } else if fb, ok := flag.value.(boolFlag); ok && fb.IsBoolFlag() { + // Bool short flag. + } else { + // Short flag with combined argument: -fARG + token := &Token{p.argi, TokenShort, short} + if len(arg) > 2 { + p.Push(&Token{p.argi, TokenArg, arg[2:]}) + } + return token + } + + if len(arg) > 2 { + p.args = append([]string{"-" + arg[2:]}, p.args...) + } + return &Token{p.argi, TokenShort, short} + } else if strings.HasPrefix(arg, "@") { + expanded, err := ExpandArgsFromFile(arg[1:]) + if err != nil { + return &Token{p.argi, TokenError, err.Error()} + } + if p.argi >= len(p.args) { + p.args = append(p.args[:p.argi-1], expanded...) + } else { + p.args = append(p.args[:p.argi-1], append(expanded, p.args[p.argi+1:]...)...) + } + return p.Next() + } + + return &Token{p.argi, TokenArg, arg} +} + +func (p *ParseContext) Peek() *Token { + if len(p.peek) == 0 { + return p.Push(p.Next()) + } + return p.peek[len(p.peek)-1] +} + +func (p *ParseContext) Push(token *Token) *Token { + p.peek = append(p.peek, token) + return token +} + +func (p *ParseContext) pop() *Token { + end := len(p.peek) - 1 + token := p.peek[end] + p.peek = p.peek[0:end] + return token +} + +func (p *ParseContext) String() string { + return p.SelectedCommand.FullCommand() +} + +func (p *ParseContext) matchedFlag(flag *FlagClause, value string) { + p.Elements = append(p.Elements, &ParseElement{Clause: flag, Value: &value}) +} + +func (p *ParseContext) matchedArg(arg *ArgClause, value string) { + p.Elements = append(p.Elements, &ParseElement{Clause: arg, Value: &value}) +} + +func (p *ParseContext) matchedCmd(cmd *CmdClause) { + p.Elements = append(p.Elements, &ParseElement{Clause: cmd}) + p.mergeFlags(cmd.flagGroup) + p.mergeArgs(cmd.argGroup) + p.SelectedCommand = cmd +} + +// Expand arguments from a file. Lines starting with # will be treated as comments. +func ExpandArgsFromFile(filename string) (out []string, err error) { + r, err := os.Open(filename) + if err != nil { + return nil, err + } + defer r.Close() + scanner := bufio.NewScanner(r) + for scanner.Scan() { + line := scanner.Text() + if strings.HasPrefix(line, "#") { + continue + } + out = append(out, line) + } + err = scanner.Err() + return +} + +func parse(context *ParseContext, app *Application) (err error) { + context.mergeFlags(app.flagGroup) + context.mergeArgs(app.argGroup) + + cmds := app.cmdGroup + ignoreDefault := context.ignoreDefault + +loop: + for !context.EOL() { + token := context.Peek() + + switch token.Type { + case TokenLong, TokenShort: + if flag, err := context.flags.parse(context); err != nil { + if !ignoreDefault { + if cmd := cmds.defaultSubcommand(); cmd != nil { + context.matchedCmd(cmd) + cmds = cmd.cmdGroup + break + } + } + return err + } else if flag == HelpFlag { + ignoreDefault = true + } + + case TokenArg: + if cmds.have() { + selectedDefault := false + cmd, ok := cmds.commands[token.String()] + if !ok { + if !ignoreDefault { + if cmd = cmds.defaultSubcommand(); cmd != nil { + selectedDefault = true + } + } + if cmd == nil { + return fmt.Errorf("expected command but got %q", token) + } + } + if cmd == HelpCommand { + ignoreDefault = true + } + context.matchedCmd(cmd) + cmds = cmd.cmdGroup + if !selectedDefault { + context.Next() + } + } else if context.arguments.have() { + if app.noInterspersed { + // no more flags + context.argsOnly = true + } + arg := context.nextArg() + if arg == nil { + break loop + } + context.matchedArg(arg, token.String()) + context.Next() + } else { + break loop + } + + case TokenEOL: + break loop + } + } + + // Move to innermost default command. + for !ignoreDefault { + if cmd := cmds.defaultSubcommand(); cmd != nil { + context.matchedCmd(cmd) + cmds = cmd.cmdGroup + } else { + break + } + } + + if !context.EOL() { + return fmt.Errorf("unexpected %s", context.Peek()) + } + + // Set defaults for all remaining args. + for arg := context.nextArg(); arg != nil && !arg.consumesRemainder(); arg = context.nextArg() { + for _, defaultValue := range arg.defaultValues { + if err := arg.value.Set(defaultValue); err != nil { + return fmt.Errorf("invalid default value '%s' for argument '%s'", defaultValue, arg.name) + } + } + } + + return +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/parser_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/parser_test.go new file mode 100644 index 0000000..f6667ff --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/parser_test.go @@ -0,0 +1,42 @@ +package kingpin + +import ( + "io/ioutil" + "os" + "testing" + + "github.com/alecthomas/assert" +) + +func TestParserExpandFromFile(t *testing.T) { + f, err := ioutil.TempFile("", "") + assert.NoError(t, err) + defer os.Remove(f.Name()) + f.WriteString("hello\nworld\n") + f.Close() + + app := New("test", "") + arg0 := app.Arg("arg0", "").String() + arg1 := app.Arg("arg1", "").String() + + _, err = app.Parse([]string{"@" + f.Name()}) + assert.NoError(t, err) + assert.Equal(t, "hello", *arg0) + assert.Equal(t, "world", *arg1) +} + +func TestParseContextPush(t *testing.T) { + app := New("test", "") + app.Command("foo", "").Command("bar", "") + c := tokenize([]string{"foo", "bar"}, false) + a := c.Next() + assert.Equal(t, TokenArg, a.Type) + b := c.Next() + assert.Equal(t, TokenArg, b.Type) + c.Push(b) + c.Push(a) + a = c.Next() + assert.Equal(t, "foo", a.Value) + b = c.Next() + assert.Equal(t, "bar", b.Value) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go b/vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go new file mode 100644 index 0000000..d9ad57e --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go @@ -0,0 +1,212 @@ +package kingpin + +import ( + "net" + "net/url" + "os" + "time" + + "github.com/alecthomas/units" +) + +type Settings interface { + SetValue(value Value) +} + +type parserMixin struct { + value Value + required bool +} + +func (p *parserMixin) SetValue(value Value) { + p.value = value +} + +// StringMap provides key=value parsing into a map. +func (p *parserMixin) StringMap() (target *map[string]string) { + target = &(map[string]string{}) + p.StringMapVar(target) + return +} + +// Duration sets the parser to a time.Duration parser. +func (p *parserMixin) Duration() (target *time.Duration) { + target = new(time.Duration) + p.DurationVar(target) + return +} + +// Bytes parses numeric byte units. eg. 1.5KB +func (p *parserMixin) Bytes() (target *units.Base2Bytes) { + target = new(units.Base2Bytes) + p.BytesVar(target) + return +} + +// IP sets the parser to a net.IP parser. +func (p *parserMixin) IP() (target *net.IP) { + target = new(net.IP) + p.IPVar(target) + return +} + +// TCP (host:port) address. +func (p *parserMixin) TCP() (target **net.TCPAddr) { + target = new(*net.TCPAddr) + p.TCPVar(target) + return +} + +// TCPVar (host:port) address. +func (p *parserMixin) TCPVar(target **net.TCPAddr) { + p.SetValue(newTCPAddrValue(target)) +} + +// ExistingFile sets the parser to one that requires and returns an existing file. +func (p *parserMixin) ExistingFile() (target *string) { + target = new(string) + p.ExistingFileVar(target) + return +} + +// ExistingDir sets the parser to one that requires and returns an existing directory. +func (p *parserMixin) ExistingDir() (target *string) { + target = new(string) + p.ExistingDirVar(target) + return +} + +// ExistingFileOrDir sets the parser to one that requires and returns an existing file OR directory. +func (p *parserMixin) ExistingFileOrDir() (target *string) { + target = new(string) + p.ExistingFileOrDirVar(target) + return +} + +// File returns an os.File against an existing file. +func (p *parserMixin) File() (target **os.File) { + target = new(*os.File) + p.FileVar(target) + return +} + +// File attempts to open a File with os.OpenFile(flag, perm). +func (p *parserMixin) OpenFile(flag int, perm os.FileMode) (target **os.File) { + target = new(*os.File) + p.OpenFileVar(target, flag, perm) + return +} + +// URL provides a valid, parsed url.URL. +func (p *parserMixin) URL() (target **url.URL) { + target = new(*url.URL) + p.URLVar(target) + return +} + +// StringMap provides key=value parsing into a map. +func (p *parserMixin) StringMapVar(target *map[string]string) { + p.SetValue(newStringMapValue(target)) +} + +// Float sets the parser to a float64 parser. +func (p *parserMixin) Float() (target *float64) { + return p.Float64() +} + +// Float sets the parser to a float64 parser. +func (p *parserMixin) FloatVar(target *float64) { + p.Float64Var(target) +} + +// Duration sets the parser to a time.Duration parser. +func (p *parserMixin) DurationVar(target *time.Duration) { + p.SetValue(newDurationValue(target)) +} + +// BytesVar parses numeric byte units. eg. 1.5KB +func (p *parserMixin) BytesVar(target *units.Base2Bytes) { + p.SetValue(newBytesValue(target)) +} + +// IP sets the parser to a net.IP parser. +func (p *parserMixin) IPVar(target *net.IP) { + p.SetValue(newIPValue(target)) +} + +// ExistingFile sets the parser to one that requires and returns an existing file. +func (p *parserMixin) ExistingFileVar(target *string) { + p.SetValue(newExistingFileValue(target)) +} + +// ExistingDir sets the parser to one that requires and returns an existing directory. +func (p *parserMixin) ExistingDirVar(target *string) { + p.SetValue(newExistingDirValue(target)) +} + +// ExistingDir sets the parser to one that requires and returns an existing directory. +func (p *parserMixin) ExistingFileOrDirVar(target *string) { + p.SetValue(newExistingFileOrDirValue(target)) +} + +// FileVar opens an existing file. +func (p *parserMixin) FileVar(target **os.File) { + p.SetValue(newFileValue(target, os.O_RDONLY, 0)) +} + +// OpenFileVar calls os.OpenFile(flag, perm) +func (p *parserMixin) OpenFileVar(target **os.File, flag int, perm os.FileMode) { + p.SetValue(newFileValue(target, flag, perm)) +} + +// URL provides a valid, parsed url.URL. +func (p *parserMixin) URLVar(target **url.URL) { + p.SetValue(newURLValue(target)) +} + +// URLList provides a parsed list of url.URL values. +func (p *parserMixin) URLList() (target *[]*url.URL) { + target = new([]*url.URL) + p.URLListVar(target) + return +} + +// URLListVar provides a parsed list of url.URL values. +func (p *parserMixin) URLListVar(target *[]*url.URL) { + p.SetValue(newURLListValue(target)) +} + +// Enum allows a value from a set of options. +func (p *parserMixin) Enum(options ...string) (target *string) { + target = new(string) + p.EnumVar(target, options...) + return +} + +// EnumVar allows a value from a set of options. +func (p *parserMixin) EnumVar(target *string, options ...string) { + p.SetValue(newEnumFlag(target, options...)) +} + +// Enums allows a set of values from a set of options. +func (p *parserMixin) Enums(options ...string) (target *[]string) { + target = new([]string) + p.EnumsVar(target, options...) + return +} + +// EnumVar allows a value from a set of options. +func (p *parserMixin) EnumsVar(target *[]string, options ...string) { + p.SetValue(newEnumsFlag(target, options...)) +} + +// A Counter increments a number each time it is encountered. +func (p *parserMixin) Counter() (target *int) { + target = new(int) + p.CounterVar(target) + return +} + +func (p *parserMixin) CounterVar(target *int) { + p.SetValue(newCounterValue(target)) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/parsers_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/parsers_test.go new file mode 100644 index 0000000..04e836c --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/parsers_test.go @@ -0,0 +1,98 @@ +package kingpin + +import ( + "io/ioutil" + "net" + "net/url" + "os" + + "github.com/alecthomas/assert" + + "testing" +) + +func TestParseStrings(t *testing.T) { + p := parserMixin{} + v := p.Strings() + p.value.Set("a") + p.value.Set("b") + assert.Equal(t, []string{"a", "b"}, *v) +} + +func TestStringsStringer(t *testing.T) { + target := []string{} + v := newAccumulator(&target, func(v interface{}) Value { return newStringValue(v.(*string)) }) + v.Set("hello") + v.Set("world") + assert.Equal(t, "hello,world", v.String()) +} + +func TestParseStringMap(t *testing.T) { + p := parserMixin{} + v := p.StringMap() + p.value.Set("a:b") + p.value.Set("b:c") + assert.Equal(t, map[string]string{"a": "b", "b": "c"}, *v) +} + +func TestParseIP(t *testing.T) { + p := parserMixin{} + v := p.IP() + p.value.Set("10.1.1.2") + ip := net.ParseIP("10.1.1.2") + assert.Equal(t, ip, *v) +} + +func TestParseURL(t *testing.T) { + p := parserMixin{} + v := p.URL() + p.value.Set("http://w3.org") + u, err := url.Parse("http://w3.org") + assert.NoError(t, err) + assert.Equal(t, *u, **v) +} + +func TestParseExistingFile(t *testing.T) { + f, err := ioutil.TempFile("", "") + if err != nil { + t.Fatal(err) + } + defer f.Close() + defer os.Remove(f.Name()) + + p := parserMixin{} + v := p.ExistingFile() + err = p.value.Set(f.Name()) + assert.NoError(t, err) + assert.Equal(t, f.Name(), *v) + err = p.value.Set("/etc/hostsDEFINITELYMISSING") + assert.Error(t, err) +} + +func TestParseTCPAddr(t *testing.T) { + p := parserMixin{} + v := p.TCP() + err := p.value.Set("127.0.0.1:1234") + assert.NoError(t, err) + expected, err := net.ResolveTCPAddr("tcp", "127.0.0.1:1234") + assert.NoError(t, err) + assert.Equal(t, *expected, **v) +} + +func TestParseTCPAddrList(t *testing.T) { + p := parserMixin{} + _ = p.TCPList() + err := p.value.Set("127.0.0.1:1234") + assert.NoError(t, err) + err = p.value.Set("127.0.0.1:1235") + assert.NoError(t, err) + assert.Equal(t, "127.0.0.1:1234,127.0.0.1:1235", p.value.String()) +} + +func TestFloat32(t *testing.T) { + p := parserMixin{} + v := p.Float32() + err := p.value.Set("123.45") + assert.NoError(t, err) + assert.InEpsilon(t, 123.45, *v, 0.001) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/templates.go b/vendor/gopkg.in/alecthomas/kingpin.v2/templates.go new file mode 100644 index 0000000..97b5c9f --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/templates.go @@ -0,0 +1,262 @@ +package kingpin + +// Default usage template. +var DefaultUsageTemplate = `{{define "FormatCommand"}}\ +{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ +{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ +{{end}}\ + +{{define "FormatCommands"}}\ +{{range .FlattenedCommands}}\ +{{if not .Hidden}}\ + {{.FullCommand}}{{if .Default}}*{{end}}{{template "FormatCommand" .}} +{{.Help|Wrap 4}} +{{end}}\ +{{end}}\ +{{end}}\ + +{{define "FormatUsage"}}\ +{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}} +{{if .Help}} +{{.Help|Wrap 0}}\ +{{end}}\ + +{{end}}\ + +{{if .Context.SelectedCommand}}\ +usage: {{.App.Name}} {{.Context.SelectedCommand}}{{template "FormatUsage" .Context.SelectedCommand}} +{{else}}\ +usage: {{.App.Name}}{{template "FormatUsage" .App}} +{{end}}\ +{{if .Context.Flags}}\ +Flags: +{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}} +{{end}}\ +{{if .Context.Args}}\ +Args: +{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} +{{end}}\ +{{if .Context.SelectedCommand}}\ +{{if len .Context.SelectedCommand.Commands}}\ +Subcommands: +{{template "FormatCommands" .Context.SelectedCommand}} +{{end}}\ +{{else if .App.Commands}}\ +Commands: +{{template "FormatCommands" .App}} +{{end}}\ +` + +// Usage template where command's optional flags are listed separately +var SeparateOptionalFlagsUsageTemplate = `{{define "FormatCommand"}}\ +{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ +{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ +{{end}}\ + +{{define "FormatCommands"}}\ +{{range .FlattenedCommands}}\ +{{if not .Hidden}}\ + {{.FullCommand}}{{if .Default}}*{{end}}{{template "FormatCommand" .}} +{{.Help|Wrap 4}} +{{end}}\ +{{end}}\ +{{end}}\ + +{{define "FormatUsage"}}\ +{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}} +{{if .Help}} +{{.Help|Wrap 0}}\ +{{end}}\ + +{{end}}\ +{{if .Context.SelectedCommand}}\ +usage: {{.App.Name}} {{.Context.SelectedCommand}}{{template "FormatUsage" .Context.SelectedCommand}} +{{else}}\ +usage: {{.App.Name}}{{template "FormatUsage" .App}} +{{end}}\ + +{{if .Context.Flags|RequiredFlags}}\ +Required flags: +{{.Context.Flags|RequiredFlags|FlagsToTwoColumns|FormatTwoColumns}} +{{end}}\ +{{if .Context.Flags|OptionalFlags}}\ +Optional flags: +{{.Context.Flags|OptionalFlags|FlagsToTwoColumns|FormatTwoColumns}} +{{end}}\ +{{if .Context.Args}}\ +Args: +{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} +{{end}}\ +{{if .Context.SelectedCommand}}\ +Subcommands: +{{if .Context.SelectedCommand.Commands}}\ +{{template "FormatCommands" .Context.SelectedCommand}} +{{end}}\ +{{else if .App.Commands}}\ +Commands: +{{template "FormatCommands" .App}} +{{end}}\ +` + +// Usage template with compactly formatted commands. +var CompactUsageTemplate = `{{define "FormatCommand"}}\ +{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ +{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ +{{end}}\ + +{{define "FormatCommandList"}}\ +{{range .}}\ +{{if not .Hidden}}\ +{{.Depth|Indent}}{{.Name}}{{if .Default}}*{{end}}{{template "FormatCommand" .}} +{{end}}\ +{{template "FormatCommandList" .Commands}}\ +{{end}}\ +{{end}}\ + +{{define "FormatUsage"}}\ +{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}} +{{if .Help}} +{{.Help|Wrap 0}}\ +{{end}}\ + +{{end}}\ + +{{if .Context.SelectedCommand}}\ +usage: {{.App.Name}} {{.Context.SelectedCommand}}{{template "FormatUsage" .Context.SelectedCommand}} +{{else}}\ +usage: {{.App.Name}}{{template "FormatUsage" .App}} +{{end}}\ +{{if .Context.Flags}}\ +Flags: +{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}} +{{end}}\ +{{if .Context.Args}}\ +Args: +{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} +{{end}}\ +{{if .Context.SelectedCommand}}\ +{{if .Context.SelectedCommand.Commands}}\ +Commands: + {{.Context.SelectedCommand}} +{{template "FormatCommandList" .Context.SelectedCommand.Commands}} +{{end}}\ +{{else if .App.Commands}}\ +Commands: +{{template "FormatCommandList" .App.Commands}} +{{end}}\ +` + +var ManPageTemplate = `{{define "FormatFlags"}}\ +{{range .Flags}}\ +{{if not .Hidden}}\ +.TP +\fB{{if .Short}}-{{.Short|Char}}, {{end}}--{{.Name}}{{if not .IsBoolFlag}}={{.FormatPlaceHolder}}{{end}}\\fR +{{.Help}} +{{end}}\ +{{end}}\ +{{end}}\ + +{{define "FormatCommand"}}\ +{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ +{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}{{if .Default}}*{{end}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ +{{end}}\ + +{{define "FormatCommands"}}\ +{{range .FlattenedCommands}}\ +{{if not .Hidden}}\ +.SS +\fB{{.FullCommand}}{{template "FormatCommand" .}}\\fR +.PP +{{.Help}} +{{template "FormatFlags" .}}\ +{{end}}\ +{{end}}\ +{{end}}\ + +{{define "FormatUsage"}}\ +{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}}\\fR +{{end}}\ + +.TH {{.App.Name}} 1 {{.App.Version}} "{{.App.Author}}" +.SH "NAME" +{{.App.Name}} +.SH "SYNOPSIS" +.TP +\fB{{.App.Name}}{{template "FormatUsage" .App}} +.SH "DESCRIPTION" +{{.App.Help}} +.SH "OPTIONS" +{{template "FormatFlags" .App}}\ +{{if .App.Commands}}\ +.SH "COMMANDS" +{{template "FormatCommands" .App}}\ +{{end}}\ +` + +// Default usage template. +var LongHelpTemplate = `{{define "FormatCommand"}}\ +{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ +{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ +{{end}}\ + +{{define "FormatCommands"}}\ +{{range .FlattenedCommands}}\ +{{if not .Hidden}}\ + {{.FullCommand}}{{template "FormatCommand" .}} +{{.Help|Wrap 4}} +{{with .Flags|FlagsToTwoColumns}}{{FormatTwoColumnsWithIndent . 4 2}}{{end}} +{{end}}\ +{{end}}\ +{{end}}\ + +{{define "FormatUsage"}}\ +{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}} +{{if .Help}} +{{.Help|Wrap 0}}\ +{{end}}\ + +{{end}}\ + +usage: {{.App.Name}}{{template "FormatUsage" .App}} +{{if .Context.Flags}}\ +Flags: +{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}} +{{end}}\ +{{if .Context.Args}}\ +Args: +{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} +{{end}}\ +{{if .App.Commands}}\ +Commands: +{{template "FormatCommands" .App}} +{{end}}\ +` + +var BashCompletionTemplate = ` +_{{.App.Name}}_bash_autocomplete() { + local cur prev opts base + COMPREPLY=() + cur="${COMP_WORDS[COMP_CWORD]}" + opts=$( ${COMP_WORDS[0]} --completion-bash ${COMP_WORDS[@]:1:$COMP_CWORD} ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + return 0 +} +complete -F _{{.App.Name}}_bash_autocomplete {{.App.Name}} + +` + +var ZshCompletionTemplate = ` +#compdef {{.App.Name}} +autoload -U compinit && compinit +autoload -U bashcompinit && bashcompinit + +_{{.App.Name}}_bash_autocomplete() { + local cur prev opts base + COMPREPLY=() + cur="${COMP_WORDS[COMP_CWORD]}" + opts=$( ${COMP_WORDS[0]} --completion-bash ${COMP_WORDS[@]:1:$COMP_CWORD} ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + return 0 +} +complete -F _{{.App.Name}}_bash_autocomplete {{.App.Name}} +` diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/usage.go b/vendor/gopkg.in/alecthomas/kingpin.v2/usage.go new file mode 100644 index 0000000..6c9124e --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/usage.go @@ -0,0 +1,211 @@ +package kingpin + +import ( + "bytes" + "fmt" + "go/doc" + "io" + "strings" + + "github.com/alecthomas/template" +) + +var ( + preIndent = " " +) + +func formatTwoColumns(w io.Writer, indent, padding, width int, rows [][2]string) { + // Find size of first column. + s := 0 + for _, row := range rows { + if c := len(row[0]); c > s && c < 30 { + s = c + } + } + + indentStr := strings.Repeat(" ", indent) + offsetStr := strings.Repeat(" ", s+padding) + + for _, row := range rows { + buf := bytes.NewBuffer(nil) + doc.ToText(buf, row[1], "", preIndent, width-s-padding-indent) + lines := strings.Split(strings.TrimRight(buf.String(), "\n"), "\n") + fmt.Fprintf(w, "%s%-*s%*s", indentStr, s, row[0], padding, "") + if len(row[0]) >= 30 { + fmt.Fprintf(w, "\n%s%s", indentStr, offsetStr) + } + fmt.Fprintf(w, "%s\n", lines[0]) + for _, line := range lines[1:] { + fmt.Fprintf(w, "%s%s%s\n", indentStr, offsetStr, line) + } + } +} + +// Usage writes application usage to w. It parses args to determine +// appropriate help context, such as which command to show help for. +func (a *Application) Usage(args []string) { + context, err := a.parseContext(true, args) + a.FatalIfError(err, "") + if err := a.UsageForContextWithTemplate(context, 2, a.usageTemplate); err != nil { + panic(err) + } +} + +func formatAppUsage(app *ApplicationModel) string { + s := []string{app.Name} + if len(app.Flags) > 0 { + s = append(s, app.FlagSummary()) + } + if len(app.Args) > 0 { + s = append(s, app.ArgSummary()) + } + return strings.Join(s, " ") +} + +func formatCmdUsage(app *ApplicationModel, cmd *CmdModel) string { + s := []string{app.Name, cmd.String()} + if len(app.Flags) > 0 { + s = append(s, app.FlagSummary()) + } + if len(app.Args) > 0 { + s = append(s, app.ArgSummary()) + } + return strings.Join(s, " ") +} + +func formatFlag(haveShort bool, flag *FlagModel) string { + flagString := "" + if flag.Short != 0 { + flagString += fmt.Sprintf("-%c, --%s", flag.Short, flag.Name) + } else { + if haveShort { + flagString += fmt.Sprintf(" --%s", flag.Name) + } else { + flagString += fmt.Sprintf("--%s", flag.Name) + } + } + if !flag.IsBoolFlag() { + flagString += fmt.Sprintf("=%s", flag.FormatPlaceHolder()) + } + if v, ok := flag.Value.(repeatableFlag); ok && v.IsCumulative() { + flagString += " ..." + } + return flagString +} + +type templateParseContext struct { + SelectedCommand *CmdModel + *FlagGroupModel + *ArgGroupModel +} + +type templateContext struct { + App *ApplicationModel + Width int + Context *templateParseContext +} + +// UsageForContext displays usage information from a ParseContext (obtained from +// Application.ParseContext() or Action(f) callbacks). +func (a *Application) UsageForContext(context *ParseContext) error { + return a.UsageForContextWithTemplate(context, 2, a.usageTemplate) +} + +// UsageForContextWithTemplate is the base usage function. You generally don't need to use this. +func (a *Application) UsageForContextWithTemplate(context *ParseContext, indent int, tmpl string) error { + width := guessWidth(a.writer) + funcs := template.FuncMap{ + "Indent": func(level int) string { + return strings.Repeat(" ", level*indent) + }, + "Wrap": func(indent int, s string) string { + buf := bytes.NewBuffer(nil) + indentText := strings.Repeat(" ", indent) + doc.ToText(buf, s, indentText, indentText, width-indent) + return buf.String() + }, + "FormatFlag": formatFlag, + "FlagsToTwoColumns": func(f []*FlagModel) [][2]string { + rows := [][2]string{} + haveShort := false + for _, flag := range f { + if flag.Short != 0 { + haveShort = true + break + } + } + for _, flag := range f { + if !flag.Hidden { + rows = append(rows, [2]string{formatFlag(haveShort, flag), flag.Help}) + } + } + return rows + }, + "RequiredFlags": func(f []*FlagModel) []*FlagModel { + requiredFlags := []*FlagModel{} + for _, flag := range f { + if flag.Required == true { + requiredFlags = append(requiredFlags, flag) + } + } + return requiredFlags + }, + "OptionalFlags": func(f []*FlagModel) []*FlagModel { + optionalFlags := []*FlagModel{} + for _, flag := range f { + if flag.Required == false { + optionalFlags = append(optionalFlags, flag) + } + } + return optionalFlags + }, + "ArgsToTwoColumns": func(a []*ArgModel) [][2]string { + rows := [][2]string{} + for _, arg := range a { + s := "<" + arg.Name + ">" + if !arg.Required { + s = "[" + s + "]" + } + rows = append(rows, [2]string{s, arg.Help}) + } + return rows + }, + "FormatTwoColumns": func(rows [][2]string) string { + buf := bytes.NewBuffer(nil) + formatTwoColumns(buf, indent, indent, width, rows) + return buf.String() + }, + "FormatTwoColumnsWithIndent": func(rows [][2]string, indent, padding int) string { + buf := bytes.NewBuffer(nil) + formatTwoColumns(buf, indent, padding, width, rows) + return buf.String() + }, + "FormatAppUsage": formatAppUsage, + "FormatCommandUsage": formatCmdUsage, + "IsCumulative": func(value Value) bool { + r, ok := value.(remainderArg) + return ok && r.IsCumulative() + }, + "Char": func(c rune) string { + return string(c) + }, + } + t, err := template.New("usage").Funcs(funcs).Parse(tmpl) + if err != nil { + return err + } + var selectedCommand *CmdModel + if context.SelectedCommand != nil { + selectedCommand = context.SelectedCommand.Model() + } + ctx := templateContext{ + App: a.Model(), + Width: width, + Context: &templateParseContext{ + SelectedCommand: selectedCommand, + FlagGroupModel: context.flags.Model(), + ArgGroupModel: context.arguments.Model(), + }, + } + return t.Execute(a.writer, ctx) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/usage_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/usage_test.go new file mode 100644 index 0000000..441b90c --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/usage_test.go @@ -0,0 +1,65 @@ +package kingpin + +import ( + "bytes" + "strings" + "testing" + + "github.com/alecthomas/assert" +) + +func TestFormatTwoColumns(t *testing.T) { + buf := bytes.NewBuffer(nil) + formatTwoColumns(buf, 2, 2, 20, [][2]string{ + {"--hello", "Hello world help with something that is cool."}, + }) + expected := ` --hello Hello + world + help with + something + that is + cool. +` + assert.Equal(t, expected, buf.String()) +} + +func TestFormatTwoColumnsWide(t *testing.T) { + samples := [][2]string{ + {strings.Repeat("x", 29), "29 chars"}, + {strings.Repeat("x", 30), "30 chars"}} + buf := bytes.NewBuffer(nil) + formatTwoColumns(buf, 0, 0, 200, samples) + expected := `xxxxxxxxxxxxxxxxxxxxxxxxxxxxx29 chars +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + 30 chars +` + assert.Equal(t, expected, buf.String()) +} + +func TestHiddenCommand(t *testing.T) { + templates := []struct{ name, template string }{ + {"default", DefaultUsageTemplate}, + {"Compact", CompactUsageTemplate}, + {"Long", LongHelpTemplate}, + {"Man", ManPageTemplate}, + } + + var buf bytes.Buffer + t.Log("1") + + a := New("test", "Test").Writer(&buf).Terminate(nil) + a.Command("visible", "visible") + a.Command("hidden", "hidden").Hidden() + + for _, tp := range templates { + buf.Reset() + a.UsageTemplate(tp.template) + a.Parse(nil) + // a.Parse([]string{"--help"}) + usage := buf.String() + t.Logf("Usage for %s is:\n%s\n", tp.name, usage) + + assert.NotContains(t, usage, "hidden") + assert.Contains(t, usage, "visible") + } +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/values.go b/vendor/gopkg.in/alecthomas/kingpin.v2/values.go new file mode 100644 index 0000000..b986f12 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/values.go @@ -0,0 +1,466 @@ +package kingpin + +//go:generate go run ./cmd/genvalues/main.go + +import ( + "fmt" + "net" + "net/url" + "os" + "reflect" + "regexp" + "strings" + "time" + + "github.com/alecthomas/units" +) + +// NOTE: Most of the base type values were lifted from: +// http://golang.org/src/pkg/flag/flag.go?s=20146:20222 + +// Value is the interface to the dynamic value stored in a flag. +// (The default value is represented as a string.) +// +// If a Value has an IsBoolFlag() bool method returning true, the command-line +// parser makes --name equivalent to -name=true rather than using the next +// command-line argument, and adds a --no-name counterpart for negating the +// flag. +type Value interface { + String() string + Set(string) error +} + +// Getter is an interface that allows the contents of a Value to be retrieved. +// It wraps the Value interface, rather than being part of it, because it +// appeared after Go 1 and its compatibility rules. All Value types provided +// by this package satisfy the Getter interface. +type Getter interface { + Value + Get() interface{} +} + +// Optional interface to indicate boolean flags that don't accept a value, and +// implicitly have a --no- negation counterpart. +type boolFlag interface { + Value + IsBoolFlag() bool +} + +// Optional interface for arguments that cumulatively consume all remaining +// input. +type remainderArg interface { + Value + IsCumulative() bool +} + +// Optional interface for flags that can be repeated. +type repeatableFlag interface { + Value + IsCumulative() bool +} + +type accumulator struct { + element func(value interface{}) Value + typ reflect.Type + slice reflect.Value +} + +// Use reflection to accumulate values into a slice. +// +// target := []string{} +// newAccumulator(&target, func (value interface{}) Value { +// return newStringValue(value.(*string)) +// }) +func newAccumulator(slice interface{}, element func(value interface{}) Value) *accumulator { + typ := reflect.TypeOf(slice) + if typ.Kind() != reflect.Ptr || typ.Elem().Kind() != reflect.Slice { + panic("expected a pointer to a slice") + } + return &accumulator{ + element: element, + typ: typ.Elem().Elem(), + slice: reflect.ValueOf(slice), + } +} + +func (a *accumulator) String() string { + out := []string{} + s := a.slice.Elem() + for i := 0; i < s.Len(); i++ { + out = append(out, a.element(s.Index(i).Addr().Interface()).String()) + } + return strings.Join(out, ",") +} + +func (a *accumulator) Set(value string) error { + e := reflect.New(a.typ) + if err := a.element(e.Interface()).Set(value); err != nil { + return err + } + slice := reflect.Append(a.slice.Elem(), e.Elem()) + a.slice.Elem().Set(slice) + return nil +} + +func (a *accumulator) Get() interface{} { + return a.slice.Interface() +} + +func (a *accumulator) IsCumulative() bool { + return true +} + +func (b *boolValue) IsBoolFlag() bool { return true } + +// -- time.Duration Value +type durationValue time.Duration + +func newDurationValue(p *time.Duration) *durationValue { + return (*durationValue)(p) +} + +func (d *durationValue) Set(s string) error { + v, err := time.ParseDuration(s) + *d = durationValue(v) + return err +} + +func (d *durationValue) Get() interface{} { return time.Duration(*d) } + +func (d *durationValue) String() string { return (*time.Duration)(d).String() } + +// -- map[string]string Value +type stringMapValue map[string]string + +func newStringMapValue(p *map[string]string) *stringMapValue { + return (*stringMapValue)(p) +} + +var stringMapRegex = regexp.MustCompile("[:=]") + +func (s *stringMapValue) Set(value string) error { + parts := stringMapRegex.Split(value, 2) + if len(parts) != 2 { + return fmt.Errorf("expected KEY=VALUE got '%s'", value) + } + (*s)[parts[0]] = parts[1] + return nil +} + +func (s *stringMapValue) Get() interface{} { + return (map[string]string)(*s) +} + +func (s *stringMapValue) String() string { + return fmt.Sprintf("%s", map[string]string(*s)) +} + +func (s *stringMapValue) IsCumulative() bool { + return true +} + +// -- net.IP Value +type ipValue net.IP + +func newIPValue(p *net.IP) *ipValue { + return (*ipValue)(p) +} + +func (i *ipValue) Set(value string) error { + if ip := net.ParseIP(value); ip == nil { + return fmt.Errorf("'%s' is not an IP address", value) + } else { + *i = *(*ipValue)(&ip) + return nil + } +} + +func (i *ipValue) Get() interface{} { + return (net.IP)(*i) +} + +func (i *ipValue) String() string { + return (*net.IP)(i).String() +} + +// -- *net.TCPAddr Value +type tcpAddrValue struct { + addr **net.TCPAddr +} + +func newTCPAddrValue(p **net.TCPAddr) *tcpAddrValue { + return &tcpAddrValue{p} +} + +func (i *tcpAddrValue) Set(value string) error { + if addr, err := net.ResolveTCPAddr("tcp", value); err != nil { + return fmt.Errorf("'%s' is not a valid TCP address: %s", value, err) + } else { + *i.addr = addr + return nil + } +} + +func (t *tcpAddrValue) Get() interface{} { + return (*net.TCPAddr)(*t.addr) +} + +func (i *tcpAddrValue) String() string { + return (*i.addr).String() +} + +// -- existingFile Value + +type fileStatValue struct { + path *string + predicate func(os.FileInfo) error +} + +func newFileStatValue(p *string, predicate func(os.FileInfo) error) *fileStatValue { + return &fileStatValue{ + path: p, + predicate: predicate, + } +} + +func (e *fileStatValue) Set(value string) error { + if s, err := os.Stat(value); os.IsNotExist(err) { + return fmt.Errorf("path '%s' does not exist", value) + } else if err != nil { + return err + } else if err := e.predicate(s); err != nil { + return err + } + *e.path = value + return nil +} + +func (f *fileStatValue) Get() interface{} { + return (string)(*f.path) +} + +func (e *fileStatValue) String() string { + return *e.path +} + +// -- os.File value + +type fileValue struct { + f **os.File + flag int + perm os.FileMode +} + +func newFileValue(p **os.File, flag int, perm os.FileMode) *fileValue { + return &fileValue{p, flag, perm} +} + +func (f *fileValue) Set(value string) error { + if fd, err := os.OpenFile(value, f.flag, f.perm); err != nil { + return err + } else { + *f.f = fd + return nil + } +} + +func (f *fileValue) Get() interface{} { + return (*os.File)(*f.f) +} + +func (f *fileValue) String() string { + if *f.f == nil { + return "" + } + return (*f.f).Name() +} + +// -- url.URL Value +type urlValue struct { + u **url.URL +} + +func newURLValue(p **url.URL) *urlValue { + return &urlValue{p} +} + +func (u *urlValue) Set(value string) error { + if url, err := url.Parse(value); err != nil { + return fmt.Errorf("invalid URL: %s", err) + } else { + *u.u = url + return nil + } +} + +func (u *urlValue) Get() interface{} { + return (*url.URL)(*u.u) +} + +func (u *urlValue) String() string { + if *u.u == nil { + return "" + } + return (*u.u).String() +} + +// -- []*url.URL Value +type urlListValue []*url.URL + +func newURLListValue(p *[]*url.URL) *urlListValue { + return (*urlListValue)(p) +} + +func (u *urlListValue) Set(value string) error { + if url, err := url.Parse(value); err != nil { + return fmt.Errorf("invalid URL: %s", err) + } else { + *u = append(*u, url) + return nil + } +} + +func (u *urlListValue) Get() interface{} { + return ([]*url.URL)(*u) +} + +func (u *urlListValue) String() string { + out := []string{} + for _, url := range *u { + out = append(out, url.String()) + } + return strings.Join(out, ",") +} + +// A flag whose value must be in a set of options. +type enumValue struct { + value *string + options []string +} + +func newEnumFlag(target *string, options ...string) *enumValue { + return &enumValue{ + value: target, + options: options, + } +} + +func (a *enumValue) String() string { + return *a.value +} + +func (a *enumValue) Set(value string) error { + for _, v := range a.options { + if v == value { + *a.value = value + return nil + } + } + return fmt.Errorf("enum value must be one of %s, got '%s'", strings.Join(a.options, ","), value) +} + +func (e *enumValue) Get() interface{} { + return (string)(*e.value) +} + +// -- []string Enum Value +type enumsValue struct { + value *[]string + options []string +} + +func newEnumsFlag(target *[]string, options ...string) *enumsValue { + return &enumsValue{ + value: target, + options: options, + } +} + +func (s *enumsValue) Set(value string) error { + for _, v := range s.options { + if v == value { + *s.value = append(*s.value, value) + return nil + } + } + return fmt.Errorf("enum value must be one of %s, got '%s'", strings.Join(s.options, ","), value) +} + +func (e *enumsValue) Get() interface{} { + return ([]string)(*e.value) +} + +func (s *enumsValue) String() string { + return strings.Join(*s.value, ",") +} + +func (s *enumsValue) IsCumulative() bool { + return true +} + +// -- units.Base2Bytes Value +type bytesValue units.Base2Bytes + +func newBytesValue(p *units.Base2Bytes) *bytesValue { + return (*bytesValue)(p) +} + +func (d *bytesValue) Set(s string) error { + v, err := units.ParseBase2Bytes(s) + *d = bytesValue(v) + return err +} + +func (d *bytesValue) Get() interface{} { return units.Base2Bytes(*d) } + +func (d *bytesValue) String() string { return (*units.Base2Bytes)(d).String() } + +func newExistingFileValue(target *string) *fileStatValue { + return newFileStatValue(target, func(s os.FileInfo) error { + if s.IsDir() { + return fmt.Errorf("'%s' is a directory", s.Name()) + } + return nil + }) +} + +func newExistingDirValue(target *string) *fileStatValue { + return newFileStatValue(target, func(s os.FileInfo) error { + if !s.IsDir() { + return fmt.Errorf("'%s' is a file", s.Name()) + } + return nil + }) +} + +func newExistingFileOrDirValue(target *string) *fileStatValue { + return newFileStatValue(target, func(s os.FileInfo) error { return nil }) +} + +type counterValue int + +func newCounterValue(n *int) *counterValue { + return (*counterValue)(n) +} + +func (c *counterValue) Set(s string) error { + *c++ + return nil +} + +func (c *counterValue) Get() interface{} { return (int)(*c) } +func (c *counterValue) IsBoolFlag() bool { return true } +func (c *counterValue) String() string { return fmt.Sprintf("%d", *c) } +func (c *counterValue) IsCumulative() bool { return true } + +func resolveHost(value string) (net.IP, error) { + if ip := net.ParseIP(value); ip != nil { + return ip, nil + } else { + if addr, err := net.ResolveIPAddr("ip", value); err != nil { + return nil, err + } else { + return addr.IP, nil + } + } +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/values.json b/vendor/gopkg.in/alecthomas/kingpin.v2/values.json new file mode 100644 index 0000000..23c6744 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/values.json @@ -0,0 +1,25 @@ +[ + {"type": "bool", "parser": "strconv.ParseBool(s)"}, + {"type": "string", "parser": "s, error(nil)", "format": "string(*f.v)", "plural": "Strings"}, + {"type": "uint", "parser": "strconv.ParseUint(s, 0, 64)", "plural": "Uints"}, + {"type": "uint8", "parser": "strconv.ParseUint(s, 0, 8)"}, + {"type": "uint16", "parser": "strconv.ParseUint(s, 0, 16)"}, + {"type": "uint32", "parser": "strconv.ParseUint(s, 0, 32)"}, + {"type": "uint64", "parser": "strconv.ParseUint(s, 0, 64)"}, + {"type": "int", "parser": "strconv.ParseFloat(s, 64)", "plural": "Ints"}, + {"type": "int8", "parser": "strconv.ParseInt(s, 0, 8)"}, + {"type": "int16", "parser": "strconv.ParseInt(s, 0, 16)"}, + {"type": "int32", "parser": "strconv.ParseInt(s, 0, 32)"}, + {"type": "int64", "parser": "strconv.ParseInt(s, 0, 64)"}, + {"type": "float64", "parser": "strconv.ParseFloat(s, 64)"}, + {"type": "float32", "parser": "strconv.ParseFloat(s, 32)"}, + {"name": "Duration", "type": "time.Duration", "no_value_parser": true}, + {"name": "IP", "type": "net.IP", "no_value_parser": true}, + {"name": "TCPAddr", "Type": "*net.TCPAddr", "plural": "TCPList", "no_value_parser": true}, + {"name": "ExistingFile", "Type": "string", "plural": "ExistingFiles", "no_value_parser": true}, + {"name": "ExistingDir", "Type": "string", "plural": "ExistingDirs", "no_value_parser": true}, + {"name": "ExistingFileOrDir", "Type": "string", "plural": "ExistingFilesOrDirs", "no_value_parser": true}, + {"name": "Regexp", "Type": "*regexp.Regexp", "parser": "regexp.Compile(s)"}, + {"name": "ResolvedIP", "Type": "net.IP", "parser": "resolveHost(s)", "help": "Resolve a hostname or IP to an IP."}, + {"name": "HexBytes", "Type": "[]byte", "parser": "hex.DecodeString(s)", "help": "Bytes as a hex string."} +] diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go b/vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go new file mode 100644 index 0000000..602cfc9 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go @@ -0,0 +1,821 @@ +package kingpin + +import ( + "encoding/hex" + "fmt" + "net" + "regexp" + "strconv" + "time" +) + +// This file is autogenerated by "go generate .". Do not modify. + +// -- bool Value +type boolValue struct{ v *bool } + +func newBoolValue(p *bool) *boolValue { + return &boolValue{p} +} + +func (f *boolValue) Set(s string) error { + v, err := strconv.ParseBool(s) + if err == nil { + *f.v = (bool)(v) + } + return err +} + +func (f *boolValue) Get() interface{} { return (bool)(*f.v) } + +func (f *boolValue) String() string { return fmt.Sprintf("%v", *f) } + +// Bool parses the next command-line value as bool. +func (p *parserMixin) Bool() (target *bool) { + target = new(bool) + p.BoolVar(target) + return +} + +func (p *parserMixin) BoolVar(target *bool) { + p.SetValue(newBoolValue(target)) +} + +// BoolList accumulates bool values into a slice. +func (p *parserMixin) BoolList() (target *[]bool) { + target = new([]bool) + p.BoolListVar(target) + return +} + +func (p *parserMixin) BoolListVar(target *[]bool) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newBoolValue(v.(*bool)) + })) +} + +// -- string Value +type stringValue struct{ v *string } + +func newStringValue(p *string) *stringValue { + return &stringValue{p} +} + +func (f *stringValue) Set(s string) error { + v, err := s, error(nil) + if err == nil { + *f.v = (string)(v) + } + return err +} + +func (f *stringValue) Get() interface{} { return (string)(*f.v) } + +func (f *stringValue) String() string { return string(*f.v) } + +// String parses the next command-line value as string. +func (p *parserMixin) String() (target *string) { + target = new(string) + p.StringVar(target) + return +} + +func (p *parserMixin) StringVar(target *string) { + p.SetValue(newStringValue(target)) +} + +// Strings accumulates string values into a slice. +func (p *parserMixin) Strings() (target *[]string) { + target = new([]string) + p.StringsVar(target) + return +} + +func (p *parserMixin) StringsVar(target *[]string) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newStringValue(v.(*string)) + })) +} + +// -- uint Value +type uintValue struct{ v *uint } + +func newUintValue(p *uint) *uintValue { + return &uintValue{p} +} + +func (f *uintValue) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 64) + if err == nil { + *f.v = (uint)(v) + } + return err +} + +func (f *uintValue) Get() interface{} { return (uint)(*f.v) } + +func (f *uintValue) String() string { return fmt.Sprintf("%v", *f) } + +// Uint parses the next command-line value as uint. +func (p *parserMixin) Uint() (target *uint) { + target = new(uint) + p.UintVar(target) + return +} + +func (p *parserMixin) UintVar(target *uint) { + p.SetValue(newUintValue(target)) +} + +// Uints accumulates uint values into a slice. +func (p *parserMixin) Uints() (target *[]uint) { + target = new([]uint) + p.UintsVar(target) + return +} + +func (p *parserMixin) UintsVar(target *[]uint) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newUintValue(v.(*uint)) + })) +} + +// -- uint8 Value +type uint8Value struct{ v *uint8 } + +func newUint8Value(p *uint8) *uint8Value { + return &uint8Value{p} +} + +func (f *uint8Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 8) + if err == nil { + *f.v = (uint8)(v) + } + return err +} + +func (f *uint8Value) Get() interface{} { return (uint8)(*f.v) } + +func (f *uint8Value) String() string { return fmt.Sprintf("%v", *f) } + +// Uint8 parses the next command-line value as uint8. +func (p *parserMixin) Uint8() (target *uint8) { + target = new(uint8) + p.Uint8Var(target) + return +} + +func (p *parserMixin) Uint8Var(target *uint8) { + p.SetValue(newUint8Value(target)) +} + +// Uint8List accumulates uint8 values into a slice. +func (p *parserMixin) Uint8List() (target *[]uint8) { + target = new([]uint8) + p.Uint8ListVar(target) + return +} + +func (p *parserMixin) Uint8ListVar(target *[]uint8) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newUint8Value(v.(*uint8)) + })) +} + +// -- uint16 Value +type uint16Value struct{ v *uint16 } + +func newUint16Value(p *uint16) *uint16Value { + return &uint16Value{p} +} + +func (f *uint16Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 16) + if err == nil { + *f.v = (uint16)(v) + } + return err +} + +func (f *uint16Value) Get() interface{} { return (uint16)(*f.v) } + +func (f *uint16Value) String() string { return fmt.Sprintf("%v", *f) } + +// Uint16 parses the next command-line value as uint16. +func (p *parserMixin) Uint16() (target *uint16) { + target = new(uint16) + p.Uint16Var(target) + return +} + +func (p *parserMixin) Uint16Var(target *uint16) { + p.SetValue(newUint16Value(target)) +} + +// Uint16List accumulates uint16 values into a slice. +func (p *parserMixin) Uint16List() (target *[]uint16) { + target = new([]uint16) + p.Uint16ListVar(target) + return +} + +func (p *parserMixin) Uint16ListVar(target *[]uint16) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newUint16Value(v.(*uint16)) + })) +} + +// -- uint32 Value +type uint32Value struct{ v *uint32 } + +func newUint32Value(p *uint32) *uint32Value { + return &uint32Value{p} +} + +func (f *uint32Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 32) + if err == nil { + *f.v = (uint32)(v) + } + return err +} + +func (f *uint32Value) Get() interface{} { return (uint32)(*f.v) } + +func (f *uint32Value) String() string { return fmt.Sprintf("%v", *f) } + +// Uint32 parses the next command-line value as uint32. +func (p *parserMixin) Uint32() (target *uint32) { + target = new(uint32) + p.Uint32Var(target) + return +} + +func (p *parserMixin) Uint32Var(target *uint32) { + p.SetValue(newUint32Value(target)) +} + +// Uint32List accumulates uint32 values into a slice. +func (p *parserMixin) Uint32List() (target *[]uint32) { + target = new([]uint32) + p.Uint32ListVar(target) + return +} + +func (p *parserMixin) Uint32ListVar(target *[]uint32) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newUint32Value(v.(*uint32)) + })) +} + +// -- uint64 Value +type uint64Value struct{ v *uint64 } + +func newUint64Value(p *uint64) *uint64Value { + return &uint64Value{p} +} + +func (f *uint64Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 64) + if err == nil { + *f.v = (uint64)(v) + } + return err +} + +func (f *uint64Value) Get() interface{} { return (uint64)(*f.v) } + +func (f *uint64Value) String() string { return fmt.Sprintf("%v", *f) } + +// Uint64 parses the next command-line value as uint64. +func (p *parserMixin) Uint64() (target *uint64) { + target = new(uint64) + p.Uint64Var(target) + return +} + +func (p *parserMixin) Uint64Var(target *uint64) { + p.SetValue(newUint64Value(target)) +} + +// Uint64List accumulates uint64 values into a slice. +func (p *parserMixin) Uint64List() (target *[]uint64) { + target = new([]uint64) + p.Uint64ListVar(target) + return +} + +func (p *parserMixin) Uint64ListVar(target *[]uint64) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newUint64Value(v.(*uint64)) + })) +} + +// -- int Value +type intValue struct{ v *int } + +func newIntValue(p *int) *intValue { + return &intValue{p} +} + +func (f *intValue) Set(s string) error { + v, err := strconv.ParseFloat(s, 64) + if err == nil { + *f.v = (int)(v) + } + return err +} + +func (f *intValue) Get() interface{} { return (int)(*f.v) } + +func (f *intValue) String() string { return fmt.Sprintf("%v", *f) } + +// Int parses the next command-line value as int. +func (p *parserMixin) Int() (target *int) { + target = new(int) + p.IntVar(target) + return +} + +func (p *parserMixin) IntVar(target *int) { + p.SetValue(newIntValue(target)) +} + +// Ints accumulates int values into a slice. +func (p *parserMixin) Ints() (target *[]int) { + target = new([]int) + p.IntsVar(target) + return +} + +func (p *parserMixin) IntsVar(target *[]int) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newIntValue(v.(*int)) + })) +} + +// -- int8 Value +type int8Value struct{ v *int8 } + +func newInt8Value(p *int8) *int8Value { + return &int8Value{p} +} + +func (f *int8Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 8) + if err == nil { + *f.v = (int8)(v) + } + return err +} + +func (f *int8Value) Get() interface{} { return (int8)(*f.v) } + +func (f *int8Value) String() string { return fmt.Sprintf("%v", *f) } + +// Int8 parses the next command-line value as int8. +func (p *parserMixin) Int8() (target *int8) { + target = new(int8) + p.Int8Var(target) + return +} + +func (p *parserMixin) Int8Var(target *int8) { + p.SetValue(newInt8Value(target)) +} + +// Int8List accumulates int8 values into a slice. +func (p *parserMixin) Int8List() (target *[]int8) { + target = new([]int8) + p.Int8ListVar(target) + return +} + +func (p *parserMixin) Int8ListVar(target *[]int8) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newInt8Value(v.(*int8)) + })) +} + +// -- int16 Value +type int16Value struct{ v *int16 } + +func newInt16Value(p *int16) *int16Value { + return &int16Value{p} +} + +func (f *int16Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 16) + if err == nil { + *f.v = (int16)(v) + } + return err +} + +func (f *int16Value) Get() interface{} { return (int16)(*f.v) } + +func (f *int16Value) String() string { return fmt.Sprintf("%v", *f) } + +// Int16 parses the next command-line value as int16. +func (p *parserMixin) Int16() (target *int16) { + target = new(int16) + p.Int16Var(target) + return +} + +func (p *parserMixin) Int16Var(target *int16) { + p.SetValue(newInt16Value(target)) +} + +// Int16List accumulates int16 values into a slice. +func (p *parserMixin) Int16List() (target *[]int16) { + target = new([]int16) + p.Int16ListVar(target) + return +} + +func (p *parserMixin) Int16ListVar(target *[]int16) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newInt16Value(v.(*int16)) + })) +} + +// -- int32 Value +type int32Value struct{ v *int32 } + +func newInt32Value(p *int32) *int32Value { + return &int32Value{p} +} + +func (f *int32Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 32) + if err == nil { + *f.v = (int32)(v) + } + return err +} + +func (f *int32Value) Get() interface{} { return (int32)(*f.v) } + +func (f *int32Value) String() string { return fmt.Sprintf("%v", *f) } + +// Int32 parses the next command-line value as int32. +func (p *parserMixin) Int32() (target *int32) { + target = new(int32) + p.Int32Var(target) + return +} + +func (p *parserMixin) Int32Var(target *int32) { + p.SetValue(newInt32Value(target)) +} + +// Int32List accumulates int32 values into a slice. +func (p *parserMixin) Int32List() (target *[]int32) { + target = new([]int32) + p.Int32ListVar(target) + return +} + +func (p *parserMixin) Int32ListVar(target *[]int32) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newInt32Value(v.(*int32)) + })) +} + +// -- int64 Value +type int64Value struct{ v *int64 } + +func newInt64Value(p *int64) *int64Value { + return &int64Value{p} +} + +func (f *int64Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 64) + if err == nil { + *f.v = (int64)(v) + } + return err +} + +func (f *int64Value) Get() interface{} { return (int64)(*f.v) } + +func (f *int64Value) String() string { return fmt.Sprintf("%v", *f) } + +// Int64 parses the next command-line value as int64. +func (p *parserMixin) Int64() (target *int64) { + target = new(int64) + p.Int64Var(target) + return +} + +func (p *parserMixin) Int64Var(target *int64) { + p.SetValue(newInt64Value(target)) +} + +// Int64List accumulates int64 values into a slice. +func (p *parserMixin) Int64List() (target *[]int64) { + target = new([]int64) + p.Int64ListVar(target) + return +} + +func (p *parserMixin) Int64ListVar(target *[]int64) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newInt64Value(v.(*int64)) + })) +} + +// -- float64 Value +type float64Value struct{ v *float64 } + +func newFloat64Value(p *float64) *float64Value { + return &float64Value{p} +} + +func (f *float64Value) Set(s string) error { + v, err := strconv.ParseFloat(s, 64) + if err == nil { + *f.v = (float64)(v) + } + return err +} + +func (f *float64Value) Get() interface{} { return (float64)(*f.v) } + +func (f *float64Value) String() string { return fmt.Sprintf("%v", *f) } + +// Float64 parses the next command-line value as float64. +func (p *parserMixin) Float64() (target *float64) { + target = new(float64) + p.Float64Var(target) + return +} + +func (p *parserMixin) Float64Var(target *float64) { + p.SetValue(newFloat64Value(target)) +} + +// Float64List accumulates float64 values into a slice. +func (p *parserMixin) Float64List() (target *[]float64) { + target = new([]float64) + p.Float64ListVar(target) + return +} + +func (p *parserMixin) Float64ListVar(target *[]float64) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newFloat64Value(v.(*float64)) + })) +} + +// -- float32 Value +type float32Value struct{ v *float32 } + +func newFloat32Value(p *float32) *float32Value { + return &float32Value{p} +} + +func (f *float32Value) Set(s string) error { + v, err := strconv.ParseFloat(s, 32) + if err == nil { + *f.v = (float32)(v) + } + return err +} + +func (f *float32Value) Get() interface{} { return (float32)(*f.v) } + +func (f *float32Value) String() string { return fmt.Sprintf("%v", *f) } + +// Float32 parses the next command-line value as float32. +func (p *parserMixin) Float32() (target *float32) { + target = new(float32) + p.Float32Var(target) + return +} + +func (p *parserMixin) Float32Var(target *float32) { + p.SetValue(newFloat32Value(target)) +} + +// Float32List accumulates float32 values into a slice. +func (p *parserMixin) Float32List() (target *[]float32) { + target = new([]float32) + p.Float32ListVar(target) + return +} + +func (p *parserMixin) Float32ListVar(target *[]float32) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newFloat32Value(v.(*float32)) + })) +} + +// DurationList accumulates time.Duration values into a slice. +func (p *parserMixin) DurationList() (target *[]time.Duration) { + target = new([]time.Duration) + p.DurationListVar(target) + return +} + +func (p *parserMixin) DurationListVar(target *[]time.Duration) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newDurationValue(v.(*time.Duration)) + })) +} + +// IPList accumulates net.IP values into a slice. +func (p *parserMixin) IPList() (target *[]net.IP) { + target = new([]net.IP) + p.IPListVar(target) + return +} + +func (p *parserMixin) IPListVar(target *[]net.IP) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newIPValue(v.(*net.IP)) + })) +} + +// TCPList accumulates *net.TCPAddr values into a slice. +func (p *parserMixin) TCPList() (target *[]*net.TCPAddr) { + target = new([]*net.TCPAddr) + p.TCPListVar(target) + return +} + +func (p *parserMixin) TCPListVar(target *[]*net.TCPAddr) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newTCPAddrValue(v.(**net.TCPAddr)) + })) +} + +// ExistingFiles accumulates string values into a slice. +func (p *parserMixin) ExistingFiles() (target *[]string) { + target = new([]string) + p.ExistingFilesVar(target) + return +} + +func (p *parserMixin) ExistingFilesVar(target *[]string) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newExistingFileValue(v.(*string)) + })) +} + +// ExistingDirs accumulates string values into a slice. +func (p *parserMixin) ExistingDirs() (target *[]string) { + target = new([]string) + p.ExistingDirsVar(target) + return +} + +func (p *parserMixin) ExistingDirsVar(target *[]string) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newExistingDirValue(v.(*string)) + })) +} + +// ExistingFilesOrDirs accumulates string values into a slice. +func (p *parserMixin) ExistingFilesOrDirs() (target *[]string) { + target = new([]string) + p.ExistingFilesOrDirsVar(target) + return +} + +func (p *parserMixin) ExistingFilesOrDirsVar(target *[]string) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newExistingFileOrDirValue(v.(*string)) + })) +} + +// -- *regexp.Regexp Value +type regexpValue struct{ v **regexp.Regexp } + +func newRegexpValue(p **regexp.Regexp) *regexpValue { + return ®expValue{p} +} + +func (f *regexpValue) Set(s string) error { + v, err := regexp.Compile(s) + if err == nil { + *f.v = (*regexp.Regexp)(v) + } + return err +} + +func (f *regexpValue) Get() interface{} { return (*regexp.Regexp)(*f.v) } + +func (f *regexpValue) String() string { return fmt.Sprintf("%v", *f) } + +// Regexp parses the next command-line value as *regexp.Regexp. +func (p *parserMixin) Regexp() (target **regexp.Regexp) { + target = new(*regexp.Regexp) + p.RegexpVar(target) + return +} + +func (p *parserMixin) RegexpVar(target **regexp.Regexp) { + p.SetValue(newRegexpValue(target)) +} + +// RegexpList accumulates *regexp.Regexp values into a slice. +func (p *parserMixin) RegexpList() (target *[]*regexp.Regexp) { + target = new([]*regexp.Regexp) + p.RegexpListVar(target) + return +} + +func (p *parserMixin) RegexpListVar(target *[]*regexp.Regexp) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newRegexpValue(v.(**regexp.Regexp)) + })) +} + +// -- net.IP Value +type resolvedIPValue struct{ v *net.IP } + +func newResolvedIPValue(p *net.IP) *resolvedIPValue { + return &resolvedIPValue{p} +} + +func (f *resolvedIPValue) Set(s string) error { + v, err := resolveHost(s) + if err == nil { + *f.v = (net.IP)(v) + } + return err +} + +func (f *resolvedIPValue) Get() interface{} { return (net.IP)(*f.v) } + +func (f *resolvedIPValue) String() string { return fmt.Sprintf("%v", *f) } + +// Resolve a hostname or IP to an IP. +func (p *parserMixin) ResolvedIP() (target *net.IP) { + target = new(net.IP) + p.ResolvedIPVar(target) + return +} + +func (p *parserMixin) ResolvedIPVar(target *net.IP) { + p.SetValue(newResolvedIPValue(target)) +} + +// ResolvedIPList accumulates net.IP values into a slice. +func (p *parserMixin) ResolvedIPList() (target *[]net.IP) { + target = new([]net.IP) + p.ResolvedIPListVar(target) + return +} + +func (p *parserMixin) ResolvedIPListVar(target *[]net.IP) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newResolvedIPValue(v.(*net.IP)) + })) +} + +// -- []byte Value +type hexBytesValue struct{ v *[]byte } + +func newHexBytesValue(p *[]byte) *hexBytesValue { + return &hexBytesValue{p} +} + +func (f *hexBytesValue) Set(s string) error { + v, err := hex.DecodeString(s) + if err == nil { + *f.v = ([]byte)(v) + } + return err +} + +func (f *hexBytesValue) Get() interface{} { return ([]byte)(*f.v) } + +func (f *hexBytesValue) String() string { return fmt.Sprintf("%v", *f) } + +// Bytes as a hex string. +func (p *parserMixin) HexBytes() (target *[]byte) { + target = new([]byte) + p.HexBytesVar(target) + return +} + +func (p *parserMixin) HexBytesVar(target *[]byte) { + p.SetValue(newHexBytesValue(target)) +} + +// HexBytesList accumulates []byte values into a slice. +func (p *parserMixin) HexBytesList() (target *[][]byte) { + target = new([][]byte) + p.HexBytesListVar(target) + return +} + +func (p *parserMixin) HexBytesListVar(target *[][]byte) { + p.SetValue(newAccumulator(target, func(v interface{}) Value { + return newHexBytesValue(v.(*[]byte)) + })) +} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/values_test.go b/vendor/gopkg.in/alecthomas/kingpin.v2/values_test.go new file mode 100644 index 0000000..7d1c375 --- /dev/null +++ b/vendor/gopkg.in/alecthomas/kingpin.v2/values_test.go @@ -0,0 +1,89 @@ +package kingpin + +import ( + "net" + + "github.com/alecthomas/assert" + + "testing" +) + +func TestAccumulatorStrings(t *testing.T) { + target := []string{} + acc := newAccumulator(&target, func(v interface{}) Value { return newStringValue(v.(*string)) }) + acc.Set("a") + assert.Equal(t, []string{"a"}, target) + acc.Set("b") + assert.Equal(t, []string{"a", "b"}, target) +} + +func TestStrings(t *testing.T) { + app := New("", "") + app.Arg("a", "").Required().String() + app.Arg("b", "").Required().String() + c := app.Arg("c", "").Required().Strings() + app.Parse([]string{"a", "b", "a", "b"}) + assert.Equal(t, []string{"a", "b"}, *c) +} + +func TestEnum(t *testing.T) { + app := New("", "") + a := app.Arg("a", "").Enum("one", "two", "three") + _, err := app.Parse([]string{"moo"}) + assert.Error(t, err) + _, err = app.Parse([]string{"one"}) + assert.NoError(t, err) + assert.Equal(t, "one", *a) +} + +func TestEnumVar(t *testing.T) { + app := New("", "") + var a string + app.Arg("a", "").EnumVar(&a, "one", "two", "three") + _, err := app.Parse([]string{"moo"}) + assert.Error(t, err) + _, err = app.Parse([]string{"one"}) + assert.NoError(t, err) + assert.Equal(t, "one", a) +} + +func TestCounter(t *testing.T) { + app := New("", "") + c := app.Flag("f", "").Counter() + _, err := app.Parse([]string{"--f", "--f", "--f"}) + assert.NoError(t, err) + assert.Equal(t, 3, *c) +} + +func TestIPv4Addr(t *testing.T) { + app := newTestApp() + flag := app.Flag("addr", "").ResolvedIP() + _, err := app.Parse([]string{"--addr", net.IPv4(1, 2, 3, 4).String()}) + assert.NoError(t, err) + assert.NotNil(t, *flag) + assert.Equal(t, net.IPv4(1, 2, 3, 4), *flag) +} + +func TestInvalidIPv4Addr(t *testing.T) { + app := newTestApp() + app.Flag("addr", "").ResolvedIP() + _, err := app.Parse([]string{"--addr", "1.2.3.256"}) + assert.Error(t, err) +} + +func TestIPv6Addr(t *testing.T) { + app := newTestApp() + flag := app.Flag("addr", "").ResolvedIP() + _, err := app.Parse([]string{"--addr", net.IPv6interfacelocalallnodes.String()}) + assert.NoError(t, err) + assert.NotNil(t, *flag) + assert.Equal(t, net.IPv6interfacelocalallnodes, *flag) +} + +func TestHexBytes(t *testing.T) { + app := newTestApp() + actual := app.Arg("bytes", "").HexBytes() + _, err := app.Parse([]string{"01020aff"}) + assert.NoError(t, err) + assert.Equal(t, []byte{0x01, 0x02, 0x0a, 0xff}, *actual) +} diff --git a/vendor/gopkg.in/yaml.v2/.travis.yml b/vendor/gopkg.in/yaml.v2/.travis.yml new file mode 100644 index 0000000..004172a --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/.travis.yml @@ -0,0 +1,9 @@ +language: go + +go: + - 1.4 + - 1.5 + - 1.6 + - tip + +go_import_path: gopkg.in/yaml.v2 diff --git a/vendor/gopkg.in/yaml.v2/LICENSE b/vendor/gopkg.in/yaml.v2/LICENSE new file mode 100644 index 0000000..a68e67f --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/LICENSE @@ -0,0 +1,188 @@ + +Copyright (c) 2011-2014 - Canonical Inc. + +This software is licensed under the LGPLv3, included below. + +As a special exception to the GNU Lesser General Public License version 3 +("LGPL3"), the copyright holders of this Library give you permission to +convey to a third party a Combined Work that links statically or dynamically +to this Library without providing any Minimal Corresponding Source or +Minimal Application Code as set out in 4d or providing the installation +information set out in section 4e, provided that you comply with the other +provisions of LGPL3 and provided that you meet, for the Application the +terms and conditions of the license(s) which apply to the Application. + +Except as stated in this special exception, the provisions of LGPL3 will +continue to comply in full to this Library. If you modify this Library, you +may apply this exception to your version of this Library, but you are not +obliged to do so. If you do not wish to do so, delete this exception +statement from your version. This exception does not (and cannot) modify any +license terms which apply to the Application, with which you must still +comply. + + + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/vendor/gopkg.in/yaml.v2/LICENSE.libyaml b/vendor/gopkg.in/yaml.v2/LICENSE.libyaml new file mode 100644 index 0000000..8da58fb --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/LICENSE.libyaml @@ -0,0 +1,31 @@ +The following files were ported to Go from C files of libyaml, and thus +are still covered by their original copyright and license: + + apic.go + emitterc.go + parserc.go + readerc.go + scannerc.go + writerc.go + yamlh.go + yamlprivateh.go + +Copyright (c) 2006 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/gopkg.in/yaml.v2/README.md b/vendor/gopkg.in/yaml.v2/README.md new file mode 100644 index 0000000..7b8bd86 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/README.md @@ -0,0 +1,131 @@ +# YAML support for the Go language + +Introduction +------------ + +The yaml package enables Go programs to comfortably encode and decode YAML +values. It was developed within [Canonical](https://www.canonical.com) as +part of the [juju](https://juju.ubuntu.com) project, and is based on a +pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML) +C library to parse and generate YAML data quickly and reliably. + +Compatibility +------------- + +The yaml package supports most of YAML 1.1 and 1.2, including support for +anchors, tags, map merging, etc. Multi-document unmarshalling is not yet +implemented, and base-60 floats from YAML 1.1 are purposefully not +supported since they're a poor design and are gone in YAML 1.2. + +Installation and usage +---------------------- + +The import path for the package is *gopkg.in/yaml.v2*. + +To install it, run: + + go get gopkg.in/yaml.v2 + +API documentation +----------------- + +If opened in a browser, the import path itself leads to the API documentation: + + * [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2) + +API stability +------------- + +The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in). + + +License +------- + +The yaml package is licensed under the LGPL with an exception that allows it to be linked statically. Please see the LICENSE file for details. + + +Example +------- + +```Go +package main + +import ( + "fmt" + "log" + + "gopkg.in/yaml.v2" +) + +var data = ` +a: Easy! +b: + c: 2 + d: [3, 4] +` + +type T struct { + A string + B struct { + RenamedC int `yaml:"c"` + D []int `yaml:",flow"` + } +} + +func main() { + t := T{} + + err := yaml.Unmarshal([]byte(data), &t) + if err != nil { + log.Fatalf("error: %v", err) + } + fmt.Printf("--- t:\n%v\n\n", t) + + d, err := yaml.Marshal(&t) + if err != nil { + log.Fatalf("error: %v", err) + } + fmt.Printf("--- t dump:\n%s\n\n", string(d)) + + m := make(map[interface{}]interface{}) + + err = yaml.Unmarshal([]byte(data), &m) + if err != nil { + log.Fatalf("error: %v", err) + } + fmt.Printf("--- m:\n%v\n\n", m) + + d, err = yaml.Marshal(&m) + if err != nil { + log.Fatalf("error: %v", err) + } + fmt.Printf("--- m dump:\n%s\n\n", string(d)) +} +``` + +This example will generate the following output: + +``` +--- t: +{Easy! {2 [3 4]}} + +--- t dump: +a: Easy! +b: + c: 2 + d: [3, 4] + + +--- m: +map[a:Easy! b:map[c:2 d:[3 4]]] + +--- m dump: +a: Easy! +b: + c: 2 + d: + - 3 + - 4 +``` + diff --git a/vendor/gopkg.in/yaml.v2/apic.go b/vendor/gopkg.in/yaml.v2/apic.go new file mode 100644 index 0000000..95ec014 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/apic.go @@ -0,0 +1,742 @@ +package yaml + +import ( + "io" + "os" +) + +func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { + //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens)) + + // Check if we can move the queue at the beginning of the buffer. + if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { + if parser.tokens_head != len(parser.tokens) { + copy(parser.tokens, parser.tokens[parser.tokens_head:]) + } + parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] + parser.tokens_head = 0 + } + parser.tokens = append(parser.tokens, *token) + if pos < 0 { + return + } + copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) + parser.tokens[parser.tokens_head+pos] = *token +} + +// Create a new parser object. +func yaml_parser_initialize(parser *yaml_parser_t) bool { + *parser = yaml_parser_t{ + raw_buffer: make([]byte, 0, input_raw_buffer_size), + buffer: make([]byte, 0, input_buffer_size), + } + return true +} + +// Destroy a parser object. +func yaml_parser_delete(parser *yaml_parser_t) { + *parser = yaml_parser_t{} +} + +// String read handler. +func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { + if parser.input_pos == len(parser.input) { + return 0, io.EOF + } + n = copy(buffer, parser.input[parser.input_pos:]) + parser.input_pos += n + return n, nil +} + +// File read handler. +func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { + return parser.input_file.Read(buffer) +} + +// Set a string input. +func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { + if parser.read_handler != nil { + panic("must set the input source only once") + } + parser.read_handler = yaml_string_read_handler + parser.input = input + parser.input_pos = 0 +} + +// Set a file input. +func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) { + if parser.read_handler != nil { + panic("must set the input source only once") + } + parser.read_handler = yaml_file_read_handler + parser.input_file = file +} + +// Set the source encoding. +func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { + if parser.encoding != yaml_ANY_ENCODING { + panic("must set the encoding only once") + } + parser.encoding = encoding +} + +// Create a new emitter object. +func yaml_emitter_initialize(emitter *yaml_emitter_t) bool { + *emitter = yaml_emitter_t{ + buffer: make([]byte, output_buffer_size), + raw_buffer: make([]byte, 0, output_raw_buffer_size), + states: make([]yaml_emitter_state_t, 0, initial_stack_size), + events: make([]yaml_event_t, 0, initial_queue_size), + } + return true +} + +// Destroy an emitter object. +func yaml_emitter_delete(emitter *yaml_emitter_t) { + *emitter = yaml_emitter_t{} +} + +// String write handler. +func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { + *emitter.output_buffer = append(*emitter.output_buffer, buffer...) + return nil +} + +// File write handler. +func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error { + _, err := emitter.output_file.Write(buffer) + return err +} + +// Set a string output. +func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) { + if emitter.write_handler != nil { + panic("must set the output target only once") + } + emitter.write_handler = yaml_string_write_handler + emitter.output_buffer = output_buffer +} + +// Set a file output. +func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) { + if emitter.write_handler != nil { + panic("must set the output target only once") + } + emitter.write_handler = yaml_file_write_handler + emitter.output_file = file +} + +// Set the output encoding. +func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { + if emitter.encoding != yaml_ANY_ENCODING { + panic("must set the output encoding only once") + } + emitter.encoding = encoding +} + +// Set the canonical output style. +func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { + emitter.canonical = canonical +} + +//// Set the indentation increment. +func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { + if indent < 2 || indent > 9 { + indent = 2 + } + emitter.best_indent = indent +} + +// Set the preferred line width. +func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { + if width < 0 { + width = -1 + } + emitter.best_width = width +} + +// Set if unescaped non-ASCII characters are allowed. +func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { + emitter.unicode = unicode +} + +// Set the preferred line break character. +func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { + emitter.line_break = line_break +} + +///* +// * Destroy a token object. +// */ +// +//YAML_DECLARE(void) +//yaml_token_delete(yaml_token_t *token) +//{ +// assert(token); // Non-NULL token object expected. +// +// switch (token.type) +// { +// case YAML_TAG_DIRECTIVE_TOKEN: +// yaml_free(token.data.tag_directive.handle); +// yaml_free(token.data.tag_directive.prefix); +// break; +// +// case YAML_ALIAS_TOKEN: +// yaml_free(token.data.alias.value); +// break; +// +// case YAML_ANCHOR_TOKEN: +// yaml_free(token.data.anchor.value); +// break; +// +// case YAML_TAG_TOKEN: +// yaml_free(token.data.tag.handle); +// yaml_free(token.data.tag.suffix); +// break; +// +// case YAML_SCALAR_TOKEN: +// yaml_free(token.data.scalar.value); +// break; +// +// default: +// break; +// } +// +// memset(token, 0, sizeof(yaml_token_t)); +//} +// +///* +// * Check if a string is a valid UTF-8 sequence. +// * +// * Check 'reader.c' for more details on UTF-8 encoding. +// */ +// +//static int +//yaml_check_utf8(yaml_char_t *start, size_t length) +//{ +// yaml_char_t *end = start+length; +// yaml_char_t *pointer = start; +// +// while (pointer < end) { +// unsigned char octet; +// unsigned int width; +// unsigned int value; +// size_t k; +// +// octet = pointer[0]; +// width = (octet & 0x80) == 0x00 ? 1 : +// (octet & 0xE0) == 0xC0 ? 2 : +// (octet & 0xF0) == 0xE0 ? 3 : +// (octet & 0xF8) == 0xF0 ? 4 : 0; +// value = (octet & 0x80) == 0x00 ? octet & 0x7F : +// (octet & 0xE0) == 0xC0 ? octet & 0x1F : +// (octet & 0xF0) == 0xE0 ? octet & 0x0F : +// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; +// if (!width) return 0; +// if (pointer+width > end) return 0; +// for (k = 1; k < width; k ++) { +// octet = pointer[k]; +// if ((octet & 0xC0) != 0x80) return 0; +// value = (value << 6) + (octet & 0x3F); +// } +// if (!((width == 1) || +// (width == 2 && value >= 0x80) || +// (width == 3 && value >= 0x800) || +// (width == 4 && value >= 0x10000))) return 0; +// +// pointer += width; +// } +// +// return 1; +//} +// + +// Create STREAM-START. +func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool { + *event = yaml_event_t{ + typ: yaml_STREAM_START_EVENT, + encoding: encoding, + } + return true +} + +// Create STREAM-END. +func yaml_stream_end_event_initialize(event *yaml_event_t) bool { + *event = yaml_event_t{ + typ: yaml_STREAM_END_EVENT, + } + return true +} + +// Create DOCUMENT-START. +func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t, + tag_directives []yaml_tag_directive_t, implicit bool) bool { + *event = yaml_event_t{ + typ: yaml_DOCUMENT_START_EVENT, + version_directive: version_directive, + tag_directives: tag_directives, + implicit: implicit, + } + return true +} + +// Create DOCUMENT-END. +func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool { + *event = yaml_event_t{ + typ: yaml_DOCUMENT_END_EVENT, + implicit: implicit, + } + return true +} + +///* +// * Create ALIAS. +// */ +// +//YAML_DECLARE(int) +//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t) +//{ +// mark yaml_mark_t = { 0, 0, 0 } +// anchor_copy *yaml_char_t = NULL +// +// assert(event) // Non-NULL event object is expected. +// assert(anchor) // Non-NULL anchor is expected. +// +// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0 +// +// anchor_copy = yaml_strdup(anchor) +// if (!anchor_copy) +// return 0 +// +// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark) +// +// return 1 +//} + +// Create SCALAR. +func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + anchor: anchor, + tag: tag, + value: value, + implicit: plain_implicit, + quoted_implicit: quoted_implicit, + style: yaml_style_t(style), + } + return true +} + +// Create SEQUENCE-START. +func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool { + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(style), + } + return true +} + +// Create SEQUENCE-END. +func yaml_sequence_end_event_initialize(event *yaml_event_t) bool { + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + } + return true +} + +// Create MAPPING-START. +func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool { + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(style), + } + return true +} + +// Create MAPPING-END. +func yaml_mapping_end_event_initialize(event *yaml_event_t) bool { + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + } + return true +} + +// Destroy an event object. +func yaml_event_delete(event *yaml_event_t) { + *event = yaml_event_t{} +} + +///* +// * Create a document object. +// */ +// +//YAML_DECLARE(int) +//yaml_document_initialize(document *yaml_document_t, +// version_directive *yaml_version_directive_t, +// tag_directives_start *yaml_tag_directive_t, +// tag_directives_end *yaml_tag_directive_t, +// start_implicit int, end_implicit int) +//{ +// struct { +// error yaml_error_type_t +// } context +// struct { +// start *yaml_node_t +// end *yaml_node_t +// top *yaml_node_t +// } nodes = { NULL, NULL, NULL } +// version_directive_copy *yaml_version_directive_t = NULL +// struct { +// start *yaml_tag_directive_t +// end *yaml_tag_directive_t +// top *yaml_tag_directive_t +// } tag_directives_copy = { NULL, NULL, NULL } +// value yaml_tag_directive_t = { NULL, NULL } +// mark yaml_mark_t = { 0, 0, 0 } +// +// assert(document) // Non-NULL document object is expected. +// assert((tag_directives_start && tag_directives_end) || +// (tag_directives_start == tag_directives_end)) +// // Valid tag directives are expected. +// +// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error +// +// if (version_directive) { +// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)) +// if (!version_directive_copy) goto error +// version_directive_copy.major = version_directive.major +// version_directive_copy.minor = version_directive.minor +// } +// +// if (tag_directives_start != tag_directives_end) { +// tag_directive *yaml_tag_directive_t +// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) +// goto error +// for (tag_directive = tag_directives_start +// tag_directive != tag_directives_end; tag_directive ++) { +// assert(tag_directive.handle) +// assert(tag_directive.prefix) +// if (!yaml_check_utf8(tag_directive.handle, +// strlen((char *)tag_directive.handle))) +// goto error +// if (!yaml_check_utf8(tag_directive.prefix, +// strlen((char *)tag_directive.prefix))) +// goto error +// value.handle = yaml_strdup(tag_directive.handle) +// value.prefix = yaml_strdup(tag_directive.prefix) +// if (!value.handle || !value.prefix) goto error +// if (!PUSH(&context, tag_directives_copy, value)) +// goto error +// value.handle = NULL +// value.prefix = NULL +// } +// } +// +// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, +// tag_directives_copy.start, tag_directives_copy.top, +// start_implicit, end_implicit, mark, mark) +// +// return 1 +// +//error: +// STACK_DEL(&context, nodes) +// yaml_free(version_directive_copy) +// while (!STACK_EMPTY(&context, tag_directives_copy)) { +// value yaml_tag_directive_t = POP(&context, tag_directives_copy) +// yaml_free(value.handle) +// yaml_free(value.prefix) +// } +// STACK_DEL(&context, tag_directives_copy) +// yaml_free(value.handle) +// yaml_free(value.prefix) +// +// return 0 +//} +// +///* +// * Destroy a document object. +// */ +// +//YAML_DECLARE(void) +//yaml_document_delete(document *yaml_document_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// tag_directive *yaml_tag_directive_t +// +// context.error = YAML_NO_ERROR // Eliminate a compliler warning. +// +// assert(document) // Non-NULL document object is expected. +// +// while (!STACK_EMPTY(&context, document.nodes)) { +// node yaml_node_t = POP(&context, document.nodes) +// yaml_free(node.tag) +// switch (node.type) { +// case YAML_SCALAR_NODE: +// yaml_free(node.data.scalar.value) +// break +// case YAML_SEQUENCE_NODE: +// STACK_DEL(&context, node.data.sequence.items) +// break +// case YAML_MAPPING_NODE: +// STACK_DEL(&context, node.data.mapping.pairs) +// break +// default: +// assert(0) // Should not happen. +// } +// } +// STACK_DEL(&context, document.nodes) +// +// yaml_free(document.version_directive) +// for (tag_directive = document.tag_directives.start +// tag_directive != document.tag_directives.end +// tag_directive++) { +// yaml_free(tag_directive.handle) +// yaml_free(tag_directive.prefix) +// } +// yaml_free(document.tag_directives.start) +// +// memset(document, 0, sizeof(yaml_document_t)) +//} +// +///** +// * Get a document node. +// */ +// +//YAML_DECLARE(yaml_node_t *) +//yaml_document_get_node(document *yaml_document_t, index int) +//{ +// assert(document) // Non-NULL document object is expected. +// +// if (index > 0 && document.nodes.start + index <= document.nodes.top) { +// return document.nodes.start + index - 1 +// } +// return NULL +//} +// +///** +// * Get the root object. +// */ +// +//YAML_DECLARE(yaml_node_t *) +//yaml_document_get_root_node(document *yaml_document_t) +//{ +// assert(document) // Non-NULL document object is expected. +// +// if (document.nodes.top != document.nodes.start) { +// return document.nodes.start +// } +// return NULL +//} +// +///* +// * Add a scalar node to a document. +// */ +// +//YAML_DECLARE(int) +//yaml_document_add_scalar(document *yaml_document_t, +// tag *yaml_char_t, value *yaml_char_t, length int, +// style yaml_scalar_style_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// mark yaml_mark_t = { 0, 0, 0 } +// tag_copy *yaml_char_t = NULL +// value_copy *yaml_char_t = NULL +// node yaml_node_t +// +// assert(document) // Non-NULL document object is expected. +// assert(value) // Non-NULL value is expected. +// +// if (!tag) { +// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error +// tag_copy = yaml_strdup(tag) +// if (!tag_copy) goto error +// +// if (length < 0) { +// length = strlen((char *)value) +// } +// +// if (!yaml_check_utf8(value, length)) goto error +// value_copy = yaml_malloc(length+1) +// if (!value_copy) goto error +// memcpy(value_copy, value, length) +// value_copy[length] = '\0' +// +// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark) +// if (!PUSH(&context, document.nodes, node)) goto error +// +// return document.nodes.top - document.nodes.start +// +//error: +// yaml_free(tag_copy) +// yaml_free(value_copy) +// +// return 0 +//} +// +///* +// * Add a sequence node to a document. +// */ +// +//YAML_DECLARE(int) +//yaml_document_add_sequence(document *yaml_document_t, +// tag *yaml_char_t, style yaml_sequence_style_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// mark yaml_mark_t = { 0, 0, 0 } +// tag_copy *yaml_char_t = NULL +// struct { +// start *yaml_node_item_t +// end *yaml_node_item_t +// top *yaml_node_item_t +// } items = { NULL, NULL, NULL } +// node yaml_node_t +// +// assert(document) // Non-NULL document object is expected. +// +// if (!tag) { +// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error +// tag_copy = yaml_strdup(tag) +// if (!tag_copy) goto error +// +// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error +// +// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, +// style, mark, mark) +// if (!PUSH(&context, document.nodes, node)) goto error +// +// return document.nodes.top - document.nodes.start +// +//error: +// STACK_DEL(&context, items) +// yaml_free(tag_copy) +// +// return 0 +//} +// +///* +// * Add a mapping node to a document. +// */ +// +//YAML_DECLARE(int) +//yaml_document_add_mapping(document *yaml_document_t, +// tag *yaml_char_t, style yaml_mapping_style_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// mark yaml_mark_t = { 0, 0, 0 } +// tag_copy *yaml_char_t = NULL +// struct { +// start *yaml_node_pair_t +// end *yaml_node_pair_t +// top *yaml_node_pair_t +// } pairs = { NULL, NULL, NULL } +// node yaml_node_t +// +// assert(document) // Non-NULL document object is expected. +// +// if (!tag) { +// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error +// tag_copy = yaml_strdup(tag) +// if (!tag_copy) goto error +// +// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error +// +// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, +// style, mark, mark) +// if (!PUSH(&context, document.nodes, node)) goto error +// +// return document.nodes.top - document.nodes.start +// +//error: +// STACK_DEL(&context, pairs) +// yaml_free(tag_copy) +// +// return 0 +//} +// +///* +// * Append an item to a sequence node. +// */ +// +//YAML_DECLARE(int) +//yaml_document_append_sequence_item(document *yaml_document_t, +// sequence int, item int) +//{ +// struct { +// error yaml_error_type_t +// } context +// +// assert(document) // Non-NULL document is required. +// assert(sequence > 0 +// && document.nodes.start + sequence <= document.nodes.top) +// // Valid sequence id is required. +// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE) +// // A sequence node is required. +// assert(item > 0 && document.nodes.start + item <= document.nodes.top) +// // Valid item id is required. +// +// if (!PUSH(&context, +// document.nodes.start[sequence-1].data.sequence.items, item)) +// return 0 +// +// return 1 +//} +// +///* +// * Append a pair of a key and a value to a mapping node. +// */ +// +//YAML_DECLARE(int) +//yaml_document_append_mapping_pair(document *yaml_document_t, +// mapping int, key int, value int) +//{ +// struct { +// error yaml_error_type_t +// } context +// +// pair yaml_node_pair_t +// +// assert(document) // Non-NULL document is required. +// assert(mapping > 0 +// && document.nodes.start + mapping <= document.nodes.top) +// // Valid mapping id is required. +// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE) +// // A mapping node is required. +// assert(key > 0 && document.nodes.start + key <= document.nodes.top) +// // Valid key id is required. +// assert(value > 0 && document.nodes.start + value <= document.nodes.top) +// // Valid value id is required. +// +// pair.key = key +// pair.value = value +// +// if (!PUSH(&context, +// document.nodes.start[mapping-1].data.mapping.pairs, pair)) +// return 0 +// +// return 1 +//} +// +// diff --git a/vendor/gopkg.in/yaml.v2/decode.go b/vendor/gopkg.in/yaml.v2/decode.go new file mode 100644 index 0000000..085cddc --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/decode.go @@ -0,0 +1,683 @@ +package yaml + +import ( + "encoding" + "encoding/base64" + "fmt" + "math" + "reflect" + "strconv" + "time" +) + +const ( + documentNode = 1 << iota + mappingNode + sequenceNode + scalarNode + aliasNode +) + +type node struct { + kind int + line, column int + tag string + value string + implicit bool + children []*node + anchors map[string]*node +} + +// ---------------------------------------------------------------------------- +// Parser, produces a node tree out of a libyaml event stream. + +type parser struct { + parser yaml_parser_t + event yaml_event_t + doc *node +} + +func newParser(b []byte) *parser { + p := parser{} + if !yaml_parser_initialize(&p.parser) { + panic("failed to initialize YAML emitter") + } + + if len(b) == 0 { + b = []byte{'\n'} + } + + yaml_parser_set_input_string(&p.parser, b) + + p.skip() + if p.event.typ != yaml_STREAM_START_EVENT { + panic("expected stream start event, got " + strconv.Itoa(int(p.event.typ))) + } + p.skip() + return &p +} + +func (p *parser) destroy() { + if p.event.typ != yaml_NO_EVENT { + yaml_event_delete(&p.event) + } + yaml_parser_delete(&p.parser) +} + +func (p *parser) skip() { + if p.event.typ != yaml_NO_EVENT { + if p.event.typ == yaml_STREAM_END_EVENT { + failf("attempted to go past the end of stream; corrupted value?") + } + yaml_event_delete(&p.event) + } + if !yaml_parser_parse(&p.parser, &p.event) { + p.fail() + } +} + +func (p *parser) fail() { + var where string + var line int + if p.parser.problem_mark.line != 0 { + line = p.parser.problem_mark.line + } else if p.parser.context_mark.line != 0 { + line = p.parser.context_mark.line + } + if line != 0 { + where = "line " + strconv.Itoa(line) + ": " + } + var msg string + if len(p.parser.problem) > 0 { + msg = p.parser.problem + } else { + msg = "unknown problem parsing YAML content" + } + failf("%s%s", where, msg) +} + +func (p *parser) anchor(n *node, anchor []byte) { + if anchor != nil { + p.doc.anchors[string(anchor)] = n + } +} + +func (p *parser) parse() *node { + switch p.event.typ { + case yaml_SCALAR_EVENT: + return p.scalar() + case yaml_ALIAS_EVENT: + return p.alias() + case yaml_MAPPING_START_EVENT: + return p.mapping() + case yaml_SEQUENCE_START_EVENT: + return p.sequence() + case yaml_DOCUMENT_START_EVENT: + return p.document() + case yaml_STREAM_END_EVENT: + // Happens when attempting to decode an empty buffer. + return nil + default: + panic("attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ))) + } + panic("unreachable") +} + +func (p *parser) node(kind int) *node { + return &node{ + kind: kind, + line: p.event.start_mark.line, + column: p.event.start_mark.column, + } +} + +func (p *parser) document() *node { + n := p.node(documentNode) + n.anchors = make(map[string]*node) + p.doc = n + p.skip() + n.children = append(n.children, p.parse()) + if p.event.typ != yaml_DOCUMENT_END_EVENT { + panic("expected end of document event but got " + strconv.Itoa(int(p.event.typ))) + } + p.skip() + return n +} + +func (p *parser) alias() *node { + n := p.node(aliasNode) + n.value = string(p.event.anchor) + p.skip() + return n +} + +func (p *parser) scalar() *node { + n := p.node(scalarNode) + n.value = string(p.event.value) + n.tag = string(p.event.tag) + n.implicit = p.event.implicit + p.anchor(n, p.event.anchor) + p.skip() + return n +} + +func (p *parser) sequence() *node { + n := p.node(sequenceNode) + p.anchor(n, p.event.anchor) + p.skip() + for p.event.typ != yaml_SEQUENCE_END_EVENT { + n.children = append(n.children, p.parse()) + } + p.skip() + return n +} + +func (p *parser) mapping() *node { + n := p.node(mappingNode) + p.anchor(n, p.event.anchor) + p.skip() + for p.event.typ != yaml_MAPPING_END_EVENT { + n.children = append(n.children, p.parse(), p.parse()) + } + p.skip() + return n +} + +// ---------------------------------------------------------------------------- +// Decoder, unmarshals a node into a provided value. + +type decoder struct { + doc *node + aliases map[string]bool + mapType reflect.Type + terrors []string +} + +var ( + mapItemType = reflect.TypeOf(MapItem{}) + durationType = reflect.TypeOf(time.Duration(0)) + defaultMapType = reflect.TypeOf(map[interface{}]interface{}{}) + ifaceType = defaultMapType.Elem() +) + +func newDecoder() *decoder { + d := &decoder{mapType: defaultMapType} + d.aliases = make(map[string]bool) + return d +} + +func (d *decoder) terror(n *node, tag string, out reflect.Value) { + if n.tag != "" { + tag = n.tag + } + value := n.value + if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG { + if len(value) > 10 { + value = " `" + value[:7] + "...`" + } else { + value = " `" + value + "`" + } + } + d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type())) +} + +func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) { + terrlen := len(d.terrors) + err := u.UnmarshalYAML(func(v interface{}) (err error) { + defer handleErr(&err) + d.unmarshal(n, reflect.ValueOf(v)) + if len(d.terrors) > terrlen { + issues := d.terrors[terrlen:] + d.terrors = d.terrors[:terrlen] + return &TypeError{issues} + } + return nil + }) + if e, ok := err.(*TypeError); ok { + d.terrors = append(d.terrors, e.Errors...) + return false + } + if err != nil { + fail(err) + } + return true +} + +// d.prepare initializes and dereferences pointers and calls UnmarshalYAML +// if a value is found to implement it. +// It returns the initialized and dereferenced out value, whether +// unmarshalling was already done by UnmarshalYAML, and if so whether +// its types unmarshalled appropriately. +// +// If n holds a null value, prepare returns before doing anything. +func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) { + if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "") { + return out, false, false + } + again := true + for again { + again = false + if out.Kind() == reflect.Ptr { + if out.IsNil() { + out.Set(reflect.New(out.Type().Elem())) + } + out = out.Elem() + again = true + } + if out.CanAddr() { + if u, ok := out.Addr().Interface().(Unmarshaler); ok { + good = d.callUnmarshaler(n, u) + return out, true, good + } + } + } + return out, false, false +} + +func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) { + switch n.kind { + case documentNode: + return d.document(n, out) + case aliasNode: + return d.alias(n, out) + } + out, unmarshaled, good := d.prepare(n, out) + if unmarshaled { + return good + } + switch n.kind { + case scalarNode: + good = d.scalar(n, out) + case mappingNode: + good = d.mapping(n, out) + case sequenceNode: + good = d.sequence(n, out) + default: + panic("internal error: unknown node kind: " + strconv.Itoa(n.kind)) + } + return good +} + +func (d *decoder) document(n *node, out reflect.Value) (good bool) { + if len(n.children) == 1 { + d.doc = n + d.unmarshal(n.children[0], out) + return true + } + return false +} + +func (d *decoder) alias(n *node, out reflect.Value) (good bool) { + an, ok := d.doc.anchors[n.value] + if !ok { + failf("unknown anchor '%s' referenced", n.value) + } + if d.aliases[n.value] { + failf("anchor '%s' value contains itself", n.value) + } + d.aliases[n.value] = true + good = d.unmarshal(an, out) + delete(d.aliases, n.value) + return good +} + +var zeroValue reflect.Value + +func resetMap(out reflect.Value) { + for _, k := range out.MapKeys() { + out.SetMapIndex(k, zeroValue) + } +} + +func (d *decoder) scalar(n *node, out reflect.Value) (good bool) { + var tag string + var resolved interface{} + if n.tag == "" && !n.implicit { + tag = yaml_STR_TAG + resolved = n.value + } else { + tag, resolved = resolve(n.tag, n.value) + if tag == yaml_BINARY_TAG { + data, err := base64.StdEncoding.DecodeString(resolved.(string)) + if err != nil { + failf("!!binary value contains invalid base64 data") + } + resolved = string(data) + } + } + if resolved == nil { + if out.Kind() == reflect.Map && !out.CanAddr() { + resetMap(out) + } else { + out.Set(reflect.Zero(out.Type())) + } + return true + } + if s, ok := resolved.(string); ok && out.CanAddr() { + if u, ok := out.Addr().Interface().(encoding.TextUnmarshaler); ok { + err := u.UnmarshalText([]byte(s)) + if err != nil { + fail(err) + } + return true + } + } + switch out.Kind() { + case reflect.String: + if tag == yaml_BINARY_TAG { + out.SetString(resolved.(string)) + good = true + } else if resolved != nil { + out.SetString(n.value) + good = true + } + case reflect.Interface: + if resolved == nil { + out.Set(reflect.Zero(out.Type())) + } else { + out.Set(reflect.ValueOf(resolved)) + } + good = true + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + switch resolved := resolved.(type) { + case int: + if !out.OverflowInt(int64(resolved)) { + out.SetInt(int64(resolved)) + good = true + } + case int64: + if !out.OverflowInt(resolved) { + out.SetInt(resolved) + good = true + } + case uint64: + if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { + out.SetInt(int64(resolved)) + good = true + } + case float64: + if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { + out.SetInt(int64(resolved)) + good = true + } + case string: + if out.Type() == durationType { + d, err := time.ParseDuration(resolved) + if err == nil { + out.SetInt(int64(d)) + good = true + } + } + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + switch resolved := resolved.(type) { + case int: + if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + good = true + } + case int64: + if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + good = true + } + case uint64: + if !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + good = true + } + case float64: + if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + good = true + } + } + case reflect.Bool: + switch resolved := resolved.(type) { + case bool: + out.SetBool(resolved) + good = true + } + case reflect.Float32, reflect.Float64: + switch resolved := resolved.(type) { + case int: + out.SetFloat(float64(resolved)) + good = true + case int64: + out.SetFloat(float64(resolved)) + good = true + case uint64: + out.SetFloat(float64(resolved)) + good = true + case float64: + out.SetFloat(resolved) + good = true + } + case reflect.Ptr: + if out.Type().Elem() == reflect.TypeOf(resolved) { + // TODO DOes this make sense? When is out a Ptr except when decoding a nil value? + elem := reflect.New(out.Type().Elem()) + elem.Elem().Set(reflect.ValueOf(resolved)) + out.Set(elem) + good = true + } + } + if !good { + d.terror(n, tag, out) + } + return good +} + +func settableValueOf(i interface{}) reflect.Value { + v := reflect.ValueOf(i) + sv := reflect.New(v.Type()).Elem() + sv.Set(v) + return sv +} + +func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { + l := len(n.children) + + var iface reflect.Value + switch out.Kind() { + case reflect.Slice: + out.Set(reflect.MakeSlice(out.Type(), l, l)) + case reflect.Interface: + // No type hints. Will have to use a generic sequence. + iface = out + out = settableValueOf(make([]interface{}, l)) + default: + d.terror(n, yaml_SEQ_TAG, out) + return false + } + et := out.Type().Elem() + + j := 0 + for i := 0; i < l; i++ { + e := reflect.New(et).Elem() + if ok := d.unmarshal(n.children[i], e); ok { + out.Index(j).Set(e) + j++ + } + } + out.Set(out.Slice(0, j)) + if iface.IsValid() { + iface.Set(out) + } + return true +} + +func (d *decoder) mapping(n *node, out reflect.Value) (good bool) { + switch out.Kind() { + case reflect.Struct: + return d.mappingStruct(n, out) + case reflect.Slice: + return d.mappingSlice(n, out) + case reflect.Map: + // okay + case reflect.Interface: + if d.mapType.Kind() == reflect.Map { + iface := out + out = reflect.MakeMap(d.mapType) + iface.Set(out) + } else { + slicev := reflect.New(d.mapType).Elem() + if !d.mappingSlice(n, slicev) { + return false + } + out.Set(slicev) + return true + } + default: + d.terror(n, yaml_MAP_TAG, out) + return false + } + outt := out.Type() + kt := outt.Key() + et := outt.Elem() + + mapType := d.mapType + if outt.Key() == ifaceType && outt.Elem() == ifaceType { + d.mapType = outt + } + + if out.IsNil() { + out.Set(reflect.MakeMap(outt)) + } + l := len(n.children) + for i := 0; i < l; i += 2 { + if isMerge(n.children[i]) { + d.merge(n.children[i+1], out) + continue + } + k := reflect.New(kt).Elem() + if d.unmarshal(n.children[i], k) { + kkind := k.Kind() + if kkind == reflect.Interface { + kkind = k.Elem().Kind() + } + if kkind == reflect.Map || kkind == reflect.Slice { + failf("invalid map key: %#v", k.Interface()) + } + e := reflect.New(et).Elem() + if d.unmarshal(n.children[i+1], e) { + out.SetMapIndex(k, e) + } + } + } + d.mapType = mapType + return true +} + +func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) { + outt := out.Type() + if outt.Elem() != mapItemType { + d.terror(n, yaml_MAP_TAG, out) + return false + } + + mapType := d.mapType + d.mapType = outt + + var slice []MapItem + var l = len(n.children) + for i := 0; i < l; i += 2 { + if isMerge(n.children[i]) { + d.merge(n.children[i+1], out) + continue + } + item := MapItem{} + k := reflect.ValueOf(&item.Key).Elem() + if d.unmarshal(n.children[i], k) { + v := reflect.ValueOf(&item.Value).Elem() + if d.unmarshal(n.children[i+1], v) { + slice = append(slice, item) + } + } + } + out.Set(reflect.ValueOf(slice)) + d.mapType = mapType + return true +} + +func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) { + sinfo, err := getStructInfo(out.Type()) + if err != nil { + panic(err) + } + name := settableValueOf("") + l := len(n.children) + + var inlineMap reflect.Value + var elemType reflect.Type + if sinfo.InlineMap != -1 { + inlineMap = out.Field(sinfo.InlineMap) + inlineMap.Set(reflect.New(inlineMap.Type()).Elem()) + elemType = inlineMap.Type().Elem() + } + + for i := 0; i < l; i += 2 { + ni := n.children[i] + if isMerge(ni) { + d.merge(n.children[i+1], out) + continue + } + if !d.unmarshal(ni, name) { + continue + } + if info, ok := sinfo.FieldsMap[name.String()]; ok { + var field reflect.Value + if info.Inline == nil { + field = out.Field(info.Num) + } else { + field = out.FieldByIndex(info.Inline) + } + d.unmarshal(n.children[i+1], field) + } else if sinfo.InlineMap != -1 { + if inlineMap.IsNil() { + inlineMap.Set(reflect.MakeMap(inlineMap.Type())) + } + value := reflect.New(elemType).Elem() + d.unmarshal(n.children[i+1], value) + inlineMap.SetMapIndex(name, value) + } + } + return true +} + +func failWantMap() { + failf("map merge requires map or sequence of maps as the value") +} + +func (d *decoder) merge(n *node, out reflect.Value) { + switch n.kind { + case mappingNode: + d.unmarshal(n, out) + case aliasNode: + an, ok := d.doc.anchors[n.value] + if ok && an.kind != mappingNode { + failWantMap() + } + d.unmarshal(n, out) + case sequenceNode: + // Step backwards as earlier nodes take precedence. + for i := len(n.children) - 1; i >= 0; i-- { + ni := n.children[i] + if ni.kind == aliasNode { + an, ok := d.doc.anchors[ni.value] + if ok && an.kind != mappingNode { + failWantMap() + } + } else if ni.kind != mappingNode { + failWantMap() + } + d.unmarshal(ni, out) + } + default: + failWantMap() + } +} + +func isMerge(n *node) bool { + return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG) +} diff --git a/vendor/gopkg.in/yaml.v2/decode_test.go b/vendor/gopkg.in/yaml.v2/decode_test.go new file mode 100644 index 0000000..c159760 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/decode_test.go @@ -0,0 +1,988 @@ +package yaml_test + +import ( + "errors" + . "gopkg.in/check.v1" + "gopkg.in/yaml.v2" + "math" + "net" + "reflect" + "strings" + "time" +) + +var unmarshalIntTest = 123 + +var unmarshalTests = []struct { + data string + value interface{} +}{ + { + "", + &struct{}{}, + }, { + "{}", &struct{}{}, + }, { + "v: hi", + map[string]string{"v": "hi"}, + }, { + "v: hi", map[string]interface{}{"v": "hi"}, + }, { + "v: true", + map[string]string{"v": "true"}, + }, { + "v: true", + map[string]interface{}{"v": true}, + }, { + "v: 10", + map[string]interface{}{"v": 10}, + }, { + "v: 0b10", + map[string]interface{}{"v": 2}, + }, { + "v: 0xA", + map[string]interface{}{"v": 10}, + }, { + "v: 4294967296", + map[string]int64{"v": 4294967296}, + }, { + "v: 0.1", + map[string]interface{}{"v": 0.1}, + }, { + "v: .1", + map[string]interface{}{"v": 0.1}, + }, { + "v: .Inf", + map[string]interface{}{"v": math.Inf(+1)}, + }, { + "v: -.Inf", + map[string]interface{}{"v": math.Inf(-1)}, + }, { + "v: -10", + map[string]interface{}{"v": -10}, + }, { + "v: -.1", + map[string]interface{}{"v": -0.1}, + }, + + // Simple values. + { + "123", + &unmarshalIntTest, + }, + + // Floats from spec + { + "canonical: 6.8523e+5", + map[string]interface{}{"canonical": 6.8523e+5}, + }, { + "expo: 685.230_15e+03", + map[string]interface{}{"expo": 685.23015e+03}, + }, { + "fixed: 685_230.15", + map[string]interface{}{"fixed": 685230.15}, + }, { + "neginf: -.inf", + map[string]interface{}{"neginf": math.Inf(-1)}, + }, { + "fixed: 685_230.15", + map[string]float64{"fixed": 685230.15}, + }, + //{"sexa: 190:20:30.15", map[string]interface{}{"sexa": 0}}, // Unsupported + //{"notanum: .NaN", map[string]interface{}{"notanum": math.NaN()}}, // Equality of NaN fails. + + // Bools from spec + { + "canonical: y", + map[string]interface{}{"canonical": true}, + }, { + "answer: NO", + map[string]interface{}{"answer": false}, + }, { + "logical: True", + map[string]interface{}{"logical": true}, + }, { + "option: on", + map[string]interface{}{"option": true}, + }, { + "option: on", + map[string]bool{"option": true}, + }, + // Ints from spec + { + "canonical: 685230", + map[string]interface{}{"canonical": 685230}, + }, { + "decimal: +685_230", + map[string]interface{}{"decimal": 685230}, + }, { + "octal: 02472256", + map[string]interface{}{"octal": 685230}, + }, { + "hexa: 0x_0A_74_AE", + map[string]interface{}{"hexa": 685230}, + }, { + "bin: 0b1010_0111_0100_1010_1110", + map[string]interface{}{"bin": 685230}, + }, { + "bin: -0b101010", + map[string]interface{}{"bin": -42}, + }, { + "decimal: +685_230", + map[string]int{"decimal": 685230}, + }, + + //{"sexa: 190:20:30", map[string]interface{}{"sexa": 0}}, // Unsupported + + // Nulls from spec + { + "empty:", + map[string]interface{}{"empty": nil}, + }, { + "canonical: ~", + map[string]interface{}{"canonical": nil}, + }, { + "english: null", + map[string]interface{}{"english": nil}, + }, { + "~: null key", + map[interface{}]string{nil: "null key"}, + }, { + "empty:", + map[string]*bool{"empty": nil}, + }, + + // Flow sequence + { + "seq: [A,B]", + map[string]interface{}{"seq": []interface{}{"A", "B"}}, + }, { + "seq: [A,B,C,]", + map[string][]string{"seq": []string{"A", "B", "C"}}, + }, { + "seq: [A,1,C]", + map[string][]string{"seq": []string{"A", "1", "C"}}, + }, { + "seq: [A,1,C]", + map[string][]int{"seq": []int{1}}, + }, { + "seq: [A,1,C]", + map[string]interface{}{"seq": []interface{}{"A", 1, "C"}}, + }, + // Block sequence + { + "seq:\n - A\n - B", + map[string]interface{}{"seq": []interface{}{"A", "B"}}, + }, { + "seq:\n - A\n - B\n - C", + map[string][]string{"seq": []string{"A", "B", "C"}}, + }, { + "seq:\n - A\n - 1\n - C", + map[string][]string{"seq": []string{"A", "1", "C"}}, + }, { + "seq:\n - A\n - 1\n - C", + map[string][]int{"seq": []int{1}}, + }, { + "seq:\n - A\n - 1\n - C", + map[string]interface{}{"seq": []interface{}{"A", 1, "C"}}, + }, + + // Literal block scalar + { + "scalar: | # Comment\n\n literal\n\n \ttext\n\n", + map[string]string{"scalar": "\nliteral\n\n\ttext\n"}, + }, + + // Folded block scalar + { + "scalar: > # Comment\n\n folded\n line\n \n next\n line\n * one\n * two\n\n last\n line\n\n", + map[string]string{"scalar": "\nfolded line\nnext line\n * one\n * two\n\nlast line\n"}, + }, + + // Map inside interface with no type hints. + { + "a: {b: c}", + map[interface{}]interface{}{"a": map[interface{}]interface{}{"b": "c"}}, + }, + + // Structs and type conversions. + { + "hello: world", + &struct{ Hello string }{"world"}, + }, { + "a: {b: c}", + &struct{ A struct{ B string } }{struct{ B string }{"c"}}, + }, { + "a: {b: c}", + &struct{ A *struct{ B string } }{&struct{ B string }{"c"}}, + }, { + "a: {b: c}", + &struct{ A map[string]string }{map[string]string{"b": "c"}}, + }, { + "a: {b: c}", + &struct{ A *map[string]string }{&map[string]string{"b": "c"}}, + }, { + "a:", + &struct{ A map[string]string }{}, + }, { + "a: 1", + &struct{ A int }{1}, + }, { + "a: 1", + &struct{ A float64 }{1}, + }, { + "a: 1.0", + &struct{ A int }{1}, + }, { + "a: 1.0", + &struct{ A uint }{1}, + }, { + "a: [1, 2]", + &struct{ A []int }{[]int{1, 2}}, + }, { + "a: 1", + &struct{ B int }{0}, + }, { + "a: 1", + &struct { + B int "a" + }{1}, + }, { + "a: y", + &struct{ A bool }{true}, + }, + + // Some cross type conversions + { + "v: 42", + map[string]uint{"v": 42}, + }, { + "v: -42", + map[string]uint{}, + }, { + "v: 4294967296", + map[string]uint64{"v": 4294967296}, + }, { + "v: -4294967296", + map[string]uint64{}, + }, + + // int + { + "int_max: 2147483647", + map[string]int{"int_max": math.MaxInt32}, + }, + { + "int_min: -2147483648", + map[string]int{"int_min": math.MinInt32}, + }, + { + "int_overflow: 9223372036854775808", // math.MaxInt64 + 1 + map[string]int{}, + }, + + // int64 + { + "int64_max: 9223372036854775807", + map[string]int64{"int64_max": math.MaxInt64}, + }, + { + "int64_max_base2: 0b111111111111111111111111111111111111111111111111111111111111111", + map[string]int64{"int64_max_base2": math.MaxInt64}, + }, + { + "int64_min: -9223372036854775808", + map[string]int64{"int64_min": math.MinInt64}, + }, + { + "int64_neg_base2: -0b111111111111111111111111111111111111111111111111111111111111111", + map[string]int64{"int64_neg_base2": -math.MaxInt64}, + }, + { + "int64_overflow: 9223372036854775808", // math.MaxInt64 + 1 + map[string]int64{}, + }, + + // uint + { + "uint_min: 0", + map[string]uint{"uint_min": 0}, + }, + { + "uint_max: 4294967295", + map[string]uint{"uint_max": math.MaxUint32}, + }, + { + "uint_underflow: -1", + map[string]uint{}, + }, + + // uint64 + { + "uint64_min: 0", + map[string]uint{"uint64_min": 0}, + }, + { + "uint64_max: 18446744073709551615", + map[string]uint64{"uint64_max": math.MaxUint64}, + }, + { + "uint64_max_base2: 0b1111111111111111111111111111111111111111111111111111111111111111", + map[string]uint64{"uint64_max_base2": math.MaxUint64}, + }, + { + "uint64_maxint64: 9223372036854775807", + map[string]uint64{"uint64_maxint64": math.MaxInt64}, + }, + { + "uint64_underflow: -1", + map[string]uint64{}, + }, + + // float32 + { + "float32_max: 3.40282346638528859811704183484516925440e+38", + map[string]float32{"float32_max": math.MaxFloat32}, + }, + { + "float32_nonzero: 1.401298464324817070923729583289916131280e-45", + map[string]float32{"float32_nonzero": math.SmallestNonzeroFloat32}, + }, + { + "float32_maxuint64: 18446744073709551615", + map[string]float32{"float32_maxuint64": float32(math.MaxUint64)}, + }, + { + "float32_maxuint64+1: 18446744073709551616", + map[string]float32{"float32_maxuint64+1": float32(math.MaxUint64 + 1)}, + }, + + // float64 + { + "float64_max: 1.797693134862315708145274237317043567981e+308", + map[string]float64{"float64_max": math.MaxFloat64}, + }, + { + "float64_nonzero: 4.940656458412465441765687928682213723651e-324", + map[string]float64{"float64_nonzero": math.SmallestNonzeroFloat64}, + }, + { + "float64_maxuint64: 18446744073709551615", + map[string]float64{"float64_maxuint64": float64(math.MaxUint64)}, + }, + { + "float64_maxuint64+1: 18446744073709551616", + map[string]float64{"float64_maxuint64+1": float64(math.MaxUint64 + 1)}, + }, + + // Overflow cases. + { + "v: 4294967297", + map[string]int32{}, + }, { + "v: 128", + map[string]int8{}, + }, + + // Quoted values. + { + "'1': '\"2\"'", + map[interface{}]interface{}{"1": "\"2\""}, + }, { + "v:\n- A\n- 'B\n\n C'\n", + map[string][]string{"v": []string{"A", "B\nC"}}, + }, + + // Explicit tags. + { + "v: !!float '1.1'", + map[string]interface{}{"v": 1.1}, + }, { + "v: !!null ''", + map[string]interface{}{"v": nil}, + }, { + "%TAG !y! tag:yaml.org,2002:\n---\nv: !y!int '1'", + map[string]interface{}{"v": 1}, + }, + + // Anchors and aliases. + { + "a: &x 1\nb: &y 2\nc: *x\nd: *y\n", + &struct{ A, B, C, D int }{1, 2, 1, 2}, + }, { + "a: &a {c: 1}\nb: *a", + &struct { + A, B struct { + C int + } + }{struct{ C int }{1}, struct{ C int }{1}}, + }, { + "a: &a [1, 2]\nb: *a", + &struct{ B []int }{[]int{1, 2}}, + }, { + "b: *a\na: &a {c: 1}", + &struct { + A, B struct { + C int + } + }{struct{ C int }{1}, struct{ C int }{1}}, + }, + + // Bug #1133337 + { + "foo: ''", + map[string]*string{"foo": new(string)}, + }, { + "foo: null", + map[string]string{"foo": ""}, + }, { + "foo: null", + map[string]interface{}{"foo": nil}, + }, + + // Ignored field + { + "a: 1\nb: 2\n", + &struct { + A int + B int "-" + }{1, 0}, + }, + + // Bug #1191981 + { + "" + + "%YAML 1.1\n" + + "--- !!str\n" + + `"Generic line break (no glyph)\n\` + "\n" + + ` Generic line break (glyphed)\n\` + "\n" + + ` Line separator\u2028\` + "\n" + + ` Paragraph separator\u2029"` + "\n", + "" + + "Generic line break (no glyph)\n" + + "Generic line break (glyphed)\n" + + "Line separator\u2028Paragraph separator\u2029", + }, + + // Struct inlining + { + "a: 1\nb: 2\nc: 3\n", + &struct { + A int + C inlineB `yaml:",inline"` + }{1, inlineB{2, inlineC{3}}}, + }, + + // Map inlining + { + "a: 1\nb: 2\nc: 3\n", + &struct { + A int + C map[string]int `yaml:",inline"` + }{1, map[string]int{"b": 2, "c": 3}}, + }, + + // bug 1243827 + { + "a: -b_c", + map[string]interface{}{"a": "-b_c"}, + }, + { + "a: +b_c", + map[string]interface{}{"a": "+b_c"}, + }, + { + "a: 50cent_of_dollar", + map[string]interface{}{"a": "50cent_of_dollar"}, + }, + + // Duration + { + "a: 3s", + map[string]time.Duration{"a": 3 * time.Second}, + }, + + // Issue #24. + { + "a: ", + map[string]string{"a": ""}, + }, + + // Base 60 floats are obsolete and unsupported. + { + "a: 1:1\n", + map[string]string{"a": "1:1"}, + }, + + // Binary data. + { + "a: !!binary gIGC\n", + map[string]string{"a": "\x80\x81\x82"}, + }, { + "a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n", + map[string]string{"a": strings.Repeat("\x90", 54)}, + }, { + "a: !!binary |\n " + strings.Repeat("A", 70) + "\n ==\n", + map[string]string{"a": strings.Repeat("\x00", 52)}, + }, + + // Ordered maps. + { + "{b: 2, a: 1, d: 4, c: 3, sub: {e: 5}}", + &yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}}, + }, + + // Issue #39. + { + "a:\n b:\n c: d\n", + map[string]struct{ B interface{} }{"a": {map[interface{}]interface{}{"c": "d"}}}, + }, + + // Custom map type. + { + "a: {b: c}", + M{"a": M{"b": "c"}}, + }, + + // Support encoding.TextUnmarshaler. + { + "a: 1.2.3.4\n", + map[string]net.IP{"a": net.IPv4(1, 2, 3, 4)}, + }, + { + "a: 2015-02-24T18:19:39Z\n", + map[string]time.Time{"a": time.Unix(1424801979, 0)}, + }, + + // Encode empty lists as zero-length slices. + { + "a: []", + &struct{ A []int }{[]int{}}, + }, + + // UTF-16-LE + { + "\xff\xfe\xf1\x00o\x00\xf1\x00o\x00:\x00 \x00v\x00e\x00r\x00y\x00 \x00y\x00e\x00s\x00\n\x00", + M{"ñoño": "very yes"}, + }, + // UTF-16-LE with surrogate. + { + "\xff\xfe\xf1\x00o\x00\xf1\x00o\x00:\x00 \x00v\x00e\x00r\x00y\x00 \x00y\x00e\x00s\x00 \x00=\xd8\xd4\xdf\n\x00", + M{"ñoño": "very yes 🟔"}, + }, + + // UTF-16-BE + { + "\xfe\xff\x00\xf1\x00o\x00\xf1\x00o\x00:\x00 \x00v\x00e\x00r\x00y\x00 \x00y\x00e\x00s\x00\n", + M{"ñoño": "very yes"}, + }, + // UTF-16-BE with surrogate. + { + "\xfe\xff\x00\xf1\x00o\x00\xf1\x00o\x00:\x00 \x00v\x00e\x00r\x00y\x00 \x00y\x00e\x00s\x00 \xd8=\xdf\xd4\x00\n", + M{"ñoño": "very yes 🟔"}, + }, +} + +type M map[interface{}]interface{} + +type inlineB struct { + B int + inlineC `yaml:",inline"` +} + +type inlineC struct { + C int +} + +func (s *S) TestUnmarshal(c *C) { + for _, item := range unmarshalTests { + t := reflect.ValueOf(item.value).Type() + var value interface{} + switch t.Kind() { + case reflect.Map: + value = reflect.MakeMap(t).Interface() + case reflect.String: + value = reflect.New(t).Interface() + case reflect.Ptr: + value = reflect.New(t.Elem()).Interface() + default: + c.Fatalf("missing case for %s", t) + } + err := yaml.Unmarshal([]byte(item.data), value) + if _, ok := err.(*yaml.TypeError); !ok { + c.Assert(err, IsNil) + } + if t.Kind() == reflect.String { + c.Assert(*value.(*string), Equals, item.value) + } else { + c.Assert(value, DeepEquals, item.value) + } + } +} + +func (s *S) TestUnmarshalNaN(c *C) { + value := map[string]interface{}{} + err := yaml.Unmarshal([]byte("notanum: .NaN"), &value) + c.Assert(err, IsNil) + c.Assert(math.IsNaN(value["notanum"].(float64)), Equals, true) +} + +var unmarshalErrorTests = []struct { + data, error string +}{ + {"v: !!float 'error'", "yaml: cannot decode !!str `error` as a !!float"}, + {"v: [A,", "yaml: line 1: did not find expected node content"}, + {"v:\n- [A,", "yaml: line 2: did not find expected node content"}, + {"a: *b\n", "yaml: unknown anchor 'b' referenced"}, + {"a: &a\n b: *a\n", "yaml: anchor 'a' value contains itself"}, + {"value: -", "yaml: block sequence entries are not allowed in this context"}, + {"a: !!binary ==", "yaml: !!binary value contains invalid base64 data"}, + {"{[.]}", `yaml: invalid map key: \[\]interface \{\}\{"\."\}`}, + {"{{.}}", `yaml: invalid map key: map\[interface\ \{\}\]interface \{\}\{".":interface \{\}\(nil\)\}`}, +} + +func (s *S) TestUnmarshalErrors(c *C) { + for _, item := range unmarshalErrorTests { + var value interface{} + err := yaml.Unmarshal([]byte(item.data), &value) + c.Assert(err, ErrorMatches, item.error, Commentf("Partial unmarshal: %#v", value)) + } +} + +var unmarshalerTests = []struct { + data, tag string + value interface{} +}{ + {"_: {hi: there}", "!!map", map[interface{}]interface{}{"hi": "there"}}, + {"_: [1,A]", "!!seq", []interface{}{1, "A"}}, + {"_: 10", "!!int", 10}, + {"_: null", "!!null", nil}, + {`_: BAR!`, "!!str", "BAR!"}, + {`_: "BAR!"`, "!!str", "BAR!"}, + {"_: !!foo 'BAR!'", "!!foo", "BAR!"}, +} + +var unmarshalerResult = map[int]error{} + +type unmarshalerType struct { + value interface{} +} + +func (o *unmarshalerType) UnmarshalYAML(unmarshal func(v interface{}) error) error { + if err := unmarshal(&o.value); err != nil { + return err + } + if i, ok := o.value.(int); ok { + if result, ok := unmarshalerResult[i]; ok { + return result + } + } + return nil +} + +type unmarshalerPointer struct { + Field *unmarshalerType "_" +} + +type unmarshalerValue struct { + Field unmarshalerType "_" +} + +func (s *S) TestUnmarshalerPointerField(c *C) { + for _, item := range unmarshalerTests { + obj := &unmarshalerPointer{} + err := yaml.Unmarshal([]byte(item.data), obj) + c.Assert(err, IsNil) + if item.value == nil { + c.Assert(obj.Field, IsNil) + } else { + c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value)) + c.Assert(obj.Field.value, DeepEquals, item.value) + } + } +} + +func (s *S) TestUnmarshalerValueField(c *C) { + for _, item := range unmarshalerTests { + obj := &unmarshalerValue{} + err := yaml.Unmarshal([]byte(item.data), obj) + c.Assert(err, IsNil) + c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value)) + c.Assert(obj.Field.value, DeepEquals, item.value) + } +} + +func (s *S) TestUnmarshalerWholeDocument(c *C) { + obj := &unmarshalerType{} + err := yaml.Unmarshal([]byte(unmarshalerTests[0].data), obj) + c.Assert(err, IsNil) + value, ok := obj.value.(map[interface{}]interface{}) + c.Assert(ok, Equals, true, Commentf("value: %#v", obj.value)) + c.Assert(value["_"], DeepEquals, unmarshalerTests[0].value) +} + +func (s *S) TestUnmarshalerTypeError(c *C) { + unmarshalerResult[2] = &yaml.TypeError{[]string{"foo"}} + unmarshalerResult[4] = &yaml.TypeError{[]string{"bar"}} + defer func() { + delete(unmarshalerResult, 2) + delete(unmarshalerResult, 4) + }() + + type T struct { + Before int + After int + M map[string]*unmarshalerType + } + var v T + data := `{before: A, m: {abc: 1, def: 2, ghi: 3, jkl: 4}, after: B}` + err := yaml.Unmarshal([]byte(data), &v) + c.Assert(err, ErrorMatches, ""+ + "yaml: unmarshal errors:\n"+ + " line 1: cannot unmarshal !!str `A` into int\n"+ + " foo\n"+ + " bar\n"+ + " line 1: cannot unmarshal !!str `B` into int") + c.Assert(v.M["abc"], NotNil) + c.Assert(v.M["def"], IsNil) + c.Assert(v.M["ghi"], NotNil) + c.Assert(v.M["jkl"], IsNil) + + c.Assert(v.M["abc"].value, Equals, 1) + c.Assert(v.M["ghi"].value, Equals, 3) +} + +type proxyTypeError struct{} + +func (v *proxyTypeError) UnmarshalYAML(unmarshal func(interface{}) error) error { + var s string + var a int32 + var b int64 + if err := unmarshal(&s); err != nil { + panic(err) + } + if s == "a" { + if err := unmarshal(&b); err == nil { + panic("should have failed") + } + return unmarshal(&a) + } + if err := unmarshal(&a); err == nil { + panic("should have failed") + } + return unmarshal(&b) +} + +func (s *S) TestUnmarshalerTypeErrorProxying(c *C) { + type T struct { + Before int + After int + M map[string]*proxyTypeError + } + var v T + data := `{before: A, m: {abc: a, def: b}, after: B}` + err := yaml.Unmarshal([]byte(data), &v) + c.Assert(err, ErrorMatches, ""+ + "yaml: unmarshal errors:\n"+ + " line 1: cannot unmarshal !!str `A` into int\n"+ + " line 1: cannot unmarshal !!str `a` into int32\n"+ + " line 1: cannot unmarshal !!str `b` into int64\n"+ + " line 1: cannot unmarshal !!str `B` into int") +} + +type failingUnmarshaler struct{} + +var failingErr = errors.New("failingErr") + +func (ft *failingUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error { + return failingErr +} + +func (s *S) TestUnmarshalerError(c *C) { + err := yaml.Unmarshal([]byte("a: b"), &failingUnmarshaler{}) + c.Assert(err, Equals, failingErr) +} + +type sliceUnmarshaler []int + +func (su *sliceUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error { + var slice []int + err := unmarshal(&slice) + if err == nil { + *su = slice + return nil + } + + var intVal int + err = unmarshal(&intVal) + if err == nil { + *su = []int{intVal} + return nil + } + + return err +} + +func (s *S) TestUnmarshalerRetry(c *C) { + var su sliceUnmarshaler + err := yaml.Unmarshal([]byte("[1, 2, 3]"), &su) + c.Assert(err, IsNil) + c.Assert(su, DeepEquals, sliceUnmarshaler([]int{1, 2, 3})) + + err = yaml.Unmarshal([]byte("1"), &su) + c.Assert(err, IsNil) + c.Assert(su, DeepEquals, sliceUnmarshaler([]int{1})) +} + +// From http://yaml.org/type/merge.html +var mergeTests = ` +anchors: + list: + - &CENTER { "x": 1, "y": 2 } + - &LEFT { "x": 0, "y": 2 } + - &BIG { "r": 10 } + - &SMALL { "r": 1 } + +# All the following maps are equal: + +plain: + # Explicit keys + "x": 1 + "y": 2 + "r": 10 + label: center/big + +mergeOne: + # Merge one map + << : *CENTER + "r": 10 + label: center/big + +mergeMultiple: + # Merge multiple maps + << : [ *CENTER, *BIG ] + label: center/big + +override: + # Override + << : [ *BIG, *LEFT, *SMALL ] + "x": 1 + label: center/big + +shortTag: + # Explicit short merge tag + !!merge "<<" : [ *CENTER, *BIG ] + label: center/big + +longTag: + # Explicit merge long tag + ! "<<" : [ *CENTER, *BIG ] + label: center/big + +inlineMap: + # Inlined map + << : {"x": 1, "y": 2, "r": 10} + label: center/big + +inlineSequenceMap: + # Inlined map in sequence + << : [ *CENTER, {"r": 10} ] + label: center/big +` + +func (s *S) TestMerge(c *C) { + var want = map[interface{}]interface{}{ + "x": 1, + "y": 2, + "r": 10, + "label": "center/big", + } + + var m map[interface{}]interface{} + err := yaml.Unmarshal([]byte(mergeTests), &m) + c.Assert(err, IsNil) + for name, test := range m { + if name == "anchors" { + continue + } + c.Assert(test, DeepEquals, want, Commentf("test %q failed", name)) + } +} + +func (s *S) TestMergeStruct(c *C) { + type Data struct { + X, Y, R int + Label string + } + want := Data{1, 2, 10, "center/big"} + + var m map[string]Data + err := yaml.Unmarshal([]byte(mergeTests), &m) + c.Assert(err, IsNil) + for name, test := range m { + if name == "anchors" { + continue + } + c.Assert(test, Equals, want, Commentf("test %q failed", name)) + } +} + +var unmarshalNullTests = []func() interface{}{ + func() interface{} { var v interface{}; v = "v"; return &v }, + func() interface{} { var s = "s"; return &s }, + func() interface{} { var s = "s"; sptr := &s; return &sptr }, + func() interface{} { var i = 1; return &i }, + func() interface{} { var i = 1; iptr := &i; return &iptr }, + func() interface{} { m := map[string]int{"s": 1}; return &m }, + func() interface{} { m := map[string]int{"s": 1}; return m }, +} + +func (s *S) TestUnmarshalNull(c *C) { + for _, test := range unmarshalNullTests { + item := test() + zero := reflect.Zero(reflect.TypeOf(item).Elem()).Interface() + err := yaml.Unmarshal([]byte("null"), item) + c.Assert(err, IsNil) + if reflect.TypeOf(item).Kind() == reflect.Map { + c.Assert(reflect.ValueOf(item).Interface(), DeepEquals, reflect.MakeMap(reflect.TypeOf(item)).Interface()) + } else { + c.Assert(reflect.ValueOf(item).Elem().Interface(), DeepEquals, zero) + } + } +} + +func (s *S) TestUnmarshalSliceOnPreset(c *C) { + // Issue #48. + v := struct{ A []int }{[]int{1}} + yaml.Unmarshal([]byte("a: [2]"), &v) + c.Assert(v.A, DeepEquals, []int{2}) +} + +//var data []byte +//func init() { +// var err error +// data, err = ioutil.ReadFile("/tmp/file.yaml") +// if err != nil { +// panic(err) +// } +//} +// +//func (s *S) BenchmarkUnmarshal(c *C) { +// var err error +// for i := 0; i < c.N; i++ { +// var v map[string]interface{} +// err = yaml.Unmarshal(data, &v) +// } +// if err != nil { +// panic(err) +// } +//} +// +//func (s *S) BenchmarkMarshal(c *C) { +// var v map[string]interface{} +// yaml.Unmarshal(data, &v) +// c.ResetTimer() +// for i := 0; i < c.N; i++ { +// yaml.Marshal(&v) +// } +//} diff --git a/vendor/gopkg.in/yaml.v2/emitterc.go b/vendor/gopkg.in/yaml.v2/emitterc.go new file mode 100644 index 0000000..2befd55 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/emitterc.go @@ -0,0 +1,1685 @@ +package yaml + +import ( + "bytes" +) + +// Flush the buffer if needed. +func flush(emitter *yaml_emitter_t) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) { + return yaml_emitter_flush(emitter) + } + return true +} + +// Put a character to the output buffer. +func put(emitter *yaml_emitter_t, value byte) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { + return false + } + emitter.buffer[emitter.buffer_pos] = value + emitter.buffer_pos++ + emitter.column++ + return true +} + +// Put a line break to the output buffer. +func put_break(emitter *yaml_emitter_t) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { + return false + } + switch emitter.line_break { + case yaml_CR_BREAK: + emitter.buffer[emitter.buffer_pos] = '\r' + emitter.buffer_pos += 1 + case yaml_LN_BREAK: + emitter.buffer[emitter.buffer_pos] = '\n' + emitter.buffer_pos += 1 + case yaml_CRLN_BREAK: + emitter.buffer[emitter.buffer_pos+0] = '\r' + emitter.buffer[emitter.buffer_pos+1] = '\n' + emitter.buffer_pos += 2 + default: + panic("unknown line break setting") + } + emitter.column = 0 + emitter.line++ + return true +} + +// Copy a character from a string into buffer. +func write(emitter *yaml_emitter_t, s []byte, i *int) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { + return false + } + p := emitter.buffer_pos + w := width(s[*i]) + switch w { + case 4: + emitter.buffer[p+3] = s[*i+3] + fallthrough + case 3: + emitter.buffer[p+2] = s[*i+2] + fallthrough + case 2: + emitter.buffer[p+1] = s[*i+1] + fallthrough + case 1: + emitter.buffer[p+0] = s[*i+0] + default: + panic("unknown character width") + } + emitter.column++ + emitter.buffer_pos += w + *i += w + return true +} + +// Write a whole string into buffer. +func write_all(emitter *yaml_emitter_t, s []byte) bool { + for i := 0; i < len(s); { + if !write(emitter, s, &i) { + return false + } + } + return true +} + +// Copy a line break character from a string into buffer. +func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { + if s[*i] == '\n' { + if !put_break(emitter) { + return false + } + *i++ + } else { + if !write(emitter, s, i) { + return false + } + emitter.column = 0 + emitter.line++ + } + return true +} + +// Set an emitter error and return false. +func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { + emitter.error = yaml_EMITTER_ERROR + emitter.problem = problem + return false +} + +// Emit an event. +func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { + emitter.events = append(emitter.events, *event) + for !yaml_emitter_need_more_events(emitter) { + event := &emitter.events[emitter.events_head] + if !yaml_emitter_analyze_event(emitter, event) { + return false + } + if !yaml_emitter_state_machine(emitter, event) { + return false + } + yaml_event_delete(event) + emitter.events_head++ + } + return true +} + +// Check if we need to accumulate more events before emitting. +// +// We accumulate extra +// - 1 event for DOCUMENT-START +// - 2 events for SEQUENCE-START +// - 3 events for MAPPING-START +// +func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { + if emitter.events_head == len(emitter.events) { + return true + } + var accumulate int + switch emitter.events[emitter.events_head].typ { + case yaml_DOCUMENT_START_EVENT: + accumulate = 1 + break + case yaml_SEQUENCE_START_EVENT: + accumulate = 2 + break + case yaml_MAPPING_START_EVENT: + accumulate = 3 + break + default: + return false + } + if len(emitter.events)-emitter.events_head > accumulate { + return false + } + var level int + for i := emitter.events_head; i < len(emitter.events); i++ { + switch emitter.events[i].typ { + case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: + level++ + case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: + level-- + } + if level == 0 { + return false + } + } + return true +} + +// Append a directive to the directives stack. +func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool { + for i := 0; i < len(emitter.tag_directives); i++ { + if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { + if allow_duplicates { + return true + } + return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive") + } + } + + // [Go] Do we actually need to copy this given garbage collection + // and the lack of deallocating destructors? + tag_copy := yaml_tag_directive_t{ + handle: make([]byte, len(value.handle)), + prefix: make([]byte, len(value.prefix)), + } + copy(tag_copy.handle, value.handle) + copy(tag_copy.prefix, value.prefix) + emitter.tag_directives = append(emitter.tag_directives, tag_copy) + return true +} + +// Increase the indentation level. +func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool { + emitter.indents = append(emitter.indents, emitter.indent) + if emitter.indent < 0 { + if flow { + emitter.indent = emitter.best_indent + } else { + emitter.indent = 0 + } + } else if !indentless { + emitter.indent += emitter.best_indent + } + return true +} + +// State dispatcher. +func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { + switch emitter.state { + default: + case yaml_EMIT_STREAM_START_STATE: + return yaml_emitter_emit_stream_start(emitter, event) + + case yaml_EMIT_FIRST_DOCUMENT_START_STATE: + return yaml_emitter_emit_document_start(emitter, event, true) + + case yaml_EMIT_DOCUMENT_START_STATE: + return yaml_emitter_emit_document_start(emitter, event, false) + + case yaml_EMIT_DOCUMENT_CONTENT_STATE: + return yaml_emitter_emit_document_content(emitter, event) + + case yaml_EMIT_DOCUMENT_END_STATE: + return yaml_emitter_emit_document_end(emitter, event) + + case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, true) + + case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, false) + + case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, true) + + case yaml_EMIT_FLOW_MAPPING_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, false) + + case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: + return yaml_emitter_emit_flow_mapping_value(emitter, event, true) + + case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: + return yaml_emitter_emit_flow_mapping_value(emitter, event, false) + + case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: + return yaml_emitter_emit_block_sequence_item(emitter, event, true) + + case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: + return yaml_emitter_emit_block_sequence_item(emitter, event, false) + + case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: + return yaml_emitter_emit_block_mapping_key(emitter, event, true) + + case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: + return yaml_emitter_emit_block_mapping_key(emitter, event, false) + + case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: + return yaml_emitter_emit_block_mapping_value(emitter, event, true) + + case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: + return yaml_emitter_emit_block_mapping_value(emitter, event, false) + + case yaml_EMIT_END_STATE: + return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END") + } + panic("invalid emitter state") +} + +// Expect STREAM-START. +func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if event.typ != yaml_STREAM_START_EVENT { + return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START") + } + if emitter.encoding == yaml_ANY_ENCODING { + emitter.encoding = event.encoding + if emitter.encoding == yaml_ANY_ENCODING { + emitter.encoding = yaml_UTF8_ENCODING + } + } + if emitter.best_indent < 2 || emitter.best_indent > 9 { + emitter.best_indent = 2 + } + if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { + emitter.best_width = 80 + } + if emitter.best_width < 0 { + emitter.best_width = 1<<31 - 1 + } + if emitter.line_break == yaml_ANY_BREAK { + emitter.line_break = yaml_LN_BREAK + } + + emitter.indent = -1 + emitter.line = 0 + emitter.column = 0 + emitter.whitespace = true + emitter.indention = true + + if emitter.encoding != yaml_UTF8_ENCODING { + if !yaml_emitter_write_bom(emitter) { + return false + } + } + emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE + return true +} + +// Expect DOCUMENT-START or STREAM-END. +func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + + if event.typ == yaml_DOCUMENT_START_EVENT { + + if event.version_directive != nil { + if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) { + return false + } + } + + for i := 0; i < len(event.tag_directives); i++ { + tag_directive := &event.tag_directives[i] + if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { + return false + } + if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { + return false + } + } + + for i := 0; i < len(default_tag_directives); i++ { + tag_directive := &default_tag_directives[i] + if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) { + return false + } + } + + implicit := event.implicit + if !first || emitter.canonical { + implicit = false + } + + if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) { + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if event.version_directive != nil { + implicit = false + if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if len(event.tag_directives) > 0 { + implicit = false + for i := 0; i < len(event.tag_directives); i++ { + tag_directive := &event.tag_directives[i] + if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { + return false + } + if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { + return false + } + if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + } + + if yaml_emitter_check_empty_document(emitter) { + implicit = false + } + if !implicit { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { + return false + } + if emitter.canonical { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + } + + emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE + return true + } + + if event.typ == yaml_STREAM_END_EVENT { + if emitter.open_ended { + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_flush(emitter) { + return false + } + emitter.state = yaml_EMIT_END_STATE + return true + } + + return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END") +} + +// Expect the root node. +func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { + emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) + return yaml_emitter_emit_node(emitter, event, true, false, false, false) +} + +// Expect DOCUMENT-END. +func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if event.typ != yaml_DOCUMENT_END_EVENT { + return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if !event.implicit { + // [Go] Allocate the slice elsewhere. + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_flush(emitter) { + return false + } + emitter.state = yaml_EMIT_DOCUMENT_START_STATE + emitter.tag_directives = emitter.tag_directives[:0] + return true +} + +// Expect a flow item node. +func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + emitter.flow_level++ + } + + if event.typ == yaml_SEQUENCE_END_EVENT { + emitter.flow_level-- + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + if emitter.canonical && !first { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + + return true + } + + if !first { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) + return yaml_emitter_emit_node(emitter, event, false, true, false, false) +} + +// Expect a flow key node. +func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + emitter.flow_level++ + } + + if event.typ == yaml_MAPPING_END_EVENT { + emitter.flow_level-- + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + if emitter.canonical && !first { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true + } + + if !first { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, true) + } + if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a flow value node. +func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { + if simple { + if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { + return false + } + } else { + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) { + return false + } + } + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a block item node. +func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) { + return false + } + } + if event.typ == yaml_SEQUENCE_END_EVENT { + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) + return yaml_emitter_emit_node(emitter, event, false, true, false, false) +} + +// Expect a block key node. +func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_increase_indent(emitter, false, false) { + return false + } + } + if event.typ == yaml_MAPPING_END_EVENT { + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if yaml_emitter_check_simple_key(emitter) { + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, true) + } + if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a block value node. +func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { + if simple { + if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { + return false + } + } else { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) { + return false + } + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a node. +func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, + root bool, sequence bool, mapping bool, simple_key bool) bool { + + emitter.root_context = root + emitter.sequence_context = sequence + emitter.mapping_context = mapping + emitter.simple_key_context = simple_key + + switch event.typ { + case yaml_ALIAS_EVENT: + return yaml_emitter_emit_alias(emitter, event) + case yaml_SCALAR_EVENT: + return yaml_emitter_emit_scalar(emitter, event) + case yaml_SEQUENCE_START_EVENT: + return yaml_emitter_emit_sequence_start(emitter, event) + case yaml_MAPPING_START_EVENT: + return yaml_emitter_emit_mapping_start(emitter, event) + default: + return yaml_emitter_set_emitter_error(emitter, + "expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS") + } + return false +} + +// Expect ALIAS. +func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true +} + +// Expect SCALAR. +func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_select_scalar_style(emitter, event) { + return false + } + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + if !yaml_emitter_process_scalar(emitter) { + return false + } + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true +} + +// Expect SEQUENCE-START. +func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE || + yaml_emitter_check_empty_sequence(emitter) { + emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE + } else { + emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE + } + return true +} + +// Expect MAPPING-START. +func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE || + yaml_emitter_check_empty_mapping(emitter) { + emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE + } else { + emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE + } + return true +} + +// Check if the document content is an empty scalar. +func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { + return false // [Go] Huh? +} + +// Check if the next events represent an empty sequence. +func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { + if len(emitter.events)-emitter.events_head < 2 { + return false + } + return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT && + emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT +} + +// Check if the next events represent an empty mapping. +func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { + if len(emitter.events)-emitter.events_head < 2 { + return false + } + return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT && + emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT +} + +// Check if the next node can be expressed as a simple key. +func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { + length := 0 + switch emitter.events[emitter.events_head].typ { + case yaml_ALIAS_EVENT: + length += len(emitter.anchor_data.anchor) + case yaml_SCALAR_EVENT: + if emitter.scalar_data.multiline { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + + len(emitter.scalar_data.value) + case yaml_SEQUENCE_START_EVENT: + if !yaml_emitter_check_empty_sequence(emitter) { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + case yaml_MAPPING_START_EVENT: + if !yaml_emitter_check_empty_mapping(emitter) { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + default: + return false + } + return length <= 128 +} + +// Determine an acceptable scalar style. +func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { + + no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 + if no_tag && !event.implicit && !event.quoted_implicit { + return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified") + } + + style := event.scalar_style() + if style == yaml_ANY_SCALAR_STYLE { + style = yaml_PLAIN_SCALAR_STYLE + } + if emitter.canonical { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + if emitter.simple_key_context && emitter.scalar_data.multiline { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + + if style == yaml_PLAIN_SCALAR_STYLE { + if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed || + emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + if no_tag && !event.implicit { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + } + if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { + if !emitter.scalar_data.single_quoted_allowed { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + } + if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { + if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + } + + if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE { + emitter.tag_data.handle = []byte{'!'} + } + emitter.scalar_data.style = style + return true +} + +// Write an achor. +func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { + if emitter.anchor_data.anchor == nil { + return true + } + c := []byte{'&'} + if emitter.anchor_data.alias { + c[0] = '*' + } + if !yaml_emitter_write_indicator(emitter, c, true, false, false) { + return false + } + return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) +} + +// Write a tag. +func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { + if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { + return true + } + if len(emitter.tag_data.handle) > 0 { + if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { + return false + } + if len(emitter.tag_data.suffix) > 0 { + if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { + return false + } + } + } else { + // [Go] Allocate these slices elsewhere. + if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { + return false + } + if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) { + return false + } + } + return true +} + +// Write a scalar. +func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { + switch emitter.scalar_data.style { + case yaml_PLAIN_SCALAR_STYLE: + return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) + + case yaml_SINGLE_QUOTED_SCALAR_STYLE: + return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) + + case yaml_DOUBLE_QUOTED_SCALAR_STYLE: + return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) + + case yaml_LITERAL_SCALAR_STYLE: + return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value) + + case yaml_FOLDED_SCALAR_STYLE: + return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value) + } + panic("unknown scalar style") +} + +// Check if a %YAML directive is valid. +func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { + if version_directive.major != 1 || version_directive.minor != 1 { + return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive") + } + return true +} + +// Check if a %TAG directive is valid. +func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool { + handle := tag_directive.handle + prefix := tag_directive.prefix + if len(handle) == 0 { + return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty") + } + if handle[0] != '!' { + return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'") + } + if handle[len(handle)-1] != '!' { + return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'") + } + for i := 1; i < len(handle)-1; i += width(handle[i]) { + if !is_alpha(handle, i) { + return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only") + } + } + if len(prefix) == 0 { + return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty") + } + return true +} + +// Check if an anchor is valid. +func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool { + if len(anchor) == 0 { + problem := "anchor value must not be empty" + if alias { + problem = "alias value must not be empty" + } + return yaml_emitter_set_emitter_error(emitter, problem) + } + for i := 0; i < len(anchor); i += width(anchor[i]) { + if !is_alpha(anchor, i) { + problem := "anchor value must contain alphanumerical characters only" + if alias { + problem = "alias value must contain alphanumerical characters only" + } + return yaml_emitter_set_emitter_error(emitter, problem) + } + } + emitter.anchor_data.anchor = anchor + emitter.anchor_data.alias = alias + return true +} + +// Check if a tag is valid. +func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { + if len(tag) == 0 { + return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty") + } + for i := 0; i < len(emitter.tag_directives); i++ { + tag_directive := &emitter.tag_directives[i] + if bytes.HasPrefix(tag, tag_directive.prefix) { + emitter.tag_data.handle = tag_directive.handle + emitter.tag_data.suffix = tag[len(tag_directive.prefix):] + return true + } + } + emitter.tag_data.suffix = tag + return true +} + +// Check if a scalar is valid. +func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { + var ( + block_indicators = false + flow_indicators = false + line_breaks = false + special_characters = false + + leading_space = false + leading_break = false + trailing_space = false + trailing_break = false + break_space = false + space_break = false + + preceeded_by_whitespace = false + followed_by_whitespace = false + previous_space = false + previous_break = false + ) + + emitter.scalar_data.value = value + + if len(value) == 0 { + emitter.scalar_data.multiline = false + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = true + emitter.scalar_data.single_quoted_allowed = true + emitter.scalar_data.block_allowed = false + return true + } + + if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) { + block_indicators = true + flow_indicators = true + } + + preceeded_by_whitespace = true + for i, w := 0, 0; i < len(value); i += w { + w = width(value[i]) + followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w) + + if i == 0 { + switch value[i] { + case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': + flow_indicators = true + block_indicators = true + case '?', ':': + flow_indicators = true + if followed_by_whitespace { + block_indicators = true + } + case '-': + if followed_by_whitespace { + flow_indicators = true + block_indicators = true + } + } + } else { + switch value[i] { + case ',', '?', '[', ']', '{', '}': + flow_indicators = true + case ':': + flow_indicators = true + if followed_by_whitespace { + block_indicators = true + } + case '#': + if preceeded_by_whitespace { + flow_indicators = true + block_indicators = true + } + } + } + + if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { + special_characters = true + } + if is_space(value, i) { + if i == 0 { + leading_space = true + } + if i+width(value[i]) == len(value) { + trailing_space = true + } + if previous_break { + break_space = true + } + previous_space = true + previous_break = false + } else if is_break(value, i) { + line_breaks = true + if i == 0 { + leading_break = true + } + if i+width(value[i]) == len(value) { + trailing_break = true + } + if previous_space { + space_break = true + } + previous_space = false + previous_break = true + } else { + previous_space = false + previous_break = false + } + + // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition. + preceeded_by_whitespace = is_blankz(value, i) + } + + emitter.scalar_data.multiline = line_breaks + emitter.scalar_data.flow_plain_allowed = true + emitter.scalar_data.block_plain_allowed = true + emitter.scalar_data.single_quoted_allowed = true + emitter.scalar_data.block_allowed = true + + if leading_space || leading_break || trailing_space || trailing_break { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + } + if trailing_space { + emitter.scalar_data.block_allowed = false + } + if break_space { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + emitter.scalar_data.single_quoted_allowed = false + } + if space_break || special_characters { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + emitter.scalar_data.single_quoted_allowed = false + emitter.scalar_data.block_allowed = false + } + if line_breaks { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + } + if flow_indicators { + emitter.scalar_data.flow_plain_allowed = false + } + if block_indicators { + emitter.scalar_data.block_plain_allowed = false + } + return true +} + +// Check if the event data is valid. +func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { + + emitter.anchor_data.anchor = nil + emitter.tag_data.handle = nil + emitter.tag_data.suffix = nil + emitter.scalar_data.value = nil + + switch event.typ { + case yaml_ALIAS_EVENT: + if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { + return false + } + + case yaml_SCALAR_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + if !yaml_emitter_analyze_scalar(emitter, event.value) { + return false + } + + case yaml_SEQUENCE_START_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + + case yaml_MAPPING_START_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + } + return true +} + +// Write the BOM character. +func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { + if !flush(emitter) { + return false + } + pos := emitter.buffer_pos + emitter.buffer[pos+0] = '\xEF' + emitter.buffer[pos+1] = '\xBB' + emitter.buffer[pos+2] = '\xBF' + emitter.buffer_pos += 3 + return true +} + +func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { + indent := emitter.indent + if indent < 0 { + indent = 0 + } + if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) { + if !put_break(emitter) { + return false + } + } + for emitter.column < indent { + if !put(emitter, ' ') { + return false + } + } + emitter.whitespace = true + emitter.indention = true + return true +} + +func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool { + if need_whitespace && !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + if !write_all(emitter, indicator) { + return false + } + emitter.whitespace = is_whitespace + emitter.indention = (emitter.indention && is_indention) + emitter.open_ended = false + return true +} + +func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { + if !write_all(emitter, value) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { + if !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + if !write_all(emitter, value) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool { + if need_whitespace && !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + for i := 0; i < len(value); { + var must_write bool + switch value[i] { + case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']': + must_write = true + default: + must_write = is_alpha(value, i) + } + if must_write { + if !write(emitter, value, &i) { + return false + } + } else { + w := width(value[i]) + for k := 0; k < w; k++ { + octet := value[i] + i++ + if !put(emitter, '%') { + return false + } + + c := octet >> 4 + if c < 10 { + c += '0' + } else { + c += 'A' - 10 + } + if !put(emitter, c) { + return false + } + + c = octet & 0x0f + if c < 10 { + c += '0' + } else { + c += 'A' - 10 + } + if !put(emitter, c) { + return false + } + } + } + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { + if !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + + spaces := false + breaks := false + for i := 0; i < len(value); { + if is_space(value, i) { + if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + spaces = true + } else if is_break(value, i) { + if !breaks && value[i] == '\n' { + if !put_break(emitter) { + return false + } + } + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + spaces = false + breaks = false + } + } + + emitter.whitespace = false + emitter.indention = false + if emitter.root_context { + emitter.open_ended = true + } + + return true +} + +func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { + + if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) { + return false + } + + spaces := false + breaks := false + for i := 0; i < len(value); { + if is_space(value, i) { + if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + spaces = true + } else if is_break(value, i) { + if !breaks && value[i] == '\n' { + if !put_break(emitter) { + return false + } + } + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if value[i] == '\'' { + if !put(emitter, '\'') { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + spaces = false + breaks = false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { + spaces := false + if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) { + return false + } + + for i := 0; i < len(value); { + if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) || + is_bom(value, i) || is_break(value, i) || + value[i] == '"' || value[i] == '\\' { + + octet := value[i] + + var w int + var v rune + switch { + case octet&0x80 == 0x00: + w, v = 1, rune(octet&0x7F) + case octet&0xE0 == 0xC0: + w, v = 2, rune(octet&0x1F) + case octet&0xF0 == 0xE0: + w, v = 3, rune(octet&0x0F) + case octet&0xF8 == 0xF0: + w, v = 4, rune(octet&0x07) + } + for k := 1; k < w; k++ { + octet = value[i+k] + v = (v << 6) + (rune(octet) & 0x3F) + } + i += w + + if !put(emitter, '\\') { + return false + } + + var ok bool + switch v { + case 0x00: + ok = put(emitter, '0') + case 0x07: + ok = put(emitter, 'a') + case 0x08: + ok = put(emitter, 'b') + case 0x09: + ok = put(emitter, 't') + case 0x0A: + ok = put(emitter, 'n') + case 0x0b: + ok = put(emitter, 'v') + case 0x0c: + ok = put(emitter, 'f') + case 0x0d: + ok = put(emitter, 'r') + case 0x1b: + ok = put(emitter, 'e') + case 0x22: + ok = put(emitter, '"') + case 0x5c: + ok = put(emitter, '\\') + case 0x85: + ok = put(emitter, 'N') + case 0xA0: + ok = put(emitter, '_') + case 0x2028: + ok = put(emitter, 'L') + case 0x2029: + ok = put(emitter, 'P') + default: + if v <= 0xFF { + ok = put(emitter, 'x') + w = 2 + } else if v <= 0xFFFF { + ok = put(emitter, 'u') + w = 4 + } else { + ok = put(emitter, 'U') + w = 8 + } + for k := (w - 1) * 4; ok && k >= 0; k -= 4 { + digit := byte((v >> uint(k)) & 0x0F) + if digit < 10 { + ok = put(emitter, digit+'0') + } else { + ok = put(emitter, digit+'A'-10) + } + } + } + if !ok { + return false + } + spaces = false + } else if is_space(value, i) { + if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 { + if !yaml_emitter_write_indent(emitter) { + return false + } + if is_space(value, i+1) { + if !put(emitter, '\\') { + return false + } + } + i += width(value[i]) + } else if !write(emitter, value, &i) { + return false + } + spaces = true + } else { + if !write(emitter, value, &i) { + return false + } + spaces = false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { + if is_space(value, 0) || is_break(value, 0) { + indent_hint := []byte{'0' + byte(emitter.best_indent)} + if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { + return false + } + } + + emitter.open_ended = false + + var chomp_hint [1]byte + if len(value) == 0 { + chomp_hint[0] = '-' + } else { + i := len(value) - 1 + for value[i]&0xC0 == 0x80 { + i-- + } + if !is_break(value, i) { + chomp_hint[0] = '-' + } else if i == 0 { + chomp_hint[0] = '+' + emitter.open_ended = true + } else { + i-- + for value[i]&0xC0 == 0x80 { + i-- + } + if is_break(value, i) { + chomp_hint[0] = '+' + emitter.open_ended = true + } + } + } + if chomp_hint[0] != 0 { + if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { + return false + } + } + return true +} + +func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { + if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) { + return false + } + if !yaml_emitter_write_block_scalar_hints(emitter, value) { + return false + } + if !put_break(emitter) { + return false + } + emitter.indention = true + emitter.whitespace = true + breaks := true + for i := 0; i < len(value); { + if is_break(value, i) { + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + breaks = false + } + } + + return true +} + +func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { + if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) { + return false + } + if !yaml_emitter_write_block_scalar_hints(emitter, value) { + return false + } + + if !put_break(emitter) { + return false + } + emitter.indention = true + emitter.whitespace = true + + breaks := true + leading_spaces := true + for i := 0; i < len(value); { + if is_break(value, i) { + if !breaks && !leading_spaces && value[i] == '\n' { + k := 0 + for is_break(value, k) { + k += width(value[k]) + } + if !is_blankz(value, k) { + if !put_break(emitter) { + return false + } + } + } + if !write_break(emitter, value, &i) { + return false + } + emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + leading_spaces = is_blank(value, i) + } + if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + emitter.indention = false + breaks = false + } + } + return true +} diff --git a/vendor/gopkg.in/yaml.v2/encode.go b/vendor/gopkg.in/yaml.v2/encode.go new file mode 100644 index 0000000..84f8499 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/encode.go @@ -0,0 +1,306 @@ +package yaml + +import ( + "encoding" + "fmt" + "reflect" + "regexp" + "sort" + "strconv" + "strings" + "time" +) + +type encoder struct { + emitter yaml_emitter_t + event yaml_event_t + out []byte + flow bool +} + +func newEncoder() (e *encoder) { + e = &encoder{} + e.must(yaml_emitter_initialize(&e.emitter)) + yaml_emitter_set_output_string(&e.emitter, &e.out) + yaml_emitter_set_unicode(&e.emitter, true) + e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING)) + e.emit() + e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true)) + e.emit() + return e +} + +func (e *encoder) finish() { + e.must(yaml_document_end_event_initialize(&e.event, true)) + e.emit() + e.emitter.open_ended = false + e.must(yaml_stream_end_event_initialize(&e.event)) + e.emit() +} + +func (e *encoder) destroy() { + yaml_emitter_delete(&e.emitter) +} + +func (e *encoder) emit() { + // This will internally delete the e.event value. + if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT { + e.must(false) + } +} + +func (e *encoder) must(ok bool) { + if !ok { + msg := e.emitter.problem + if msg == "" { + msg = "unknown problem generating YAML content" + } + failf("%s", msg) + } +} + +func (e *encoder) marshal(tag string, in reflect.Value) { + if !in.IsValid() { + e.nilv() + return + } + iface := in.Interface() + if m, ok := iface.(Marshaler); ok { + v, err := m.MarshalYAML() + if err != nil { + fail(err) + } + if v == nil { + e.nilv() + return + } + in = reflect.ValueOf(v) + } else if m, ok := iface.(encoding.TextMarshaler); ok { + text, err := m.MarshalText() + if err != nil { + fail(err) + } + in = reflect.ValueOf(string(text)) + } + switch in.Kind() { + case reflect.Interface: + if in.IsNil() { + e.nilv() + } else { + e.marshal(tag, in.Elem()) + } + case reflect.Map: + e.mapv(tag, in) + case reflect.Ptr: + if in.IsNil() { + e.nilv() + } else { + e.marshal(tag, in.Elem()) + } + case reflect.Struct: + e.structv(tag, in) + case reflect.Slice: + if in.Type().Elem() == mapItemType { + e.itemsv(tag, in) + } else { + e.slicev(tag, in) + } + case reflect.String: + e.stringv(tag, in) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + if in.Type() == durationType { + e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String())) + } else { + e.intv(tag, in) + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + e.uintv(tag, in) + case reflect.Float32, reflect.Float64: + e.floatv(tag, in) + case reflect.Bool: + e.boolv(tag, in) + default: + panic("cannot marshal type: " + in.Type().String()) + } +} + +func (e *encoder) mapv(tag string, in reflect.Value) { + e.mappingv(tag, func() { + keys := keyList(in.MapKeys()) + sort.Sort(keys) + for _, k := range keys { + e.marshal("", k) + e.marshal("", in.MapIndex(k)) + } + }) +} + +func (e *encoder) itemsv(tag string, in reflect.Value) { + e.mappingv(tag, func() { + slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem) + for _, item := range slice { + e.marshal("", reflect.ValueOf(item.Key)) + e.marshal("", reflect.ValueOf(item.Value)) + } + }) +} + +func (e *encoder) structv(tag string, in reflect.Value) { + sinfo, err := getStructInfo(in.Type()) + if err != nil { + panic(err) + } + e.mappingv(tag, func() { + for _, info := range sinfo.FieldsList { + var value reflect.Value + if info.Inline == nil { + value = in.Field(info.Num) + } else { + value = in.FieldByIndex(info.Inline) + } + if info.OmitEmpty && isZero(value) { + continue + } + e.marshal("", reflect.ValueOf(info.Key)) + e.flow = info.Flow + e.marshal("", value) + } + if sinfo.InlineMap >= 0 { + m := in.Field(sinfo.InlineMap) + if m.Len() > 0 { + e.flow = false + keys := keyList(m.MapKeys()) + sort.Sort(keys) + for _, k := range keys { + if _, found := sinfo.FieldsMap[k.String()]; found { + panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String())) + } + e.marshal("", k) + e.flow = false + e.marshal("", m.MapIndex(k)) + } + } + } + }) +} + +func (e *encoder) mappingv(tag string, f func()) { + implicit := tag == "" + style := yaml_BLOCK_MAPPING_STYLE + if e.flow { + e.flow = false + style = yaml_FLOW_MAPPING_STYLE + } + e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) + e.emit() + f() + e.must(yaml_mapping_end_event_initialize(&e.event)) + e.emit() +} + +func (e *encoder) slicev(tag string, in reflect.Value) { + implicit := tag == "" + style := yaml_BLOCK_SEQUENCE_STYLE + if e.flow { + e.flow = false + style = yaml_FLOW_SEQUENCE_STYLE + } + e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) + e.emit() + n := in.Len() + for i := 0; i < n; i++ { + e.marshal("", in.Index(i)) + } + e.must(yaml_sequence_end_event_initialize(&e.event)) + e.emit() +} + +// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1. +// +// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported +// in YAML 1.2 and by this package, but these should be marshalled quoted for +// the time being for compatibility with other parsers. +func isBase60Float(s string) (result bool) { + // Fast path. + if s == "" { + return false + } + c := s[0] + if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 { + return false + } + // Do the full match. + return base60float.MatchString(s) +} + +// From http://yaml.org/type/float.html, except the regular expression there +// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix. +var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`) + +func (e *encoder) stringv(tag string, in reflect.Value) { + var style yaml_scalar_style_t + s := in.String() + rtag, rs := resolve("", s) + if rtag == yaml_BINARY_TAG { + if tag == "" || tag == yaml_STR_TAG { + tag = rtag + s = rs.(string) + } else if tag == yaml_BINARY_TAG { + failf("explicitly tagged !!binary data must be base64-encoded") + } else { + failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag)) + } + } + if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } else if strings.Contains(s, "\n") { + style = yaml_LITERAL_SCALAR_STYLE + } else { + style = yaml_PLAIN_SCALAR_STYLE + } + e.emitScalar(s, "", tag, style) +} + +func (e *encoder) boolv(tag string, in reflect.Value) { + var s string + if in.Bool() { + s = "true" + } else { + s = "false" + } + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) intv(tag string, in reflect.Value) { + s := strconv.FormatInt(in.Int(), 10) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) uintv(tag string, in reflect.Value) { + s := strconv.FormatUint(in.Uint(), 10) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) floatv(tag string, in reflect.Value) { + // FIXME: Handle 64 bits here. + s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32) + switch s { + case "+Inf": + s = ".inf" + case "-Inf": + s = "-.inf" + case "NaN": + s = ".nan" + } + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) nilv() { + e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE) +} + +func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) { + implicit := tag == "" + e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style)) + e.emit() +} diff --git a/vendor/gopkg.in/yaml.v2/encode_test.go b/vendor/gopkg.in/yaml.v2/encode_test.go new file mode 100644 index 0000000..84099bd --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/encode_test.go @@ -0,0 +1,501 @@ +package yaml_test + +import ( + "fmt" + "math" + "strconv" + "strings" + "time" + + . "gopkg.in/check.v1" + "gopkg.in/yaml.v2" + "net" + "os" +) + +var marshalIntTest = 123 + +var marshalTests = []struct { + value interface{} + data string +}{ + { + nil, + "null\n", + }, { + &struct{}{}, + "{}\n", + }, { + map[string]string{"v": "hi"}, + "v: hi\n", + }, { + map[string]interface{}{"v": "hi"}, + "v: hi\n", + }, { + map[string]string{"v": "true"}, + "v: \"true\"\n", + }, { + map[string]string{"v": "false"}, + "v: \"false\"\n", + }, { + map[string]interface{}{"v": true}, + "v: true\n", + }, { + map[string]interface{}{"v": false}, + "v: false\n", + }, { + map[string]interface{}{"v": 10}, + "v: 10\n", + }, { + map[string]interface{}{"v": -10}, + "v: -10\n", + }, { + map[string]uint{"v": 42}, + "v: 42\n", + }, { + map[string]interface{}{"v": int64(4294967296)}, + "v: 4294967296\n", + }, { + map[string]int64{"v": int64(4294967296)}, + "v: 4294967296\n", + }, { + map[string]uint64{"v": 4294967296}, + "v: 4294967296\n", + }, { + map[string]interface{}{"v": "10"}, + "v: \"10\"\n", + }, { + map[string]interface{}{"v": 0.1}, + "v: 0.1\n", + }, { + map[string]interface{}{"v": float64(0.1)}, + "v: 0.1\n", + }, { + map[string]interface{}{"v": -0.1}, + "v: -0.1\n", + }, { + map[string]interface{}{"v": math.Inf(+1)}, + "v: .inf\n", + }, { + map[string]interface{}{"v": math.Inf(-1)}, + "v: -.inf\n", + }, { + map[string]interface{}{"v": math.NaN()}, + "v: .nan\n", + }, { + map[string]interface{}{"v": nil}, + "v: null\n", + }, { + map[string]interface{}{"v": ""}, + "v: \"\"\n", + }, { + map[string][]string{"v": []string{"A", "B"}}, + "v:\n- A\n- B\n", + }, { + map[string][]string{"v": []string{"A", "B\nC"}}, + "v:\n- A\n- |-\n B\n C\n", + }, { + map[string][]interface{}{"v": []interface{}{"A", 1, map[string][]int{"B": []int{2, 3}}}}, + "v:\n- A\n- 1\n- B:\n - 2\n - 3\n", + }, { + map[string]interface{}{"a": map[interface{}]interface{}{"b": "c"}}, + "a:\n b: c\n", + }, { + map[string]interface{}{"a": "-"}, + "a: '-'\n", + }, + + // Simple values. + { + &marshalIntTest, + "123\n", + }, + + // Structures + { + &struct{ Hello string }{"world"}, + "hello: world\n", + }, { + &struct { + A struct { + B string + } + }{struct{ B string }{"c"}}, + "a:\n b: c\n", + }, { + &struct { + A *struct { + B string + } + }{&struct{ B string }{"c"}}, + "a:\n b: c\n", + }, { + &struct { + A *struct { + B string + } + }{}, + "a: null\n", + }, { + &struct{ A int }{1}, + "a: 1\n", + }, { + &struct{ A []int }{[]int{1, 2}}, + "a:\n- 1\n- 2\n", + }, { + &struct { + B int "a" + }{1}, + "a: 1\n", + }, { + &struct{ A bool }{true}, + "a: true\n", + }, + + // Conditional flag + { + &struct { + A int "a,omitempty" + B int "b,omitempty" + }{1, 0}, + "a: 1\n", + }, { + &struct { + A int "a,omitempty" + B int "b,omitempty" + }{0, 0}, + "{}\n", + }, { + &struct { + A *struct{ X, y int } "a,omitempty,flow" + }{&struct{ X, y int }{1, 2}}, + "a: {x: 1}\n", + }, { + &struct { + A *struct{ X, y int } "a,omitempty,flow" + }{nil}, + "{}\n", + }, { + &struct { + A *struct{ X, y int } "a,omitempty,flow" + }{&struct{ X, y int }{}}, + "a: {x: 0}\n", + }, { + &struct { + A struct{ X, y int } "a,omitempty,flow" + }{struct{ X, y int }{1, 2}}, + "a: {x: 1}\n", + }, { + &struct { + A struct{ X, y int } "a,omitempty,flow" + }{struct{ X, y int }{0, 1}}, + "{}\n", + }, { + &struct { + A float64 "a,omitempty" + B float64 "b,omitempty" + }{1, 0}, + "a: 1\n", + }, + + // Flow flag + { + &struct { + A []int "a,flow" + }{[]int{1, 2}}, + "a: [1, 2]\n", + }, { + &struct { + A map[string]string "a,flow" + }{map[string]string{"b": "c", "d": "e"}}, + "a: {b: c, d: e}\n", + }, { + &struct { + A struct { + B, D string + } "a,flow" + }{struct{ B, D string }{"c", "e"}}, + "a: {b: c, d: e}\n", + }, + + // Unexported field + { + &struct { + u int + A int + }{0, 1}, + "a: 1\n", + }, + + // Ignored field + { + &struct { + A int + B int "-" + }{1, 2}, + "a: 1\n", + }, + + // Struct inlining + { + &struct { + A int + C inlineB `yaml:",inline"` + }{1, inlineB{2, inlineC{3}}}, + "a: 1\nb: 2\nc: 3\n", + }, + + // Map inlining + { + &struct { + A int + C map[string]int `yaml:",inline"` + }{1, map[string]int{"b": 2, "c": 3}}, + "a: 1\nb: 2\nc: 3\n", + }, + + // Duration + { + map[string]time.Duration{"a": 3 * time.Second}, + "a: 3s\n", + }, + + // Issue #24: bug in map merging logic. + { + map[string]string{"a": ""}, + "a: \n", + }, + + // Issue #34: marshal unsupported base 60 floats quoted for compatibility + // with old YAML 1.1 parsers. + { + map[string]string{"a": "1:1"}, + "a: \"1:1\"\n", + }, + + // Binary data. + { + map[string]string{"a": "\x00"}, + "a: \"\\0\"\n", + }, { + map[string]string{"a": "\x80\x81\x82"}, + "a: !!binary gIGC\n", + }, { + map[string]string{"a": strings.Repeat("\x90", 54)}, + "a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n", + }, + + // Ordered maps. + { + &yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}}, + "b: 2\na: 1\nd: 4\nc: 3\nsub:\n e: 5\n", + }, + + // Encode unicode as utf-8 rather than in escaped form. + { + map[string]string{"a": "你好"}, + "a: 你好\n", + }, + + // Support encoding.TextMarshaler. + { + map[string]net.IP{"a": net.IPv4(1, 2, 3, 4)}, + "a: 1.2.3.4\n", + }, + { + map[string]time.Time{"a": time.Unix(1424801979, 0)}, + "a: 2015-02-24T18:19:39Z\n", + }, + + // Ensure strings containing ": " are quoted (reported as PR #43, but not reproducible). + { + map[string]string{"a": "b: c"}, + "a: 'b: c'\n", + }, + + // Containing hash mark ('#') in string should be quoted + { + map[string]string{"a": "Hello #comment"}, + "a: 'Hello #comment'\n", + }, + { + map[string]string{"a": "你好 #comment"}, + "a: '你好 #comment'\n", + }, +} + +func (s *S) TestMarshal(c *C) { + defer os.Setenv("TZ", os.Getenv("TZ")) + os.Setenv("TZ", "UTC") + for _, item := range marshalTests { + data, err := yaml.Marshal(item.value) + c.Assert(err, IsNil) + c.Assert(string(data), Equals, item.data) + } +} + +var marshalErrorTests = []struct { + value interface{} + error string + panic string +}{{ + value: &struct { + B int + inlineB ",inline" + }{1, inlineB{2, inlineC{3}}}, + panic: `Duplicated key 'b' in struct struct \{ B int; .*`, +}, { + value: &struct { + A int + B map[string]int ",inline" + }{1, map[string]int{"a": 2}}, + panic: `Can't have key "a" in inlined map; conflicts with struct field`, +}} + +func (s *S) TestMarshalErrors(c *C) { + for _, item := range marshalErrorTests { + if item.panic != "" { + c.Assert(func() { yaml.Marshal(item.value) }, PanicMatches, item.panic) + } else { + _, err := yaml.Marshal(item.value) + c.Assert(err, ErrorMatches, item.error) + } + } +} + +func (s *S) TestMarshalTypeCache(c *C) { + var data []byte + var err error + func() { + type T struct{ A int } + data, err = yaml.Marshal(&T{}) + c.Assert(err, IsNil) + }() + func() { + type T struct{ B int } + data, err = yaml.Marshal(&T{}) + c.Assert(err, IsNil) + }() + c.Assert(string(data), Equals, "b: 0\n") +} + +var marshalerTests = []struct { + data string + value interface{} +}{ + {"_:\n hi: there\n", map[interface{}]interface{}{"hi": "there"}}, + {"_:\n- 1\n- A\n", []interface{}{1, "A"}}, + {"_: 10\n", 10}, + {"_: null\n", nil}, + {"_: BAR!\n", "BAR!"}, +} + +type marshalerType struct { + value interface{} +} + +func (o marshalerType) MarshalText() ([]byte, error) { + panic("MarshalText called on type with MarshalYAML") +} + +func (o marshalerType) MarshalYAML() (interface{}, error) { + return o.value, nil +} + +type marshalerValue struct { + Field marshalerType "_" +} + +func (s *S) TestMarshaler(c *C) { + for _, item := range marshalerTests { + obj := &marshalerValue{} + obj.Field.value = item.value + data, err := yaml.Marshal(obj) + c.Assert(err, IsNil) + c.Assert(string(data), Equals, string(item.data)) + } +} + +func (s *S) TestMarshalerWholeDocument(c *C) { + obj := &marshalerType{} + obj.value = map[string]string{"hello": "world!"} + data, err := yaml.Marshal(obj) + c.Assert(err, IsNil) + c.Assert(string(data), Equals, "hello: world!\n") +} + +type failingMarshaler struct{} + +func (ft *failingMarshaler) MarshalYAML() (interface{}, error) { + return nil, failingErr +} + +func (s *S) TestMarshalerError(c *C) { + _, err := yaml.Marshal(&failingMarshaler{}) + c.Assert(err, Equals, failingErr) +} + +func (s *S) TestSortedOutput(c *C) { + order := []interface{}{ + false, + true, + 1, + uint(1), + 1.0, + 1.1, + 1.2, + 2, + uint(2), + 2.0, + 2.1, + "", + ".1", + ".2", + ".a", + "1", + "2", + "a!10", + "a/2", + "a/10", + "a~10", + "ab/1", + "b/1", + "b/01", + "b/2", + "b/02", + "b/3", + "b/03", + "b1", + "b01", + "b3", + "c2.10", + "c10.2", + "d1", + "d12", + "d12a", + } + m := make(map[interface{}]int) + for _, k := range order { + m[k] = 1 + } + data, err := yaml.Marshal(m) + c.Assert(err, IsNil) + out := "\n" + string(data) + last := 0 + for i, k := range order { + repr := fmt.Sprint(k) + if s, ok := k.(string); ok { + if _, err = strconv.ParseFloat(repr, 32); s == "" || err == nil { + repr = `"` + repr + `"` + } + } + index := strings.Index(out, "\n"+repr+":") + if index == -1 { + c.Fatalf("%#v is not in the output: %#v", k, out) + } + if index < last { + c.Fatalf("%#v was generated before %#v: %q", k, order[i-1], out) + } + last = index + } +} diff --git a/vendor/gopkg.in/yaml.v2/parserc.go b/vendor/gopkg.in/yaml.v2/parserc.go new file mode 100644 index 0000000..0a7037a --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/parserc.go @@ -0,0 +1,1096 @@ +package yaml + +import ( + "bytes" +) + +// The parser implements the following grammar: +// +// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +// implicit_document ::= block_node DOCUMENT-END* +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// block_node_or_indentless_sequence ::= +// ALIAS +// | properties (block_content | indentless_block_sequence)? +// | block_content +// | indentless_block_sequence +// block_node ::= ALIAS +// | properties block_content? +// | block_content +// flow_node ::= ALIAS +// | properties flow_content? +// | flow_content +// properties ::= TAG ANCHOR? | ANCHOR TAG? +// block_content ::= block_collection | flow_collection | SCALAR +// flow_content ::= flow_collection | SCALAR +// block_collection ::= block_sequence | block_mapping +// flow_collection ::= flow_sequence | flow_mapping +// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ +// block_mapping ::= BLOCK-MAPPING_START +// ((KEY block_node_or_indentless_sequence?)? +// (VALUE block_node_or_indentless_sequence?)?)* +// BLOCK-END +// flow_sequence ::= FLOW-SEQUENCE-START +// (flow_sequence_entry FLOW-ENTRY)* +// flow_sequence_entry? +// FLOW-SEQUENCE-END +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// flow_mapping ::= FLOW-MAPPING-START +// (flow_mapping_entry FLOW-ENTRY)* +// flow_mapping_entry? +// FLOW-MAPPING-END +// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + +// Peek the next token in the token queue. +func peek_token(parser *yaml_parser_t) *yaml_token_t { + if parser.token_available || yaml_parser_fetch_more_tokens(parser) { + return &parser.tokens[parser.tokens_head] + } + return nil +} + +// Remove the next token from the queue (must be called after peek_token). +func skip_token(parser *yaml_parser_t) { + parser.token_available = false + parser.tokens_parsed++ + parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN + parser.tokens_head++ +} + +// Get the next event. +func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { + // Erase the event object. + *event = yaml_event_t{} + + // No events after the end of the stream or error. + if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { + return true + } + + // Generate the next event. + return yaml_parser_state_machine(parser, event) +} + +// Set parser error. +func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { + parser.error = yaml_PARSER_ERROR + parser.problem = problem + parser.problem_mark = problem_mark + return false +} + +func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { + parser.error = yaml_PARSER_ERROR + parser.context = context + parser.context_mark = context_mark + parser.problem = problem + parser.problem_mark = problem_mark + return false +} + +// State dispatcher. +func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { + //trace("yaml_parser_state_machine", "state:", parser.state.String()) + + switch parser.state { + case yaml_PARSE_STREAM_START_STATE: + return yaml_parser_parse_stream_start(parser, event) + + case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: + return yaml_parser_parse_document_start(parser, event, true) + + case yaml_PARSE_DOCUMENT_START_STATE: + return yaml_parser_parse_document_start(parser, event, false) + + case yaml_PARSE_DOCUMENT_CONTENT_STATE: + return yaml_parser_parse_document_content(parser, event) + + case yaml_PARSE_DOCUMENT_END_STATE: + return yaml_parser_parse_document_end(parser, event) + + case yaml_PARSE_BLOCK_NODE_STATE: + return yaml_parser_parse_node(parser, event, true, false) + + case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: + return yaml_parser_parse_node(parser, event, true, true) + + case yaml_PARSE_FLOW_NODE_STATE: + return yaml_parser_parse_node(parser, event, false, false) + + case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: + return yaml_parser_parse_block_sequence_entry(parser, event, true) + + case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_block_sequence_entry(parser, event, false) + + case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_indentless_sequence_entry(parser, event) + + case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: + return yaml_parser_parse_block_mapping_key(parser, event, true) + + case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: + return yaml_parser_parse_block_mapping_key(parser, event, false) + + case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: + return yaml_parser_parse_block_mapping_value(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: + return yaml_parser_parse_flow_sequence_entry(parser, event, true) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_flow_sequence_entry(parser, event, false) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) + + case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: + return yaml_parser_parse_flow_mapping_key(parser, event, true) + + case yaml_PARSE_FLOW_MAPPING_KEY_STATE: + return yaml_parser_parse_flow_mapping_key(parser, event, false) + + case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: + return yaml_parser_parse_flow_mapping_value(parser, event, false) + + case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: + return yaml_parser_parse_flow_mapping_value(parser, event, true) + + default: + panic("invalid parser state") + } + return false +} + +// Parse the production: +// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +// ************ +func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_STREAM_START_TOKEN { + return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark) + } + parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE + *event = yaml_event_t{ + typ: yaml_STREAM_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + encoding: token.encoding, + } + skip_token(parser) + return true +} + +// Parse the productions: +// implicit_document ::= block_node DOCUMENT-END* +// * +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// ************************* +func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { + + token := peek_token(parser) + if token == nil { + return false + } + + // Parse extra document end indicators. + if !implicit { + for token.typ == yaml_DOCUMENT_END_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } + + if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && + token.typ != yaml_TAG_DIRECTIVE_TOKEN && + token.typ != yaml_DOCUMENT_START_TOKEN && + token.typ != yaml_STREAM_END_TOKEN { + // Parse an implicit document. + if !yaml_parser_process_directives(parser, nil, nil) { + return false + } + parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) + parser.state = yaml_PARSE_BLOCK_NODE_STATE + + *event = yaml_event_t{ + typ: yaml_DOCUMENT_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + } else if token.typ != yaml_STREAM_END_TOKEN { + // Parse an explicit document. + var version_directive *yaml_version_directive_t + var tag_directives []yaml_tag_directive_t + start_mark := token.start_mark + if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { + return false + } + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_DOCUMENT_START_TOKEN { + yaml_parser_set_parser_error(parser, + "did not find expected ", token.start_mark) + return false + } + parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) + parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE + end_mark := token.end_mark + + *event = yaml_event_t{ + typ: yaml_DOCUMENT_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + version_directive: version_directive, + tag_directives: tag_directives, + implicit: false, + } + skip_token(parser) + + } else { + // Parse the stream end. + parser.state = yaml_PARSE_END_STATE + *event = yaml_event_t{ + typ: yaml_STREAM_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + skip_token(parser) + } + + return true +} + +// Parse the productions: +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// *********** +// +func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || + token.typ == yaml_TAG_DIRECTIVE_TOKEN || + token.typ == yaml_DOCUMENT_START_TOKEN || + token.typ == yaml_DOCUMENT_END_TOKEN || + token.typ == yaml_STREAM_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + return yaml_parser_process_empty_scalar(parser, event, + token.start_mark) + } + return yaml_parser_parse_node(parser, event, true, false) +} + +// Parse the productions: +// implicit_document ::= block_node DOCUMENT-END* +// ************* +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// +func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + start_mark := token.start_mark + end_mark := token.start_mark + + implicit := true + if token.typ == yaml_DOCUMENT_END_TOKEN { + end_mark = token.end_mark + skip_token(parser) + implicit = false + } + + parser.tag_directives = parser.tag_directives[:0] + + parser.state = yaml_PARSE_DOCUMENT_START_STATE + *event = yaml_event_t{ + typ: yaml_DOCUMENT_END_EVENT, + start_mark: start_mark, + end_mark: end_mark, + implicit: implicit, + } + return true +} + +// Parse the productions: +// block_node_or_indentless_sequence ::= +// ALIAS +// ***** +// | properties (block_content | indentless_block_sequence)? +// ********** * +// | block_content | indentless_block_sequence +// * +// block_node ::= ALIAS +// ***** +// | properties block_content? +// ********** * +// | block_content +// * +// flow_node ::= ALIAS +// ***** +// | properties flow_content? +// ********** * +// | flow_content +// * +// properties ::= TAG ANCHOR? | ANCHOR TAG? +// ************************* +// block_content ::= block_collection | flow_collection | SCALAR +// ****** +// flow_content ::= flow_collection | SCALAR +// ****** +func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { + //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_ALIAS_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + *event = yaml_event_t{ + typ: yaml_ALIAS_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + anchor: token.value, + } + skip_token(parser) + return true + } + + start_mark := token.start_mark + end_mark := token.start_mark + + var tag_token bool + var tag_handle, tag_suffix, anchor []byte + var tag_mark yaml_mark_t + if token.typ == yaml_ANCHOR_TOKEN { + anchor = token.value + start_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_TAG_TOKEN { + tag_token = true + tag_handle = token.value + tag_suffix = token.suffix + tag_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } else if token.typ == yaml_TAG_TOKEN { + tag_token = true + tag_handle = token.value + tag_suffix = token.suffix + start_mark = token.start_mark + tag_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_ANCHOR_TOKEN { + anchor = token.value + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } + + var tag []byte + if tag_token { + if len(tag_handle) == 0 { + tag = tag_suffix + tag_suffix = nil + } else { + for i := range parser.tag_directives { + if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { + tag = append([]byte(nil), parser.tag_directives[i].prefix...) + tag = append(tag, tag_suffix...) + break + } + } + if len(tag) == 0 { + yaml_parser_set_parser_error_context(parser, + "while parsing a node", start_mark, + "found undefined tag handle", tag_mark) + return false + } + } + } + + implicit := len(tag) == 0 + if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), + } + return true + } + if token.typ == yaml_SCALAR_TOKEN { + var plain_implicit, quoted_implicit bool + end_mark = token.end_mark + if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { + plain_implicit = true + } else if len(tag) == 0 { + quoted_implicit = true + } + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + value: token.value, + implicit: plain_implicit, + quoted_implicit: quoted_implicit, + style: yaml_style_t(token.style), + } + skip_token(parser) + return true + } + if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { + // [Go] Some of the events below can be merged as they differ only on style. + end_mark = token.end_mark + parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), + } + return true + } + if token.typ == yaml_FLOW_MAPPING_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), + } + return true + } + if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), + } + return true + } + if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), + } + return true + } + if len(anchor) > 0 || len(tag) > 0 { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + quoted_implicit: false, + style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), + } + return true + } + + context := "while parsing a flow node" + if block { + context = "while parsing a block node" + } + yaml_parser_set_parser_error_context(parser, context, start_mark, + "did not find expected node content", token.start_mark) + return false +} + +// Parse the productions: +// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +// ******************** *********** * ********* +// +func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_BLOCK_ENTRY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, true, false) + } else { + parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } + if token.typ == yaml_BLOCK_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + skip_token(parser) + return true + } + + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a block collection", context_mark, + "did not find expected '-' indicator", token.start_mark) +} + +// Parse the productions: +// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ +// *********** * +func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_BLOCK_ENTRY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_BLOCK_ENTRY_TOKEN && + token.typ != yaml_KEY_TOKEN && + token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, true, false) + } + parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? + } + return true +} + +// Parse the productions: +// block_mapping ::= BLOCK-MAPPING_START +// ******************* +// ((KEY block_node_or_indentless_sequence?)? +// *** * +// (VALUE block_node_or_indentless_sequence?)?)* +// +// BLOCK-END +// ********* +// +func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_KEY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_KEY_TOKEN && + token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, true, true) + } else { + parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } else if token.typ == yaml_BLOCK_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + skip_token(parser) + return true + } + + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a block mapping", context_mark, + "did not find expected key", token.start_mark) +} + +// Parse the productions: +// block_mapping ::= BLOCK-MAPPING_START +// +// ((KEY block_node_or_indentless_sequence?)? +// +// (VALUE block_node_or_indentless_sequence?)?)* +// ***** * +// BLOCK-END +// +// +func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_VALUE_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_KEY_TOKEN && + token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) + return yaml_parser_parse_node(parser, event, true, true) + } + parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +// Parse the productions: +// flow_sequence ::= FLOW-SEQUENCE-START +// ******************* +// (flow_sequence_entry FLOW-ENTRY)* +// * ********** +// flow_sequence_entry? +// * +// FLOW-SEQUENCE-END +// ***************** +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// * +// +func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + if !first { + if token.typ == yaml_FLOW_ENTRY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } else { + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a flow sequence", context_mark, + "did not find expected ',' or ']'", token.start_mark) + } + } + + if token.typ == yaml_KEY_TOKEN { + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + implicit: true, + style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), + } + skip_token(parser) + return true + } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + skip_token(parser) + return true +} + +// +// Parse the productions: +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// *** * +// +func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_FLOW_ENTRY_TOKEN && + token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + mark := token.end_mark + skip_token(parser) + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) +} + +// Parse the productions: +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// ***** * +// +func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_VALUE_TOKEN { + skip_token(parser) + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +// Parse the productions: +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// * +// +func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? + } + return true +} + +// Parse the productions: +// flow_mapping ::= FLOW-MAPPING-START +// ****************** +// (flow_mapping_entry FLOW-ENTRY)* +// * ********** +// flow_mapping_entry? +// ****************** +// FLOW-MAPPING-END +// **************** +// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// * *** * +// +func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ != yaml_FLOW_MAPPING_END_TOKEN { + if !first { + if token.typ == yaml_FLOW_ENTRY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } else { + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a flow mapping", context_mark, + "did not find expected ',' or '}'", token.start_mark) + } + } + + if token.typ == yaml_KEY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_FLOW_ENTRY_TOKEN && + token.typ != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } else { + parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) + } + } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + skip_token(parser) + return true +} + +// Parse the productions: +// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// * ***** * +// +func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { + token := peek_token(parser) + if token == nil { + return false + } + if empty { + parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) + } + if token.typ == yaml_VALUE_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +// Generate an empty scalar event. +func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + start_mark: mark, + end_mark: mark, + value: nil, // Empty + implicit: true, + style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), + } + return true +} + +var default_tag_directives = []yaml_tag_directive_t{ + {[]byte("!"), []byte("!")}, + {[]byte("!!"), []byte("tag:yaml.org,2002:")}, +} + +// Parse directives. +func yaml_parser_process_directives(parser *yaml_parser_t, + version_directive_ref **yaml_version_directive_t, + tag_directives_ref *[]yaml_tag_directive_t) bool { + + var version_directive *yaml_version_directive_t + var tag_directives []yaml_tag_directive_t + + token := peek_token(parser) + if token == nil { + return false + } + + for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { + if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { + if version_directive != nil { + yaml_parser_set_parser_error(parser, + "found duplicate %YAML directive", token.start_mark) + return false + } + if token.major != 1 || token.minor != 1 { + yaml_parser_set_parser_error(parser, + "found incompatible YAML document", token.start_mark) + return false + } + version_directive = &yaml_version_directive_t{ + major: token.major, + minor: token.minor, + } + } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { + value := yaml_tag_directive_t{ + handle: token.value, + prefix: token.prefix, + } + if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { + return false + } + tag_directives = append(tag_directives, value) + } + + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + + for i := range default_tag_directives { + if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { + return false + } + } + + if version_directive_ref != nil { + *version_directive_ref = version_directive + } + if tag_directives_ref != nil { + *tag_directives_ref = tag_directives + } + return true +} + +// Append a tag directive to the directives stack. +func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { + for i := range parser.tag_directives { + if bytes.Equal(value.handle, parser.tag_directives[i].handle) { + if allow_duplicates { + return true + } + return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) + } + } + + // [Go] I suspect the copy is unnecessary. This was likely done + // because there was no way to track ownership of the data. + value_copy := yaml_tag_directive_t{ + handle: make([]byte, len(value.handle)), + prefix: make([]byte, len(value.prefix)), + } + copy(value_copy.handle, value.handle) + copy(value_copy.prefix, value.prefix) + parser.tag_directives = append(parser.tag_directives, value_copy) + return true +} diff --git a/vendor/gopkg.in/yaml.v2/readerc.go b/vendor/gopkg.in/yaml.v2/readerc.go new file mode 100644 index 0000000..f450791 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/readerc.go @@ -0,0 +1,394 @@ +package yaml + +import ( + "io" +) + +// Set the reader error and return 0. +func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool { + parser.error = yaml_READER_ERROR + parser.problem = problem + parser.problem_offset = offset + parser.problem_value = value + return false +} + +// Byte order marks. +const ( + bom_UTF8 = "\xef\xbb\xbf" + bom_UTF16LE = "\xff\xfe" + bom_UTF16BE = "\xfe\xff" +) + +// Determine the input stream encoding by checking the BOM symbol. If no BOM is +// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. +func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { + // Ensure that we had enough bytes in the raw buffer. + for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { + if !yaml_parser_update_raw_buffer(parser) { + return false + } + } + + // Determine the encoding. + buf := parser.raw_buffer + pos := parser.raw_buffer_pos + avail := len(buf) - pos + if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] { + parser.encoding = yaml_UTF16LE_ENCODING + parser.raw_buffer_pos += 2 + parser.offset += 2 + } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] { + parser.encoding = yaml_UTF16BE_ENCODING + parser.raw_buffer_pos += 2 + parser.offset += 2 + } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] { + parser.encoding = yaml_UTF8_ENCODING + parser.raw_buffer_pos += 3 + parser.offset += 3 + } else { + parser.encoding = yaml_UTF8_ENCODING + } + return true +} + +// Update the raw buffer. +func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { + size_read := 0 + + // Return if the raw buffer is full. + if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { + return true + } + + // Return on EOF. + if parser.eof { + return true + } + + // Move the remaining bytes in the raw buffer to the beginning. + if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { + copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) + } + parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] + parser.raw_buffer_pos = 0 + + // Call the read handler to fill the buffer. + size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) + parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] + if err == io.EOF { + parser.eof = true + } else if err != nil { + return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1) + } + return true +} + +// Ensure that the buffer contains at least `length` characters. +// Return true on success, false on failure. +// +// The length is supposed to be significantly less that the buffer size. +func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { + if parser.read_handler == nil { + panic("read handler must be set") + } + + // If the EOF flag is set and the raw buffer is empty, do nothing. + if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { + return true + } + + // Return if the buffer contains enough characters. + if parser.unread >= length { + return true + } + + // Determine the input encoding if it is not known yet. + if parser.encoding == yaml_ANY_ENCODING { + if !yaml_parser_determine_encoding(parser) { + return false + } + } + + // Move the unread characters to the beginning of the buffer. + buffer_len := len(parser.buffer) + if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len { + copy(parser.buffer, parser.buffer[parser.buffer_pos:]) + buffer_len -= parser.buffer_pos + parser.buffer_pos = 0 + } else if parser.buffer_pos == buffer_len { + buffer_len = 0 + parser.buffer_pos = 0 + } + + // Open the whole buffer for writing, and cut it before returning. + parser.buffer = parser.buffer[:cap(parser.buffer)] + + // Fill the buffer until it has enough characters. + first := true + for parser.unread < length { + + // Fill the raw buffer if necessary. + if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { + if !yaml_parser_update_raw_buffer(parser) { + parser.buffer = parser.buffer[:buffer_len] + return false + } + } + first = false + + // Decode the raw buffer. + inner: + for parser.raw_buffer_pos != len(parser.raw_buffer) { + var value rune + var width int + + raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos + + // Decode the next character. + switch parser.encoding { + case yaml_UTF8_ENCODING: + // Decode a UTF-8 character. Check RFC 3629 + // (http://www.ietf.org/rfc/rfc3629.txt) for more details. + // + // The following table (taken from the RFC) is used for + // decoding. + // + // Char. number range | UTF-8 octet sequence + // (hexadecimal) | (binary) + // --------------------+------------------------------------ + // 0000 0000-0000 007F | 0xxxxxxx + // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx + // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx + // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + // + // Additionally, the characters in the range 0xD800-0xDFFF + // are prohibited as they are reserved for use with UTF-16 + // surrogate pairs. + + // Determine the length of the UTF-8 sequence. + octet := parser.raw_buffer[parser.raw_buffer_pos] + switch { + case octet&0x80 == 0x00: + width = 1 + case octet&0xE0 == 0xC0: + width = 2 + case octet&0xF0 == 0xE0: + width = 3 + case octet&0xF8 == 0xF0: + width = 4 + default: + // The leading octet is invalid. + return yaml_parser_set_reader_error(parser, + "invalid leading UTF-8 octet", + parser.offset, int(octet)) + } + + // Check if the raw buffer contains an incomplete character. + if width > raw_unread { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-8 octet sequence", + parser.offset, -1) + } + break inner + } + + // Decode the leading octet. + switch { + case octet&0x80 == 0x00: + value = rune(octet & 0x7F) + case octet&0xE0 == 0xC0: + value = rune(octet & 0x1F) + case octet&0xF0 == 0xE0: + value = rune(octet & 0x0F) + case octet&0xF8 == 0xF0: + value = rune(octet & 0x07) + default: + value = 0 + } + + // Check and decode the trailing octets. + for k := 1; k < width; k++ { + octet = parser.raw_buffer[parser.raw_buffer_pos+k] + + // Check if the octet is valid. + if (octet & 0xC0) != 0x80 { + return yaml_parser_set_reader_error(parser, + "invalid trailing UTF-8 octet", + parser.offset+k, int(octet)) + } + + // Decode the octet. + value = (value << 6) + rune(octet&0x3F) + } + + // Check the length of the sequence against the value. + switch { + case width == 1: + case width == 2 && value >= 0x80: + case width == 3 && value >= 0x800: + case width == 4 && value >= 0x10000: + default: + return yaml_parser_set_reader_error(parser, + "invalid length of a UTF-8 sequence", + parser.offset, -1) + } + + // Check the range of the value. + if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF { + return yaml_parser_set_reader_error(parser, + "invalid Unicode character", + parser.offset, int(value)) + } + + case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING: + var low, high int + if parser.encoding == yaml_UTF16LE_ENCODING { + low, high = 0, 1 + } else { + low, high = 1, 0 + } + + // The UTF-16 encoding is not as simple as one might + // naively think. Check RFC 2781 + // (http://www.ietf.org/rfc/rfc2781.txt). + // + // Normally, two subsequent bytes describe a Unicode + // character. However a special technique (called a + // surrogate pair) is used for specifying character + // values larger than 0xFFFF. + // + // A surrogate pair consists of two pseudo-characters: + // high surrogate area (0xD800-0xDBFF) + // low surrogate area (0xDC00-0xDFFF) + // + // The following formulas are used for decoding + // and encoding characters using surrogate pairs: + // + // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) + // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) + // W1 = 110110yyyyyyyyyy + // W2 = 110111xxxxxxxxxx + // + // where U is the character value, W1 is the high surrogate + // area, W2 is the low surrogate area. + + // Check for incomplete UTF-16 character. + if raw_unread < 2 { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-16 character", + parser.offset, -1) + } + break inner + } + + // Get the character. + value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + + (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) + + // Check for unexpected low surrogate area. + if value&0xFC00 == 0xDC00 { + return yaml_parser_set_reader_error(parser, + "unexpected low surrogate area", + parser.offset, int(value)) + } + + // Check for a high surrogate area. + if value&0xFC00 == 0xD800 { + width = 4 + + // Check for incomplete surrogate pair. + if raw_unread < 4 { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-16 surrogate pair", + parser.offset, -1) + } + break inner + } + + // Get the next character. + value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + + (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) + + // Check for a low surrogate area. + if value2&0xFC00 != 0xDC00 { + return yaml_parser_set_reader_error(parser, + "expected low surrogate area", + parser.offset+2, int(value2)) + } + + // Generate the value of the surrogate pair. + value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) + } else { + width = 2 + } + + default: + panic("impossible") + } + + // Check if the character is in the allowed range: + // #x9 | #xA | #xD | [#x20-#x7E] (8 bit) + // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) + // | [#x10000-#x10FFFF] (32 bit) + switch { + case value == 0x09: + case value == 0x0A: + case value == 0x0D: + case value >= 0x20 && value <= 0x7E: + case value == 0x85: + case value >= 0xA0 && value <= 0xD7FF: + case value >= 0xE000 && value <= 0xFFFD: + case value >= 0x10000 && value <= 0x10FFFF: + default: + return yaml_parser_set_reader_error(parser, + "control characters are not allowed", + parser.offset, int(value)) + } + + // Move the raw pointers. + parser.raw_buffer_pos += width + parser.offset += width + + // Finally put the character into the buffer. + if value <= 0x7F { + // 0000 0000-0000 007F . 0xxxxxxx + parser.buffer[buffer_len+0] = byte(value) + buffer_len += 1 + } else if value <= 0x7FF { + // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx + parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6)) + parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F)) + buffer_len += 2 + } else if value <= 0xFFFF { + // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx + parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12)) + parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F)) + parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F)) + buffer_len += 3 + } else { + // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18)) + parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F)) + parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F)) + parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F)) + buffer_len += 4 + } + + parser.unread++ + } + + // On EOF, put NUL into the buffer and return. + if parser.eof { + parser.buffer[buffer_len] = 0 + buffer_len++ + parser.unread++ + break + } + } + parser.buffer = parser.buffer[:buffer_len] + return true +} diff --git a/vendor/gopkg.in/yaml.v2/resolve.go b/vendor/gopkg.in/yaml.v2/resolve.go new file mode 100644 index 0000000..93a8632 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/resolve.go @@ -0,0 +1,203 @@ +package yaml + +import ( + "encoding/base64" + "math" + "strconv" + "strings" + "unicode/utf8" +) + +type resolveMapItem struct { + value interface{} + tag string +} + +var resolveTable = make([]byte, 256) +var resolveMap = make(map[string]resolveMapItem) + +func init() { + t := resolveTable + t[int('+')] = 'S' // Sign + t[int('-')] = 'S' + for _, c := range "0123456789" { + t[int(c)] = 'D' // Digit + } + for _, c := range "yYnNtTfFoO~" { + t[int(c)] = 'M' // In map + } + t[int('.')] = '.' // Float (potentially in map) + + var resolveMapList = []struct { + v interface{} + tag string + l []string + }{ + {true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}}, + {true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}}, + {true, yaml_BOOL_TAG, []string{"on", "On", "ON"}}, + {false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}}, + {false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}}, + {false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}}, + {nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}}, + {math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}}, + {math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}}, + {math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}}, + {math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}}, + {"<<", yaml_MERGE_TAG, []string{"<<"}}, + } + + m := resolveMap + for _, item := range resolveMapList { + for _, s := range item.l { + m[s] = resolveMapItem{item.v, item.tag} + } + } +} + +const longTagPrefix = "tag:yaml.org,2002:" + +func shortTag(tag string) string { + // TODO This can easily be made faster and produce less garbage. + if strings.HasPrefix(tag, longTagPrefix) { + return "!!" + tag[len(longTagPrefix):] + } + return tag +} + +func longTag(tag string) string { + if strings.HasPrefix(tag, "!!") { + return longTagPrefix + tag[2:] + } + return tag +} + +func resolvableTag(tag string) bool { + switch tag { + case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG: + return true + } + return false +} + +func resolve(tag string, in string) (rtag string, out interface{}) { + if !resolvableTag(tag) { + return tag, in + } + + defer func() { + switch tag { + case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG: + return + } + failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag)) + }() + + // Any data is accepted as a !!str or !!binary. + // Otherwise, the prefix is enough of a hint about what it might be. + hint := byte('N') + if in != "" { + hint = resolveTable[in[0]] + } + if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG { + // Handle things we can lookup in a map. + if item, ok := resolveMap[in]; ok { + return item.tag, item.value + } + + // Base 60 floats are a bad idea, were dropped in YAML 1.2, and + // are purposefully unsupported here. They're still quoted on + // the way out for compatibility with other parser, though. + + switch hint { + case 'M': + // We've already checked the map above. + + case '.': + // Not in the map, so maybe a normal float. + floatv, err := strconv.ParseFloat(in, 64) + if err == nil { + return yaml_FLOAT_TAG, floatv + } + + case 'D', 'S': + // Int, float, or timestamp. + plain := strings.Replace(in, "_", "", -1) + intv, err := strconv.ParseInt(plain, 0, 64) + if err == nil { + if intv == int64(int(intv)) { + return yaml_INT_TAG, int(intv) + } else { + return yaml_INT_TAG, intv + } + } + uintv, err := strconv.ParseUint(plain, 0, 64) + if err == nil { + return yaml_INT_TAG, uintv + } + floatv, err := strconv.ParseFloat(plain, 64) + if err == nil { + return yaml_FLOAT_TAG, floatv + } + if strings.HasPrefix(plain, "0b") { + intv, err := strconv.ParseInt(plain[2:], 2, 64) + if err == nil { + if intv == int64(int(intv)) { + return yaml_INT_TAG, int(intv) + } else { + return yaml_INT_TAG, intv + } + } + uintv, err := strconv.ParseUint(plain[2:], 2, 64) + if err == nil { + return yaml_INT_TAG, uintv + } + } else if strings.HasPrefix(plain, "-0b") { + intv, err := strconv.ParseInt(plain[3:], 2, 64) + if err == nil { + if intv == int64(int(intv)) { + return yaml_INT_TAG, -int(intv) + } else { + return yaml_INT_TAG, -intv + } + } + } + // XXX Handle timestamps here. + + default: + panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")") + } + } + if tag == yaml_BINARY_TAG { + return yaml_BINARY_TAG, in + } + if utf8.ValidString(in) { + return yaml_STR_TAG, in + } + return yaml_BINARY_TAG, encodeBase64(in) +} + +// encodeBase64 encodes s as base64 that is broken up into multiple lines +// as appropriate for the resulting length. +func encodeBase64(s string) string { + const lineLen = 70 + encLen := base64.StdEncoding.EncodedLen(len(s)) + lines := encLen/lineLen + 1 + buf := make([]byte, encLen*2+lines) + in := buf[0:encLen] + out := buf[encLen:] + base64.StdEncoding.Encode(in, []byte(s)) + k := 0 + for i := 0; i < len(in); i += lineLen { + j := i + lineLen + if j > len(in) { + j = len(in) + } + k += copy(out[k:], in[i:j]) + if lines > 1 { + out[k] = '\n' + k++ + } + } + return string(out[:k]) +} diff --git a/vendor/gopkg.in/yaml.v2/scannerc.go b/vendor/gopkg.in/yaml.v2/scannerc.go new file mode 100644 index 0000000..2580800 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/scannerc.go @@ -0,0 +1,2710 @@ +package yaml + +import ( + "bytes" + "fmt" +) + +// Introduction +// ************ +// +// The following notes assume that you are familiar with the YAML specification +// (http://yaml.org/spec/cvs/current.html). We mostly follow it, although in +// some cases we are less restrictive that it requires. +// +// The process of transforming a YAML stream into a sequence of events is +// divided on two steps: Scanning and Parsing. +// +// The Scanner transforms the input stream into a sequence of tokens, while the +// parser transform the sequence of tokens produced by the Scanner into a +// sequence of parsing events. +// +// The Scanner is rather clever and complicated. The Parser, on the contrary, +// is a straightforward implementation of a recursive-descendant parser (or, +// LL(1) parser, as it is usually called). +// +// Actually there are two issues of Scanning that might be called "clever", the +// rest is quite straightforward. The issues are "block collection start" and +// "simple keys". Both issues are explained below in details. +// +// Here the Scanning step is explained and implemented. We start with the list +// of all the tokens produced by the Scanner together with short descriptions. +// +// Now, tokens: +// +// STREAM-START(encoding) # The stream start. +// STREAM-END # The stream end. +// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. +// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. +// DOCUMENT-START # '---' +// DOCUMENT-END # '...' +// BLOCK-SEQUENCE-START # Indentation increase denoting a block +// BLOCK-MAPPING-START # sequence or a block mapping. +// BLOCK-END # Indentation decrease. +// FLOW-SEQUENCE-START # '[' +// FLOW-SEQUENCE-END # ']' +// BLOCK-SEQUENCE-START # '{' +// BLOCK-SEQUENCE-END # '}' +// BLOCK-ENTRY # '-' +// FLOW-ENTRY # ',' +// KEY # '?' or nothing (simple keys). +// VALUE # ':' +// ALIAS(anchor) # '*anchor' +// ANCHOR(anchor) # '&anchor' +// TAG(handle,suffix) # '!handle!suffix' +// SCALAR(value,style) # A scalar. +// +// The following two tokens are "virtual" tokens denoting the beginning and the +// end of the stream: +// +// STREAM-START(encoding) +// STREAM-END +// +// We pass the information about the input stream encoding with the +// STREAM-START token. +// +// The next two tokens are responsible for tags: +// +// VERSION-DIRECTIVE(major,minor) +// TAG-DIRECTIVE(handle,prefix) +// +// Example: +// +// %YAML 1.1 +// %TAG ! !foo +// %TAG !yaml! tag:yaml.org,2002: +// --- +// +// The correspoding sequence of tokens: +// +// STREAM-START(utf-8) +// VERSION-DIRECTIVE(1,1) +// TAG-DIRECTIVE("!","!foo") +// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") +// DOCUMENT-START +// STREAM-END +// +// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole +// line. +// +// The document start and end indicators are represented by: +// +// DOCUMENT-START +// DOCUMENT-END +// +// Note that if a YAML stream contains an implicit document (without '---' +// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be +// produced. +// +// In the following examples, we present whole documents together with the +// produced tokens. +// +// 1. An implicit document: +// +// 'a scalar' +// +// Tokens: +// +// STREAM-START(utf-8) +// SCALAR("a scalar",single-quoted) +// STREAM-END +// +// 2. An explicit document: +// +// --- +// 'a scalar' +// ... +// +// Tokens: +// +// STREAM-START(utf-8) +// DOCUMENT-START +// SCALAR("a scalar",single-quoted) +// DOCUMENT-END +// STREAM-END +// +// 3. Several documents in a stream: +// +// 'a scalar' +// --- +// 'another scalar' +// --- +// 'yet another scalar' +// +// Tokens: +// +// STREAM-START(utf-8) +// SCALAR("a scalar",single-quoted) +// DOCUMENT-START +// SCALAR("another scalar",single-quoted) +// DOCUMENT-START +// SCALAR("yet another scalar",single-quoted) +// STREAM-END +// +// We have already introduced the SCALAR token above. The following tokens are +// used to describe aliases, anchors, tag, and scalars: +// +// ALIAS(anchor) +// ANCHOR(anchor) +// TAG(handle,suffix) +// SCALAR(value,style) +// +// The following series of examples illustrate the usage of these tokens: +// +// 1. A recursive sequence: +// +// &A [ *A ] +// +// Tokens: +// +// STREAM-START(utf-8) +// ANCHOR("A") +// FLOW-SEQUENCE-START +// ALIAS("A") +// FLOW-SEQUENCE-END +// STREAM-END +// +// 2. A tagged scalar: +// +// !!float "3.14" # A good approximation. +// +// Tokens: +// +// STREAM-START(utf-8) +// TAG("!!","float") +// SCALAR("3.14",double-quoted) +// STREAM-END +// +// 3. Various scalar styles: +// +// --- # Implicit empty plain scalars do not produce tokens. +// --- a plain scalar +// --- 'a single-quoted scalar' +// --- "a double-quoted scalar" +// --- |- +// a literal scalar +// --- >- +// a folded +// scalar +// +// Tokens: +// +// STREAM-START(utf-8) +// DOCUMENT-START +// DOCUMENT-START +// SCALAR("a plain scalar",plain) +// DOCUMENT-START +// SCALAR("a single-quoted scalar",single-quoted) +// DOCUMENT-START +// SCALAR("a double-quoted scalar",double-quoted) +// DOCUMENT-START +// SCALAR("a literal scalar",literal) +// DOCUMENT-START +// SCALAR("a folded scalar",folded) +// STREAM-END +// +// Now it's time to review collection-related tokens. We will start with +// flow collections: +// +// FLOW-SEQUENCE-START +// FLOW-SEQUENCE-END +// FLOW-MAPPING-START +// FLOW-MAPPING-END +// FLOW-ENTRY +// KEY +// VALUE +// +// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and +// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' +// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the +// indicators '?' and ':', which are used for denoting mapping keys and values, +// are represented by the KEY and VALUE tokens. +// +// The following examples show flow collections: +// +// 1. A flow sequence: +// +// [item 1, item 2, item 3] +// +// Tokens: +// +// STREAM-START(utf-8) +// FLOW-SEQUENCE-START +// SCALAR("item 1",plain) +// FLOW-ENTRY +// SCALAR("item 2",plain) +// FLOW-ENTRY +// SCALAR("item 3",plain) +// FLOW-SEQUENCE-END +// STREAM-END +// +// 2. A flow mapping: +// +// { +// a simple key: a value, # Note that the KEY token is produced. +// ? a complex key: another value, +// } +// +// Tokens: +// +// STREAM-START(utf-8) +// FLOW-MAPPING-START +// KEY +// SCALAR("a simple key",plain) +// VALUE +// SCALAR("a value",plain) +// FLOW-ENTRY +// KEY +// SCALAR("a complex key",plain) +// VALUE +// SCALAR("another value",plain) +// FLOW-ENTRY +// FLOW-MAPPING-END +// STREAM-END +// +// A simple key is a key which is not denoted by the '?' indicator. Note that +// the Scanner still produce the KEY token whenever it encounters a simple key. +// +// For scanning block collections, the following tokens are used (note that we +// repeat KEY and VALUE here): +// +// BLOCK-SEQUENCE-START +// BLOCK-MAPPING-START +// BLOCK-END +// BLOCK-ENTRY +// KEY +// VALUE +// +// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation +// increase that precedes a block collection (cf. the INDENT token in Python). +// The token BLOCK-END denote indentation decrease that ends a block collection +// (cf. the DEDENT token in Python). However YAML has some syntax pecularities +// that makes detections of these tokens more complex. +// +// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators +// '-', '?', and ':' correspondingly. +// +// The following examples show how the tokens BLOCK-SEQUENCE-START, +// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: +// +// 1. Block sequences: +// +// - item 1 +// - item 2 +// - +// - item 3.1 +// - item 3.2 +// - +// key 1: value 1 +// key 2: value 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-ENTRY +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 3.1",plain) +// BLOCK-ENTRY +// SCALAR("item 3.2",plain) +// BLOCK-END +// BLOCK-ENTRY +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// 2. Block mappings: +// +// a simple key: a value # The KEY token is produced here. +// ? a complex key +// : another value +// a mapping: +// key 1: value 1 +// key 2: value 2 +// a sequence: +// - item 1 +// - item 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-MAPPING-START +// KEY +// SCALAR("a simple key",plain) +// VALUE +// SCALAR("a value",plain) +// KEY +// SCALAR("a complex key",plain) +// VALUE +// SCALAR("another value",plain) +// KEY +// SCALAR("a mapping",plain) +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// KEY +// SCALAR("a sequence",plain) +// VALUE +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// YAML does not always require to start a new block collection from a new +// line. If the current line contains only '-', '?', and ':' indicators, a new +// block collection may start at the current line. The following examples +// illustrate this case: +// +// 1. Collections in a sequence: +// +// - - item 1 +// - item 2 +// - key 1: value 1 +// key 2: value 2 +// - ? complex key +// : complex value +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// BLOCK-ENTRY +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// BLOCK-ENTRY +// BLOCK-MAPPING-START +// KEY +// SCALAR("complex key") +// VALUE +// SCALAR("complex value") +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// 2. Collections in a mapping: +// +// ? a sequence +// : - item 1 +// - item 2 +// ? a mapping +// : key 1: value 1 +// key 2: value 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-MAPPING-START +// KEY +// SCALAR("a sequence",plain) +// VALUE +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// KEY +// SCALAR("a mapping",plain) +// VALUE +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// YAML also permits non-indented sequences if they are included into a block +// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: +// +// key: +// - item 1 # BLOCK-SEQUENCE-START is NOT produced here. +// - item 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-MAPPING-START +// KEY +// SCALAR("key",plain) +// VALUE +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// + +// Ensure that the buffer contains the required number of characters. +// Return true on success, false on failure (reader error or memory error). +func cache(parser *yaml_parser_t, length int) bool { + // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B) + return parser.unread >= length || yaml_parser_update_buffer(parser, length) +} + +// Advance the buffer pointer. +func skip(parser *yaml_parser_t) { + parser.mark.index++ + parser.mark.column++ + parser.unread-- + parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) +} + +func skip_line(parser *yaml_parser_t) { + if is_crlf(parser.buffer, parser.buffer_pos) { + parser.mark.index += 2 + parser.mark.column = 0 + parser.mark.line++ + parser.unread -= 2 + parser.buffer_pos += 2 + } else if is_break(parser.buffer, parser.buffer_pos) { + parser.mark.index++ + parser.mark.column = 0 + parser.mark.line++ + parser.unread-- + parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) + } +} + +// Copy a character to a string buffer and advance pointers. +func read(parser *yaml_parser_t, s []byte) []byte { + w := width(parser.buffer[parser.buffer_pos]) + if w == 0 { + panic("invalid character sequence") + } + if len(s) == 0 { + s = make([]byte, 0, 32) + } + if w == 1 && len(s)+w <= cap(s) { + s = s[:len(s)+1] + s[len(s)-1] = parser.buffer[parser.buffer_pos] + parser.buffer_pos++ + } else { + s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) + parser.buffer_pos += w + } + parser.mark.index++ + parser.mark.column++ + parser.unread-- + return s +} + +// Copy a line break character to a string buffer and advance pointers. +func read_line(parser *yaml_parser_t, s []byte) []byte { + buf := parser.buffer + pos := parser.buffer_pos + switch { + case buf[pos] == '\r' && buf[pos+1] == '\n': + // CR LF . LF + s = append(s, '\n') + parser.buffer_pos += 2 + parser.mark.index++ + parser.unread-- + case buf[pos] == '\r' || buf[pos] == '\n': + // CR|LF . LF + s = append(s, '\n') + parser.buffer_pos += 1 + case buf[pos] == '\xC2' && buf[pos+1] == '\x85': + // NEL . LF + s = append(s, '\n') + parser.buffer_pos += 2 + case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'): + // LS|PS . LS|PS + s = append(s, buf[parser.buffer_pos:pos+3]...) + parser.buffer_pos += 3 + default: + return s + } + parser.mark.index++ + parser.mark.column = 0 + parser.mark.line++ + parser.unread-- + return s +} + +// Get the next token. +func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { + // Erase the token object. + *token = yaml_token_t{} // [Go] Is this necessary? + + // No tokens after STREAM-END or error. + if parser.stream_end_produced || parser.error != yaml_NO_ERROR { + return true + } + + // Ensure that the tokens queue contains enough tokens. + if !parser.token_available { + if !yaml_parser_fetch_more_tokens(parser) { + return false + } + } + + // Fetch the next token from the queue. + *token = parser.tokens[parser.tokens_head] + parser.tokens_head++ + parser.tokens_parsed++ + parser.token_available = false + + if token.typ == yaml_STREAM_END_TOKEN { + parser.stream_end_produced = true + } + return true +} + +// Set the scanner error and return false. +func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool { + parser.error = yaml_SCANNER_ERROR + parser.context = context + parser.context_mark = context_mark + parser.problem = problem + parser.problem_mark = parser.mark + return false +} + +func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool { + context := "while parsing a tag" + if directive { + context = "while parsing a %TAG directive" + } + return yaml_parser_set_scanner_error(parser, context, context_mark, "did not find URI escaped octet") +} + +func trace(args ...interface{}) func() { + pargs := append([]interface{}{"+++"}, args...) + fmt.Println(pargs...) + pargs = append([]interface{}{"---"}, args...) + return func() { fmt.Println(pargs...) } +} + +// Ensure that the tokens queue contains at least one token which can be +// returned to the Parser. +func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { + // While we need more tokens to fetch, do it. + for { + // Check if we really need to fetch more tokens. + need_more_tokens := false + + if parser.tokens_head == len(parser.tokens) { + // Queue is empty. + need_more_tokens = true + } else { + // Check if any potential simple key may occupy the head position. + if !yaml_parser_stale_simple_keys(parser) { + return false + } + + for i := range parser.simple_keys { + simple_key := &parser.simple_keys[i] + if simple_key.possible && simple_key.token_number == parser.tokens_parsed { + need_more_tokens = true + break + } + } + } + + // We are finished. + if !need_more_tokens { + break + } + // Fetch the next token. + if !yaml_parser_fetch_next_token(parser) { + return false + } + } + + parser.token_available = true + return true +} + +// The dispatcher for token fetchers. +func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool { + // Ensure that the buffer is initialized. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // Check if we just started scanning. Fetch STREAM-START then. + if !parser.stream_start_produced { + return yaml_parser_fetch_stream_start(parser) + } + + // Eat whitespaces and comments until we reach the next token. + if !yaml_parser_scan_to_next_token(parser) { + return false + } + + // Remove obsolete potential simple keys. + if !yaml_parser_stale_simple_keys(parser) { + return false + } + + // Check the indentation level against the current column. + if !yaml_parser_unroll_indent(parser, parser.mark.column) { + return false + } + + // Ensure that the buffer contains at least 4 characters. 4 is the length + // of the longest indicators ('--- ' and '... '). + if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { + return false + } + + // Is it the end of the stream? + if is_z(parser.buffer, parser.buffer_pos) { + return yaml_parser_fetch_stream_end(parser) + } + + // Is it a directive? + if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' { + return yaml_parser_fetch_directive(parser) + } + + buf := parser.buffer + pos := parser.buffer_pos + + // Is it the document start indicator? + if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) { + return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN) + } + + // Is it the document end indicator? + if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) { + return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN) + } + + // Is it the flow sequence start indicator? + if buf[pos] == '[' { + return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN) + } + + // Is it the flow mapping start indicator? + if parser.buffer[parser.buffer_pos] == '{' { + return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN) + } + + // Is it the flow sequence end indicator? + if parser.buffer[parser.buffer_pos] == ']' { + return yaml_parser_fetch_flow_collection_end(parser, + yaml_FLOW_SEQUENCE_END_TOKEN) + } + + // Is it the flow mapping end indicator? + if parser.buffer[parser.buffer_pos] == '}' { + return yaml_parser_fetch_flow_collection_end(parser, + yaml_FLOW_MAPPING_END_TOKEN) + } + + // Is it the flow entry indicator? + if parser.buffer[parser.buffer_pos] == ',' { + return yaml_parser_fetch_flow_entry(parser) + } + + // Is it the block entry indicator? + if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) { + return yaml_parser_fetch_block_entry(parser) + } + + // Is it the key indicator? + if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { + return yaml_parser_fetch_key(parser) + } + + // Is it the value indicator? + if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { + return yaml_parser_fetch_value(parser) + } + + // Is it an alias? + if parser.buffer[parser.buffer_pos] == '*' { + return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) + } + + // Is it an anchor? + if parser.buffer[parser.buffer_pos] == '&' { + return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) + } + + // Is it a tag? + if parser.buffer[parser.buffer_pos] == '!' { + return yaml_parser_fetch_tag(parser) + } + + // Is it a literal scalar? + if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 { + return yaml_parser_fetch_block_scalar(parser, true) + } + + // Is it a folded scalar? + if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 { + return yaml_parser_fetch_block_scalar(parser, false) + } + + // Is it a single-quoted scalar? + if parser.buffer[parser.buffer_pos] == '\'' { + return yaml_parser_fetch_flow_scalar(parser, true) + } + + // Is it a double-quoted scalar? + if parser.buffer[parser.buffer_pos] == '"' { + return yaml_parser_fetch_flow_scalar(parser, false) + } + + // Is it a plain scalar? + // + // A plain scalar may start with any non-blank characters except + // + // '-', '?', ':', ',', '[', ']', '{', '}', + // '#', '&', '*', '!', '|', '>', '\'', '\"', + // '%', '@', '`'. + // + // In the block context (and, for the '-' indicator, in the flow context + // too), it may also start with the characters + // + // '-', '?', ':' + // + // if it is followed by a non-space character. + // + // The last rule is more restrictive than the specification requires. + // [Go] Make this logic more reasonable. + //switch parser.buffer[parser.buffer_pos] { + //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`': + //} + if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' || + parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' || + parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' || + parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || + parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' || + parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' || + parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' || + parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' || + parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' || + parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') || + (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) || + (parser.flow_level == 0 && + (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') && + !is_blankz(parser.buffer, parser.buffer_pos+1)) { + return yaml_parser_fetch_plain_scalar(parser) + } + + // If we don't determine the token type so far, it is an error. + return yaml_parser_set_scanner_error(parser, + "while scanning for the next token", parser.mark, + "found character that cannot start any token") +} + +// Check the list of potential simple keys and remove the positions that +// cannot contain simple keys anymore. +func yaml_parser_stale_simple_keys(parser *yaml_parser_t) bool { + // Check for a potential simple key for each flow level. + for i := range parser.simple_keys { + simple_key := &parser.simple_keys[i] + + // The specification requires that a simple key + // + // - is limited to a single line, + // - is shorter than 1024 characters. + if simple_key.possible && (simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index) { + + // Check if the potential simple key to be removed is required. + if simple_key.required { + return yaml_parser_set_scanner_error(parser, + "while scanning a simple key", simple_key.mark, + "could not find expected ':'") + } + simple_key.possible = false + } + } + return true +} + +// Check if a simple key may start at the current position and add it if +// needed. +func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { + // A simple key is required at the current position if the scanner is in + // the block context and the current column coincides with the indentation + // level. + + required := parser.flow_level == 0 && parser.indent == parser.mark.column + + // A simple key is required only when it is the first token in the current + // line. Therefore it is always allowed. But we add a check anyway. + if required && !parser.simple_key_allowed { + panic("should not happen") + } + + // + // If the current position may start a simple key, save it. + // + if parser.simple_key_allowed { + simple_key := yaml_simple_key_t{ + possible: true, + required: required, + token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), + } + simple_key.mark = parser.mark + + if !yaml_parser_remove_simple_key(parser) { + return false + } + parser.simple_keys[len(parser.simple_keys)-1] = simple_key + } + return true +} + +// Remove a potential simple key at the current flow level. +func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { + i := len(parser.simple_keys) - 1 + if parser.simple_keys[i].possible { + // If the key is required, it is an error. + if parser.simple_keys[i].required { + return yaml_parser_set_scanner_error(parser, + "while scanning a simple key", parser.simple_keys[i].mark, + "could not find expected ':'") + } + } + // Remove the key from the stack. + parser.simple_keys[i].possible = false + return true +} + +// Increase the flow level and resize the simple key list if needed. +func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { + // Reset the simple key on the next level. + parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) + + // Increase the flow level. + parser.flow_level++ + return true +} + +// Decrease the flow level. +func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { + if parser.flow_level > 0 { + parser.flow_level-- + parser.simple_keys = parser.simple_keys[:len(parser.simple_keys)-1] + } + return true +} + +// Push the current indentation level to the stack and set the new level +// the current column is greater than the indentation level. In this case, +// append or insert the specified token into the token queue. +func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool { + // In the flow context, do nothing. + if parser.flow_level > 0 { + return true + } + + if parser.indent < column { + // Push the current indentation level to the stack and set the new + // indentation level. + parser.indents = append(parser.indents, parser.indent) + parser.indent = column + + // Create a token and insert it into the queue. + token := yaml_token_t{ + typ: typ, + start_mark: mark, + end_mark: mark, + } + if number > -1 { + number -= parser.tokens_parsed + } + yaml_insert_token(parser, number, &token) + } + return true +} + +// Pop indentation levels from the indents stack until the current level +// becomes less or equal to the column. For each indentation level, append +// the BLOCK-END token. +func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool { + // In the flow context, do nothing. + if parser.flow_level > 0 { + return true + } + + // Loop through the indentation levels in the stack. + for parser.indent > column { + // Create a token and append it to the queue. + token := yaml_token_t{ + typ: yaml_BLOCK_END_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + } + yaml_insert_token(parser, -1, &token) + + // Pop the indentation level. + parser.indent = parser.indents[len(parser.indents)-1] + parser.indents = parser.indents[:len(parser.indents)-1] + } + return true +} + +// Initialize the scanner and produce the STREAM-START token. +func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { + + // Set the initial indentation. + parser.indent = -1 + + // Initialize the simple key stack. + parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) + + // A simple key is allowed at the beginning of the stream. + parser.simple_key_allowed = true + + // We have started. + parser.stream_start_produced = true + + // Create the STREAM-START token and append it to the queue. + token := yaml_token_t{ + typ: yaml_STREAM_START_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + encoding: parser.encoding, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the STREAM-END token and shut down the scanner. +func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { + + // Force new line. + if parser.mark.column != 0 { + parser.mark.column = 0 + parser.mark.line++ + } + + // Reset the indentation level. + if !yaml_parser_unroll_indent(parser, -1) { + return false + } + + // Reset simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + // Create the STREAM-END token and append it to the queue. + token := yaml_token_t{ + typ: yaml_STREAM_END_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. +func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { + // Reset the indentation level. + if !yaml_parser_unroll_indent(parser, -1) { + return false + } + + // Reset simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. + token := yaml_token_t{} + if !yaml_parser_scan_directive(parser, &token) { + return false + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the DOCUMENT-START or DOCUMENT-END token. +func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // Reset the indentation level. + if !yaml_parser_unroll_indent(parser, -1) { + return false + } + + // Reset simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + // Consume the token. + start_mark := parser.mark + + skip(parser) + skip(parser) + skip(parser) + + end_mark := parser.mark + + // Create the DOCUMENT-START or DOCUMENT-END token. + token := yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. +func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // The indicators '[' and '{' may start a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // Increase the flow level. + if !yaml_parser_increase_flow_level(parser) { + return false + } + + // A simple key may follow the indicators '[' and '{'. + parser.simple_key_allowed = true + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. + token := yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. +func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // Reset any potential simple key on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Decrease the flow level. + if !yaml_parser_decrease_flow_level(parser) { + return false + } + + // No simple keys after the indicators ']' and '}'. + parser.simple_key_allowed = false + + // Consume the token. + + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. + token := yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the FLOW-ENTRY token. +func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { + // Reset any potential simple keys on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Simple keys are allowed after ','. + parser.simple_key_allowed = true + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the FLOW-ENTRY token and append it to the queue. + token := yaml_token_t{ + typ: yaml_FLOW_ENTRY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the BLOCK-ENTRY token. +func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { + // Check if the scanner is in the block context. + if parser.flow_level == 0 { + // Check if we are allowed to start a new entry. + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "block sequence entries are not allowed in this context") + } + // Add the BLOCK-SEQUENCE-START token if needed. + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { + return false + } + } else { + // It is an error for the '-' indicator to occur in the flow context, + // but we let the Parser detect and report about it because the Parser + // is able to point to the context. + } + + // Reset any potential simple keys on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Simple keys are allowed after '-'. + parser.simple_key_allowed = true + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the BLOCK-ENTRY token and append it to the queue. + token := yaml_token_t{ + typ: yaml_BLOCK_ENTRY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the KEY token. +func yaml_parser_fetch_key(parser *yaml_parser_t) bool { + + // In the block context, additional checks are required. + if parser.flow_level == 0 { + // Check if we are allowed to start a new key (not nessesary simple). + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "mapping keys are not allowed in this context") + } + // Add the BLOCK-MAPPING-START token if needed. + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { + return false + } + } + + // Reset any potential simple keys on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Simple keys are allowed after '?' in the block context. + parser.simple_key_allowed = parser.flow_level == 0 + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the KEY token and append it to the queue. + token := yaml_token_t{ + typ: yaml_KEY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the VALUE token. +func yaml_parser_fetch_value(parser *yaml_parser_t) bool { + + simple_key := &parser.simple_keys[len(parser.simple_keys)-1] + + // Have we found a simple key? + if simple_key.possible { + // Create the KEY token and insert it into the queue. + token := yaml_token_t{ + typ: yaml_KEY_TOKEN, + start_mark: simple_key.mark, + end_mark: simple_key.mark, + } + yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) + + // In the block context, we may need to add the BLOCK-MAPPING-START token. + if !yaml_parser_roll_indent(parser, simple_key.mark.column, + simple_key.token_number, + yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { + return false + } + + // Remove the simple key. + simple_key.possible = false + + // A simple key cannot follow another simple key. + parser.simple_key_allowed = false + + } else { + // The ':' indicator follows a complex key. + + // In the block context, extra checks are required. + if parser.flow_level == 0 { + + // Check if we are allowed to start a complex value. + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "mapping values are not allowed in this context") + } + + // Add the BLOCK-MAPPING-START token if needed. + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { + return false + } + } + + // Simple keys after ':' are allowed in the block context. + parser.simple_key_allowed = parser.flow_level == 0 + } + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the VALUE token and append it to the queue. + token := yaml_token_t{ + typ: yaml_VALUE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the ALIAS or ANCHOR token. +func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // An anchor or an alias could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow an anchor or an alias. + parser.simple_key_allowed = false + + // Create the ALIAS or ANCHOR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_anchor(parser, &token, typ) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the TAG token. +func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { + // A tag could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow a tag. + parser.simple_key_allowed = false + + // Create the TAG token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_tag(parser, &token) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. +func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { + // Remove any potential simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // A simple key may follow a block scalar. + parser.simple_key_allowed = true + + // Create the SCALAR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_block_scalar(parser, &token, literal) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. +func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { + // A plain scalar could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow a flow scalar. + parser.simple_key_allowed = false + + // Create the SCALAR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_flow_scalar(parser, &token, single) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the SCALAR(...,plain) token. +func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { + // A plain scalar could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow a flow scalar. + parser.simple_key_allowed = false + + // Create the SCALAR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_plain_scalar(parser, &token) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Eat whitespaces and comments until the next token is found. +func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { + + // Until the next token is not found. + for { + // Allow the BOM mark to start a line. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) { + skip(parser) + } + + // Eat whitespaces. + // Tabs are allowed: + // - in the flow context + // - in the block context, but not at the beginning of the line or + // after '-', '?', or ':' (complex value). + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Eat a comment until a line break. + if parser.buffer[parser.buffer_pos] == '#' { + for !is_breakz(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + } + + // If it is a line break, eat it. + if is_break(parser.buffer, parser.buffer_pos) { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + + // In the block context, a new line may start a simple key. + if parser.flow_level == 0 { + parser.simple_key_allowed = true + } + } else { + break // We have found a token. + } + } + + return true +} + +// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. +// +// Scope: +// %YAML 1.1 # a comment \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// +func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { + // Eat '%'. + start_mark := parser.mark + skip(parser) + + // Scan the directive name. + var name []byte + if !yaml_parser_scan_directive_name(parser, start_mark, &name) { + return false + } + + // Is it a YAML directive? + if bytes.Equal(name, []byte("YAML")) { + // Scan the VERSION directive value. + var major, minor int8 + if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) { + return false + } + end_mark := parser.mark + + // Create a VERSION-DIRECTIVE token. + *token = yaml_token_t{ + typ: yaml_VERSION_DIRECTIVE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + major: major, + minor: minor, + } + + // Is it a TAG directive? + } else if bytes.Equal(name, []byte("TAG")) { + // Scan the TAG directive value. + var handle, prefix []byte + if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) { + return false + } + end_mark := parser.mark + + // Create a TAG-DIRECTIVE token. + *token = yaml_token_t{ + typ: yaml_TAG_DIRECTIVE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: handle, + prefix: prefix, + } + + // Unknown directive. + } else { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "found unknown directive name") + return false + } + + // Eat the rest of the line including any comments. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + if parser.buffer[parser.buffer_pos] == '#' { + for !is_breakz(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + } + + // Check if we are at the end of the line. + if !is_breakz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "did not find expected comment or line break") + return false + } + + // Eat a line break. + if is_break(parser.buffer, parser.buffer_pos) { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + } + + return true +} + +// Scan the directive name. +// +// Scope: +// %YAML 1.1 # a comment \n +// ^^^^ +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^ +// +func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { + // Consume the directive name. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + var s []byte + for is_alpha(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the name is empty. + if len(s) == 0 { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "could not find expected directive name") + return false + } + + // Check for an blank character after the name. + if !is_blankz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "found unexpected non-alphabetical character") + return false + } + *name = s + return true +} + +// Scan the value of VERSION-DIRECTIVE. +// +// Scope: +// %YAML 1.1 # a comment \n +// ^^^^^^ +func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { + // Eat whitespaces. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Consume the major version number. + if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { + return false + } + + // Eat '.'. + if parser.buffer[parser.buffer_pos] != '.' { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "did not find expected digit or '.' character") + } + + skip(parser) + + // Consume the minor version number. + if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { + return false + } + return true +} + +const max_number_length = 2 + +// Scan the version number of VERSION-DIRECTIVE. +// +// Scope: +// %YAML 1.1 # a comment \n +// ^ +// %YAML 1.1 # a comment \n +// ^ +func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { + + // Repeat while the next character is digit. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + var value, length int8 + for is_digit(parser.buffer, parser.buffer_pos) { + // Check if the number is too long. + length++ + if length > max_number_length { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "found extremely long version number") + } + value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos)) + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the number was present. + if length == 0 { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "did not find expected version number") + } + *number = value + return true +} + +// Scan the value of a TAG-DIRECTIVE token. +// +// Scope: +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// +func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { + var handle_value, prefix_value []byte + + // Eat whitespaces. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Scan a handle. + if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { + return false + } + + // Expect a whitespace. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if !is_blank(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", + start_mark, "did not find expected whitespace") + return false + } + + // Eat whitespaces. + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Scan a prefix. + if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { + return false + } + + // Expect a whitespace or line break. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if !is_blankz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", + start_mark, "did not find expected whitespace or line break") + return false + } + + *handle = handle_value + *prefix = prefix_value + return true +} + +func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool { + var s []byte + + // Eat the indicator character. + start_mark := parser.mark + skip(parser) + + // Consume the value. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_alpha(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + end_mark := parser.mark + + /* + * Check if length of the anchor is greater than 0 and it is followed by + * a whitespace character or one of the indicators: + * + * '?', ':', ',', ']', '}', '%', '@', '`'. + */ + + if len(s) == 0 || + !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' || + parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' || + parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' || + parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' || + parser.buffer[parser.buffer_pos] == '`') { + context := "while scanning an alias" + if typ == yaml_ANCHOR_TOKEN { + context = "while scanning an anchor" + } + yaml_parser_set_scanner_error(parser, context, start_mark, + "did not find expected alphabetic or numeric character") + return false + } + + // Create a token. + *token = yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + value: s, + } + + return true +} + +/* + * Scan a TAG token. + */ + +func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { + var handle, suffix []byte + + start_mark := parser.mark + + // Check if the tag is in the canonical form. + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + if parser.buffer[parser.buffer_pos+1] == '<' { + // Keep the handle as '' + + // Eat '!<' + skip(parser) + skip(parser) + + // Consume the tag value. + if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { + return false + } + + // Check for '>' and eat it. + if parser.buffer[parser.buffer_pos] != '>' { + yaml_parser_set_scanner_error(parser, "while scanning a tag", + start_mark, "did not find the expected '>'") + return false + } + + skip(parser) + } else { + // The tag has either the '!suffix' or the '!handle!suffix' form. + + // First, try to scan a handle. + if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { + return false + } + + // Check if it is, indeed, handle. + if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { + // Scan the suffix now. + if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { + return false + } + } else { + // It wasn't a handle after all. Scan the rest of the tag. + if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { + return false + } + + // Set the handle to '!'. + handle = []byte{'!'} + + // A special case: the '!' tag. Set the handle to '' and the + // suffix to '!'. + if len(suffix) == 0 { + handle, suffix = suffix, handle + } + } + } + + // Check the character which ends the tag. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if !is_blankz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a tag", + start_mark, "did not find expected whitespace or line break") + return false + } + + end_mark := parser.mark + + // Create a token. + *token = yaml_token_t{ + typ: yaml_TAG_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: handle, + suffix: suffix, + } + return true +} + +// Scan a tag handle. +func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool { + // Check the initial '!' character. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if parser.buffer[parser.buffer_pos] != '!' { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected '!'") + return false + } + + var s []byte + + // Copy the '!' character. + s = read(parser, s) + + // Copy all subsequent alphabetical and numerical characters. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for is_alpha(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the trailing character is '!' and copy it. + if parser.buffer[parser.buffer_pos] == '!' { + s = read(parser, s) + } else { + // It's either the '!' tag or not really a tag handle. If it's a %TAG + // directive, it's an error. If it's a tag token, it must be a part of URI. + if directive && !(s[0] == '!' && s[1] == 0) { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected '!'") + return false + } + } + + *handle = s + return true +} + +// Scan a tag. +func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool { + //size_t length = head ? strlen((char *)head) : 0 + var s []byte + + // Copy the head if needed. + // + // Note that we don't copy the leading '!' character. + if len(head) > 1 { + s = append(s, head[1:]...) + } + + // Scan the tag. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // The set of characters that may appear in URI is as follows: + // + // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', + // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', + // '%'. + // [Go] Convert this into more reasonable logic. + for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' || + parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' || + parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' || + parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' || + parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' || + parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' || + parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' || + parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' || + parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' || + parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' || + parser.buffer[parser.buffer_pos] == '%' { + // Check if it is a URI-escape sequence. + if parser.buffer[parser.buffer_pos] == '%' { + if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) { + return false + } + } else { + s = read(parser, s) + } + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the tag is non-empty. + if len(s) == 0 { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected tag URI") + return false + } + *uri = s + return true +} + +// Decode an URI-escape sequence corresponding to a single UTF-8 character. +func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool { + + // Decode the required number of characters. + w := 1024 + for w > 0 { + // Check for a URI-escaped octet. + if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { + return false + } + + if !(parser.buffer[parser.buffer_pos] == '%' && + is_hex(parser.buffer, parser.buffer_pos+1) && + is_hex(parser.buffer, parser.buffer_pos+2)) { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find URI escaped octet") + } + + // Get the octet. + octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2)) + + // If it is the leading octet, determine the length of the UTF-8 sequence. + if w == 1024 { + w = width(octet) + if w == 0 { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "found an incorrect leading UTF-8 octet") + } + } else { + // Check if the trailing octet is correct. + if octet&0xC0 != 0x80 { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "found an incorrect trailing UTF-8 octet") + } + } + + // Copy the octet and move the pointers. + *s = append(*s, octet) + skip(parser) + skip(parser) + skip(parser) + w-- + } + return true +} + +// Scan a block scalar. +func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool { + // Eat the indicator '|' or '>'. + start_mark := parser.mark + skip(parser) + + // Scan the additional block scalar indicators. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // Check for a chomping indicator. + var chomping, increment int + if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { + // Set the chomping method and eat the indicator. + if parser.buffer[parser.buffer_pos] == '+' { + chomping = +1 + } else { + chomping = -1 + } + skip(parser) + + // Check for an indentation indicator. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if is_digit(parser.buffer, parser.buffer_pos) { + // Check that the indentation is greater than 0. + if parser.buffer[parser.buffer_pos] == '0' { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found an indentation indicator equal to 0") + return false + } + + // Get the indentation level and eat the indicator. + increment = as_digit(parser.buffer, parser.buffer_pos) + skip(parser) + } + + } else if is_digit(parser.buffer, parser.buffer_pos) { + // Do the same as above, but in the opposite order. + + if parser.buffer[parser.buffer_pos] == '0' { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found an indentation indicator equal to 0") + return false + } + increment = as_digit(parser.buffer, parser.buffer_pos) + skip(parser) + + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { + if parser.buffer[parser.buffer_pos] == '+' { + chomping = +1 + } else { + chomping = -1 + } + skip(parser) + } + } + + // Eat whitespaces and comments to the end of the line. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + if parser.buffer[parser.buffer_pos] == '#' { + for !is_breakz(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + } + + // Check if we are at the end of the line. + if !is_breakz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "did not find expected comment or line break") + return false + } + + // Eat a line break. + if is_break(parser.buffer, parser.buffer_pos) { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + } + + end_mark := parser.mark + + // Set the indentation level if it was specified. + var indent int + if increment > 0 { + if parser.indent >= 0 { + indent = parser.indent + increment + } else { + indent = increment + } + } + + // Scan the leading line breaks and determine the indentation level if needed. + var s, leading_break, trailing_breaks []byte + if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { + return false + } + + // Scan the block scalar content. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + var leading_blank, trailing_blank bool + for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) { + // We are at the beginning of a non-empty line. + + // Is it a trailing whitespace? + trailing_blank = is_blank(parser.buffer, parser.buffer_pos) + + // Check if we need to fold the leading line break. + if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' { + // Do we need to join the lines by space? + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } + } else { + s = append(s, leading_break...) + } + leading_break = leading_break[:0] + + // Append the remaining line breaks. + s = append(s, trailing_breaks...) + trailing_breaks = trailing_breaks[:0] + + // Is it a leading whitespace? + leading_blank = is_blank(parser.buffer, parser.buffer_pos) + + // Consume the current line. + for !is_breakz(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Consume the line break. + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + leading_break = read_line(parser, leading_break) + + // Eat the following indentation spaces and line breaks. + if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { + return false + } + } + + // Chomp the tail. + if chomping != -1 { + s = append(s, leading_break...) + } + if chomping == 1 { + s = append(s, trailing_breaks...) + } + + // Create a token. + *token = yaml_token_t{ + typ: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_LITERAL_SCALAR_STYLE, + } + if !literal { + token.style = yaml_FOLDED_SCALAR_STYLE + } + return true +} + +// Scan indentation spaces and line breaks for a block scalar. Determine the +// indentation level if needed. +func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool { + *end_mark = parser.mark + + // Eat the indentation spaces and line breaks. + max_indent := 0 + for { + // Eat the indentation spaces. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + if parser.mark.column > max_indent { + max_indent = parser.mark.column + } + + // Check for a tab character messing the indentation. + if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) { + return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found a tab character where an indentation space is expected") + } + + // Have we found a non-empty line? + if !is_break(parser.buffer, parser.buffer_pos) { + break + } + + // Consume the line break. + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + // [Go] Should really be returning breaks instead. + *breaks = read_line(parser, *breaks) + *end_mark = parser.mark + } + + // Determine the indentation level if needed. + if *indent == 0 { + *indent = max_indent + if *indent < parser.indent+1 { + *indent = parser.indent + 1 + } + if *indent < 1 { + *indent = 1 + } + } + return true +} + +// Scan a quoted scalar. +func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool { + // Eat the left quote. + start_mark := parser.mark + skip(parser) + + // Consume the content of the quoted scalar. + var s, leading_break, trailing_breaks, whitespaces []byte + for { + // Check that there are no document indicators at the beginning of the line. + if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { + return false + } + + if parser.mark.column == 0 && + ((parser.buffer[parser.buffer_pos+0] == '-' && + parser.buffer[parser.buffer_pos+1] == '-' && + parser.buffer[parser.buffer_pos+2] == '-') || + (parser.buffer[parser.buffer_pos+0] == '.' && + parser.buffer[parser.buffer_pos+1] == '.' && + parser.buffer[parser.buffer_pos+2] == '.')) && + is_blankz(parser.buffer, parser.buffer_pos+3) { + yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", + start_mark, "found unexpected document indicator") + return false + } + + // Check for EOF. + if is_z(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", + start_mark, "found unexpected end of stream") + return false + } + + // Consume non-blank characters. + leading_blanks := false + for !is_blankz(parser.buffer, parser.buffer_pos) { + if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' { + // Is is an escaped single quote. + s = append(s, '\'') + skip(parser) + skip(parser) + + } else if single && parser.buffer[parser.buffer_pos] == '\'' { + // It is a right single quote. + break + } else if !single && parser.buffer[parser.buffer_pos] == '"' { + // It is a right double quote. + break + + } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) { + // It is an escaped line break. + if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { + return false + } + skip(parser) + skip_line(parser) + leading_blanks = true + break + + } else if !single && parser.buffer[parser.buffer_pos] == '\\' { + // It is an escape sequence. + code_length := 0 + + // Check the escape character. + switch parser.buffer[parser.buffer_pos+1] { + case '0': + s = append(s, 0) + case 'a': + s = append(s, '\x07') + case 'b': + s = append(s, '\x08') + case 't', '\t': + s = append(s, '\x09') + case 'n': + s = append(s, '\x0A') + case 'v': + s = append(s, '\x0B') + case 'f': + s = append(s, '\x0C') + case 'r': + s = append(s, '\x0D') + case 'e': + s = append(s, '\x1B') + case ' ': + s = append(s, '\x20') + case '"': + s = append(s, '"') + case '\'': + s = append(s, '\'') + case '\\': + s = append(s, '\\') + case 'N': // NEL (#x85) + s = append(s, '\xC2') + s = append(s, '\x85') + case '_': // #xA0 + s = append(s, '\xC2') + s = append(s, '\xA0') + case 'L': // LS (#x2028) + s = append(s, '\xE2') + s = append(s, '\x80') + s = append(s, '\xA8') + case 'P': // PS (#x2029) + s = append(s, '\xE2') + s = append(s, '\x80') + s = append(s, '\xA9') + case 'x': + code_length = 2 + case 'u': + code_length = 4 + case 'U': + code_length = 8 + default: + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "found unknown escape character") + return false + } + + skip(parser) + skip(parser) + + // Consume an arbitrary escape code. + if code_length > 0 { + var value int + + // Scan the character value. + if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) { + return false + } + for k := 0; k < code_length; k++ { + if !is_hex(parser.buffer, parser.buffer_pos+k) { + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "did not find expected hexdecimal number") + return false + } + value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k) + } + + // Check the value and write the character. + if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "found invalid Unicode character escape code") + return false + } + if value <= 0x7F { + s = append(s, byte(value)) + } else if value <= 0x7FF { + s = append(s, byte(0xC0+(value>>6))) + s = append(s, byte(0x80+(value&0x3F))) + } else if value <= 0xFFFF { + s = append(s, byte(0xE0+(value>>12))) + s = append(s, byte(0x80+((value>>6)&0x3F))) + s = append(s, byte(0x80+(value&0x3F))) + } else { + s = append(s, byte(0xF0+(value>>18))) + s = append(s, byte(0x80+((value>>12)&0x3F))) + s = append(s, byte(0x80+((value>>6)&0x3F))) + s = append(s, byte(0x80+(value&0x3F))) + } + + // Advance the pointer. + for k := 0; k < code_length; k++ { + skip(parser) + } + } + } else { + // It is a non-escaped non-blank character. + s = read(parser, s) + } + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + } + + // Check if we are at the end of the scalar. + if single { + if parser.buffer[parser.buffer_pos] == '\'' { + break + } + } else { + if parser.buffer[parser.buffer_pos] == '"' { + break + } + } + + // Consume blank characters. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { + if is_blank(parser.buffer, parser.buffer_pos) { + // Consume a space or a tab character. + if !leading_blanks { + whitespaces = read(parser, whitespaces) + } else { + skip(parser) + } + } else { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + // Check if it is a first line break. + if !leading_blanks { + whitespaces = whitespaces[:0] + leading_break = read_line(parser, leading_break) + leading_blanks = true + } else { + trailing_breaks = read_line(parser, trailing_breaks) + } + } + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Join the whitespaces or fold line breaks. + if leading_blanks { + // Do we need to fold line breaks? + if len(leading_break) > 0 && leading_break[0] == '\n' { + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } else { + s = append(s, trailing_breaks...) + } + } else { + s = append(s, leading_break...) + s = append(s, trailing_breaks...) + } + trailing_breaks = trailing_breaks[:0] + leading_break = leading_break[:0] + } else { + s = append(s, whitespaces...) + whitespaces = whitespaces[:0] + } + } + + // Eat the right quote. + skip(parser) + end_mark := parser.mark + + // Create a token. + *token = yaml_token_t{ + typ: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_SINGLE_QUOTED_SCALAR_STYLE, + } + if !single { + token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + return true +} + +// Scan a plain scalar. +func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { + + var s, leading_break, trailing_breaks, whitespaces []byte + var leading_blanks bool + var indent = parser.indent + 1 + + start_mark := parser.mark + end_mark := parser.mark + + // Consume the content of the plain scalar. + for { + // Check for a document indicator. + if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { + return false + } + if parser.mark.column == 0 && + ((parser.buffer[parser.buffer_pos+0] == '-' && + parser.buffer[parser.buffer_pos+1] == '-' && + parser.buffer[parser.buffer_pos+2] == '-') || + (parser.buffer[parser.buffer_pos+0] == '.' && + parser.buffer[parser.buffer_pos+1] == '.' && + parser.buffer[parser.buffer_pos+2] == '.')) && + is_blankz(parser.buffer, parser.buffer_pos+3) { + break + } + + // Check for a comment. + if parser.buffer[parser.buffer_pos] == '#' { + break + } + + // Consume non-blank characters. + for !is_blankz(parser.buffer, parser.buffer_pos) { + + // Check for 'x:x' in the flow context. TODO: Fix the test "spec-08-13". + if parser.flow_level > 0 && + parser.buffer[parser.buffer_pos] == ':' && + !is_blankz(parser.buffer, parser.buffer_pos+1) { + yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", + start_mark, "found unexpected ':'") + return false + } + + // Check for indicators that may end a plain scalar. + if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) || + (parser.flow_level > 0 && + (parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == ':' || + parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' || + parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || + parser.buffer[parser.buffer_pos] == '}')) { + break + } + + // Check if we need to join whitespaces and breaks. + if leading_blanks || len(whitespaces) > 0 { + if leading_blanks { + // Do we need to fold line breaks? + if leading_break[0] == '\n' { + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } else { + s = append(s, trailing_breaks...) + } + } else { + s = append(s, leading_break...) + s = append(s, trailing_breaks...) + } + trailing_breaks = trailing_breaks[:0] + leading_break = leading_break[:0] + leading_blanks = false + } else { + s = append(s, whitespaces...) + whitespaces = whitespaces[:0] + } + } + + // Copy the character. + s = read(parser, s) + + end_mark = parser.mark + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + } + + // Is it the end? + if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) { + break + } + + // Consume blank characters. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { + if is_blank(parser.buffer, parser.buffer_pos) { + + // Check for tab character that abuse indentation. + if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", + start_mark, "found a tab character that violate indentation") + return false + } + + // Consume a space or a tab character. + if !leading_blanks { + whitespaces = read(parser, whitespaces) + } else { + skip(parser) + } + } else { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + // Check if it is a first line break. + if !leading_blanks { + whitespaces = whitespaces[:0] + leading_break = read_line(parser, leading_break) + leading_blanks = true + } else { + trailing_breaks = read_line(parser, trailing_breaks) + } + } + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check indentation level. + if parser.flow_level == 0 && parser.mark.column < indent { + break + } + } + + // Create a token. + *token = yaml_token_t{ + typ: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_PLAIN_SCALAR_STYLE, + } + + // Note that we change the 'simple_key_allowed' flag. + if leading_blanks { + parser.simple_key_allowed = true + } + return true +} diff --git a/vendor/gopkg.in/yaml.v2/sorter.go b/vendor/gopkg.in/yaml.v2/sorter.go new file mode 100644 index 0000000..5958822 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/sorter.go @@ -0,0 +1,104 @@ +package yaml + +import ( + "reflect" + "unicode" +) + +type keyList []reflect.Value + +func (l keyList) Len() int { return len(l) } +func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } +func (l keyList) Less(i, j int) bool { + a := l[i] + b := l[j] + ak := a.Kind() + bk := b.Kind() + for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() { + a = a.Elem() + ak = a.Kind() + } + for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() { + b = b.Elem() + bk = b.Kind() + } + af, aok := keyFloat(a) + bf, bok := keyFloat(b) + if aok && bok { + if af != bf { + return af < bf + } + if ak != bk { + return ak < bk + } + return numLess(a, b) + } + if ak != reflect.String || bk != reflect.String { + return ak < bk + } + ar, br := []rune(a.String()), []rune(b.String()) + for i := 0; i < len(ar) && i < len(br); i++ { + if ar[i] == br[i] { + continue + } + al := unicode.IsLetter(ar[i]) + bl := unicode.IsLetter(br[i]) + if al && bl { + return ar[i] < br[i] + } + if al || bl { + return bl + } + var ai, bi int + var an, bn int64 + for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ { + an = an*10 + int64(ar[ai]-'0') + } + for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ { + bn = bn*10 + int64(br[bi]-'0') + } + if an != bn { + return an < bn + } + if ai != bi { + return ai < bi + } + return ar[i] < br[i] + } + return len(ar) < len(br) +} + +// keyFloat returns a float value for v if it is a number/bool +// and whether it is a number/bool or not. +func keyFloat(v reflect.Value) (f float64, ok bool) { + switch v.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return float64(v.Int()), true + case reflect.Float32, reflect.Float64: + return v.Float(), true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return float64(v.Uint()), true + case reflect.Bool: + if v.Bool() { + return 1, true + } + return 0, true + } + return 0, false +} + +// numLess returns whether a < b. +// a and b must necessarily have the same kind. +func numLess(a, b reflect.Value) bool { + switch a.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return a.Int() < b.Int() + case reflect.Float32, reflect.Float64: + return a.Float() < b.Float() + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return a.Uint() < b.Uint() + case reflect.Bool: + return !a.Bool() && b.Bool() + } + panic("not a number") +} diff --git a/vendor/gopkg.in/yaml.v2/suite_test.go b/vendor/gopkg.in/yaml.v2/suite_test.go new file mode 100644 index 0000000..c5cf1ed --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/suite_test.go @@ -0,0 +1,12 @@ +package yaml_test + +import ( + . "gopkg.in/check.v1" + "testing" +) + +func Test(t *testing.T) { TestingT(t) } + +type S struct{} + +var _ = Suite(&S{}) diff --git a/vendor/gopkg.in/yaml.v2/writerc.go b/vendor/gopkg.in/yaml.v2/writerc.go new file mode 100644 index 0000000..190362f --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/writerc.go @@ -0,0 +1,89 @@ +package yaml + +// Set the writer error and return false. +func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { + emitter.error = yaml_WRITER_ERROR + emitter.problem = problem + return false +} + +// Flush the output buffer. +func yaml_emitter_flush(emitter *yaml_emitter_t) bool { + if emitter.write_handler == nil { + panic("write handler not set") + } + + // Check if the buffer is empty. + if emitter.buffer_pos == 0 { + return true + } + + // If the output encoding is UTF-8, we don't need to recode the buffer. + if emitter.encoding == yaml_UTF8_ENCODING { + if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil { + return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) + } + emitter.buffer_pos = 0 + return true + } + + // Recode the buffer into the raw buffer. + var low, high int + if emitter.encoding == yaml_UTF16LE_ENCODING { + low, high = 0, 1 + } else { + high, low = 1, 0 + } + + pos := 0 + for pos < emitter.buffer_pos { + // See the "reader.c" code for more details on UTF-8 encoding. Note + // that we assume that the buffer contains a valid UTF-8 sequence. + + // Read the next UTF-8 character. + octet := emitter.buffer[pos] + + var w int + var value rune + switch { + case octet&0x80 == 0x00: + w, value = 1, rune(octet&0x7F) + case octet&0xE0 == 0xC0: + w, value = 2, rune(octet&0x1F) + case octet&0xF0 == 0xE0: + w, value = 3, rune(octet&0x0F) + case octet&0xF8 == 0xF0: + w, value = 4, rune(octet&0x07) + } + for k := 1; k < w; k++ { + octet = emitter.buffer[pos+k] + value = (value << 6) + (rune(octet) & 0x3F) + } + pos += w + + // Write the character. + if value < 0x10000 { + var b [2]byte + b[high] = byte(value >> 8) + b[low] = byte(value & 0xFF) + emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1]) + } else { + // Write the character using a surrogate pair (check "reader.c"). + var b [4]byte + value -= 0x10000 + b[high] = byte(0xD8 + (value >> 18)) + b[low] = byte((value >> 10) & 0xFF) + b[high+2] = byte(0xDC + ((value >> 8) & 0xFF)) + b[low+2] = byte(value & 0xFF) + emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3]) + } + } + + // Write the raw buffer. + if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil { + return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) + } + emitter.buffer_pos = 0 + emitter.raw_buffer = emitter.raw_buffer[:0] + return true +} diff --git a/vendor/gopkg.in/yaml.v2/yaml.go b/vendor/gopkg.in/yaml.v2/yaml.go new file mode 100644 index 0000000..36d6b88 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/yaml.go @@ -0,0 +1,346 @@ +// Package yaml implements YAML support for the Go language. +// +// Source code and other details for the project are available at GitHub: +// +// https://github.com/go-yaml/yaml +// +package yaml + +import ( + "errors" + "fmt" + "reflect" + "strings" + "sync" +) + +// MapSlice encodes and decodes as a YAML map. +// The order of keys is preserved when encoding and decoding. +type MapSlice []MapItem + +// MapItem is an item in a MapSlice. +type MapItem struct { + Key, Value interface{} +} + +// The Unmarshaler interface may be implemented by types to customize their +// behavior when being unmarshaled from a YAML document. The UnmarshalYAML +// method receives a function that may be called to unmarshal the original +// YAML value into a field or variable. It is safe to call the unmarshal +// function parameter more than once if necessary. +type Unmarshaler interface { + UnmarshalYAML(unmarshal func(interface{}) error) error +} + +// The Marshaler interface may be implemented by types to customize their +// behavior when being marshaled into a YAML document. The returned value +// is marshaled in place of the original value implementing Marshaler. +// +// If an error is returned by MarshalYAML, the marshaling procedure stops +// and returns with the provided error. +type Marshaler interface { + MarshalYAML() (interface{}, error) +} + +// Unmarshal decodes the first document found within the in byte slice +// and assigns decoded values into the out value. +// +// Maps and pointers (to a struct, string, int, etc) are accepted as out +// values. If an internal pointer within a struct is not initialized, +// the yaml package will initialize it if necessary for unmarshalling +// the provided data. The out parameter must not be nil. +// +// The type of the decoded values should be compatible with the respective +// values in out. If one or more values cannot be decoded due to a type +// mismatches, decoding continues partially until the end of the YAML +// content, and a *yaml.TypeError is returned with details for all +// missed values. +// +// Struct fields are only unmarshalled if they are exported (have an +// upper case first letter), and are unmarshalled using the field name +// lowercased as the default key. Custom keys may be defined via the +// "yaml" name in the field tag: the content preceding the first comma +// is used as the key, and the following comma-separated options are +// used to tweak the marshalling process (see Marshal). +// Conflicting names result in a runtime error. +// +// For example: +// +// type T struct { +// F int `yaml:"a,omitempty"` +// B int +// } +// var t T +// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) +// +// See the documentation of Marshal for the format of tags and a list of +// supported tag options. +// +func Unmarshal(in []byte, out interface{}) (err error) { + defer handleErr(&err) + d := newDecoder() + p := newParser(in) + defer p.destroy() + node := p.parse() + if node != nil { + v := reflect.ValueOf(out) + if v.Kind() == reflect.Ptr && !v.IsNil() { + v = v.Elem() + } + d.unmarshal(node, v) + } + if len(d.terrors) > 0 { + return &TypeError{d.terrors} + } + return nil +} + +// Marshal serializes the value provided into a YAML document. The structure +// of the generated document will reflect the structure of the value itself. +// Maps and pointers (to struct, string, int, etc) are accepted as the in value. +// +// Struct fields are only unmarshalled if they are exported (have an upper case +// first letter), and are unmarshalled using the field name lowercased as the +// default key. Custom keys may be defined via the "yaml" name in the field +// tag: the content preceding the first comma is used as the key, and the +// following comma-separated options are used to tweak the marshalling process. +// Conflicting names result in a runtime error. +// +// The field tag format accepted is: +// +// `(...) yaml:"[][,[,]]" (...)` +// +// The following flags are currently supported: +// +// omitempty Only include the field if it's not set to the zero +// value for the type or to empty slices or maps. +// Does not apply to zero valued structs. +// +// flow Marshal using a flow style (useful for structs, +// sequences and maps). +// +// inline Inline the field, which must be a struct or a map, +// causing all of its fields or keys to be processed as if +// they were part of the outer struct. For maps, keys must +// not conflict with the yaml keys of other struct fields. +// +// In addition, if the key is "-", the field is ignored. +// +// For example: +// +// type T struct { +// F int "a,omitempty" +// B int +// } +// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" +// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" +// +func Marshal(in interface{}) (out []byte, err error) { + defer handleErr(&err) + e := newEncoder() + defer e.destroy() + e.marshal("", reflect.ValueOf(in)) + e.finish() + out = e.out + return +} + +func handleErr(err *error) { + if v := recover(); v != nil { + if e, ok := v.(yamlError); ok { + *err = e.err + } else { + panic(v) + } + } +} + +type yamlError struct { + err error +} + +func fail(err error) { + panic(yamlError{err}) +} + +func failf(format string, args ...interface{}) { + panic(yamlError{fmt.Errorf("yaml: "+format, args...)}) +} + +// A TypeError is returned by Unmarshal when one or more fields in +// the YAML document cannot be properly decoded into the requested +// types. When this error is returned, the value is still +// unmarshaled partially. +type TypeError struct { + Errors []string +} + +func (e *TypeError) Error() string { + return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n ")) +} + +// -------------------------------------------------------------------------- +// Maintain a mapping of keys to structure field indexes + +// The code in this section was copied from mgo/bson. + +// structInfo holds details for the serialization of fields of +// a given struct. +type structInfo struct { + FieldsMap map[string]fieldInfo + FieldsList []fieldInfo + + // InlineMap is the number of the field in the struct that + // contains an ,inline map, or -1 if there's none. + InlineMap int +} + +type fieldInfo struct { + Key string + Num int + OmitEmpty bool + Flow bool + + // Inline holds the field index if the field is part of an inlined struct. + Inline []int +} + +var structMap = make(map[reflect.Type]*structInfo) +var fieldMapMutex sync.RWMutex + +func getStructInfo(st reflect.Type) (*structInfo, error) { + fieldMapMutex.RLock() + sinfo, found := structMap[st] + fieldMapMutex.RUnlock() + if found { + return sinfo, nil + } + + n := st.NumField() + fieldsMap := make(map[string]fieldInfo) + fieldsList := make([]fieldInfo, 0, n) + inlineMap := -1 + for i := 0; i != n; i++ { + field := st.Field(i) + if field.PkgPath != "" && !field.Anonymous { + continue // Private field + } + + info := fieldInfo{Num: i} + + tag := field.Tag.Get("yaml") + if tag == "" && strings.Index(string(field.Tag), ":") < 0 { + tag = string(field.Tag) + } + if tag == "-" { + continue + } + + inline := false + fields := strings.Split(tag, ",") + if len(fields) > 1 { + for _, flag := range fields[1:] { + switch flag { + case "omitempty": + info.OmitEmpty = true + case "flow": + info.Flow = true + case "inline": + inline = true + default: + return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st)) + } + } + tag = fields[0] + } + + if inline { + switch field.Type.Kind() { + case reflect.Map: + if inlineMap >= 0 { + return nil, errors.New("Multiple ,inline maps in struct " + st.String()) + } + if field.Type.Key() != reflect.TypeOf("") { + return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String()) + } + inlineMap = info.Num + case reflect.Struct: + sinfo, err := getStructInfo(field.Type) + if err != nil { + return nil, err + } + for _, finfo := range sinfo.FieldsList { + if _, found := fieldsMap[finfo.Key]; found { + msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String() + return nil, errors.New(msg) + } + if finfo.Inline == nil { + finfo.Inline = []int{i, finfo.Num} + } else { + finfo.Inline = append([]int{i}, finfo.Inline...) + } + fieldsMap[finfo.Key] = finfo + fieldsList = append(fieldsList, finfo) + } + default: + //return nil, errors.New("Option ,inline needs a struct value or map field") + return nil, errors.New("Option ,inline needs a struct value field") + } + continue + } + + if tag != "" { + info.Key = tag + } else { + info.Key = strings.ToLower(field.Name) + } + + if _, found = fieldsMap[info.Key]; found { + msg := "Duplicated key '" + info.Key + "' in struct " + st.String() + return nil, errors.New(msg) + } + + fieldsList = append(fieldsList, info) + fieldsMap[info.Key] = info + } + + sinfo = &structInfo{fieldsMap, fieldsList, inlineMap} + + fieldMapMutex.Lock() + structMap[st] = sinfo + fieldMapMutex.Unlock() + return sinfo, nil +} + +func isZero(v reflect.Value) bool { + switch v.Kind() { + case reflect.String: + return len(v.String()) == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + case reflect.Slice: + return v.Len() == 0 + case reflect.Map: + return v.Len() == 0 + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Struct: + vt := v.Type() + for i := v.NumField() - 1; i >= 0; i-- { + if vt.Field(i).PkgPath != "" { + continue // Private field + } + if !isZero(v.Field(i)) { + return false + } + } + return true + } + return false +} diff --git a/vendor/gopkg.in/yaml.v2/yamlh.go b/vendor/gopkg.in/yaml.v2/yamlh.go new file mode 100644 index 0000000..d60a6b6 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/yamlh.go @@ -0,0 +1,716 @@ +package yaml + +import ( + "io" +) + +// The version directive data. +type yaml_version_directive_t struct { + major int8 // The major version number. + minor int8 // The minor version number. +} + +// The tag directive data. +type yaml_tag_directive_t struct { + handle []byte // The tag handle. + prefix []byte // The tag prefix. +} + +type yaml_encoding_t int + +// The stream encoding. +const ( + // Let the parser choose the encoding. + yaml_ANY_ENCODING yaml_encoding_t = iota + + yaml_UTF8_ENCODING // The default UTF-8 encoding. + yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM. + yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM. +) + +type yaml_break_t int + +// Line break types. +const ( + // Let the parser choose the break type. + yaml_ANY_BREAK yaml_break_t = iota + + yaml_CR_BREAK // Use CR for line breaks (Mac style). + yaml_LN_BREAK // Use LN for line breaks (Unix style). + yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style). +) + +type yaml_error_type_t int + +// Many bad things could happen with the parser and emitter. +const ( + // No error is produced. + yaml_NO_ERROR yaml_error_type_t = iota + + yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory. + yaml_READER_ERROR // Cannot read or decode the input stream. + yaml_SCANNER_ERROR // Cannot scan the input stream. + yaml_PARSER_ERROR // Cannot parse the input stream. + yaml_COMPOSER_ERROR // Cannot compose a YAML document. + yaml_WRITER_ERROR // Cannot write to the output stream. + yaml_EMITTER_ERROR // Cannot emit a YAML stream. +) + +// The pointer position. +type yaml_mark_t struct { + index int // The position index. + line int // The position line. + column int // The position column. +} + +// Node Styles + +type yaml_style_t int8 + +type yaml_scalar_style_t yaml_style_t + +// Scalar styles. +const ( + // Let the emitter choose the style. + yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota + + yaml_PLAIN_SCALAR_STYLE // The plain scalar style. + yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style. + yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style. + yaml_LITERAL_SCALAR_STYLE // The literal scalar style. + yaml_FOLDED_SCALAR_STYLE // The folded scalar style. +) + +type yaml_sequence_style_t yaml_style_t + +// Sequence styles. +const ( + // Let the emitter choose the style. + yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota + + yaml_BLOCK_SEQUENCE_STYLE // The block sequence style. + yaml_FLOW_SEQUENCE_STYLE // The flow sequence style. +) + +type yaml_mapping_style_t yaml_style_t + +// Mapping styles. +const ( + // Let the emitter choose the style. + yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota + + yaml_BLOCK_MAPPING_STYLE // The block mapping style. + yaml_FLOW_MAPPING_STYLE // The flow mapping style. +) + +// Tokens + +type yaml_token_type_t int + +// Token types. +const ( + // An empty token. + yaml_NO_TOKEN yaml_token_type_t = iota + + yaml_STREAM_START_TOKEN // A STREAM-START token. + yaml_STREAM_END_TOKEN // A STREAM-END token. + + yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token. + yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token. + yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token. + yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token. + + yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token. + yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token. + yaml_BLOCK_END_TOKEN // A BLOCK-END token. + + yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token. + yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token. + yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token. + yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token. + + yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token. + yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token. + yaml_KEY_TOKEN // A KEY token. + yaml_VALUE_TOKEN // A VALUE token. + + yaml_ALIAS_TOKEN // An ALIAS token. + yaml_ANCHOR_TOKEN // An ANCHOR token. + yaml_TAG_TOKEN // A TAG token. + yaml_SCALAR_TOKEN // A SCALAR token. +) + +func (tt yaml_token_type_t) String() string { + switch tt { + case yaml_NO_TOKEN: + return "yaml_NO_TOKEN" + case yaml_STREAM_START_TOKEN: + return "yaml_STREAM_START_TOKEN" + case yaml_STREAM_END_TOKEN: + return "yaml_STREAM_END_TOKEN" + case yaml_VERSION_DIRECTIVE_TOKEN: + return "yaml_VERSION_DIRECTIVE_TOKEN" + case yaml_TAG_DIRECTIVE_TOKEN: + return "yaml_TAG_DIRECTIVE_TOKEN" + case yaml_DOCUMENT_START_TOKEN: + return "yaml_DOCUMENT_START_TOKEN" + case yaml_DOCUMENT_END_TOKEN: + return "yaml_DOCUMENT_END_TOKEN" + case yaml_BLOCK_SEQUENCE_START_TOKEN: + return "yaml_BLOCK_SEQUENCE_START_TOKEN" + case yaml_BLOCK_MAPPING_START_TOKEN: + return "yaml_BLOCK_MAPPING_START_TOKEN" + case yaml_BLOCK_END_TOKEN: + return "yaml_BLOCK_END_TOKEN" + case yaml_FLOW_SEQUENCE_START_TOKEN: + return "yaml_FLOW_SEQUENCE_START_TOKEN" + case yaml_FLOW_SEQUENCE_END_TOKEN: + return "yaml_FLOW_SEQUENCE_END_TOKEN" + case yaml_FLOW_MAPPING_START_TOKEN: + return "yaml_FLOW_MAPPING_START_TOKEN" + case yaml_FLOW_MAPPING_END_TOKEN: + return "yaml_FLOW_MAPPING_END_TOKEN" + case yaml_BLOCK_ENTRY_TOKEN: + return "yaml_BLOCK_ENTRY_TOKEN" + case yaml_FLOW_ENTRY_TOKEN: + return "yaml_FLOW_ENTRY_TOKEN" + case yaml_KEY_TOKEN: + return "yaml_KEY_TOKEN" + case yaml_VALUE_TOKEN: + return "yaml_VALUE_TOKEN" + case yaml_ALIAS_TOKEN: + return "yaml_ALIAS_TOKEN" + case yaml_ANCHOR_TOKEN: + return "yaml_ANCHOR_TOKEN" + case yaml_TAG_TOKEN: + return "yaml_TAG_TOKEN" + case yaml_SCALAR_TOKEN: + return "yaml_SCALAR_TOKEN" + } + return "" +} + +// The token structure. +type yaml_token_t struct { + // The token type. + typ yaml_token_type_t + + // The start/end of the token. + start_mark, end_mark yaml_mark_t + + // The stream encoding (for yaml_STREAM_START_TOKEN). + encoding yaml_encoding_t + + // The alias/anchor/scalar value or tag/tag directive handle + // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN). + value []byte + + // The tag suffix (for yaml_TAG_TOKEN). + suffix []byte + + // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN). + prefix []byte + + // The scalar style (for yaml_SCALAR_TOKEN). + style yaml_scalar_style_t + + // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN). + major, minor int8 +} + +// Events + +type yaml_event_type_t int8 + +// Event types. +const ( + // An empty event. + yaml_NO_EVENT yaml_event_type_t = iota + + yaml_STREAM_START_EVENT // A STREAM-START event. + yaml_STREAM_END_EVENT // A STREAM-END event. + yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event. + yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event. + yaml_ALIAS_EVENT // An ALIAS event. + yaml_SCALAR_EVENT // A SCALAR event. + yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event. + yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event. + yaml_MAPPING_START_EVENT // A MAPPING-START event. + yaml_MAPPING_END_EVENT // A MAPPING-END event. +) + +// The event structure. +type yaml_event_t struct { + + // The event type. + typ yaml_event_type_t + + // The start and end of the event. + start_mark, end_mark yaml_mark_t + + // The document encoding (for yaml_STREAM_START_EVENT). + encoding yaml_encoding_t + + // The version directive (for yaml_DOCUMENT_START_EVENT). + version_directive *yaml_version_directive_t + + // The list of tag directives (for yaml_DOCUMENT_START_EVENT). + tag_directives []yaml_tag_directive_t + + // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT). + anchor []byte + + // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). + tag []byte + + // The scalar value (for yaml_SCALAR_EVENT). + value []byte + + // Is the document start/end indicator implicit, or the tag optional? + // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT). + implicit bool + + // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT). + quoted_implicit bool + + // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). + style yaml_style_t +} + +func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) } +func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) } +func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) } + +// Nodes + +const ( + yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null. + yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false. + yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values. + yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values. + yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values. + yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values. + + yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences. + yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping. + + // Not in original libyaml. + yaml_BINARY_TAG = "tag:yaml.org,2002:binary" + yaml_MERGE_TAG = "tag:yaml.org,2002:merge" + + yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. + yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. + yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map. +) + +type yaml_node_type_t int + +// Node types. +const ( + // An empty node. + yaml_NO_NODE yaml_node_type_t = iota + + yaml_SCALAR_NODE // A scalar node. + yaml_SEQUENCE_NODE // A sequence node. + yaml_MAPPING_NODE // A mapping node. +) + +// An element of a sequence node. +type yaml_node_item_t int + +// An element of a mapping node. +type yaml_node_pair_t struct { + key int // The key of the element. + value int // The value of the element. +} + +// The node structure. +type yaml_node_t struct { + typ yaml_node_type_t // The node type. + tag []byte // The node tag. + + // The node data. + + // The scalar parameters (for yaml_SCALAR_NODE). + scalar struct { + value []byte // The scalar value. + length int // The length of the scalar value. + style yaml_scalar_style_t // The scalar style. + } + + // The sequence parameters (for YAML_SEQUENCE_NODE). + sequence struct { + items_data []yaml_node_item_t // The stack of sequence items. + style yaml_sequence_style_t // The sequence style. + } + + // The mapping parameters (for yaml_MAPPING_NODE). + mapping struct { + pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value). + pairs_start *yaml_node_pair_t // The beginning of the stack. + pairs_end *yaml_node_pair_t // The end of the stack. + pairs_top *yaml_node_pair_t // The top of the stack. + style yaml_mapping_style_t // The mapping style. + } + + start_mark yaml_mark_t // The beginning of the node. + end_mark yaml_mark_t // The end of the node. + +} + +// The document structure. +type yaml_document_t struct { + + // The document nodes. + nodes []yaml_node_t + + // The version directive. + version_directive *yaml_version_directive_t + + // The list of tag directives. + tag_directives_data []yaml_tag_directive_t + tag_directives_start int // The beginning of the tag directives list. + tag_directives_end int // The end of the tag directives list. + + start_implicit int // Is the document start indicator implicit? + end_implicit int // Is the document end indicator implicit? + + // The start/end of the document. + start_mark, end_mark yaml_mark_t +} + +// The prototype of a read handler. +// +// The read handler is called when the parser needs to read more bytes from the +// source. The handler should write not more than size bytes to the buffer. +// The number of written bytes should be set to the size_read variable. +// +// [in,out] data A pointer to an application data specified by +// yaml_parser_set_input(). +// [out] buffer The buffer to write the data from the source. +// [in] size The size of the buffer. +// [out] size_read The actual number of bytes read from the source. +// +// On success, the handler should return 1. If the handler failed, +// the returned value should be 0. On EOF, the handler should set the +// size_read to 0 and return 1. +type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) + +// This structure holds information about a potential simple key. +type yaml_simple_key_t struct { + possible bool // Is a simple key possible? + required bool // Is a simple key required? + token_number int // The number of the token. + mark yaml_mark_t // The position mark. +} + +// The states of the parser. +type yaml_parser_state_t int + +const ( + yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota + + yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document. + yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START. + yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document. + yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END. + yaml_PARSE_BLOCK_NODE_STATE // Expect a block node. + yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence. + yaml_PARSE_FLOW_NODE_STATE // Expect a flow node. + yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence. + yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence. + yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence. + yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. + yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key. + yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value. + yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry. + yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. + yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. + yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. + yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping. + yaml_PARSE_END_STATE // Expect nothing. +) + +func (ps yaml_parser_state_t) String() string { + switch ps { + case yaml_PARSE_STREAM_START_STATE: + return "yaml_PARSE_STREAM_START_STATE" + case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: + return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE" + case yaml_PARSE_DOCUMENT_START_STATE: + return "yaml_PARSE_DOCUMENT_START_STATE" + case yaml_PARSE_DOCUMENT_CONTENT_STATE: + return "yaml_PARSE_DOCUMENT_CONTENT_STATE" + case yaml_PARSE_DOCUMENT_END_STATE: + return "yaml_PARSE_DOCUMENT_END_STATE" + case yaml_PARSE_BLOCK_NODE_STATE: + return "yaml_PARSE_BLOCK_NODE_STATE" + case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: + return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE" + case yaml_PARSE_FLOW_NODE_STATE: + return "yaml_PARSE_FLOW_NODE_STATE" + case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: + return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE" + case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: + return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE" + case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: + return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE" + case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: + return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE" + case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: + return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE" + case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: + return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE" + case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE" + case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: + return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE" + case yaml_PARSE_FLOW_MAPPING_KEY_STATE: + return "yaml_PARSE_FLOW_MAPPING_KEY_STATE" + case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: + return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE" + case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: + return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE" + case yaml_PARSE_END_STATE: + return "yaml_PARSE_END_STATE" + } + return "" +} + +// This structure holds aliases data. +type yaml_alias_data_t struct { + anchor []byte // The anchor. + index int // The node id. + mark yaml_mark_t // The anchor mark. +} + +// The parser structure. +// +// All members are internal. Manage the structure using the +// yaml_parser_ family of functions. +type yaml_parser_t struct { + + // Error handling + + error yaml_error_type_t // Error type. + + problem string // Error description. + + // The byte about which the problem occured. + problem_offset int + problem_value int + problem_mark yaml_mark_t + + // The error context. + context string + context_mark yaml_mark_t + + // Reader stuff + + read_handler yaml_read_handler_t // Read handler. + + input_file io.Reader // File input data. + input []byte // String input data. + input_pos int + + eof bool // EOF flag + + buffer []byte // The working buffer. + buffer_pos int // The current position of the buffer. + + unread int // The number of unread characters in the buffer. + + raw_buffer []byte // The raw buffer. + raw_buffer_pos int // The current position of the buffer. + + encoding yaml_encoding_t // The input encoding. + + offset int // The offset of the current position (in bytes). + mark yaml_mark_t // The mark of the current position. + + // Scanner stuff + + stream_start_produced bool // Have we started to scan the input stream? + stream_end_produced bool // Have we reached the end of the input stream? + + flow_level int // The number of unclosed '[' and '{' indicators. + + tokens []yaml_token_t // The tokens queue. + tokens_head int // The head of the tokens queue. + tokens_parsed int // The number of tokens fetched from the queue. + token_available bool // Does the tokens queue contain a token ready for dequeueing. + + indent int // The current indentation level. + indents []int // The indentation levels stack. + + simple_key_allowed bool // May a simple key occur at the current position? + simple_keys []yaml_simple_key_t // The stack of simple keys. + + // Parser stuff + + state yaml_parser_state_t // The current parser state. + states []yaml_parser_state_t // The parser states stack. + marks []yaml_mark_t // The stack of marks. + tag_directives []yaml_tag_directive_t // The list of TAG directives. + + // Dumper stuff + + aliases []yaml_alias_data_t // The alias data. + + document *yaml_document_t // The currently parsed document. +} + +// Emitter Definitions + +// The prototype of a write handler. +// +// The write handler is called when the emitter needs to flush the accumulated +// characters to the output. The handler should write @a size bytes of the +// @a buffer to the output. +// +// @param[in,out] data A pointer to an application data specified by +// yaml_emitter_set_output(). +// @param[in] buffer The buffer with bytes to be written. +// @param[in] size The size of the buffer. +// +// @returns On success, the handler should return @c 1. If the handler failed, +// the returned value should be @c 0. +// +type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error + +type yaml_emitter_state_t int + +// The emitter states. +const ( + // Expect STREAM-START. + yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota + + yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END. + yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END. + yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document. + yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END. + yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence. + yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence. + yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. + yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. + yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping. + yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. + yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence. + yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence. + yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. + yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping. + yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping. + yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping. + yaml_EMIT_END_STATE // Expect nothing. +) + +// The emitter structure. +// +// All members are internal. Manage the structure using the @c yaml_emitter_ +// family of functions. +type yaml_emitter_t struct { + + // Error handling + + error yaml_error_type_t // Error type. + problem string // Error description. + + // Writer stuff + + write_handler yaml_write_handler_t // Write handler. + + output_buffer *[]byte // String output data. + output_file io.Writer // File output data. + + buffer []byte // The working buffer. + buffer_pos int // The current position of the buffer. + + raw_buffer []byte // The raw buffer. + raw_buffer_pos int // The current position of the buffer. + + encoding yaml_encoding_t // The stream encoding. + + // Emitter stuff + + canonical bool // If the output is in the canonical style? + best_indent int // The number of indentation spaces. + best_width int // The preferred width of the output lines. + unicode bool // Allow unescaped non-ASCII characters? + line_break yaml_break_t // The preferred line break. + + state yaml_emitter_state_t // The current emitter state. + states []yaml_emitter_state_t // The stack of states. + + events []yaml_event_t // The event queue. + events_head int // The head of the event queue. + + indents []int // The stack of indentation levels. + + tag_directives []yaml_tag_directive_t // The list of tag directives. + + indent int // The current indentation level. + + flow_level int // The current flow level. + + root_context bool // Is it the document root context? + sequence_context bool // Is it a sequence context? + mapping_context bool // Is it a mapping context? + simple_key_context bool // Is it a simple mapping key context? + + line int // The current line. + column int // The current column. + whitespace bool // If the last character was a whitespace? + indention bool // If the last character was an indentation character (' ', '-', '?', ':')? + open_ended bool // If an explicit document end is required? + + // Anchor analysis. + anchor_data struct { + anchor []byte // The anchor value. + alias bool // Is it an alias? + } + + // Tag analysis. + tag_data struct { + handle []byte // The tag handle. + suffix []byte // The tag suffix. + } + + // Scalar analysis. + scalar_data struct { + value []byte // The scalar value. + multiline bool // Does the scalar contain line breaks? + flow_plain_allowed bool // Can the scalar be expessed in the flow plain style? + block_plain_allowed bool // Can the scalar be expressed in the block plain style? + single_quoted_allowed bool // Can the scalar be expressed in the single quoted style? + block_allowed bool // Can the scalar be expressed in the literal or folded styles? + style yaml_scalar_style_t // The output style. + } + + // Dumper stuff + + opened bool // If the stream was already opened? + closed bool // If the stream was already closed? + + // The information associated with the document nodes. + anchors *struct { + references int // The number of references. + anchor int // The anchor id. + serialized bool // If the node has been emitted? + } + + last_anchor_id int // The last assigned anchor id. + + document *yaml_document_t // The currently emitted document. +} diff --git a/vendor/gopkg.in/yaml.v2/yamlprivateh.go b/vendor/gopkg.in/yaml.v2/yamlprivateh.go new file mode 100644 index 0000000..8110ce3 --- /dev/null +++ b/vendor/gopkg.in/yaml.v2/yamlprivateh.go @@ -0,0 +1,173 @@ +package yaml + +const ( + // The size of the input raw buffer. + input_raw_buffer_size = 512 + + // The size of the input buffer. + // It should be possible to decode the whole raw buffer. + input_buffer_size = input_raw_buffer_size * 3 + + // The size of the output buffer. + output_buffer_size = 128 + + // The size of the output raw buffer. + // It should be possible to encode the whole output buffer. + output_raw_buffer_size = (output_buffer_size*2 + 2) + + // The size of other stacks and queues. + initial_stack_size = 16 + initial_queue_size = 16 + initial_string_size = 16 +) + +// Check if the character at the specified position is an alphabetical +// character, a digit, '_', or '-'. +func is_alpha(b []byte, i int) bool { + return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-' +} + +// Check if the character at the specified position is a digit. +func is_digit(b []byte, i int) bool { + return b[i] >= '0' && b[i] <= '9' +} + +// Get the value of a digit. +func as_digit(b []byte, i int) int { + return int(b[i]) - '0' +} + +// Check if the character at the specified position is a hex-digit. +func is_hex(b []byte, i int) bool { + return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f' +} + +// Get the value of a hex-digit. +func as_hex(b []byte, i int) int { + bi := b[i] + if bi >= 'A' && bi <= 'F' { + return int(bi) - 'A' + 10 + } + if bi >= 'a' && bi <= 'f' { + return int(bi) - 'a' + 10 + } + return int(bi) - '0' +} + +// Check if the character is ASCII. +func is_ascii(b []byte, i int) bool { + return b[i] <= 0x7F +} + +// Check if the character at the start of the buffer can be printed unescaped. +func is_printable(b []byte, i int) bool { + return ((b[i] == 0x0A) || // . == #x0A + (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E + (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF + (b[i] > 0xC2 && b[i] < 0xED) || + (b[i] == 0xED && b[i+1] < 0xA0) || + (b[i] == 0xEE) || + (b[i] == 0xEF && // #xE000 <= . <= #xFFFD + !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF + !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) +} + +// Check if the character at the specified position is NUL. +func is_z(b []byte, i int) bool { + return b[i] == 0x00 +} + +// Check if the beginning of the buffer is a BOM. +func is_bom(b []byte, i int) bool { + return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF +} + +// Check if the character at the specified position is space. +func is_space(b []byte, i int) bool { + return b[i] == ' ' +} + +// Check if the character at the specified position is tab. +func is_tab(b []byte, i int) bool { + return b[i] == '\t' +} + +// Check if the character at the specified position is blank (space or tab). +func is_blank(b []byte, i int) bool { + //return is_space(b, i) || is_tab(b, i) + return b[i] == ' ' || b[i] == '\t' +} + +// Check if the character at the specified position is a line break. +func is_break(b []byte, i int) bool { + return (b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029) +} + +func is_crlf(b []byte, i int) bool { + return b[i] == '\r' && b[i+1] == '\n' +} + +// Check if the character is a line break or NUL. +func is_breakz(b []byte, i int) bool { + //return is_break(b, i) || is_z(b, i) + return ( // is_break: + b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) + // is_z: + b[i] == 0) +} + +// Check if the character is a line break, space, or NUL. +func is_spacez(b []byte, i int) bool { + //return is_space(b, i) || is_breakz(b, i) + return ( // is_space: + b[i] == ' ' || + // is_breakz: + b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) + b[i] == 0) +} + +// Check if the character is a line break, space, tab, or NUL. +func is_blankz(b []byte, i int) bool { + //return is_blank(b, i) || is_breakz(b, i) + return ( // is_blank: + b[i] == ' ' || b[i] == '\t' || + // is_breakz: + b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) + b[i] == 0) +} + +// Determine the width of the character. +func width(b byte) int { + // Don't replace these by a switch without first + // confirming that it is being inlined. + if b&0x80 == 0x00 { + return 1 + } + if b&0xE0 == 0xC0 { + return 2 + } + if b&0xF0 == 0xE0 { + return 3 + } + if b&0xF8 == 0xF0 { + return 4 + } + return 0 + +}