", st)} // should not happen
+}
+
+// Returns the index in the struct for the named field, as well as the parsed tag properties.
+func structFieldByName(sprops *StructProperties, name string) (int, *Properties, bool) {
+ i, ok := sprops.decoderOrigNames[name]
+ if ok {
+ return i, sprops.Prop[i], true
+ }
+ return -1, nil, false
+}
+
+// Consume a ':' from the input stream (if the next token is a colon),
+// returning an error if a colon is needed but not present.
+func (p *textParser) checkForColon(props *Properties, typ reflect.Type) *ParseError {
+ tok := p.next()
+ if tok.err != nil {
+ return tok.err
+ }
+ if tok.value != ":" {
+ // Colon is optional when the field is a group or message.
+ needColon := true
+ switch props.Wire {
+ case "group":
+ needColon = false
+ case "bytes":
+ // A "bytes" field is either a message, a string, or a repeated field;
+ // those three become *T, *string and []T respectively, so we can check for
+ // this field being a pointer to a non-string.
+ if typ.Kind() == reflect.Ptr {
+ // *T or *string
+ if typ.Elem().Kind() == reflect.String {
+ break
+ }
+ } else if typ.Kind() == reflect.Slice {
+ // []T or []*T
+ if typ.Elem().Kind() != reflect.Ptr {
+ break
+ }
+ } else if typ.Kind() == reflect.String {
+ // The proto3 exception is for a string field,
+ // which requires a colon.
+ break
+ }
+ needColon = false
+ }
+ if needColon {
+ return p.errorf("expected ':', found %q", tok.value)
+ }
+ p.back()
+ }
+ return nil
+}
+
+func (p *textParser) readStruct(sv reflect.Value, terminator string) error {
+ st := sv.Type()
+ sprops := GetProperties(st)
+ reqCount := sprops.reqCount
+ var reqFieldErr error
+ fieldSet := make(map[string]bool)
+ // A struct is a sequence of "name: value", terminated by one of
+ // '>' or '}', or the end of the input. A name may also be
+ // "[extension]" or "[type/url]".
+ //
+ // The whole struct can also be an expanded Any message, like:
+ // [type/url] < ... struct contents ... >
+ for {
+ tok := p.next()
+ if tok.err != nil {
+ return tok.err
+ }
+ if tok.value == terminator {
+ break
+ }
+ if tok.value == "[" {
+ // Looks like an extension or an Any.
+ //
+ // TODO: Check whether we need to handle
+ // namespace rooted names (e.g. ".something.Foo").
+ extName, err := p.consumeExtName()
+ if err != nil {
+ return err
+ }
+
+ if s := strings.LastIndex(extName, "/"); s >= 0 {
+ // If it contains a slash, it's an Any type URL.
+ messageName := extName[s+1:]
+ mt := MessageType(messageName)
+ if mt == nil {
+ return p.errorf("unrecognized message %q in google.protobuf.Any", messageName)
+ }
+ tok = p.next()
+ if tok.err != nil {
+ return tok.err
+ }
+ // consume an optional colon
+ if tok.value == ":" {
+ tok = p.next()
+ if tok.err != nil {
+ return tok.err
+ }
+ }
+ var terminator string
+ switch tok.value {
+ case "<":
+ terminator = ">"
+ case "{":
+ terminator = "}"
+ default:
+ return p.errorf("expected '{' or '<', found %q", tok.value)
+ }
+ v := reflect.New(mt.Elem())
+ if pe := p.readStruct(v.Elem(), terminator); pe != nil {
+ return pe
+ }
+ b, err := Marshal(v.Interface().(Message))
+ if err != nil {
+ return p.errorf("failed to marshal message of type %q: %v", messageName, err)
+ }
+ if fieldSet["type_url"] {
+ return p.errorf(anyRepeatedlyUnpacked, "type_url")
+ }
+ if fieldSet["value"] {
+ return p.errorf(anyRepeatedlyUnpacked, "value")
+ }
+ sv.FieldByName("TypeUrl").SetString(extName)
+ sv.FieldByName("Value").SetBytes(b)
+ fieldSet["type_url"] = true
+ fieldSet["value"] = true
+ continue
+ }
+
+ var desc *ExtensionDesc
+ // This could be faster, but it's functional.
+ // TODO: Do something smarter than a linear scan.
+ for _, d := range RegisteredExtensions(reflect.New(st).Interface().(Message)) {
+ if d.Name == extName {
+ desc = d
+ break
+ }
+ }
+ if desc == nil {
+ return p.errorf("unrecognized extension %q", extName)
+ }
+
+ props := &Properties{}
+ props.Parse(desc.Tag)
+
+ typ := reflect.TypeOf(desc.ExtensionType)
+ if err := p.checkForColon(props, typ); err != nil {
+ return err
+ }
+
+ rep := desc.repeated()
+
+ // Read the extension structure, and set it in
+ // the value we're constructing.
+ var ext reflect.Value
+ if !rep {
+ ext = reflect.New(typ).Elem()
+ } else {
+ ext = reflect.New(typ.Elem()).Elem()
+ }
+ if err := p.readAny(ext, props); err != nil {
+ if _, ok := err.(*RequiredNotSetError); !ok {
+ return err
+ }
+ reqFieldErr = err
+ }
+ ep := sv.Addr().Interface().(Message)
+ if !rep {
+ SetExtension(ep, desc, ext.Interface())
+ } else {
+ old, err := GetExtension(ep, desc)
+ var sl reflect.Value
+ if err == nil {
+ sl = reflect.ValueOf(old) // existing slice
+ } else {
+ sl = reflect.MakeSlice(typ, 0, 1)
+ }
+ sl = reflect.Append(sl, ext)
+ SetExtension(ep, desc, sl.Interface())
+ }
+ if err := p.consumeOptionalSeparator(); err != nil {
+ return err
+ }
+ continue
+ }
+
+ // This is a normal, non-extension field.
+ name := tok.value
+ var dst reflect.Value
+ fi, props, ok := structFieldByName(sprops, name)
+ if ok {
+ dst = sv.Field(fi)
+ } else if oop, ok := sprops.OneofTypes[name]; ok {
+ // It is a oneof.
+ props = oop.Prop
+ nv := reflect.New(oop.Type.Elem())
+ dst = nv.Elem().Field(0)
+ field := sv.Field(oop.Field)
+ if !field.IsNil() {
+ return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, sv.Type().Field(oop.Field).Name)
+ }
+ field.Set(nv)
+ }
+ if !dst.IsValid() {
+ return p.errorf("unknown field name %q in %v", name, st)
+ }
+
+ if dst.Kind() == reflect.Map {
+ // Consume any colon.
+ if err := p.checkForColon(props, dst.Type()); err != nil {
+ return err
+ }
+
+ // Construct the map if it doesn't already exist.
+ if dst.IsNil() {
+ dst.Set(reflect.MakeMap(dst.Type()))
+ }
+ key := reflect.New(dst.Type().Key()).Elem()
+ val := reflect.New(dst.Type().Elem()).Elem()
+
+ // The map entry should be this sequence of tokens:
+ // < key : KEY value : VALUE >
+ // However, implementations may omit key or value, and technically
+ // we should support them in any order. See b/28924776 for a time
+ // this went wrong.
+
+ tok := p.next()
+ var terminator string
+ switch tok.value {
+ case "<":
+ terminator = ">"
+ case "{":
+ terminator = "}"
+ default:
+ return p.errorf("expected '{' or '<', found %q", tok.value)
+ }
+ for {
+ tok := p.next()
+ if tok.err != nil {
+ return tok.err
+ }
+ if tok.value == terminator {
+ break
+ }
+ switch tok.value {
+ case "key":
+ if err := p.consumeToken(":"); err != nil {
+ return err
+ }
+ if err := p.readAny(key, props.MapKeyProp); err != nil {
+ return err
+ }
+ if err := p.consumeOptionalSeparator(); err != nil {
+ return err
+ }
+ case "value":
+ if err := p.checkForColon(props.MapValProp, dst.Type().Elem()); err != nil {
+ return err
+ }
+ if err := p.readAny(val, props.MapValProp); err != nil {
+ return err
+ }
+ if err := p.consumeOptionalSeparator(); err != nil {
+ return err
+ }
+ default:
+ p.back()
+ return p.errorf(`expected "key", "value", or %q, found %q`, terminator, tok.value)
+ }
+ }
+
+ dst.SetMapIndex(key, val)
+ continue
+ }
+
+ // Check that it's not already set if it's not a repeated field.
+ if !props.Repeated && fieldSet[name] {
+ return p.errorf("non-repeated field %q was repeated", name)
+ }
+
+ if err := p.checkForColon(props, dst.Type()); err != nil {
+ return err
+ }
+
+ // Parse into the field.
+ fieldSet[name] = true
+ if err := p.readAny(dst, props); err != nil {
+ if _, ok := err.(*RequiredNotSetError); !ok {
+ return err
+ }
+ reqFieldErr = err
+ }
+ if props.Required {
+ reqCount--
+ }
+
+ if err := p.consumeOptionalSeparator(); err != nil {
+ return err
+ }
+
+ }
+
+ if reqCount > 0 {
+ return p.missingRequiredFieldError(sv)
+ }
+ return reqFieldErr
+}
+
+// consumeExtName consumes extension name or expanded Any type URL and the
+// following ']'. It returns the name or URL consumed.
+func (p *textParser) consumeExtName() (string, error) {
+ tok := p.next()
+ if tok.err != nil {
+ return "", tok.err
+ }
+
+ // If extension name or type url is quoted, it's a single token.
+ if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] {
+ name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0]))
+ if err != nil {
+ return "", err
+ }
+ return name, p.consumeToken("]")
+ }
+
+ // Consume everything up to "]"
+ var parts []string
+ for tok.value != "]" {
+ parts = append(parts, tok.value)
+ tok = p.next()
+ if tok.err != nil {
+ return "", p.errorf("unrecognized type_url or extension name: %s", tok.err)
+ }
+ if p.done && tok.value != "]" {
+ return "", p.errorf("unclosed type_url or extension name")
+ }
+ }
+ return strings.Join(parts, ""), nil
+}
+
+// consumeOptionalSeparator consumes an optional semicolon or comma.
+// It is used in readStruct to provide backward compatibility.
+func (p *textParser) consumeOptionalSeparator() error {
+ tok := p.next()
+ if tok.err != nil {
+ return tok.err
+ }
+ if tok.value != ";" && tok.value != "," {
+ p.back()
+ }
+ return nil
+}
+
+func (p *textParser) readAny(v reflect.Value, props *Properties) error {
+ tok := p.next()
+ if tok.err != nil {
+ return tok.err
+ }
+ if tok.value == "" {
+ return p.errorf("unexpected EOF")
+ }
+
+ switch fv := v; fv.Kind() {
+ case reflect.Slice:
+ at := v.Type()
+ if at.Elem().Kind() == reflect.Uint8 {
+ // Special case for []byte
+ if tok.value[0] != '"' && tok.value[0] != '\'' {
+ // Deliberately written out here, as the error after
+ // this switch statement would write "invalid []byte: ...",
+ // which is not as user-friendly.
+ return p.errorf("invalid string: %v", tok.value)
+ }
+ bytes := []byte(tok.unquoted)
+ fv.Set(reflect.ValueOf(bytes))
+ return nil
+ }
+ // Repeated field.
+ if tok.value == "[" {
+ // Repeated field with list notation, like [1,2,3].
+ for {
+ fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem()))
+ err := p.readAny(fv.Index(fv.Len()-1), props)
+ if err != nil {
+ return err
+ }
+ tok := p.next()
+ if tok.err != nil {
+ return tok.err
+ }
+ if tok.value == "]" {
+ break
+ }
+ if tok.value != "," {
+ return p.errorf("Expected ']' or ',' found %q", tok.value)
+ }
+ }
+ return nil
+ }
+ // One value of the repeated field.
+ p.back()
+ fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem()))
+ return p.readAny(fv.Index(fv.Len()-1), props)
+ case reflect.Bool:
+ // true/1/t/True or false/f/0/False.
+ switch tok.value {
+ case "true", "1", "t", "True":
+ fv.SetBool(true)
+ return nil
+ case "false", "0", "f", "False":
+ fv.SetBool(false)
+ return nil
+ }
+ case reflect.Float32, reflect.Float64:
+ v := tok.value
+ // Ignore 'f' for compatibility with output generated by C++, but don't
+ // remove 'f' when the value is "-inf" or "inf".
+ if strings.HasSuffix(v, "f") && tok.value != "-inf" && tok.value != "inf" {
+ v = v[:len(v)-1]
+ }
+ if f, err := strconv.ParseFloat(v, fv.Type().Bits()); err == nil {
+ fv.SetFloat(f)
+ return nil
+ }
+ case reflect.Int32:
+ if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil {
+ fv.SetInt(x)
+ return nil
+ }
+
+ if len(props.Enum) == 0 {
+ break
+ }
+ m, ok := enumValueMaps[props.Enum]
+ if !ok {
+ break
+ }
+ x, ok := m[tok.value]
+ if !ok {
+ break
+ }
+ fv.SetInt(int64(x))
+ return nil
+ case reflect.Int64:
+ if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil {
+ fv.SetInt(x)
+ return nil
+ }
+
+ case reflect.Ptr:
+ // A basic field (indirected through pointer), or a repeated message/group
+ p.back()
+ fv.Set(reflect.New(fv.Type().Elem()))
+ return p.readAny(fv.Elem(), props)
+ case reflect.String:
+ if tok.value[0] == '"' || tok.value[0] == '\'' {
+ fv.SetString(tok.unquoted)
+ return nil
+ }
+ case reflect.Struct:
+ var terminator string
+ switch tok.value {
+ case "{":
+ terminator = "}"
+ case "<":
+ terminator = ">"
+ default:
+ return p.errorf("expected '{' or '<', found %q", tok.value)
+ }
+ // TODO: Handle nested messages which implement encoding.TextUnmarshaler.
+ return p.readStruct(fv, terminator)
+ case reflect.Uint32:
+ if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil {
+ fv.SetUint(uint64(x))
+ return nil
+ }
+ case reflect.Uint64:
+ if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil {
+ fv.SetUint(x)
+ return nil
+ }
+ }
+ return p.errorf("invalid %v: %v", v.Type(), tok.value)
+}
+
+// UnmarshalText reads a protocol buffer in Text format. UnmarshalText resets pb
+// before starting to unmarshal, so any existing data in pb is always removed.
+// If a required field is not set and no other error occurs,
+// UnmarshalText returns *RequiredNotSetError.
+func UnmarshalText(s string, pb Message) error {
+ if um, ok := pb.(encoding.TextUnmarshaler); ok {
+ return um.UnmarshalText([]byte(s))
+ }
+ pb.Reset()
+ v := reflect.ValueOf(pb)
+ return newTextParser(s).readStruct(v.Elem(), "")
+}
diff --git a/vendor/github.com/kyma-project/kyma/LICENSE b/vendor/github.com/kyma-project/kyma/LICENSE
deleted file mode 100644
index be36d55..0000000
--- a/vendor/github.com/kyma-project/kyma/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright (c) 2018 SAP SE or an SAP affiliate company. All rights reserved.
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/kyma-project/kyma/NOTICE.md b/vendor/github.com/kyma-project/kyma/NOTICE.md
deleted file mode 100644
index ccf38a4..0000000
--- a/vendor/github.com/kyma-project/kyma/NOTICE.md
+++ /dev/null
@@ -1 +0,0 @@
-Copyright (c) 2018 SAP SE or an SAP affiliate company. All rights reserved.
diff --git a/vendor/github.com/kyma-project/kyma/components/binding-usage-controller/pkg/signal/signal.go b/vendor/github.com/kyma-project/kyma/components/binding-usage-controller/pkg/signal/signal.go
deleted file mode 100644
index 9200f8f..0000000
--- a/vendor/github.com/kyma-project/kyma/components/binding-usage-controller/pkg/signal/signal.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package signal
-
-import (
- "os"
- "os/signal"
- "syscall"
-)
-
-// SetupChannel registered for SIGTERM and Interrupt. A stop channel is returned
-// which is closed on one of these signals. If a second signal is caught, the program
-// is terminated with exit code 1.
-func SetupChannel() (stopCh <-chan struct{}) {
- stop := make(chan struct{})
- c := make(chan os.Signal, 2)
- signal.Notify(c, os.Interrupt, syscall.SIGTERM)
- go func() {
- <-c
- close(stop)
- <-c
- os.Exit(1) // second signal. Exit directly.
- }()
-
- return stop
-}
diff --git a/vendor/github.com/kyma-project/kyma/components/ui-api-layer/LICENSE b/vendor/github.com/kyma-project/kyma/components/ui-api-layer/LICENSE
deleted file mode 100644
index f6cd2bc..0000000
--- a/vendor/github.com/kyma-project/kyma/components/ui-api-layer/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/kyma-project/kyma/docs/LICENSE b/vendor/github.com/kyma-project/kyma/docs/LICENSE
deleted file mode 100644
index be36d55..0000000
--- a/vendor/github.com/kyma-project/kyma/docs/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright (c) 2018 SAP SE or an SAP affiliate company. All rights reserved.
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/kyma-project/kyma/tools/failery/LICENSE b/vendor/github.com/kyma-project/kyma/tools/failery/LICENSE
deleted file mode 100644
index 940c0bd..0000000
--- a/vendor/github.com/kyma-project/kyma/tools/failery/LICENSE
+++ /dev/null
@@ -1,31 +0,0 @@
-Copyright (c) 2014, Opinionated Architecture.
-All rights reserved.
-
-Copyright (c) 2018 SAP SE or an SAP affiliate company.
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-* Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
-
-* Neither the name of the {organization} nor the names of its
- contributors may be used to endorse or promote products derived from
- this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
diff --git a/vendor/github.com/pmezard/go-difflib/LICENSE b/vendor/github.com/pmezard/go-difflib/LICENSE
new file mode 100644
index 0000000..c67dad6
--- /dev/null
+++ b/vendor/github.com/pmezard/go-difflib/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2013, Patrick Mezard
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+ The names of its contributors may not be used to endorse or promote
+products derived from this software without specific prior written
+permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/pmezard/go-difflib/difflib/difflib.go b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go
new file mode 100644
index 0000000..003e99f
--- /dev/null
+++ b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go
@@ -0,0 +1,772 @@
+// Package difflib is a partial port of Python difflib module.
+//
+// It provides tools to compare sequences of strings and generate textual diffs.
+//
+// The following class and functions have been ported:
+//
+// - SequenceMatcher
+//
+// - unified_diff
+//
+// - context_diff
+//
+// Getting unified diffs was the main goal of the port. Keep in mind this code
+// is mostly suitable to output text differences in a human friendly way, there
+// are no guarantees generated diffs are consumable by patch(1).
+package difflib
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ "strings"
+)
+
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
+
+func max(a, b int) int {
+ if a > b {
+ return a
+ }
+ return b
+}
+
+func calculateRatio(matches, length int) float64 {
+ if length > 0 {
+ return 2.0 * float64(matches) / float64(length)
+ }
+ return 1.0
+}
+
+type Match struct {
+ A int
+ B int
+ Size int
+}
+
+type OpCode struct {
+ Tag byte
+ I1 int
+ I2 int
+ J1 int
+ J2 int
+}
+
+// SequenceMatcher compares sequence of strings. The basic
+// algorithm predates, and is a little fancier than, an algorithm
+// published in the late 1980's by Ratcliff and Obershelp under the
+// hyperbolic name "gestalt pattern matching". The basic idea is to find
+// the longest contiguous matching subsequence that contains no "junk"
+// elements (R-O doesn't address junk). The same idea is then applied
+// recursively to the pieces of the sequences to the left and to the right
+// of the matching subsequence. This does not yield minimal edit
+// sequences, but does tend to yield matches that "look right" to people.
+//
+// SequenceMatcher tries to compute a "human-friendly diff" between two
+// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the
+// longest *contiguous* & junk-free matching subsequence. That's what
+// catches peoples' eyes. The Windows(tm) windiff has another interesting
+// notion, pairing up elements that appear uniquely in each sequence.
+// That, and the method here, appear to yield more intuitive difference
+// reports than does diff. This method appears to be the least vulnerable
+// to synching up on blocks of "junk lines", though (like blank lines in
+// ordinary text files, or maybe "" lines in HTML files). That may be
+// because this is the only method of the 3 that has a *concept* of
+// "junk" .
+//
+// Timing: Basic R-O is cubic time worst case and quadratic time expected
+// case. SequenceMatcher is quadratic time for the worst case and has
+// expected-case behavior dependent in a complicated way on how many
+// elements the sequences have in common; best case time is linear.
+type SequenceMatcher struct {
+ a []string
+ b []string
+ b2j map[string][]int
+ IsJunk func(string) bool
+ autoJunk bool
+ bJunk map[string]struct{}
+ matchingBlocks []Match
+ fullBCount map[string]int
+ bPopular map[string]struct{}
+ opCodes []OpCode
+}
+
+func NewMatcher(a, b []string) *SequenceMatcher {
+ m := SequenceMatcher{autoJunk: true}
+ m.SetSeqs(a, b)
+ return &m
+}
+
+func NewMatcherWithJunk(a, b []string, autoJunk bool,
+ isJunk func(string) bool) *SequenceMatcher {
+
+ m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk}
+ m.SetSeqs(a, b)
+ return &m
+}
+
+// Set two sequences to be compared.
+func (m *SequenceMatcher) SetSeqs(a, b []string) {
+ m.SetSeq1(a)
+ m.SetSeq2(b)
+}
+
+// Set the first sequence to be compared. The second sequence to be compared is
+// not changed.
+//
+// SequenceMatcher computes and caches detailed information about the second
+// sequence, so if you want to compare one sequence S against many sequences,
+// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other
+// sequences.
+//
+// See also SetSeqs() and SetSeq2().
+func (m *SequenceMatcher) SetSeq1(a []string) {
+ if &a == &m.a {
+ return
+ }
+ m.a = a
+ m.matchingBlocks = nil
+ m.opCodes = nil
+}
+
+// Set the second sequence to be compared. The first sequence to be compared is
+// not changed.
+func (m *SequenceMatcher) SetSeq2(b []string) {
+ if &b == &m.b {
+ return
+ }
+ m.b = b
+ m.matchingBlocks = nil
+ m.opCodes = nil
+ m.fullBCount = nil
+ m.chainB()
+}
+
+func (m *SequenceMatcher) chainB() {
+ // Populate line -> index mapping
+ b2j := map[string][]int{}
+ for i, s := range m.b {
+ indices := b2j[s]
+ indices = append(indices, i)
+ b2j[s] = indices
+ }
+
+ // Purge junk elements
+ m.bJunk = map[string]struct{}{}
+ if m.IsJunk != nil {
+ junk := m.bJunk
+ for s, _ := range b2j {
+ if m.IsJunk(s) {
+ junk[s] = struct{}{}
+ }
+ }
+ for s, _ := range junk {
+ delete(b2j, s)
+ }
+ }
+
+ // Purge remaining popular elements
+ popular := map[string]struct{}{}
+ n := len(m.b)
+ if m.autoJunk && n >= 200 {
+ ntest := n/100 + 1
+ for s, indices := range b2j {
+ if len(indices) > ntest {
+ popular[s] = struct{}{}
+ }
+ }
+ for s, _ := range popular {
+ delete(b2j, s)
+ }
+ }
+ m.bPopular = popular
+ m.b2j = b2j
+}
+
+func (m *SequenceMatcher) isBJunk(s string) bool {
+ _, ok := m.bJunk[s]
+ return ok
+}
+
+// Find longest matching block in a[alo:ahi] and b[blo:bhi].
+//
+// If IsJunk is not defined:
+//
+// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
+// alo <= i <= i+k <= ahi
+// blo <= j <= j+k <= bhi
+// and for all (i',j',k') meeting those conditions,
+// k >= k'
+// i <= i'
+// and if i == i', j <= j'
+//
+// In other words, of all maximal matching blocks, return one that
+// starts earliest in a, and of all those maximal matching blocks that
+// start earliest in a, return the one that starts earliest in b.
+//
+// If IsJunk is defined, first the longest matching block is
+// determined as above, but with the additional restriction that no
+// junk element appears in the block. Then that block is extended as
+// far as possible by matching (only) junk elements on both sides. So
+// the resulting block never matches on junk except as identical junk
+// happens to be adjacent to an "interesting" match.
+//
+// If no blocks match, return (alo, blo, 0).
+func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match {
+ // CAUTION: stripping common prefix or suffix would be incorrect.
+ // E.g.,
+ // ab
+ // acab
+ // Longest matching block is "ab", but if common prefix is
+ // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so
+ // strip, so ends up claiming that ab is changed to acab by
+ // inserting "ca" in the middle. That's minimal but unintuitive:
+ // "it's obvious" that someone inserted "ac" at the front.
+ // Windiff ends up at the same place as diff, but by pairing up
+ // the unique 'b's and then matching the first two 'a's.
+ besti, bestj, bestsize := alo, blo, 0
+
+ // find longest junk-free match
+ // during an iteration of the loop, j2len[j] = length of longest
+ // junk-free match ending with a[i-1] and b[j]
+ j2len := map[int]int{}
+ for i := alo; i != ahi; i++ {
+ // look at all instances of a[i] in b; note that because
+ // b2j has no junk keys, the loop is skipped if a[i] is junk
+ newj2len := map[int]int{}
+ for _, j := range m.b2j[m.a[i]] {
+ // a[i] matches b[j]
+ if j < blo {
+ continue
+ }
+ if j >= bhi {
+ break
+ }
+ k := j2len[j-1] + 1
+ newj2len[j] = k
+ if k > bestsize {
+ besti, bestj, bestsize = i-k+1, j-k+1, k
+ }
+ }
+ j2len = newj2len
+ }
+
+ // Extend the best by non-junk elements on each end. In particular,
+ // "popular" non-junk elements aren't in b2j, which greatly speeds
+ // the inner loop above, but also means "the best" match so far
+ // doesn't contain any junk *or* popular non-junk elements.
+ for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) &&
+ m.a[besti-1] == m.b[bestj-1] {
+ besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+ }
+ for besti+bestsize < ahi && bestj+bestsize < bhi &&
+ !m.isBJunk(m.b[bestj+bestsize]) &&
+ m.a[besti+bestsize] == m.b[bestj+bestsize] {
+ bestsize += 1
+ }
+
+ // Now that we have a wholly interesting match (albeit possibly
+ // empty!), we may as well suck up the matching junk on each
+ // side of it too. Can't think of a good reason not to, and it
+ // saves post-processing the (possibly considerable) expense of
+ // figuring out what to do with it. In the case of an empty
+ // interesting match, this is clearly the right thing to do,
+ // because no other kind of match is possible in the regions.
+ for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) &&
+ m.a[besti-1] == m.b[bestj-1] {
+ besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+ }
+ for besti+bestsize < ahi && bestj+bestsize < bhi &&
+ m.isBJunk(m.b[bestj+bestsize]) &&
+ m.a[besti+bestsize] == m.b[bestj+bestsize] {
+ bestsize += 1
+ }
+
+ return Match{A: besti, B: bestj, Size: bestsize}
+}
+
+// Return list of triples describing matching subsequences.
+//
+// Each triple is of the form (i, j, n), and means that
+// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in
+// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are
+// adjacent triples in the list, and the second is not the last triple in the
+// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe
+// adjacent equal blocks.
+//
+// The last triple is a dummy, (len(a), len(b), 0), and is the only
+// triple with n==0.
+func (m *SequenceMatcher) GetMatchingBlocks() []Match {
+ if m.matchingBlocks != nil {
+ return m.matchingBlocks
+ }
+
+ var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match
+ matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match {
+ match := m.findLongestMatch(alo, ahi, blo, bhi)
+ i, j, k := match.A, match.B, match.Size
+ if match.Size > 0 {
+ if alo < i && blo < j {
+ matched = matchBlocks(alo, i, blo, j, matched)
+ }
+ matched = append(matched, match)
+ if i+k < ahi && j+k < bhi {
+ matched = matchBlocks(i+k, ahi, j+k, bhi, matched)
+ }
+ }
+ return matched
+ }
+ matched := matchBlocks(0, len(m.a), 0, len(m.b), nil)
+
+ // It's possible that we have adjacent equal blocks in the
+ // matching_blocks list now.
+ nonAdjacent := []Match{}
+ i1, j1, k1 := 0, 0, 0
+ for _, b := range matched {
+ // Is this block adjacent to i1, j1, k1?
+ i2, j2, k2 := b.A, b.B, b.Size
+ if i1+k1 == i2 && j1+k1 == j2 {
+ // Yes, so collapse them -- this just increases the length of
+ // the first block by the length of the second, and the first
+ // block so lengthened remains the block to compare against.
+ k1 += k2
+ } else {
+ // Not adjacent. Remember the first block (k1==0 means it's
+ // the dummy we started with), and make the second block the
+ // new block to compare against.
+ if k1 > 0 {
+ nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
+ }
+ i1, j1, k1 = i2, j2, k2
+ }
+ }
+ if k1 > 0 {
+ nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
+ }
+
+ nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0})
+ m.matchingBlocks = nonAdjacent
+ return m.matchingBlocks
+}
+
+// Return list of 5-tuples describing how to turn a into b.
+//
+// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple
+// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the
+// tuple preceding it, and likewise for j1 == the previous j2.
+//
+// The tags are characters, with these meanings:
+//
+// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2]
+//
+// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case.
+//
+// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case.
+//
+// 'e' (equal): a[i1:i2] == b[j1:j2]
+func (m *SequenceMatcher) GetOpCodes() []OpCode {
+ if m.opCodes != nil {
+ return m.opCodes
+ }
+ i, j := 0, 0
+ matching := m.GetMatchingBlocks()
+ opCodes := make([]OpCode, 0, len(matching))
+ for _, m := range matching {
+ // invariant: we've pumped out correct diffs to change
+ // a[:i] into b[:j], and the next matching block is
+ // a[ai:ai+size] == b[bj:bj+size]. So we need to pump
+ // out a diff to change a[i:ai] into b[j:bj], pump out
+ // the matching block, and move (i,j) beyond the match
+ ai, bj, size := m.A, m.B, m.Size
+ tag := byte(0)
+ if i < ai && j < bj {
+ tag = 'r'
+ } else if i < ai {
+ tag = 'd'
+ } else if j < bj {
+ tag = 'i'
+ }
+ if tag > 0 {
+ opCodes = append(opCodes, OpCode{tag, i, ai, j, bj})
+ }
+ i, j = ai+size, bj+size
+ // the list of matching blocks is terminated by a
+ // sentinel with size 0
+ if size > 0 {
+ opCodes = append(opCodes, OpCode{'e', ai, i, bj, j})
+ }
+ }
+ m.opCodes = opCodes
+ return m.opCodes
+}
+
+// Isolate change clusters by eliminating ranges with no changes.
+//
+// Return a generator of groups with up to n lines of context.
+// Each group is in the same format as returned by GetOpCodes().
+func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode {
+ if n < 0 {
+ n = 3
+ }
+ codes := m.GetOpCodes()
+ if len(codes) == 0 {
+ codes = []OpCode{OpCode{'e', 0, 1, 0, 1}}
+ }
+ // Fixup leading and trailing groups if they show no changes.
+ if codes[0].Tag == 'e' {
+ c := codes[0]
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2}
+ }
+ if codes[len(codes)-1].Tag == 'e' {
+ c := codes[len(codes)-1]
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)}
+ }
+ nn := n + n
+ groups := [][]OpCode{}
+ group := []OpCode{}
+ for _, c := range codes {
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ // End the current group and start a new one whenever
+ // there is a large range with no changes.
+ if c.Tag == 'e' && i2-i1 > nn {
+ group = append(group, OpCode{c.Tag, i1, min(i2, i1+n),
+ j1, min(j2, j1+n)})
+ groups = append(groups, group)
+ group = []OpCode{}
+ i1, j1 = max(i1, i2-n), max(j1, j2-n)
+ }
+ group = append(group, OpCode{c.Tag, i1, i2, j1, j2})
+ }
+ if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') {
+ groups = append(groups, group)
+ }
+ return groups
+}
+
+// Return a measure of the sequences' similarity (float in [0,1]).
+//
+// Where T is the total number of elements in both sequences, and
+// M is the number of matches, this is 2.0*M / T.
+// Note that this is 1 if the sequences are identical, and 0 if
+// they have nothing in common.
+//
+// .Ratio() is expensive to compute if you haven't already computed
+// .GetMatchingBlocks() or .GetOpCodes(), in which case you may
+// want to try .QuickRatio() or .RealQuickRation() first to get an
+// upper bound.
+func (m *SequenceMatcher) Ratio() float64 {
+ matches := 0
+ for _, m := range m.GetMatchingBlocks() {
+ matches += m.Size
+ }
+ return calculateRatio(matches, len(m.a)+len(m.b))
+}
+
+// Return an upper bound on ratio() relatively quickly.
+//
+// This isn't defined beyond that it is an upper bound on .Ratio(), and
+// is faster to compute.
+func (m *SequenceMatcher) QuickRatio() float64 {
+ // viewing a and b as multisets, set matches to the cardinality
+ // of their intersection; this counts the number of matches
+ // without regard to order, so is clearly an upper bound
+ if m.fullBCount == nil {
+ m.fullBCount = map[string]int{}
+ for _, s := range m.b {
+ m.fullBCount[s] = m.fullBCount[s] + 1
+ }
+ }
+
+ // avail[x] is the number of times x appears in 'b' less the
+ // number of times we've seen it in 'a' so far ... kinda
+ avail := map[string]int{}
+ matches := 0
+ for _, s := range m.a {
+ n, ok := avail[s]
+ if !ok {
+ n = m.fullBCount[s]
+ }
+ avail[s] = n - 1
+ if n > 0 {
+ matches += 1
+ }
+ }
+ return calculateRatio(matches, len(m.a)+len(m.b))
+}
+
+// Return an upper bound on ratio() very quickly.
+//
+// This isn't defined beyond that it is an upper bound on .Ratio(), and
+// is faster to compute than either .Ratio() or .QuickRatio().
+func (m *SequenceMatcher) RealQuickRatio() float64 {
+ la, lb := len(m.a), len(m.b)
+ return calculateRatio(min(la, lb), la+lb)
+}
+
+// Convert range to the "ed" format
+func formatRangeUnified(start, stop int) string {
+ // Per the diff spec at http://www.unix.org/single_unix_specification/
+ beginning := start + 1 // lines start numbering with one
+ length := stop - start
+ if length == 1 {
+ return fmt.Sprintf("%d", beginning)
+ }
+ if length == 0 {
+ beginning -= 1 // empty ranges begin at line just before the range
+ }
+ return fmt.Sprintf("%d,%d", beginning, length)
+}
+
+// Unified diff parameters
+type UnifiedDiff struct {
+ A []string // First sequence lines
+ FromFile string // First file name
+ FromDate string // First file time
+ B []string // Second sequence lines
+ ToFile string // Second file name
+ ToDate string // Second file time
+ Eol string // Headers end of line, defaults to LF
+ Context int // Number of context lines
+}
+
+// Compare two sequences of lines; generate the delta as a unified diff.
+//
+// Unified diffs are a compact way of showing line changes and a few
+// lines of context. The number of context lines is set by 'n' which
+// defaults to three.
+//
+// By default, the diff control lines (those with ---, +++, or @@) are
+// created with a trailing newline. This is helpful so that inputs
+// created from file.readlines() result in diffs that are suitable for
+// file.writelines() since both the inputs and outputs have trailing
+// newlines.
+//
+// For inputs that do not have trailing newlines, set the lineterm
+// argument to "" so that the output will be uniformly newline free.
+//
+// The unidiff format normally has a header for filenames and modification
+// times. Any or all of these may be specified using strings for
+// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
+// The modification times are normally expressed in the ISO 8601 format.
+func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error {
+ buf := bufio.NewWriter(writer)
+ defer buf.Flush()
+ wf := func(format string, args ...interface{}) error {
+ _, err := buf.WriteString(fmt.Sprintf(format, args...))
+ return err
+ }
+ ws := func(s string) error {
+ _, err := buf.WriteString(s)
+ return err
+ }
+
+ if len(diff.Eol) == 0 {
+ diff.Eol = "\n"
+ }
+
+ started := false
+ m := NewMatcher(diff.A, diff.B)
+ for _, g := range m.GetGroupedOpCodes(diff.Context) {
+ if !started {
+ started = true
+ fromDate := ""
+ if len(diff.FromDate) > 0 {
+ fromDate = "\t" + diff.FromDate
+ }
+ toDate := ""
+ if len(diff.ToDate) > 0 {
+ toDate = "\t" + diff.ToDate
+ }
+ if diff.FromFile != "" || diff.ToFile != "" {
+ err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol)
+ if err != nil {
+ return err
+ }
+ err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ first, last := g[0], g[len(g)-1]
+ range1 := formatRangeUnified(first.I1, last.I2)
+ range2 := formatRangeUnified(first.J1, last.J2)
+ if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil {
+ return err
+ }
+ for _, c := range g {
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ if c.Tag == 'e' {
+ for _, line := range diff.A[i1:i2] {
+ if err := ws(" " + line); err != nil {
+ return err
+ }
+ }
+ continue
+ }
+ if c.Tag == 'r' || c.Tag == 'd' {
+ for _, line := range diff.A[i1:i2] {
+ if err := ws("-" + line); err != nil {
+ return err
+ }
+ }
+ }
+ if c.Tag == 'r' || c.Tag == 'i' {
+ for _, line := range diff.B[j1:j2] {
+ if err := ws("+" + line); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// Like WriteUnifiedDiff but returns the diff a string.
+func GetUnifiedDiffString(diff UnifiedDiff) (string, error) {
+ w := &bytes.Buffer{}
+ err := WriteUnifiedDiff(w, diff)
+ return string(w.Bytes()), err
+}
+
+// Convert range to the "ed" format.
+func formatRangeContext(start, stop int) string {
+ // Per the diff spec at http://www.unix.org/single_unix_specification/
+ beginning := start + 1 // lines start numbering with one
+ length := stop - start
+ if length == 0 {
+ beginning -= 1 // empty ranges begin at line just before the range
+ }
+ if length <= 1 {
+ return fmt.Sprintf("%d", beginning)
+ }
+ return fmt.Sprintf("%d,%d", beginning, beginning+length-1)
+}
+
+type ContextDiff UnifiedDiff
+
+// Compare two sequences of lines; generate the delta as a context diff.
+//
+// Context diffs are a compact way of showing line changes and a few
+// lines of context. The number of context lines is set by diff.Context
+// which defaults to three.
+//
+// By default, the diff control lines (those with *** or ---) are
+// created with a trailing newline.
+//
+// For inputs that do not have trailing newlines, set the diff.Eol
+// argument to "" so that the output will be uniformly newline free.
+//
+// The context diff format normally has a header for filenames and
+// modification times. Any or all of these may be specified using
+// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate.
+// The modification times are normally expressed in the ISO 8601 format.
+// If not specified, the strings default to blanks.
+func WriteContextDiff(writer io.Writer, diff ContextDiff) error {
+ buf := bufio.NewWriter(writer)
+ defer buf.Flush()
+ var diffErr error
+ wf := func(format string, args ...interface{}) {
+ _, err := buf.WriteString(fmt.Sprintf(format, args...))
+ if diffErr == nil && err != nil {
+ diffErr = err
+ }
+ }
+ ws := func(s string) {
+ _, err := buf.WriteString(s)
+ if diffErr == nil && err != nil {
+ diffErr = err
+ }
+ }
+
+ if len(diff.Eol) == 0 {
+ diff.Eol = "\n"
+ }
+
+ prefix := map[byte]string{
+ 'i': "+ ",
+ 'd': "- ",
+ 'r': "! ",
+ 'e': " ",
+ }
+
+ started := false
+ m := NewMatcher(diff.A, diff.B)
+ for _, g := range m.GetGroupedOpCodes(diff.Context) {
+ if !started {
+ started = true
+ fromDate := ""
+ if len(diff.FromDate) > 0 {
+ fromDate = "\t" + diff.FromDate
+ }
+ toDate := ""
+ if len(diff.ToDate) > 0 {
+ toDate = "\t" + diff.ToDate
+ }
+ if diff.FromFile != "" || diff.ToFile != "" {
+ wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol)
+ wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol)
+ }
+ }
+
+ first, last := g[0], g[len(g)-1]
+ ws("***************" + diff.Eol)
+
+ range1 := formatRangeContext(first.I1, last.I2)
+ wf("*** %s ****%s", range1, diff.Eol)
+ for _, c := range g {
+ if c.Tag == 'r' || c.Tag == 'd' {
+ for _, cc := range g {
+ if cc.Tag == 'i' {
+ continue
+ }
+ for _, line := range diff.A[cc.I1:cc.I2] {
+ ws(prefix[cc.Tag] + line)
+ }
+ }
+ break
+ }
+ }
+
+ range2 := formatRangeContext(first.J1, last.J2)
+ wf("--- %s ----%s", range2, diff.Eol)
+ for _, c := range g {
+ if c.Tag == 'r' || c.Tag == 'i' {
+ for _, cc := range g {
+ if cc.Tag == 'd' {
+ continue
+ }
+ for _, line := range diff.B[cc.J1:cc.J2] {
+ ws(prefix[cc.Tag] + line)
+ }
+ }
+ break
+ }
+ }
+ }
+ return diffErr
+}
+
+// Like WriteContextDiff but returns the diff a string.
+func GetContextDiffString(diff ContextDiff) (string, error) {
+ w := &bytes.Buffer{}
+ err := WriteContextDiff(w, diff)
+ return string(w.Bytes()), err
+}
+
+// Split a string on "\n" while preserving them. The output can be used
+// as input for UnifiedDiff and ContextDiff structures.
+func SplitLines(s string) []string {
+ lines := strings.SplitAfter(s, "\n")
+ lines[len(lines)-1] += "\n"
+ return lines
+}
diff --git a/vendor/github.com/spf13/pflag/.gitignore b/vendor/github.com/spf13/pflag/.gitignore
deleted file mode 100644
index c3da290..0000000
--- a/vendor/github.com/spf13/pflag/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-.idea/*
-
diff --git a/vendor/github.com/spf13/pflag/.travis.yml b/vendor/github.com/spf13/pflag/.travis.yml
deleted file mode 100644
index f8a63b3..0000000
--- a/vendor/github.com/spf13/pflag/.travis.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-sudo: false
-
-language: go
-
-go:
- - 1.7.3
- - 1.8.1
- - tip
-
-matrix:
- allow_failures:
- - go: tip
-
-install:
- - go get github.com/golang/lint/golint
- - export PATH=$GOPATH/bin:$PATH
- - go install ./...
-
-script:
- - verify/all.sh -v
- - go test ./...
diff --git a/vendor/github.com/spf13/pflag/README.md b/vendor/github.com/spf13/pflag/README.md
deleted file mode 100644
index b052414..0000000
--- a/vendor/github.com/spf13/pflag/README.md
+++ /dev/null
@@ -1,296 +0,0 @@
-[![Build Status](https://travis-ci.org/spf13/pflag.svg?branch=master)](https://travis-ci.org/spf13/pflag)
-[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/pflag)](https://goreportcard.com/report/github.com/spf13/pflag)
-[![GoDoc](https://godoc.org/github.com/spf13/pflag?status.svg)](https://godoc.org/github.com/spf13/pflag)
-
-## Description
-
-pflag is a drop-in replacement for Go's flag package, implementing
-POSIX/GNU-style --flags.
-
-pflag is compatible with the [GNU extensions to the POSIX recommendations
-for command-line options][1]. For a more precise description, see the
-"Command-line flag syntax" section below.
-
-[1]: http://www.gnu.org/software/libc/manual/html_node/Argument-Syntax.html
-
-pflag is available under the same style of BSD license as the Go language,
-which can be found in the LICENSE file.
-
-## Installation
-
-pflag is available using the standard `go get` command.
-
-Install by running:
-
- go get github.com/spf13/pflag
-
-Run tests by running:
-
- go test github.com/spf13/pflag
-
-## Usage
-
-pflag is a drop-in replacement of Go's native flag package. If you import
-pflag under the name "flag" then all code should continue to function
-with no changes.
-
-``` go
-import flag "github.com/spf13/pflag"
-```
-
-There is one exception to this: if you directly instantiate the Flag struct
-there is one more field "Shorthand" that you will need to set.
-Most code never instantiates this struct directly, and instead uses
-functions such as String(), BoolVar(), and Var(), and is therefore
-unaffected.
-
-Define flags using flag.String(), Bool(), Int(), etc.
-
-This declares an integer flag, -flagname, stored in the pointer ip, with type *int.
-
-``` go
-var ip *int = flag.Int("flagname", 1234, "help message for flagname")
-```
-
-If you like, you can bind the flag to a variable using the Var() functions.
-
-``` go
-var flagvar int
-func init() {
- flag.IntVar(&flagvar, "flagname", 1234, "help message for flagname")
-}
-```
-
-Or you can create custom flags that satisfy the Value interface (with
-pointer receivers) and couple them to flag parsing by
-
-``` go
-flag.Var(&flagVal, "name", "help message for flagname")
-```
-
-For such flags, the default value is just the initial value of the variable.
-
-After all flags are defined, call
-
-``` go
-flag.Parse()
-```
-
-to parse the command line into the defined flags.
-
-Flags may then be used directly. If you're using the flags themselves,
-they are all pointers; if you bind to variables, they're values.
-
-``` go
-fmt.Println("ip has value ", *ip)
-fmt.Println("flagvar has value ", flagvar)
-```
-
-There are helpers function to get values later if you have the FlagSet but
-it was difficult to keep up with all of the flag pointers in your code.
-If you have a pflag.FlagSet with a flag called 'flagname' of type int you
-can use GetInt() to get the int value. But notice that 'flagname' must exist
-and it must be an int. GetString("flagname") will fail.
-
-``` go
-i, err := flagset.GetInt("flagname")
-```
-
-After parsing, the arguments after the flag are available as the
-slice flag.Args() or individually as flag.Arg(i).
-The arguments are indexed from 0 through flag.NArg()-1.
-
-The pflag package also defines some new functions that are not in flag,
-that give one-letter shorthands for flags. You can use these by appending
-'P' to the name of any function that defines a flag.
-
-``` go
-var ip = flag.IntP("flagname", "f", 1234, "help message")
-var flagvar bool
-func init() {
- flag.BoolVarP(&flagvar, "boolname", "b", true, "help message")
-}
-flag.VarP(&flagVal, "varname", "v", "help message")
-```
-
-Shorthand letters can be used with single dashes on the command line.
-Boolean shorthand flags can be combined with other shorthand flags.
-
-The default set of command-line flags is controlled by
-top-level functions. The FlagSet type allows one to define
-independent sets of flags, such as to implement subcommands
-in a command-line interface. The methods of FlagSet are
-analogous to the top-level functions for the command-line
-flag set.
-
-## Setting no option default values for flags
-
-After you create a flag it is possible to set the pflag.NoOptDefVal for
-the given flag. Doing this changes the meaning of the flag slightly. If
-a flag has a NoOptDefVal and the flag is set on the command line without
-an option the flag will be set to the NoOptDefVal. For example given:
-
-``` go
-var ip = flag.IntP("flagname", "f", 1234, "help message")
-flag.Lookup("flagname").NoOptDefVal = "4321"
-```
-
-Would result in something like
-
-| Parsed Arguments | Resulting Value |
-| ------------- | ------------- |
-| --flagname=1357 | ip=1357 |
-| --flagname | ip=4321 |
-| [nothing] | ip=1234 |
-
-## Command line flag syntax
-
-```
---flag // boolean flags, or flags with no option default values
---flag x // only on flags without a default value
---flag=x
-```
-
-Unlike the flag package, a single dash before an option means something
-different than a double dash. Single dashes signify a series of shorthand
-letters for flags. All but the last shorthand letter must be boolean flags
-or a flag with a default value
-
-```
-// boolean or flags where the 'no option default value' is set
--f
--f=true
--abc
-but
--b true is INVALID
-
-// non-boolean and flags without a 'no option default value'
--n 1234
--n=1234
--n1234
-
-// mixed
--abcs "hello"
--absd="hello"
--abcs1234
-```
-
-Flag parsing stops after the terminator "--". Unlike the flag package,
-flags can be interspersed with arguments anywhere on the command line
-before this terminator.
-
-Integer flags accept 1234, 0664, 0x1234 and may be negative.
-Boolean flags (in their long form) accept 1, 0, t, f, true, false,
-TRUE, FALSE, True, False.
-Duration flags accept any input valid for time.ParseDuration.
-
-## Mutating or "Normalizing" Flag names
-
-It is possible to set a custom flag name 'normalization function.' It allows flag names to be mutated both when created in the code and when used on the command line to some 'normalized' form. The 'normalized' form is used for comparison. Two examples of using the custom normalization func follow.
-
-**Example #1**: You want -, _, and . in flags to compare the same. aka --my-flag == --my_flag == --my.flag
-
-``` go
-func wordSepNormalizeFunc(f *pflag.FlagSet, name string) pflag.NormalizedName {
- from := []string{"-", "_"}
- to := "."
- for _, sep := range from {
- name = strings.Replace(name, sep, to, -1)
- }
- return pflag.NormalizedName(name)
-}
-
-myFlagSet.SetNormalizeFunc(wordSepNormalizeFunc)
-```
-
-**Example #2**: You want to alias two flags. aka --old-flag-name == --new-flag-name
-
-``` go
-func aliasNormalizeFunc(f *pflag.FlagSet, name string) pflag.NormalizedName {
- switch name {
- case "old-flag-name":
- name = "new-flag-name"
- break
- }
- return pflag.NormalizedName(name)
-}
-
-myFlagSet.SetNormalizeFunc(aliasNormalizeFunc)
-```
-
-## Deprecating a flag or its shorthand
-It is possible to deprecate a flag, or just its shorthand. Deprecating a flag/shorthand hides it from help text and prints a usage message when the deprecated flag/shorthand is used.
-
-**Example #1**: You want to deprecate a flag named "badflag" as well as inform the users what flag they should use instead.
-```go
-// deprecate a flag by specifying its name and a usage message
-flags.MarkDeprecated("badflag", "please use --good-flag instead")
-```
-This hides "badflag" from help text, and prints `Flag --badflag has been deprecated, please use --good-flag instead` when "badflag" is used.
-
-**Example #2**: You want to keep a flag name "noshorthandflag" but deprecate its shortname "n".
-```go
-// deprecate a flag shorthand by specifying its flag name and a usage message
-flags.MarkShorthandDeprecated("noshorthandflag", "please use --noshorthandflag only")
-```
-This hides the shortname "n" from help text, and prints `Flag shorthand -n has been deprecated, please use --noshorthandflag only` when the shorthand "n" is used.
-
-Note that usage message is essential here, and it should not be empty.
-
-## Hidden flags
-It is possible to mark a flag as hidden, meaning it will still function as normal, however will not show up in usage/help text.
-
-**Example**: You have a flag named "secretFlag" that you need for internal use only and don't want it showing up in help text, or for its usage text to be available.
-```go
-// hide a flag by specifying its name
-flags.MarkHidden("secretFlag")
-```
-
-## Disable sorting of flags
-`pflag` allows you to disable sorting of flags for help and usage message.
-
-**Example**:
-```go
-flags.BoolP("verbose", "v", false, "verbose output")
-flags.String("coolflag", "yeaah", "it's really cool flag")
-flags.Int("usefulflag", 777, "sometimes it's very useful")
-flags.SortFlags = false
-flags.PrintDefaults()
-```
-**Output**:
-```
- -v, --verbose verbose output
- --coolflag string it's really cool flag (default "yeaah")
- --usefulflag int sometimes it's very useful (default 777)
-```
-
-
-## Supporting Go flags when using pflag
-In order to support flags defined using Go's `flag` package, they must be added to the `pflag` flagset. This is usually necessary
-to support flags defined by third-party dependencies (e.g. `golang/glog`).
-
-**Example**: You want to add the Go flags to the `CommandLine` flagset
-```go
-import (
- goflag "flag"
- flag "github.com/spf13/pflag"
-)
-
-var ip *int = flag.Int("flagname", 1234, "help message for flagname")
-
-func main() {
- flag.CommandLine.AddGoFlagSet(goflag.CommandLine)
- flag.Parse()
-}
-```
-
-## More info
-
-You can see the full reference documentation of the pflag package
-[at godoc.org][3], or through go's standard documentation system by
-running `godoc -http=:6060` and browsing to
-[http://localhost:6060/pkg/github.com/spf13/pflag][2] after
-installation.
-
-[2]: http://localhost:6060/pkg/github.com/spf13/pflag
-[3]: http://godoc.org/github.com/spf13/pflag
diff --git a/vendor/github.com/spf13/pflag/bool.go b/vendor/github.com/spf13/pflag/bool.go
deleted file mode 100644
index c4c5c0b..0000000
--- a/vendor/github.com/spf13/pflag/bool.go
+++ /dev/null
@@ -1,94 +0,0 @@
-package pflag
-
-import "strconv"
-
-// optional interface to indicate boolean flags that can be
-// supplied without "=value" text
-type boolFlag interface {
- Value
- IsBoolFlag() bool
-}
-
-// -- bool Value
-type boolValue bool
-
-func newBoolValue(val bool, p *bool) *boolValue {
- *p = val
- return (*boolValue)(p)
-}
-
-func (b *boolValue) Set(s string) error {
- v, err := strconv.ParseBool(s)
- *b = boolValue(v)
- return err
-}
-
-func (b *boolValue) Type() string {
- return "bool"
-}
-
-func (b *boolValue) String() string { return strconv.FormatBool(bool(*b)) }
-
-func (b *boolValue) IsBoolFlag() bool { return true }
-
-func boolConv(sval string) (interface{}, error) {
- return strconv.ParseBool(sval)
-}
-
-// GetBool return the bool value of a flag with the given name
-func (f *FlagSet) GetBool(name string) (bool, error) {
- val, err := f.getFlagType(name, "bool", boolConv)
- if err != nil {
- return false, err
- }
- return val.(bool), nil
-}
-
-// BoolVar defines a bool flag with specified name, default value, and usage string.
-// The argument p points to a bool variable in which to store the value of the flag.
-func (f *FlagSet) BoolVar(p *bool, name string, value bool, usage string) {
- f.BoolVarP(p, name, "", value, usage)
-}
-
-// BoolVarP is like BoolVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) BoolVarP(p *bool, name, shorthand string, value bool, usage string) {
- flag := f.VarPF(newBoolValue(value, p), name, shorthand, usage)
- flag.NoOptDefVal = "true"
-}
-
-// BoolVar defines a bool flag with specified name, default value, and usage string.
-// The argument p points to a bool variable in which to store the value of the flag.
-func BoolVar(p *bool, name string, value bool, usage string) {
- BoolVarP(p, name, "", value, usage)
-}
-
-// BoolVarP is like BoolVar, but accepts a shorthand letter that can be used after a single dash.
-func BoolVarP(p *bool, name, shorthand string, value bool, usage string) {
- flag := CommandLine.VarPF(newBoolValue(value, p), name, shorthand, usage)
- flag.NoOptDefVal = "true"
-}
-
-// Bool defines a bool flag with specified name, default value, and usage string.
-// The return value is the address of a bool variable that stores the value of the flag.
-func (f *FlagSet) Bool(name string, value bool, usage string) *bool {
- return f.BoolP(name, "", value, usage)
-}
-
-// BoolP is like Bool, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) BoolP(name, shorthand string, value bool, usage string) *bool {
- p := new(bool)
- f.BoolVarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Bool defines a bool flag with specified name, default value, and usage string.
-// The return value is the address of a bool variable that stores the value of the flag.
-func Bool(name string, value bool, usage string) *bool {
- return BoolP(name, "", value, usage)
-}
-
-// BoolP is like Bool, but accepts a shorthand letter that can be used after a single dash.
-func BoolP(name, shorthand string, value bool, usage string) *bool {
- b := CommandLine.BoolP(name, shorthand, value, usage)
- return b
-}
diff --git a/vendor/github.com/spf13/pflag/bool_slice.go b/vendor/github.com/spf13/pflag/bool_slice.go
deleted file mode 100644
index 5af02f1..0000000
--- a/vendor/github.com/spf13/pflag/bool_slice.go
+++ /dev/null
@@ -1,147 +0,0 @@
-package pflag
-
-import (
- "io"
- "strconv"
- "strings"
-)
-
-// -- boolSlice Value
-type boolSliceValue struct {
- value *[]bool
- changed bool
-}
-
-func newBoolSliceValue(val []bool, p *[]bool) *boolSliceValue {
- bsv := new(boolSliceValue)
- bsv.value = p
- *bsv.value = val
- return bsv
-}
-
-// Set converts, and assigns, the comma-separated boolean argument string representation as the []bool value of this flag.
-// If Set is called on a flag that already has a []bool assigned, the newly converted values will be appended.
-func (s *boolSliceValue) Set(val string) error {
-
- // remove all quote characters
- rmQuote := strings.NewReplacer(`"`, "", `'`, "", "`", "")
-
- // read flag arguments with CSV parser
- boolStrSlice, err := readAsCSV(rmQuote.Replace(val))
- if err != nil && err != io.EOF {
- return err
- }
-
- // parse boolean values into slice
- out := make([]bool, 0, len(boolStrSlice))
- for _, boolStr := range boolStrSlice {
- b, err := strconv.ParseBool(strings.TrimSpace(boolStr))
- if err != nil {
- return err
- }
- out = append(out, b)
- }
-
- if !s.changed {
- *s.value = out
- } else {
- *s.value = append(*s.value, out...)
- }
-
- s.changed = true
-
- return nil
-}
-
-// Type returns a string that uniquely represents this flag's type.
-func (s *boolSliceValue) Type() string {
- return "boolSlice"
-}
-
-// String defines a "native" format for this boolean slice flag value.
-func (s *boolSliceValue) String() string {
-
- boolStrSlice := make([]string, len(*s.value))
- for i, b := range *s.value {
- boolStrSlice[i] = strconv.FormatBool(b)
- }
-
- out, _ := writeAsCSV(boolStrSlice)
-
- return "[" + out + "]"
-}
-
-func boolSliceConv(val string) (interface{}, error) {
- val = strings.Trim(val, "[]")
- // Empty string would cause a slice with one (empty) entry
- if len(val) == 0 {
- return []bool{}, nil
- }
- ss := strings.Split(val, ",")
- out := make([]bool, len(ss))
- for i, t := range ss {
- var err error
- out[i], err = strconv.ParseBool(t)
- if err != nil {
- return nil, err
- }
- }
- return out, nil
-}
-
-// GetBoolSlice returns the []bool value of a flag with the given name.
-func (f *FlagSet) GetBoolSlice(name string) ([]bool, error) {
- val, err := f.getFlagType(name, "boolSlice", boolSliceConv)
- if err != nil {
- return []bool{}, err
- }
- return val.([]bool), nil
-}
-
-// BoolSliceVar defines a boolSlice flag with specified name, default value, and usage string.
-// The argument p points to a []bool variable in which to store the value of the flag.
-func (f *FlagSet) BoolSliceVar(p *[]bool, name string, value []bool, usage string) {
- f.VarP(newBoolSliceValue(value, p), name, "", usage)
-}
-
-// BoolSliceVarP is like BoolSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) BoolSliceVarP(p *[]bool, name, shorthand string, value []bool, usage string) {
- f.VarP(newBoolSliceValue(value, p), name, shorthand, usage)
-}
-
-// BoolSliceVar defines a []bool flag with specified name, default value, and usage string.
-// The argument p points to a []bool variable in which to store the value of the flag.
-func BoolSliceVar(p *[]bool, name string, value []bool, usage string) {
- CommandLine.VarP(newBoolSliceValue(value, p), name, "", usage)
-}
-
-// BoolSliceVarP is like BoolSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func BoolSliceVarP(p *[]bool, name, shorthand string, value []bool, usage string) {
- CommandLine.VarP(newBoolSliceValue(value, p), name, shorthand, usage)
-}
-
-// BoolSlice defines a []bool flag with specified name, default value, and usage string.
-// The return value is the address of a []bool variable that stores the value of the flag.
-func (f *FlagSet) BoolSlice(name string, value []bool, usage string) *[]bool {
- p := []bool{}
- f.BoolSliceVarP(&p, name, "", value, usage)
- return &p
-}
-
-// BoolSliceP is like BoolSlice, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) BoolSliceP(name, shorthand string, value []bool, usage string) *[]bool {
- p := []bool{}
- f.BoolSliceVarP(&p, name, shorthand, value, usage)
- return &p
-}
-
-// BoolSlice defines a []bool flag with specified name, default value, and usage string.
-// The return value is the address of a []bool variable that stores the value of the flag.
-func BoolSlice(name string, value []bool, usage string) *[]bool {
- return CommandLine.BoolSliceP(name, "", value, usage)
-}
-
-// BoolSliceP is like BoolSlice, but accepts a shorthand letter that can be used after a single dash.
-func BoolSliceP(name, shorthand string, value []bool, usage string) *[]bool {
- return CommandLine.BoolSliceP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/bytes.go b/vendor/github.com/spf13/pflag/bytes.go
deleted file mode 100644
index 67d5304..0000000
--- a/vendor/github.com/spf13/pflag/bytes.go
+++ /dev/null
@@ -1,209 +0,0 @@
-package pflag
-
-import (
- "encoding/base64"
- "encoding/hex"
- "fmt"
- "strings"
-)
-
-// BytesHex adapts []byte for use as a flag. Value of flag is HEX encoded
-type bytesHexValue []byte
-
-// String implements pflag.Value.String.
-func (bytesHex bytesHexValue) String() string {
- return fmt.Sprintf("%X", []byte(bytesHex))
-}
-
-// Set implements pflag.Value.Set.
-func (bytesHex *bytesHexValue) Set(value string) error {
- bin, err := hex.DecodeString(strings.TrimSpace(value))
-
- if err != nil {
- return err
- }
-
- *bytesHex = bin
-
- return nil
-}
-
-// Type implements pflag.Value.Type.
-func (*bytesHexValue) Type() string {
- return "bytesHex"
-}
-
-func newBytesHexValue(val []byte, p *[]byte) *bytesHexValue {
- *p = val
- return (*bytesHexValue)(p)
-}
-
-func bytesHexConv(sval string) (interface{}, error) {
-
- bin, err := hex.DecodeString(sval)
-
- if err == nil {
- return bin, nil
- }
-
- return nil, fmt.Errorf("invalid string being converted to Bytes: %s %s", sval, err)
-}
-
-// GetBytesHex return the []byte value of a flag with the given name
-func (f *FlagSet) GetBytesHex(name string) ([]byte, error) {
- val, err := f.getFlagType(name, "bytesHex", bytesHexConv)
-
- if err != nil {
- return []byte{}, err
- }
-
- return val.([]byte), nil
-}
-
-// BytesHexVar defines an []byte flag with specified name, default value, and usage string.
-// The argument p points to an []byte variable in which to store the value of the flag.
-func (f *FlagSet) BytesHexVar(p *[]byte, name string, value []byte, usage string) {
- f.VarP(newBytesHexValue(value, p), name, "", usage)
-}
-
-// BytesHexVarP is like BytesHexVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) BytesHexVarP(p *[]byte, name, shorthand string, value []byte, usage string) {
- f.VarP(newBytesHexValue(value, p), name, shorthand, usage)
-}
-
-// BytesHexVar defines an []byte flag with specified name, default value, and usage string.
-// The argument p points to an []byte variable in which to store the value of the flag.
-func BytesHexVar(p *[]byte, name string, value []byte, usage string) {
- CommandLine.VarP(newBytesHexValue(value, p), name, "", usage)
-}
-
-// BytesHexVarP is like BytesHexVar, but accepts a shorthand letter that can be used after a single dash.
-func BytesHexVarP(p *[]byte, name, shorthand string, value []byte, usage string) {
- CommandLine.VarP(newBytesHexValue(value, p), name, shorthand, usage)
-}
-
-// BytesHex defines an []byte flag with specified name, default value, and usage string.
-// The return value is the address of an []byte variable that stores the value of the flag.
-func (f *FlagSet) BytesHex(name string, value []byte, usage string) *[]byte {
- p := new([]byte)
- f.BytesHexVarP(p, name, "", value, usage)
- return p
-}
-
-// BytesHexP is like BytesHex, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) BytesHexP(name, shorthand string, value []byte, usage string) *[]byte {
- p := new([]byte)
- f.BytesHexVarP(p, name, shorthand, value, usage)
- return p
-}
-
-// BytesHex defines an []byte flag with specified name, default value, and usage string.
-// The return value is the address of an []byte variable that stores the value of the flag.
-func BytesHex(name string, value []byte, usage string) *[]byte {
- return CommandLine.BytesHexP(name, "", value, usage)
-}
-
-// BytesHexP is like BytesHex, but accepts a shorthand letter that can be used after a single dash.
-func BytesHexP(name, shorthand string, value []byte, usage string) *[]byte {
- return CommandLine.BytesHexP(name, shorthand, value, usage)
-}
-
-// BytesBase64 adapts []byte for use as a flag. Value of flag is Base64 encoded
-type bytesBase64Value []byte
-
-// String implements pflag.Value.String.
-func (bytesBase64 bytesBase64Value) String() string {
- return base64.StdEncoding.EncodeToString([]byte(bytesBase64))
-}
-
-// Set implements pflag.Value.Set.
-func (bytesBase64 *bytesBase64Value) Set(value string) error {
- bin, err := base64.StdEncoding.DecodeString(strings.TrimSpace(value))
-
- if err != nil {
- return err
- }
-
- *bytesBase64 = bin
-
- return nil
-}
-
-// Type implements pflag.Value.Type.
-func (*bytesBase64Value) Type() string {
- return "bytesBase64"
-}
-
-func newBytesBase64Value(val []byte, p *[]byte) *bytesBase64Value {
- *p = val
- return (*bytesBase64Value)(p)
-}
-
-func bytesBase64ValueConv(sval string) (interface{}, error) {
-
- bin, err := base64.StdEncoding.DecodeString(sval)
- if err == nil {
- return bin, nil
- }
-
- return nil, fmt.Errorf("invalid string being converted to Bytes: %s %s", sval, err)
-}
-
-// GetBytesBase64 return the []byte value of a flag with the given name
-func (f *FlagSet) GetBytesBase64(name string) ([]byte, error) {
- val, err := f.getFlagType(name, "bytesBase64", bytesBase64ValueConv)
-
- if err != nil {
- return []byte{}, err
- }
-
- return val.([]byte), nil
-}
-
-// BytesBase64Var defines an []byte flag with specified name, default value, and usage string.
-// The argument p points to an []byte variable in which to store the value of the flag.
-func (f *FlagSet) BytesBase64Var(p *[]byte, name string, value []byte, usage string) {
- f.VarP(newBytesBase64Value(value, p), name, "", usage)
-}
-
-// BytesBase64VarP is like BytesBase64Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) BytesBase64VarP(p *[]byte, name, shorthand string, value []byte, usage string) {
- f.VarP(newBytesBase64Value(value, p), name, shorthand, usage)
-}
-
-// BytesBase64Var defines an []byte flag with specified name, default value, and usage string.
-// The argument p points to an []byte variable in which to store the value of the flag.
-func BytesBase64Var(p *[]byte, name string, value []byte, usage string) {
- CommandLine.VarP(newBytesBase64Value(value, p), name, "", usage)
-}
-
-// BytesBase64VarP is like BytesBase64Var, but accepts a shorthand letter that can be used after a single dash.
-func BytesBase64VarP(p *[]byte, name, shorthand string, value []byte, usage string) {
- CommandLine.VarP(newBytesBase64Value(value, p), name, shorthand, usage)
-}
-
-// BytesBase64 defines an []byte flag with specified name, default value, and usage string.
-// The return value is the address of an []byte variable that stores the value of the flag.
-func (f *FlagSet) BytesBase64(name string, value []byte, usage string) *[]byte {
- p := new([]byte)
- f.BytesBase64VarP(p, name, "", value, usage)
- return p
-}
-
-// BytesBase64P is like BytesBase64, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) BytesBase64P(name, shorthand string, value []byte, usage string) *[]byte {
- p := new([]byte)
- f.BytesBase64VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// BytesBase64 defines an []byte flag with specified name, default value, and usage string.
-// The return value is the address of an []byte variable that stores the value of the flag.
-func BytesBase64(name string, value []byte, usage string) *[]byte {
- return CommandLine.BytesBase64P(name, "", value, usage)
-}
-
-// BytesBase64P is like BytesBase64, but accepts a shorthand letter that can be used after a single dash.
-func BytesBase64P(name, shorthand string, value []byte, usage string) *[]byte {
- return CommandLine.BytesBase64P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/count.go b/vendor/github.com/spf13/pflag/count.go
deleted file mode 100644
index aa126e4..0000000
--- a/vendor/github.com/spf13/pflag/count.go
+++ /dev/null
@@ -1,96 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- count Value
-type countValue int
-
-func newCountValue(val int, p *int) *countValue {
- *p = val
- return (*countValue)(p)
-}
-
-func (i *countValue) Set(s string) error {
- // "+1" means that no specific value was passed, so increment
- if s == "+1" {
- *i = countValue(*i + 1)
- return nil
- }
- v, err := strconv.ParseInt(s, 0, 0)
- *i = countValue(v)
- return err
-}
-
-func (i *countValue) Type() string {
- return "count"
-}
-
-func (i *countValue) String() string { return strconv.Itoa(int(*i)) }
-
-func countConv(sval string) (interface{}, error) {
- i, err := strconv.Atoi(sval)
- if err != nil {
- return nil, err
- }
- return i, nil
-}
-
-// GetCount return the int value of a flag with the given name
-func (f *FlagSet) GetCount(name string) (int, error) {
- val, err := f.getFlagType(name, "count", countConv)
- if err != nil {
- return 0, err
- }
- return val.(int), nil
-}
-
-// CountVar defines a count flag with specified name, default value, and usage string.
-// The argument p points to an int variable in which to store the value of the flag.
-// A count flag will add 1 to its value evey time it is found on the command line
-func (f *FlagSet) CountVar(p *int, name string, usage string) {
- f.CountVarP(p, name, "", usage)
-}
-
-// CountVarP is like CountVar only take a shorthand for the flag name.
-func (f *FlagSet) CountVarP(p *int, name, shorthand string, usage string) {
- flag := f.VarPF(newCountValue(0, p), name, shorthand, usage)
- flag.NoOptDefVal = "+1"
-}
-
-// CountVar like CountVar only the flag is placed on the CommandLine instead of a given flag set
-func CountVar(p *int, name string, usage string) {
- CommandLine.CountVar(p, name, usage)
-}
-
-// CountVarP is like CountVar only take a shorthand for the flag name.
-func CountVarP(p *int, name, shorthand string, usage string) {
- CommandLine.CountVarP(p, name, shorthand, usage)
-}
-
-// Count defines a count flag with specified name, default value, and usage string.
-// The return value is the address of an int variable that stores the value of the flag.
-// A count flag will add 1 to its value evey time it is found on the command line
-func (f *FlagSet) Count(name string, usage string) *int {
- p := new(int)
- f.CountVarP(p, name, "", usage)
- return p
-}
-
-// CountP is like Count only takes a shorthand for the flag name.
-func (f *FlagSet) CountP(name, shorthand string, usage string) *int {
- p := new(int)
- f.CountVarP(p, name, shorthand, usage)
- return p
-}
-
-// Count defines a count flag with specified name, default value, and usage string.
-// The return value is the address of an int variable that stores the value of the flag.
-// A count flag will add 1 to its value evey time it is found on the command line
-func Count(name string, usage string) *int {
- return CommandLine.CountP(name, "", usage)
-}
-
-// CountP is like Count only takes a shorthand for the flag name.
-func CountP(name, shorthand string, usage string) *int {
- return CommandLine.CountP(name, shorthand, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/duration.go b/vendor/github.com/spf13/pflag/duration.go
deleted file mode 100644
index e9debef..0000000
--- a/vendor/github.com/spf13/pflag/duration.go
+++ /dev/null
@@ -1,86 +0,0 @@
-package pflag
-
-import (
- "time"
-)
-
-// -- time.Duration Value
-type durationValue time.Duration
-
-func newDurationValue(val time.Duration, p *time.Duration) *durationValue {
- *p = val
- return (*durationValue)(p)
-}
-
-func (d *durationValue) Set(s string) error {
- v, err := time.ParseDuration(s)
- *d = durationValue(v)
- return err
-}
-
-func (d *durationValue) Type() string {
- return "duration"
-}
-
-func (d *durationValue) String() string { return (*time.Duration)(d).String() }
-
-func durationConv(sval string) (interface{}, error) {
- return time.ParseDuration(sval)
-}
-
-// GetDuration return the duration value of a flag with the given name
-func (f *FlagSet) GetDuration(name string) (time.Duration, error) {
- val, err := f.getFlagType(name, "duration", durationConv)
- if err != nil {
- return 0, err
- }
- return val.(time.Duration), nil
-}
-
-// DurationVar defines a time.Duration flag with specified name, default value, and usage string.
-// The argument p points to a time.Duration variable in which to store the value of the flag.
-func (f *FlagSet) DurationVar(p *time.Duration, name string, value time.Duration, usage string) {
- f.VarP(newDurationValue(value, p), name, "", usage)
-}
-
-// DurationVarP is like DurationVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) DurationVarP(p *time.Duration, name, shorthand string, value time.Duration, usage string) {
- f.VarP(newDurationValue(value, p), name, shorthand, usage)
-}
-
-// DurationVar defines a time.Duration flag with specified name, default value, and usage string.
-// The argument p points to a time.Duration variable in which to store the value of the flag.
-func DurationVar(p *time.Duration, name string, value time.Duration, usage string) {
- CommandLine.VarP(newDurationValue(value, p), name, "", usage)
-}
-
-// DurationVarP is like DurationVar, but accepts a shorthand letter that can be used after a single dash.
-func DurationVarP(p *time.Duration, name, shorthand string, value time.Duration, usage string) {
- CommandLine.VarP(newDurationValue(value, p), name, shorthand, usage)
-}
-
-// Duration defines a time.Duration flag with specified name, default value, and usage string.
-// The return value is the address of a time.Duration variable that stores the value of the flag.
-func (f *FlagSet) Duration(name string, value time.Duration, usage string) *time.Duration {
- p := new(time.Duration)
- f.DurationVarP(p, name, "", value, usage)
- return p
-}
-
-// DurationP is like Duration, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) DurationP(name, shorthand string, value time.Duration, usage string) *time.Duration {
- p := new(time.Duration)
- f.DurationVarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Duration defines a time.Duration flag with specified name, default value, and usage string.
-// The return value is the address of a time.Duration variable that stores the value of the flag.
-func Duration(name string, value time.Duration, usage string) *time.Duration {
- return CommandLine.DurationP(name, "", value, usage)
-}
-
-// DurationP is like Duration, but accepts a shorthand letter that can be used after a single dash.
-func DurationP(name, shorthand string, value time.Duration, usage string) *time.Duration {
- return CommandLine.DurationP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/duration_slice.go b/vendor/github.com/spf13/pflag/duration_slice.go
deleted file mode 100644
index 52c6b6d..0000000
--- a/vendor/github.com/spf13/pflag/duration_slice.go
+++ /dev/null
@@ -1,128 +0,0 @@
-package pflag
-
-import (
- "fmt"
- "strings"
- "time"
-)
-
-// -- durationSlice Value
-type durationSliceValue struct {
- value *[]time.Duration
- changed bool
-}
-
-func newDurationSliceValue(val []time.Duration, p *[]time.Duration) *durationSliceValue {
- dsv := new(durationSliceValue)
- dsv.value = p
- *dsv.value = val
- return dsv
-}
-
-func (s *durationSliceValue) Set(val string) error {
- ss := strings.Split(val, ",")
- out := make([]time.Duration, len(ss))
- for i, d := range ss {
- var err error
- out[i], err = time.ParseDuration(d)
- if err != nil {
- return err
- }
-
- }
- if !s.changed {
- *s.value = out
- } else {
- *s.value = append(*s.value, out...)
- }
- s.changed = true
- return nil
-}
-
-func (s *durationSliceValue) Type() string {
- return "durationSlice"
-}
-
-func (s *durationSliceValue) String() string {
- out := make([]string, len(*s.value))
- for i, d := range *s.value {
- out[i] = fmt.Sprintf("%s", d)
- }
- return "[" + strings.Join(out, ",") + "]"
-}
-
-func durationSliceConv(val string) (interface{}, error) {
- val = strings.Trim(val, "[]")
- // Empty string would cause a slice with one (empty) entry
- if len(val) == 0 {
- return []time.Duration{}, nil
- }
- ss := strings.Split(val, ",")
- out := make([]time.Duration, len(ss))
- for i, d := range ss {
- var err error
- out[i], err = time.ParseDuration(d)
- if err != nil {
- return nil, err
- }
-
- }
- return out, nil
-}
-
-// GetDurationSlice returns the []time.Duration value of a flag with the given name
-func (f *FlagSet) GetDurationSlice(name string) ([]time.Duration, error) {
- val, err := f.getFlagType(name, "durationSlice", durationSliceConv)
- if err != nil {
- return []time.Duration{}, err
- }
- return val.([]time.Duration), nil
-}
-
-// DurationSliceVar defines a durationSlice flag with specified name, default value, and usage string.
-// The argument p points to a []time.Duration variable in which to store the value of the flag.
-func (f *FlagSet) DurationSliceVar(p *[]time.Duration, name string, value []time.Duration, usage string) {
- f.VarP(newDurationSliceValue(value, p), name, "", usage)
-}
-
-// DurationSliceVarP is like DurationSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) DurationSliceVarP(p *[]time.Duration, name, shorthand string, value []time.Duration, usage string) {
- f.VarP(newDurationSliceValue(value, p), name, shorthand, usage)
-}
-
-// DurationSliceVar defines a duration[] flag with specified name, default value, and usage string.
-// The argument p points to a duration[] variable in which to store the value of the flag.
-func DurationSliceVar(p *[]time.Duration, name string, value []time.Duration, usage string) {
- CommandLine.VarP(newDurationSliceValue(value, p), name, "", usage)
-}
-
-// DurationSliceVarP is like DurationSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func DurationSliceVarP(p *[]time.Duration, name, shorthand string, value []time.Duration, usage string) {
- CommandLine.VarP(newDurationSliceValue(value, p), name, shorthand, usage)
-}
-
-// DurationSlice defines a []time.Duration flag with specified name, default value, and usage string.
-// The return value is the address of a []time.Duration variable that stores the value of the flag.
-func (f *FlagSet) DurationSlice(name string, value []time.Duration, usage string) *[]time.Duration {
- p := []time.Duration{}
- f.DurationSliceVarP(&p, name, "", value, usage)
- return &p
-}
-
-// DurationSliceP is like DurationSlice, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) DurationSliceP(name, shorthand string, value []time.Duration, usage string) *[]time.Duration {
- p := []time.Duration{}
- f.DurationSliceVarP(&p, name, shorthand, value, usage)
- return &p
-}
-
-// DurationSlice defines a []time.Duration flag with specified name, default value, and usage string.
-// The return value is the address of a []time.Duration variable that stores the value of the flag.
-func DurationSlice(name string, value []time.Duration, usage string) *[]time.Duration {
- return CommandLine.DurationSliceP(name, "", value, usage)
-}
-
-// DurationSliceP is like DurationSlice, but accepts a shorthand letter that can be used after a single dash.
-func DurationSliceP(name, shorthand string, value []time.Duration, usage string) *[]time.Duration {
- return CommandLine.DurationSliceP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/flag.go b/vendor/github.com/spf13/pflag/flag.go
deleted file mode 100644
index 9beeda8..0000000
--- a/vendor/github.com/spf13/pflag/flag.go
+++ /dev/null
@@ -1,1227 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-/*
-Package pflag is a drop-in replacement for Go's flag package, implementing
-POSIX/GNU-style --flags.
-
-pflag is compatible with the GNU extensions to the POSIX recommendations
-for command-line options. See
-http://www.gnu.org/software/libc/manual/html_node/Argument-Syntax.html
-
-Usage:
-
-pflag is a drop-in replacement of Go's native flag package. If you import
-pflag under the name "flag" then all code should continue to function
-with no changes.
-
- import flag "github.com/spf13/pflag"
-
-There is one exception to this: if you directly instantiate the Flag struct
-there is one more field "Shorthand" that you will need to set.
-Most code never instantiates this struct directly, and instead uses
-functions such as String(), BoolVar(), and Var(), and is therefore
-unaffected.
-
-Define flags using flag.String(), Bool(), Int(), etc.
-
-This declares an integer flag, -flagname, stored in the pointer ip, with type *int.
- var ip = flag.Int("flagname", 1234, "help message for flagname")
-If you like, you can bind the flag to a variable using the Var() functions.
- var flagvar int
- func init() {
- flag.IntVar(&flagvar, "flagname", 1234, "help message for flagname")
- }
-Or you can create custom flags that satisfy the Value interface (with
-pointer receivers) and couple them to flag parsing by
- flag.Var(&flagVal, "name", "help message for flagname")
-For such flags, the default value is just the initial value of the variable.
-
-After all flags are defined, call
- flag.Parse()
-to parse the command line into the defined flags.
-
-Flags may then be used directly. If you're using the flags themselves,
-they are all pointers; if you bind to variables, they're values.
- fmt.Println("ip has value ", *ip)
- fmt.Println("flagvar has value ", flagvar)
-
-After parsing, the arguments after the flag are available as the
-slice flag.Args() or individually as flag.Arg(i).
-The arguments are indexed from 0 through flag.NArg()-1.
-
-The pflag package also defines some new functions that are not in flag,
-that give one-letter shorthands for flags. You can use these by appending
-'P' to the name of any function that defines a flag.
- var ip = flag.IntP("flagname", "f", 1234, "help message")
- var flagvar bool
- func init() {
- flag.BoolVarP("boolname", "b", true, "help message")
- }
- flag.VarP(&flagVar, "varname", "v", 1234, "help message")
-Shorthand letters can be used with single dashes on the command line.
-Boolean shorthand flags can be combined with other shorthand flags.
-
-Command line flag syntax:
- --flag // boolean flags only
- --flag=x
-
-Unlike the flag package, a single dash before an option means something
-different than a double dash. Single dashes signify a series of shorthand
-letters for flags. All but the last shorthand letter must be boolean flags.
- // boolean flags
- -f
- -abc
- // non-boolean flags
- -n 1234
- -Ifile
- // mixed
- -abcs "hello"
- -abcn1234
-
-Flag parsing stops after the terminator "--". Unlike the flag package,
-flags can be interspersed with arguments anywhere on the command line
-before this terminator.
-
-Integer flags accept 1234, 0664, 0x1234 and may be negative.
-Boolean flags (in their long form) accept 1, 0, t, f, true, false,
-TRUE, FALSE, True, False.
-Duration flags accept any input valid for time.ParseDuration.
-
-The default set of command-line flags is controlled by
-top-level functions. The FlagSet type allows one to define
-independent sets of flags, such as to implement subcommands
-in a command-line interface. The methods of FlagSet are
-analogous to the top-level functions for the command-line
-flag set.
-*/
-package pflag
-
-import (
- "bytes"
- "errors"
- goflag "flag"
- "fmt"
- "io"
- "os"
- "sort"
- "strings"
-)
-
-// ErrHelp is the error returned if the flag -help is invoked but no such flag is defined.
-var ErrHelp = errors.New("pflag: help requested")
-
-// ErrorHandling defines how to handle flag parsing errors.
-type ErrorHandling int
-
-const (
- // ContinueOnError will return an err from Parse() if an error is found
- ContinueOnError ErrorHandling = iota
- // ExitOnError will call os.Exit(2) if an error is found when parsing
- ExitOnError
- // PanicOnError will panic() if an error is found when parsing flags
- PanicOnError
-)
-
-// ParseErrorsWhitelist defines the parsing errors that can be ignored
-type ParseErrorsWhitelist struct {
- // UnknownFlags will ignore unknown flags errors and continue parsing rest of the flags
- UnknownFlags bool
-}
-
-// NormalizedName is a flag name that has been normalized according to rules
-// for the FlagSet (e.g. making '-' and '_' equivalent).
-type NormalizedName string
-
-// A FlagSet represents a set of defined flags.
-type FlagSet struct {
- // Usage is the function called when an error occurs while parsing flags.
- // The field is a function (not a method) that may be changed to point to
- // a custom error handler.
- Usage func()
-
- // SortFlags is used to indicate, if user wants to have sorted flags in
- // help/usage messages.
- SortFlags bool
-
- // ParseErrorsWhitelist is used to configure a whitelist of errors
- ParseErrorsWhitelist ParseErrorsWhitelist
-
- name string
- parsed bool
- actual map[NormalizedName]*Flag
- orderedActual []*Flag
- sortedActual []*Flag
- formal map[NormalizedName]*Flag
- orderedFormal []*Flag
- sortedFormal []*Flag
- shorthands map[byte]*Flag
- args []string // arguments after flags
- argsLenAtDash int // len(args) when a '--' was located when parsing, or -1 if no --
- errorHandling ErrorHandling
- output io.Writer // nil means stderr; use out() accessor
- interspersed bool // allow interspersed option/non-option args
- normalizeNameFunc func(f *FlagSet, name string) NormalizedName
-
- addedGoFlagSets []*goflag.FlagSet
-}
-
-// A Flag represents the state of a flag.
-type Flag struct {
- Name string // name as it appears on command line
- Shorthand string // one-letter abbreviated flag
- Usage string // help message
- Value Value // value as set
- DefValue string // default value (as text); for usage message
- Changed bool // If the user set the value (or if left to default)
- NoOptDefVal string // default value (as text); if the flag is on the command line without any options
- Deprecated string // If this flag is deprecated, this string is the new or now thing to use
- Hidden bool // used by cobra.Command to allow flags to be hidden from help/usage text
- ShorthandDeprecated string // If the shorthand of this flag is deprecated, this string is the new or now thing to use
- Annotations map[string][]string // used by cobra.Command bash autocomple code
-}
-
-// Value is the interface to the dynamic value stored in a flag.
-// (The default value is represented as a string.)
-type Value interface {
- String() string
- Set(string) error
- Type() string
-}
-
-// sortFlags returns the flags as a slice in lexicographical sorted order.
-func sortFlags(flags map[NormalizedName]*Flag) []*Flag {
- list := make(sort.StringSlice, len(flags))
- i := 0
- for k := range flags {
- list[i] = string(k)
- i++
- }
- list.Sort()
- result := make([]*Flag, len(list))
- for i, name := range list {
- result[i] = flags[NormalizedName(name)]
- }
- return result
-}
-
-// SetNormalizeFunc allows you to add a function which can translate flag names.
-// Flags added to the FlagSet will be translated and then when anything tries to
-// look up the flag that will also be translated. So it would be possible to create
-// a flag named "getURL" and have it translated to "geturl". A user could then pass
-// "--getUrl" which may also be translated to "geturl" and everything will work.
-func (f *FlagSet) SetNormalizeFunc(n func(f *FlagSet, name string) NormalizedName) {
- f.normalizeNameFunc = n
- f.sortedFormal = f.sortedFormal[:0]
- for fname, flag := range f.formal {
- nname := f.normalizeFlagName(flag.Name)
- if fname == nname {
- continue
- }
- flag.Name = string(nname)
- delete(f.formal, fname)
- f.formal[nname] = flag
- if _, set := f.actual[fname]; set {
- delete(f.actual, fname)
- f.actual[nname] = flag
- }
- }
-}
-
-// GetNormalizeFunc returns the previously set NormalizeFunc of a function which
-// does no translation, if not set previously.
-func (f *FlagSet) GetNormalizeFunc() func(f *FlagSet, name string) NormalizedName {
- if f.normalizeNameFunc != nil {
- return f.normalizeNameFunc
- }
- return func(f *FlagSet, name string) NormalizedName { return NormalizedName(name) }
-}
-
-func (f *FlagSet) normalizeFlagName(name string) NormalizedName {
- n := f.GetNormalizeFunc()
- return n(f, name)
-}
-
-func (f *FlagSet) out() io.Writer {
- if f.output == nil {
- return os.Stderr
- }
- return f.output
-}
-
-// SetOutput sets the destination for usage and error messages.
-// If output is nil, os.Stderr is used.
-func (f *FlagSet) SetOutput(output io.Writer) {
- f.output = output
-}
-
-// VisitAll visits the flags in lexicographical order or
-// in primordial order if f.SortFlags is false, calling fn for each.
-// It visits all flags, even those not set.
-func (f *FlagSet) VisitAll(fn func(*Flag)) {
- if len(f.formal) == 0 {
- return
- }
-
- var flags []*Flag
- if f.SortFlags {
- if len(f.formal) != len(f.sortedFormal) {
- f.sortedFormal = sortFlags(f.formal)
- }
- flags = f.sortedFormal
- } else {
- flags = f.orderedFormal
- }
-
- for _, flag := range flags {
- fn(flag)
- }
-}
-
-// HasFlags returns a bool to indicate if the FlagSet has any flags defined.
-func (f *FlagSet) HasFlags() bool {
- return len(f.formal) > 0
-}
-
-// HasAvailableFlags returns a bool to indicate if the FlagSet has any flags
-// that are not hidden.
-func (f *FlagSet) HasAvailableFlags() bool {
- for _, flag := range f.formal {
- if !flag.Hidden {
- return true
- }
- }
- return false
-}
-
-// VisitAll visits the command-line flags in lexicographical order or
-// in primordial order if f.SortFlags is false, calling fn for each.
-// It visits all flags, even those not set.
-func VisitAll(fn func(*Flag)) {
- CommandLine.VisitAll(fn)
-}
-
-// Visit visits the flags in lexicographical order or
-// in primordial order if f.SortFlags is false, calling fn for each.
-// It visits only those flags that have been set.
-func (f *FlagSet) Visit(fn func(*Flag)) {
- if len(f.actual) == 0 {
- return
- }
-
- var flags []*Flag
- if f.SortFlags {
- if len(f.actual) != len(f.sortedActual) {
- f.sortedActual = sortFlags(f.actual)
- }
- flags = f.sortedActual
- } else {
- flags = f.orderedActual
- }
-
- for _, flag := range flags {
- fn(flag)
- }
-}
-
-// Visit visits the command-line flags in lexicographical order or
-// in primordial order if f.SortFlags is false, calling fn for each.
-// It visits only those flags that have been set.
-func Visit(fn func(*Flag)) {
- CommandLine.Visit(fn)
-}
-
-// Lookup returns the Flag structure of the named flag, returning nil if none exists.
-func (f *FlagSet) Lookup(name string) *Flag {
- return f.lookup(f.normalizeFlagName(name))
-}
-
-// ShorthandLookup returns the Flag structure of the short handed flag,
-// returning nil if none exists.
-// It panics, if len(name) > 1.
-func (f *FlagSet) ShorthandLookup(name string) *Flag {
- if name == "" {
- return nil
- }
- if len(name) > 1 {
- msg := fmt.Sprintf("can not look up shorthand which is more than one ASCII character: %q", name)
- fmt.Fprintf(f.out(), msg)
- panic(msg)
- }
- c := name[0]
- return f.shorthands[c]
-}
-
-// lookup returns the Flag structure of the named flag, returning nil if none exists.
-func (f *FlagSet) lookup(name NormalizedName) *Flag {
- return f.formal[name]
-}
-
-// func to return a given type for a given flag name
-func (f *FlagSet) getFlagType(name string, ftype string, convFunc func(sval string) (interface{}, error)) (interface{}, error) {
- flag := f.Lookup(name)
- if flag == nil {
- err := fmt.Errorf("flag accessed but not defined: %s", name)
- return nil, err
- }
-
- if flag.Value.Type() != ftype {
- err := fmt.Errorf("trying to get %s value of flag of type %s", ftype, flag.Value.Type())
- return nil, err
- }
-
- sval := flag.Value.String()
- result, err := convFunc(sval)
- if err != nil {
- return nil, err
- }
- return result, nil
-}
-
-// ArgsLenAtDash will return the length of f.Args at the moment when a -- was
-// found during arg parsing. This allows your program to know which args were
-// before the -- and which came after.
-func (f *FlagSet) ArgsLenAtDash() int {
- return f.argsLenAtDash
-}
-
-// MarkDeprecated indicated that a flag is deprecated in your program. It will
-// continue to function but will not show up in help or usage messages. Using
-// this flag will also print the given usageMessage.
-func (f *FlagSet) MarkDeprecated(name string, usageMessage string) error {
- flag := f.Lookup(name)
- if flag == nil {
- return fmt.Errorf("flag %q does not exist", name)
- }
- if usageMessage == "" {
- return fmt.Errorf("deprecated message for flag %q must be set", name)
- }
- flag.Deprecated = usageMessage
- flag.Hidden = true
- return nil
-}
-
-// MarkShorthandDeprecated will mark the shorthand of a flag deprecated in your
-// program. It will continue to function but will not show up in help or usage
-// messages. Using this flag will also print the given usageMessage.
-func (f *FlagSet) MarkShorthandDeprecated(name string, usageMessage string) error {
- flag := f.Lookup(name)
- if flag == nil {
- return fmt.Errorf("flag %q does not exist", name)
- }
- if usageMessage == "" {
- return fmt.Errorf("deprecated message for flag %q must be set", name)
- }
- flag.ShorthandDeprecated = usageMessage
- return nil
-}
-
-// MarkHidden sets a flag to 'hidden' in your program. It will continue to
-// function but will not show up in help or usage messages.
-func (f *FlagSet) MarkHidden(name string) error {
- flag := f.Lookup(name)
- if flag == nil {
- return fmt.Errorf("flag %q does not exist", name)
- }
- flag.Hidden = true
- return nil
-}
-
-// Lookup returns the Flag structure of the named command-line flag,
-// returning nil if none exists.
-func Lookup(name string) *Flag {
- return CommandLine.Lookup(name)
-}
-
-// ShorthandLookup returns the Flag structure of the short handed flag,
-// returning nil if none exists.
-func ShorthandLookup(name string) *Flag {
- return CommandLine.ShorthandLookup(name)
-}
-
-// Set sets the value of the named flag.
-func (f *FlagSet) Set(name, value string) error {
- normalName := f.normalizeFlagName(name)
- flag, ok := f.formal[normalName]
- if !ok {
- return fmt.Errorf("no such flag -%v", name)
- }
-
- err := flag.Value.Set(value)
- if err != nil {
- var flagName string
- if flag.Shorthand != "" && flag.ShorthandDeprecated == "" {
- flagName = fmt.Sprintf("-%s, --%s", flag.Shorthand, flag.Name)
- } else {
- flagName = fmt.Sprintf("--%s", flag.Name)
- }
- return fmt.Errorf("invalid argument %q for %q flag: %v", value, flagName, err)
- }
-
- if !flag.Changed {
- if f.actual == nil {
- f.actual = make(map[NormalizedName]*Flag)
- }
- f.actual[normalName] = flag
- f.orderedActual = append(f.orderedActual, flag)
-
- flag.Changed = true
- }
-
- if flag.Deprecated != "" {
- fmt.Fprintf(f.out(), "Flag --%s has been deprecated, %s\n", flag.Name, flag.Deprecated)
- }
- return nil
-}
-
-// SetAnnotation allows one to set arbitrary annotations on a flag in the FlagSet.
-// This is sometimes used by spf13/cobra programs which want to generate additional
-// bash completion information.
-func (f *FlagSet) SetAnnotation(name, key string, values []string) error {
- normalName := f.normalizeFlagName(name)
- flag, ok := f.formal[normalName]
- if !ok {
- return fmt.Errorf("no such flag -%v", name)
- }
- if flag.Annotations == nil {
- flag.Annotations = map[string][]string{}
- }
- flag.Annotations[key] = values
- return nil
-}
-
-// Changed returns true if the flag was explicitly set during Parse() and false
-// otherwise
-func (f *FlagSet) Changed(name string) bool {
- flag := f.Lookup(name)
- // If a flag doesn't exist, it wasn't changed....
- if flag == nil {
- return false
- }
- return flag.Changed
-}
-
-// Set sets the value of the named command-line flag.
-func Set(name, value string) error {
- return CommandLine.Set(name, value)
-}
-
-// PrintDefaults prints, to standard error unless configured
-// otherwise, the default values of all defined flags in the set.
-func (f *FlagSet) PrintDefaults() {
- usages := f.FlagUsages()
- fmt.Fprint(f.out(), usages)
-}
-
-// defaultIsZeroValue returns true if the default value for this flag represents
-// a zero value.
-func (f *Flag) defaultIsZeroValue() bool {
- switch f.Value.(type) {
- case boolFlag:
- return f.DefValue == "false"
- case *durationValue:
- // Beginning in Go 1.7, duration zero values are "0s"
- return f.DefValue == "0" || f.DefValue == "0s"
- case *intValue, *int8Value, *int32Value, *int64Value, *uintValue, *uint8Value, *uint16Value, *uint32Value, *uint64Value, *countValue, *float32Value, *float64Value:
- return f.DefValue == "0"
- case *stringValue:
- return f.DefValue == ""
- case *ipValue, *ipMaskValue, *ipNetValue:
- return f.DefValue == ""
- case *intSliceValue, *stringSliceValue, *stringArrayValue:
- return f.DefValue == "[]"
- default:
- switch f.Value.String() {
- case "false":
- return true
- case "":
- return true
- case "":
- return true
- case "0":
- return true
- }
- return false
- }
-}
-
-// UnquoteUsage extracts a back-quoted name from the usage
-// string for a flag and returns it and the un-quoted usage.
-// Given "a `name` to show" it returns ("name", "a name to show").
-// If there are no back quotes, the name is an educated guess of the
-// type of the flag's value, or the empty string if the flag is boolean.
-func UnquoteUsage(flag *Flag) (name string, usage string) {
- // Look for a back-quoted name, but avoid the strings package.
- usage = flag.Usage
- for i := 0; i < len(usage); i++ {
- if usage[i] == '`' {
- for j := i + 1; j < len(usage); j++ {
- if usage[j] == '`' {
- name = usage[i+1 : j]
- usage = usage[:i] + name + usage[j+1:]
- return name, usage
- }
- }
- break // Only one back quote; use type name.
- }
- }
-
- name = flag.Value.Type()
- switch name {
- case "bool":
- name = ""
- case "float64":
- name = "float"
- case "int64":
- name = "int"
- case "uint64":
- name = "uint"
- case "stringSlice":
- name = "strings"
- case "intSlice":
- name = "ints"
- case "uintSlice":
- name = "uints"
- case "boolSlice":
- name = "bools"
- }
-
- return
-}
-
-// Splits the string `s` on whitespace into an initial substring up to
-// `i` runes in length and the remainder. Will go `slop` over `i` if
-// that encompasses the entire string (which allows the caller to
-// avoid short orphan words on the final line).
-func wrapN(i, slop int, s string) (string, string) {
- if i+slop > len(s) {
- return s, ""
- }
-
- w := strings.LastIndexAny(s[:i], " \t\n")
- if w <= 0 {
- return s, ""
- }
- nlPos := strings.LastIndex(s[:i], "\n")
- if nlPos > 0 && nlPos < w {
- return s[:nlPos], s[nlPos+1:]
- }
- return s[:w], s[w+1:]
-}
-
-// Wraps the string `s` to a maximum width `w` with leading indent
-// `i`. The first line is not indented (this is assumed to be done by
-// caller). Pass `w` == 0 to do no wrapping
-func wrap(i, w int, s string) string {
- if w == 0 {
- return strings.Replace(s, "\n", "\n"+strings.Repeat(" ", i), -1)
- }
-
- // space between indent i and end of line width w into which
- // we should wrap the text.
- wrap := w - i
-
- var r, l string
-
- // Not enough space for sensible wrapping. Wrap as a block on
- // the next line instead.
- if wrap < 24 {
- i = 16
- wrap = w - i
- r += "\n" + strings.Repeat(" ", i)
- }
- // If still not enough space then don't even try to wrap.
- if wrap < 24 {
- return strings.Replace(s, "\n", r, -1)
- }
-
- // Try to avoid short orphan words on the final line, by
- // allowing wrapN to go a bit over if that would fit in the
- // remainder of the line.
- slop := 5
- wrap = wrap - slop
-
- // Handle first line, which is indented by the caller (or the
- // special case above)
- l, s = wrapN(wrap, slop, s)
- r = r + strings.Replace(l, "\n", "\n"+strings.Repeat(" ", i), -1)
-
- // Now wrap the rest
- for s != "" {
- var t string
-
- t, s = wrapN(wrap, slop, s)
- r = r + "\n" + strings.Repeat(" ", i) + strings.Replace(t, "\n", "\n"+strings.Repeat(" ", i), -1)
- }
-
- return r
-
-}
-
-// FlagUsagesWrapped returns a string containing the usage information
-// for all flags in the FlagSet. Wrapped to `cols` columns (0 for no
-// wrapping)
-func (f *FlagSet) FlagUsagesWrapped(cols int) string {
- buf := new(bytes.Buffer)
-
- lines := make([]string, 0, len(f.formal))
-
- maxlen := 0
- f.VisitAll(func(flag *Flag) {
- if flag.Hidden {
- return
- }
-
- line := ""
- if flag.Shorthand != "" && flag.ShorthandDeprecated == "" {
- line = fmt.Sprintf(" -%s, --%s", flag.Shorthand, flag.Name)
- } else {
- line = fmt.Sprintf(" --%s", flag.Name)
- }
-
- varname, usage := UnquoteUsage(flag)
- if varname != "" {
- line += " " + varname
- }
- if flag.NoOptDefVal != "" {
- switch flag.Value.Type() {
- case "string":
- line += fmt.Sprintf("[=\"%s\"]", flag.NoOptDefVal)
- case "bool":
- if flag.NoOptDefVal != "true" {
- line += fmt.Sprintf("[=%s]", flag.NoOptDefVal)
- }
- case "count":
- if flag.NoOptDefVal != "+1" {
- line += fmt.Sprintf("[=%s]", flag.NoOptDefVal)
- }
- default:
- line += fmt.Sprintf("[=%s]", flag.NoOptDefVal)
- }
- }
-
- // This special character will be replaced with spacing once the
- // correct alignment is calculated
- line += "\x00"
- if len(line) > maxlen {
- maxlen = len(line)
- }
-
- line += usage
- if !flag.defaultIsZeroValue() {
- if flag.Value.Type() == "string" {
- line += fmt.Sprintf(" (default %q)", flag.DefValue)
- } else {
- line += fmt.Sprintf(" (default %s)", flag.DefValue)
- }
- }
- if len(flag.Deprecated) != 0 {
- line += fmt.Sprintf(" (DEPRECATED: %s)", flag.Deprecated)
- }
-
- lines = append(lines, line)
- })
-
- for _, line := range lines {
- sidx := strings.Index(line, "\x00")
- spacing := strings.Repeat(" ", maxlen-sidx)
- // maxlen + 2 comes from + 1 for the \x00 and + 1 for the (deliberate) off-by-one in maxlen-sidx
- fmt.Fprintln(buf, line[:sidx], spacing, wrap(maxlen+2, cols, line[sidx+1:]))
- }
-
- return buf.String()
-}
-
-// FlagUsages returns a string containing the usage information for all flags in
-// the FlagSet
-func (f *FlagSet) FlagUsages() string {
- return f.FlagUsagesWrapped(0)
-}
-
-// PrintDefaults prints to standard error the default values of all defined command-line flags.
-func PrintDefaults() {
- CommandLine.PrintDefaults()
-}
-
-// defaultUsage is the default function to print a usage message.
-func defaultUsage(f *FlagSet) {
- fmt.Fprintf(f.out(), "Usage of %s:\n", f.name)
- f.PrintDefaults()
-}
-
-// NOTE: Usage is not just defaultUsage(CommandLine)
-// because it serves (via godoc flag Usage) as the example
-// for how to write your own usage function.
-
-// Usage prints to standard error a usage message documenting all defined command-line flags.
-// The function is a variable that may be changed to point to a custom function.
-// By default it prints a simple header and calls PrintDefaults; for details about the
-// format of the output and how to control it, see the documentation for PrintDefaults.
-var Usage = func() {
- fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
- PrintDefaults()
-}
-
-// NFlag returns the number of flags that have been set.
-func (f *FlagSet) NFlag() int { return len(f.actual) }
-
-// NFlag returns the number of command-line flags that have been set.
-func NFlag() int { return len(CommandLine.actual) }
-
-// Arg returns the i'th argument. Arg(0) is the first remaining argument
-// after flags have been processed.
-func (f *FlagSet) Arg(i int) string {
- if i < 0 || i >= len(f.args) {
- return ""
- }
- return f.args[i]
-}
-
-// Arg returns the i'th command-line argument. Arg(0) is the first remaining argument
-// after flags have been processed.
-func Arg(i int) string {
- return CommandLine.Arg(i)
-}
-
-// NArg is the number of arguments remaining after flags have been processed.
-func (f *FlagSet) NArg() int { return len(f.args) }
-
-// NArg is the number of arguments remaining after flags have been processed.
-func NArg() int { return len(CommandLine.args) }
-
-// Args returns the non-flag arguments.
-func (f *FlagSet) Args() []string { return f.args }
-
-// Args returns the non-flag command-line arguments.
-func Args() []string { return CommandLine.args }
-
-// Var defines a flag with the specified name and usage string. The type and
-// value of the flag are represented by the first argument, of type Value, which
-// typically holds a user-defined implementation of Value. For instance, the
-// caller could create a flag that turns a comma-separated string into a slice
-// of strings by giving the slice the methods of Value; in particular, Set would
-// decompose the comma-separated string into the slice.
-func (f *FlagSet) Var(value Value, name string, usage string) {
- f.VarP(value, name, "", usage)
-}
-
-// VarPF is like VarP, but returns the flag created
-func (f *FlagSet) VarPF(value Value, name, shorthand, usage string) *Flag {
- // Remember the default value as a string; it won't change.
- flag := &Flag{
- Name: name,
- Shorthand: shorthand,
- Usage: usage,
- Value: value,
- DefValue: value.String(),
- }
- f.AddFlag(flag)
- return flag
-}
-
-// VarP is like Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) VarP(value Value, name, shorthand, usage string) {
- f.VarPF(value, name, shorthand, usage)
-}
-
-// AddFlag will add the flag to the FlagSet
-func (f *FlagSet) AddFlag(flag *Flag) {
- normalizedFlagName := f.normalizeFlagName(flag.Name)
-
- _, alreadyThere := f.formal[normalizedFlagName]
- if alreadyThere {
- msg := fmt.Sprintf("%s flag redefined: %s", f.name, flag.Name)
- fmt.Fprintln(f.out(), msg)
- panic(msg) // Happens only if flags are declared with identical names
- }
- if f.formal == nil {
- f.formal = make(map[NormalizedName]*Flag)
- }
-
- flag.Name = string(normalizedFlagName)
- f.formal[normalizedFlagName] = flag
- f.orderedFormal = append(f.orderedFormal, flag)
-
- if flag.Shorthand == "" {
- return
- }
- if len(flag.Shorthand) > 1 {
- msg := fmt.Sprintf("%q shorthand is more than one ASCII character", flag.Shorthand)
- fmt.Fprintf(f.out(), msg)
- panic(msg)
- }
- if f.shorthands == nil {
- f.shorthands = make(map[byte]*Flag)
- }
- c := flag.Shorthand[0]
- used, alreadyThere := f.shorthands[c]
- if alreadyThere {
- msg := fmt.Sprintf("unable to redefine %q shorthand in %q flagset: it's already used for %q flag", c, f.name, used.Name)
- fmt.Fprintf(f.out(), msg)
- panic(msg)
- }
- f.shorthands[c] = flag
-}
-
-// AddFlagSet adds one FlagSet to another. If a flag is already present in f
-// the flag from newSet will be ignored.
-func (f *FlagSet) AddFlagSet(newSet *FlagSet) {
- if newSet == nil {
- return
- }
- newSet.VisitAll(func(flag *Flag) {
- if f.Lookup(flag.Name) == nil {
- f.AddFlag(flag)
- }
- })
-}
-
-// Var defines a flag with the specified name and usage string. The type and
-// value of the flag are represented by the first argument, of type Value, which
-// typically holds a user-defined implementation of Value. For instance, the
-// caller could create a flag that turns a comma-separated string into a slice
-// of strings by giving the slice the methods of Value; in particular, Set would
-// decompose the comma-separated string into the slice.
-func Var(value Value, name string, usage string) {
- CommandLine.VarP(value, name, "", usage)
-}
-
-// VarP is like Var, but accepts a shorthand letter that can be used after a single dash.
-func VarP(value Value, name, shorthand, usage string) {
- CommandLine.VarP(value, name, shorthand, usage)
-}
-
-// failf prints to standard error a formatted error and usage message and
-// returns the error.
-func (f *FlagSet) failf(format string, a ...interface{}) error {
- err := fmt.Errorf(format, a...)
- if f.errorHandling != ContinueOnError {
- fmt.Fprintln(f.out(), err)
- f.usage()
- }
- return err
-}
-
-// usage calls the Usage method for the flag set, or the usage function if
-// the flag set is CommandLine.
-func (f *FlagSet) usage() {
- if f == CommandLine {
- Usage()
- } else if f.Usage == nil {
- defaultUsage(f)
- } else {
- f.Usage()
- }
-}
-
-//--unknown (args will be empty)
-//--unknown --next-flag ... (args will be --next-flag ...)
-//--unknown arg ... (args will be arg ...)
-func stripUnknownFlagValue(args []string) []string {
- if len(args) == 0 {
- //--unknown
- return args
- }
-
- first := args[0]
- if len(first) > 0 && first[0] == '-' {
- //--unknown --next-flag ...
- return args
- }
-
- //--unknown arg ... (args will be arg ...)
- if len(args) > 1 {
- return args[1:]
- }
- return nil
-}
-
-func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []string, err error) {
- a = args
- name := s[2:]
- if len(name) == 0 || name[0] == '-' || name[0] == '=' {
- err = f.failf("bad flag syntax: %s", s)
- return
- }
-
- split := strings.SplitN(name, "=", 2)
- name = split[0]
- flag, exists := f.formal[f.normalizeFlagName(name)]
-
- if !exists {
- switch {
- case name == "help":
- f.usage()
- return a, ErrHelp
- case f.ParseErrorsWhitelist.UnknownFlags:
- // --unknown=unknownval arg ...
- // we do not want to lose arg in this case
- if len(split) >= 2 {
- return a, nil
- }
-
- return stripUnknownFlagValue(a), nil
- default:
- err = f.failf("unknown flag: --%s", name)
- return
- }
- }
-
- var value string
- if len(split) == 2 {
- // '--flag=arg'
- value = split[1]
- } else if flag.NoOptDefVal != "" {
- // '--flag' (arg was optional)
- value = flag.NoOptDefVal
- } else if len(a) > 0 {
- // '--flag arg'
- value = a[0]
- a = a[1:]
- } else {
- // '--flag' (arg was required)
- err = f.failf("flag needs an argument: %s", s)
- return
- }
-
- err = fn(flag, value)
- if err != nil {
- f.failf(err.Error())
- }
- return
-}
-
-func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parseFunc) (outShorts string, outArgs []string, err error) {
- outArgs = args
-
- if strings.HasPrefix(shorthands, "test.") {
- return
- }
-
- outShorts = shorthands[1:]
- c := shorthands[0]
-
- flag, exists := f.shorthands[c]
- if !exists {
- switch {
- case c == 'h':
- f.usage()
- err = ErrHelp
- return
- case f.ParseErrorsWhitelist.UnknownFlags:
- // '-f=arg arg ...'
- // we do not want to lose arg in this case
- if len(shorthands) > 2 && shorthands[1] == '=' {
- outShorts = ""
- return
- }
-
- outArgs = stripUnknownFlagValue(outArgs)
- return
- default:
- err = f.failf("unknown shorthand flag: %q in -%s", c, shorthands)
- return
- }
- }
-
- var value string
- if len(shorthands) > 2 && shorthands[1] == '=' {
- // '-f=arg'
- value = shorthands[2:]
- outShorts = ""
- } else if flag.NoOptDefVal != "" {
- // '-f' (arg was optional)
- value = flag.NoOptDefVal
- } else if len(shorthands) > 1 {
- // '-farg'
- value = shorthands[1:]
- outShorts = ""
- } else if len(args) > 0 {
- // '-f arg'
- value = args[0]
- outArgs = args[1:]
- } else {
- // '-f' (arg was required)
- err = f.failf("flag needs an argument: %q in -%s", c, shorthands)
- return
- }
-
- if flag.ShorthandDeprecated != "" {
- fmt.Fprintf(f.out(), "Flag shorthand -%s has been deprecated, %s\n", flag.Shorthand, flag.ShorthandDeprecated)
- }
-
- err = fn(flag, value)
- if err != nil {
- f.failf(err.Error())
- }
- return
-}
-
-func (f *FlagSet) parseShortArg(s string, args []string, fn parseFunc) (a []string, err error) {
- a = args
- shorthands := s[1:]
-
- // "shorthands" can be a series of shorthand letters of flags (e.g. "-vvv").
- for len(shorthands) > 0 {
- shorthands, a, err = f.parseSingleShortArg(shorthands, args, fn)
- if err != nil {
- return
- }
- }
-
- return
-}
-
-func (f *FlagSet) parseArgs(args []string, fn parseFunc) (err error) {
- for len(args) > 0 {
- s := args[0]
- args = args[1:]
- if len(s) == 0 || s[0] != '-' || len(s) == 1 {
- if !f.interspersed {
- f.args = append(f.args, s)
- f.args = append(f.args, args...)
- return nil
- }
- f.args = append(f.args, s)
- continue
- }
-
- if s[1] == '-' {
- if len(s) == 2 { // "--" terminates the flags
- f.argsLenAtDash = len(f.args)
- f.args = append(f.args, args...)
- break
- }
- args, err = f.parseLongArg(s, args, fn)
- } else {
- args, err = f.parseShortArg(s, args, fn)
- }
- if err != nil {
- return
- }
- }
- return
-}
-
-// Parse parses flag definitions from the argument list, which should not
-// include the command name. Must be called after all flags in the FlagSet
-// are defined and before flags are accessed by the program.
-// The return value will be ErrHelp if -help was set but not defined.
-func (f *FlagSet) Parse(arguments []string) error {
- if f.addedGoFlagSets != nil {
- for _, goFlagSet := range f.addedGoFlagSets {
- goFlagSet.Parse(nil)
- }
- }
- f.parsed = true
-
- if len(arguments) < 0 {
- return nil
- }
-
- f.args = make([]string, 0, len(arguments))
-
- set := func(flag *Flag, value string) error {
- return f.Set(flag.Name, value)
- }
-
- err := f.parseArgs(arguments, set)
- if err != nil {
- switch f.errorHandling {
- case ContinueOnError:
- return err
- case ExitOnError:
- fmt.Println(err)
- os.Exit(2)
- case PanicOnError:
- panic(err)
- }
- }
- return nil
-}
-
-type parseFunc func(flag *Flag, value string) error
-
-// ParseAll parses flag definitions from the argument list, which should not
-// include the command name. The arguments for fn are flag and value. Must be
-// called after all flags in the FlagSet are defined and before flags are
-// accessed by the program. The return value will be ErrHelp if -help was set
-// but not defined.
-func (f *FlagSet) ParseAll(arguments []string, fn func(flag *Flag, value string) error) error {
- f.parsed = true
- f.args = make([]string, 0, len(arguments))
-
- err := f.parseArgs(arguments, fn)
- if err != nil {
- switch f.errorHandling {
- case ContinueOnError:
- return err
- case ExitOnError:
- os.Exit(2)
- case PanicOnError:
- panic(err)
- }
- }
- return nil
-}
-
-// Parsed reports whether f.Parse has been called.
-func (f *FlagSet) Parsed() bool {
- return f.parsed
-}
-
-// Parse parses the command-line flags from os.Args[1:]. Must be called
-// after all flags are defined and before flags are accessed by the program.
-func Parse() {
- // Ignore errors; CommandLine is set for ExitOnError.
- CommandLine.Parse(os.Args[1:])
-}
-
-// ParseAll parses the command-line flags from os.Args[1:] and called fn for each.
-// The arguments for fn are flag and value. Must be called after all flags are
-// defined and before flags are accessed by the program.
-func ParseAll(fn func(flag *Flag, value string) error) {
- // Ignore errors; CommandLine is set for ExitOnError.
- CommandLine.ParseAll(os.Args[1:], fn)
-}
-
-// SetInterspersed sets whether to support interspersed option/non-option arguments.
-func SetInterspersed(interspersed bool) {
- CommandLine.SetInterspersed(interspersed)
-}
-
-// Parsed returns true if the command-line flags have been parsed.
-func Parsed() bool {
- return CommandLine.Parsed()
-}
-
-// CommandLine is the default set of command-line flags, parsed from os.Args.
-var CommandLine = NewFlagSet(os.Args[0], ExitOnError)
-
-// NewFlagSet returns a new, empty flag set with the specified name,
-// error handling property and SortFlags set to true.
-func NewFlagSet(name string, errorHandling ErrorHandling) *FlagSet {
- f := &FlagSet{
- name: name,
- errorHandling: errorHandling,
- argsLenAtDash: -1,
- interspersed: true,
- SortFlags: true,
- }
- return f
-}
-
-// SetInterspersed sets whether to support interspersed option/non-option arguments.
-func (f *FlagSet) SetInterspersed(interspersed bool) {
- f.interspersed = interspersed
-}
-
-// Init sets the name and error handling property for a flag set.
-// By default, the zero FlagSet uses an empty name and the
-// ContinueOnError error handling policy.
-func (f *FlagSet) Init(name string, errorHandling ErrorHandling) {
- f.name = name
- f.errorHandling = errorHandling
- f.argsLenAtDash = -1
-}
diff --git a/vendor/github.com/spf13/pflag/float32.go b/vendor/github.com/spf13/pflag/float32.go
deleted file mode 100644
index a243f81..0000000
--- a/vendor/github.com/spf13/pflag/float32.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- float32 Value
-type float32Value float32
-
-func newFloat32Value(val float32, p *float32) *float32Value {
- *p = val
- return (*float32Value)(p)
-}
-
-func (f *float32Value) Set(s string) error {
- v, err := strconv.ParseFloat(s, 32)
- *f = float32Value(v)
- return err
-}
-
-func (f *float32Value) Type() string {
- return "float32"
-}
-
-func (f *float32Value) String() string { return strconv.FormatFloat(float64(*f), 'g', -1, 32) }
-
-func float32Conv(sval string) (interface{}, error) {
- v, err := strconv.ParseFloat(sval, 32)
- if err != nil {
- return 0, err
- }
- return float32(v), nil
-}
-
-// GetFloat32 return the float32 value of a flag with the given name
-func (f *FlagSet) GetFloat32(name string) (float32, error) {
- val, err := f.getFlagType(name, "float32", float32Conv)
- if err != nil {
- return 0, err
- }
- return val.(float32), nil
-}
-
-// Float32Var defines a float32 flag with specified name, default value, and usage string.
-// The argument p points to a float32 variable in which to store the value of the flag.
-func (f *FlagSet) Float32Var(p *float32, name string, value float32, usage string) {
- f.VarP(newFloat32Value(value, p), name, "", usage)
-}
-
-// Float32VarP is like Float32Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Float32VarP(p *float32, name, shorthand string, value float32, usage string) {
- f.VarP(newFloat32Value(value, p), name, shorthand, usage)
-}
-
-// Float32Var defines a float32 flag with specified name, default value, and usage string.
-// The argument p points to a float32 variable in which to store the value of the flag.
-func Float32Var(p *float32, name string, value float32, usage string) {
- CommandLine.VarP(newFloat32Value(value, p), name, "", usage)
-}
-
-// Float32VarP is like Float32Var, but accepts a shorthand letter that can be used after a single dash.
-func Float32VarP(p *float32, name, shorthand string, value float32, usage string) {
- CommandLine.VarP(newFloat32Value(value, p), name, shorthand, usage)
-}
-
-// Float32 defines a float32 flag with specified name, default value, and usage string.
-// The return value is the address of a float32 variable that stores the value of the flag.
-func (f *FlagSet) Float32(name string, value float32, usage string) *float32 {
- p := new(float32)
- f.Float32VarP(p, name, "", value, usage)
- return p
-}
-
-// Float32P is like Float32, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Float32P(name, shorthand string, value float32, usage string) *float32 {
- p := new(float32)
- f.Float32VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Float32 defines a float32 flag with specified name, default value, and usage string.
-// The return value is the address of a float32 variable that stores the value of the flag.
-func Float32(name string, value float32, usage string) *float32 {
- return CommandLine.Float32P(name, "", value, usage)
-}
-
-// Float32P is like Float32, but accepts a shorthand letter that can be used after a single dash.
-func Float32P(name, shorthand string, value float32, usage string) *float32 {
- return CommandLine.Float32P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/float64.go b/vendor/github.com/spf13/pflag/float64.go
deleted file mode 100644
index 04b5492..0000000
--- a/vendor/github.com/spf13/pflag/float64.go
+++ /dev/null
@@ -1,84 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- float64 Value
-type float64Value float64
-
-func newFloat64Value(val float64, p *float64) *float64Value {
- *p = val
- return (*float64Value)(p)
-}
-
-func (f *float64Value) Set(s string) error {
- v, err := strconv.ParseFloat(s, 64)
- *f = float64Value(v)
- return err
-}
-
-func (f *float64Value) Type() string {
- return "float64"
-}
-
-func (f *float64Value) String() string { return strconv.FormatFloat(float64(*f), 'g', -1, 64) }
-
-func float64Conv(sval string) (interface{}, error) {
- return strconv.ParseFloat(sval, 64)
-}
-
-// GetFloat64 return the float64 value of a flag with the given name
-func (f *FlagSet) GetFloat64(name string) (float64, error) {
- val, err := f.getFlagType(name, "float64", float64Conv)
- if err != nil {
- return 0, err
- }
- return val.(float64), nil
-}
-
-// Float64Var defines a float64 flag with specified name, default value, and usage string.
-// The argument p points to a float64 variable in which to store the value of the flag.
-func (f *FlagSet) Float64Var(p *float64, name string, value float64, usage string) {
- f.VarP(newFloat64Value(value, p), name, "", usage)
-}
-
-// Float64VarP is like Float64Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Float64VarP(p *float64, name, shorthand string, value float64, usage string) {
- f.VarP(newFloat64Value(value, p), name, shorthand, usage)
-}
-
-// Float64Var defines a float64 flag with specified name, default value, and usage string.
-// The argument p points to a float64 variable in which to store the value of the flag.
-func Float64Var(p *float64, name string, value float64, usage string) {
- CommandLine.VarP(newFloat64Value(value, p), name, "", usage)
-}
-
-// Float64VarP is like Float64Var, but accepts a shorthand letter that can be used after a single dash.
-func Float64VarP(p *float64, name, shorthand string, value float64, usage string) {
- CommandLine.VarP(newFloat64Value(value, p), name, shorthand, usage)
-}
-
-// Float64 defines a float64 flag with specified name, default value, and usage string.
-// The return value is the address of a float64 variable that stores the value of the flag.
-func (f *FlagSet) Float64(name string, value float64, usage string) *float64 {
- p := new(float64)
- f.Float64VarP(p, name, "", value, usage)
- return p
-}
-
-// Float64P is like Float64, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Float64P(name, shorthand string, value float64, usage string) *float64 {
- p := new(float64)
- f.Float64VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Float64 defines a float64 flag with specified name, default value, and usage string.
-// The return value is the address of a float64 variable that stores the value of the flag.
-func Float64(name string, value float64, usage string) *float64 {
- return CommandLine.Float64P(name, "", value, usage)
-}
-
-// Float64P is like Float64, but accepts a shorthand letter that can be used after a single dash.
-func Float64P(name, shorthand string, value float64, usage string) *float64 {
- return CommandLine.Float64P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/golangflag.go b/vendor/github.com/spf13/pflag/golangflag.go
deleted file mode 100644
index d3dd72b..0000000
--- a/vendor/github.com/spf13/pflag/golangflag.go
+++ /dev/null
@@ -1,105 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package pflag
-
-import (
- goflag "flag"
- "reflect"
- "strings"
-)
-
-// flagValueWrapper implements pflag.Value around a flag.Value. The main
-// difference here is the addition of the Type method that returns a string
-// name of the type. As this is generally unknown, we approximate that with
-// reflection.
-type flagValueWrapper struct {
- inner goflag.Value
- flagType string
-}
-
-// We are just copying the boolFlag interface out of goflag as that is what
-// they use to decide if a flag should get "true" when no arg is given.
-type goBoolFlag interface {
- goflag.Value
- IsBoolFlag() bool
-}
-
-func wrapFlagValue(v goflag.Value) Value {
- // If the flag.Value happens to also be a pflag.Value, just use it directly.
- if pv, ok := v.(Value); ok {
- return pv
- }
-
- pv := &flagValueWrapper{
- inner: v,
- }
-
- t := reflect.TypeOf(v)
- if t.Kind() == reflect.Interface || t.Kind() == reflect.Ptr {
- t = t.Elem()
- }
-
- pv.flagType = strings.TrimSuffix(t.Name(), "Value")
- return pv
-}
-
-func (v *flagValueWrapper) String() string {
- return v.inner.String()
-}
-
-func (v *flagValueWrapper) Set(s string) error {
- return v.inner.Set(s)
-}
-
-func (v *flagValueWrapper) Type() string {
- return v.flagType
-}
-
-// PFlagFromGoFlag will return a *pflag.Flag given a *flag.Flag
-// If the *flag.Flag.Name was a single character (ex: `v`) it will be accessiblei
-// with both `-v` and `--v` in flags. If the golang flag was more than a single
-// character (ex: `verbose`) it will only be accessible via `--verbose`
-func PFlagFromGoFlag(goflag *goflag.Flag) *Flag {
- // Remember the default value as a string; it won't change.
- flag := &Flag{
- Name: goflag.Name,
- Usage: goflag.Usage,
- Value: wrapFlagValue(goflag.Value),
- // Looks like golang flags don't set DefValue correctly :-(
- //DefValue: goflag.DefValue,
- DefValue: goflag.Value.String(),
- }
- // Ex: if the golang flag was -v, allow both -v and --v to work
- if len(flag.Name) == 1 {
- flag.Shorthand = flag.Name
- }
- if fv, ok := goflag.Value.(goBoolFlag); ok && fv.IsBoolFlag() {
- flag.NoOptDefVal = "true"
- }
- return flag
-}
-
-// AddGoFlag will add the given *flag.Flag to the pflag.FlagSet
-func (f *FlagSet) AddGoFlag(goflag *goflag.Flag) {
- if f.Lookup(goflag.Name) != nil {
- return
- }
- newflag := PFlagFromGoFlag(goflag)
- f.AddFlag(newflag)
-}
-
-// AddGoFlagSet will add the given *flag.FlagSet to the pflag.FlagSet
-func (f *FlagSet) AddGoFlagSet(newSet *goflag.FlagSet) {
- if newSet == nil {
- return
- }
- newSet.VisitAll(func(goflag *goflag.Flag) {
- f.AddGoFlag(goflag)
- })
- if f.addedGoFlagSets == nil {
- f.addedGoFlagSets = make([]*goflag.FlagSet, 0)
- }
- f.addedGoFlagSets = append(f.addedGoFlagSets, newSet)
-}
diff --git a/vendor/github.com/spf13/pflag/int.go b/vendor/github.com/spf13/pflag/int.go
deleted file mode 100644
index 1474b89..0000000
--- a/vendor/github.com/spf13/pflag/int.go
+++ /dev/null
@@ -1,84 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- int Value
-type intValue int
-
-func newIntValue(val int, p *int) *intValue {
- *p = val
- return (*intValue)(p)
-}
-
-func (i *intValue) Set(s string) error {
- v, err := strconv.ParseInt(s, 0, 64)
- *i = intValue(v)
- return err
-}
-
-func (i *intValue) Type() string {
- return "int"
-}
-
-func (i *intValue) String() string { return strconv.Itoa(int(*i)) }
-
-func intConv(sval string) (interface{}, error) {
- return strconv.Atoi(sval)
-}
-
-// GetInt return the int value of a flag with the given name
-func (f *FlagSet) GetInt(name string) (int, error) {
- val, err := f.getFlagType(name, "int", intConv)
- if err != nil {
- return 0, err
- }
- return val.(int), nil
-}
-
-// IntVar defines an int flag with specified name, default value, and usage string.
-// The argument p points to an int variable in which to store the value of the flag.
-func (f *FlagSet) IntVar(p *int, name string, value int, usage string) {
- f.VarP(newIntValue(value, p), name, "", usage)
-}
-
-// IntVarP is like IntVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IntVarP(p *int, name, shorthand string, value int, usage string) {
- f.VarP(newIntValue(value, p), name, shorthand, usage)
-}
-
-// IntVar defines an int flag with specified name, default value, and usage string.
-// The argument p points to an int variable in which to store the value of the flag.
-func IntVar(p *int, name string, value int, usage string) {
- CommandLine.VarP(newIntValue(value, p), name, "", usage)
-}
-
-// IntVarP is like IntVar, but accepts a shorthand letter that can be used after a single dash.
-func IntVarP(p *int, name, shorthand string, value int, usage string) {
- CommandLine.VarP(newIntValue(value, p), name, shorthand, usage)
-}
-
-// Int defines an int flag with specified name, default value, and usage string.
-// The return value is the address of an int variable that stores the value of the flag.
-func (f *FlagSet) Int(name string, value int, usage string) *int {
- p := new(int)
- f.IntVarP(p, name, "", value, usage)
- return p
-}
-
-// IntP is like Int, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IntP(name, shorthand string, value int, usage string) *int {
- p := new(int)
- f.IntVarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Int defines an int flag with specified name, default value, and usage string.
-// The return value is the address of an int variable that stores the value of the flag.
-func Int(name string, value int, usage string) *int {
- return CommandLine.IntP(name, "", value, usage)
-}
-
-// IntP is like Int, but accepts a shorthand letter that can be used after a single dash.
-func IntP(name, shorthand string, value int, usage string) *int {
- return CommandLine.IntP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/int16.go b/vendor/github.com/spf13/pflag/int16.go
deleted file mode 100644
index f1a01d0..0000000
--- a/vendor/github.com/spf13/pflag/int16.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- int16 Value
-type int16Value int16
-
-func newInt16Value(val int16, p *int16) *int16Value {
- *p = val
- return (*int16Value)(p)
-}
-
-func (i *int16Value) Set(s string) error {
- v, err := strconv.ParseInt(s, 0, 16)
- *i = int16Value(v)
- return err
-}
-
-func (i *int16Value) Type() string {
- return "int16"
-}
-
-func (i *int16Value) String() string { return strconv.FormatInt(int64(*i), 10) }
-
-func int16Conv(sval string) (interface{}, error) {
- v, err := strconv.ParseInt(sval, 0, 16)
- if err != nil {
- return 0, err
- }
- return int16(v), nil
-}
-
-// GetInt16 returns the int16 value of a flag with the given name
-func (f *FlagSet) GetInt16(name string) (int16, error) {
- val, err := f.getFlagType(name, "int16", int16Conv)
- if err != nil {
- return 0, err
- }
- return val.(int16), nil
-}
-
-// Int16Var defines an int16 flag with specified name, default value, and usage string.
-// The argument p points to an int16 variable in which to store the value of the flag.
-func (f *FlagSet) Int16Var(p *int16, name string, value int16, usage string) {
- f.VarP(newInt16Value(value, p), name, "", usage)
-}
-
-// Int16VarP is like Int16Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Int16VarP(p *int16, name, shorthand string, value int16, usage string) {
- f.VarP(newInt16Value(value, p), name, shorthand, usage)
-}
-
-// Int16Var defines an int16 flag with specified name, default value, and usage string.
-// The argument p points to an int16 variable in which to store the value of the flag.
-func Int16Var(p *int16, name string, value int16, usage string) {
- CommandLine.VarP(newInt16Value(value, p), name, "", usage)
-}
-
-// Int16VarP is like Int16Var, but accepts a shorthand letter that can be used after a single dash.
-func Int16VarP(p *int16, name, shorthand string, value int16, usage string) {
- CommandLine.VarP(newInt16Value(value, p), name, shorthand, usage)
-}
-
-// Int16 defines an int16 flag with specified name, default value, and usage string.
-// The return value is the address of an int16 variable that stores the value of the flag.
-func (f *FlagSet) Int16(name string, value int16, usage string) *int16 {
- p := new(int16)
- f.Int16VarP(p, name, "", value, usage)
- return p
-}
-
-// Int16P is like Int16, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Int16P(name, shorthand string, value int16, usage string) *int16 {
- p := new(int16)
- f.Int16VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Int16 defines an int16 flag with specified name, default value, and usage string.
-// The return value is the address of an int16 variable that stores the value of the flag.
-func Int16(name string, value int16, usage string) *int16 {
- return CommandLine.Int16P(name, "", value, usage)
-}
-
-// Int16P is like Int16, but accepts a shorthand letter that can be used after a single dash.
-func Int16P(name, shorthand string, value int16, usage string) *int16 {
- return CommandLine.Int16P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/int32.go b/vendor/github.com/spf13/pflag/int32.go
deleted file mode 100644
index 9b95944..0000000
--- a/vendor/github.com/spf13/pflag/int32.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- int32 Value
-type int32Value int32
-
-func newInt32Value(val int32, p *int32) *int32Value {
- *p = val
- return (*int32Value)(p)
-}
-
-func (i *int32Value) Set(s string) error {
- v, err := strconv.ParseInt(s, 0, 32)
- *i = int32Value(v)
- return err
-}
-
-func (i *int32Value) Type() string {
- return "int32"
-}
-
-func (i *int32Value) String() string { return strconv.FormatInt(int64(*i), 10) }
-
-func int32Conv(sval string) (interface{}, error) {
- v, err := strconv.ParseInt(sval, 0, 32)
- if err != nil {
- return 0, err
- }
- return int32(v), nil
-}
-
-// GetInt32 return the int32 value of a flag with the given name
-func (f *FlagSet) GetInt32(name string) (int32, error) {
- val, err := f.getFlagType(name, "int32", int32Conv)
- if err != nil {
- return 0, err
- }
- return val.(int32), nil
-}
-
-// Int32Var defines an int32 flag with specified name, default value, and usage string.
-// The argument p points to an int32 variable in which to store the value of the flag.
-func (f *FlagSet) Int32Var(p *int32, name string, value int32, usage string) {
- f.VarP(newInt32Value(value, p), name, "", usage)
-}
-
-// Int32VarP is like Int32Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Int32VarP(p *int32, name, shorthand string, value int32, usage string) {
- f.VarP(newInt32Value(value, p), name, shorthand, usage)
-}
-
-// Int32Var defines an int32 flag with specified name, default value, and usage string.
-// The argument p points to an int32 variable in which to store the value of the flag.
-func Int32Var(p *int32, name string, value int32, usage string) {
- CommandLine.VarP(newInt32Value(value, p), name, "", usage)
-}
-
-// Int32VarP is like Int32Var, but accepts a shorthand letter that can be used after a single dash.
-func Int32VarP(p *int32, name, shorthand string, value int32, usage string) {
- CommandLine.VarP(newInt32Value(value, p), name, shorthand, usage)
-}
-
-// Int32 defines an int32 flag with specified name, default value, and usage string.
-// The return value is the address of an int32 variable that stores the value of the flag.
-func (f *FlagSet) Int32(name string, value int32, usage string) *int32 {
- p := new(int32)
- f.Int32VarP(p, name, "", value, usage)
- return p
-}
-
-// Int32P is like Int32, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Int32P(name, shorthand string, value int32, usage string) *int32 {
- p := new(int32)
- f.Int32VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Int32 defines an int32 flag with specified name, default value, and usage string.
-// The return value is the address of an int32 variable that stores the value of the flag.
-func Int32(name string, value int32, usage string) *int32 {
- return CommandLine.Int32P(name, "", value, usage)
-}
-
-// Int32P is like Int32, but accepts a shorthand letter that can be used after a single dash.
-func Int32P(name, shorthand string, value int32, usage string) *int32 {
- return CommandLine.Int32P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/int64.go b/vendor/github.com/spf13/pflag/int64.go
deleted file mode 100644
index 0026d78..0000000
--- a/vendor/github.com/spf13/pflag/int64.go
+++ /dev/null
@@ -1,84 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- int64 Value
-type int64Value int64
-
-func newInt64Value(val int64, p *int64) *int64Value {
- *p = val
- return (*int64Value)(p)
-}
-
-func (i *int64Value) Set(s string) error {
- v, err := strconv.ParseInt(s, 0, 64)
- *i = int64Value(v)
- return err
-}
-
-func (i *int64Value) Type() string {
- return "int64"
-}
-
-func (i *int64Value) String() string { return strconv.FormatInt(int64(*i), 10) }
-
-func int64Conv(sval string) (interface{}, error) {
- return strconv.ParseInt(sval, 0, 64)
-}
-
-// GetInt64 return the int64 value of a flag with the given name
-func (f *FlagSet) GetInt64(name string) (int64, error) {
- val, err := f.getFlagType(name, "int64", int64Conv)
- if err != nil {
- return 0, err
- }
- return val.(int64), nil
-}
-
-// Int64Var defines an int64 flag with specified name, default value, and usage string.
-// The argument p points to an int64 variable in which to store the value of the flag.
-func (f *FlagSet) Int64Var(p *int64, name string, value int64, usage string) {
- f.VarP(newInt64Value(value, p), name, "", usage)
-}
-
-// Int64VarP is like Int64Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Int64VarP(p *int64, name, shorthand string, value int64, usage string) {
- f.VarP(newInt64Value(value, p), name, shorthand, usage)
-}
-
-// Int64Var defines an int64 flag with specified name, default value, and usage string.
-// The argument p points to an int64 variable in which to store the value of the flag.
-func Int64Var(p *int64, name string, value int64, usage string) {
- CommandLine.VarP(newInt64Value(value, p), name, "", usage)
-}
-
-// Int64VarP is like Int64Var, but accepts a shorthand letter that can be used after a single dash.
-func Int64VarP(p *int64, name, shorthand string, value int64, usage string) {
- CommandLine.VarP(newInt64Value(value, p), name, shorthand, usage)
-}
-
-// Int64 defines an int64 flag with specified name, default value, and usage string.
-// The return value is the address of an int64 variable that stores the value of the flag.
-func (f *FlagSet) Int64(name string, value int64, usage string) *int64 {
- p := new(int64)
- f.Int64VarP(p, name, "", value, usage)
- return p
-}
-
-// Int64P is like Int64, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Int64P(name, shorthand string, value int64, usage string) *int64 {
- p := new(int64)
- f.Int64VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Int64 defines an int64 flag with specified name, default value, and usage string.
-// The return value is the address of an int64 variable that stores the value of the flag.
-func Int64(name string, value int64, usage string) *int64 {
- return CommandLine.Int64P(name, "", value, usage)
-}
-
-// Int64P is like Int64, but accepts a shorthand letter that can be used after a single dash.
-func Int64P(name, shorthand string, value int64, usage string) *int64 {
- return CommandLine.Int64P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/int8.go b/vendor/github.com/spf13/pflag/int8.go
deleted file mode 100644
index 4da9222..0000000
--- a/vendor/github.com/spf13/pflag/int8.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- int8 Value
-type int8Value int8
-
-func newInt8Value(val int8, p *int8) *int8Value {
- *p = val
- return (*int8Value)(p)
-}
-
-func (i *int8Value) Set(s string) error {
- v, err := strconv.ParseInt(s, 0, 8)
- *i = int8Value(v)
- return err
-}
-
-func (i *int8Value) Type() string {
- return "int8"
-}
-
-func (i *int8Value) String() string { return strconv.FormatInt(int64(*i), 10) }
-
-func int8Conv(sval string) (interface{}, error) {
- v, err := strconv.ParseInt(sval, 0, 8)
- if err != nil {
- return 0, err
- }
- return int8(v), nil
-}
-
-// GetInt8 return the int8 value of a flag with the given name
-func (f *FlagSet) GetInt8(name string) (int8, error) {
- val, err := f.getFlagType(name, "int8", int8Conv)
- if err != nil {
- return 0, err
- }
- return val.(int8), nil
-}
-
-// Int8Var defines an int8 flag with specified name, default value, and usage string.
-// The argument p points to an int8 variable in which to store the value of the flag.
-func (f *FlagSet) Int8Var(p *int8, name string, value int8, usage string) {
- f.VarP(newInt8Value(value, p), name, "", usage)
-}
-
-// Int8VarP is like Int8Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Int8VarP(p *int8, name, shorthand string, value int8, usage string) {
- f.VarP(newInt8Value(value, p), name, shorthand, usage)
-}
-
-// Int8Var defines an int8 flag with specified name, default value, and usage string.
-// The argument p points to an int8 variable in which to store the value of the flag.
-func Int8Var(p *int8, name string, value int8, usage string) {
- CommandLine.VarP(newInt8Value(value, p), name, "", usage)
-}
-
-// Int8VarP is like Int8Var, but accepts a shorthand letter that can be used after a single dash.
-func Int8VarP(p *int8, name, shorthand string, value int8, usage string) {
- CommandLine.VarP(newInt8Value(value, p), name, shorthand, usage)
-}
-
-// Int8 defines an int8 flag with specified name, default value, and usage string.
-// The return value is the address of an int8 variable that stores the value of the flag.
-func (f *FlagSet) Int8(name string, value int8, usage string) *int8 {
- p := new(int8)
- f.Int8VarP(p, name, "", value, usage)
- return p
-}
-
-// Int8P is like Int8, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Int8P(name, shorthand string, value int8, usage string) *int8 {
- p := new(int8)
- f.Int8VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Int8 defines an int8 flag with specified name, default value, and usage string.
-// The return value is the address of an int8 variable that stores the value of the flag.
-func Int8(name string, value int8, usage string) *int8 {
- return CommandLine.Int8P(name, "", value, usage)
-}
-
-// Int8P is like Int8, but accepts a shorthand letter that can be used after a single dash.
-func Int8P(name, shorthand string, value int8, usage string) *int8 {
- return CommandLine.Int8P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/int_slice.go b/vendor/github.com/spf13/pflag/int_slice.go
deleted file mode 100644
index 1e7c9ed..0000000
--- a/vendor/github.com/spf13/pflag/int_slice.go
+++ /dev/null
@@ -1,128 +0,0 @@
-package pflag
-
-import (
- "fmt"
- "strconv"
- "strings"
-)
-
-// -- intSlice Value
-type intSliceValue struct {
- value *[]int
- changed bool
-}
-
-func newIntSliceValue(val []int, p *[]int) *intSliceValue {
- isv := new(intSliceValue)
- isv.value = p
- *isv.value = val
- return isv
-}
-
-func (s *intSliceValue) Set(val string) error {
- ss := strings.Split(val, ",")
- out := make([]int, len(ss))
- for i, d := range ss {
- var err error
- out[i], err = strconv.Atoi(d)
- if err != nil {
- return err
- }
-
- }
- if !s.changed {
- *s.value = out
- } else {
- *s.value = append(*s.value, out...)
- }
- s.changed = true
- return nil
-}
-
-func (s *intSliceValue) Type() string {
- return "intSlice"
-}
-
-func (s *intSliceValue) String() string {
- out := make([]string, len(*s.value))
- for i, d := range *s.value {
- out[i] = fmt.Sprintf("%d", d)
- }
- return "[" + strings.Join(out, ",") + "]"
-}
-
-func intSliceConv(val string) (interface{}, error) {
- val = strings.Trim(val, "[]")
- // Empty string would cause a slice with one (empty) entry
- if len(val) == 0 {
- return []int{}, nil
- }
- ss := strings.Split(val, ",")
- out := make([]int, len(ss))
- for i, d := range ss {
- var err error
- out[i], err = strconv.Atoi(d)
- if err != nil {
- return nil, err
- }
-
- }
- return out, nil
-}
-
-// GetIntSlice return the []int value of a flag with the given name
-func (f *FlagSet) GetIntSlice(name string) ([]int, error) {
- val, err := f.getFlagType(name, "intSlice", intSliceConv)
- if err != nil {
- return []int{}, err
- }
- return val.([]int), nil
-}
-
-// IntSliceVar defines a intSlice flag with specified name, default value, and usage string.
-// The argument p points to a []int variable in which to store the value of the flag.
-func (f *FlagSet) IntSliceVar(p *[]int, name string, value []int, usage string) {
- f.VarP(newIntSliceValue(value, p), name, "", usage)
-}
-
-// IntSliceVarP is like IntSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IntSliceVarP(p *[]int, name, shorthand string, value []int, usage string) {
- f.VarP(newIntSliceValue(value, p), name, shorthand, usage)
-}
-
-// IntSliceVar defines a int[] flag with specified name, default value, and usage string.
-// The argument p points to a int[] variable in which to store the value of the flag.
-func IntSliceVar(p *[]int, name string, value []int, usage string) {
- CommandLine.VarP(newIntSliceValue(value, p), name, "", usage)
-}
-
-// IntSliceVarP is like IntSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func IntSliceVarP(p *[]int, name, shorthand string, value []int, usage string) {
- CommandLine.VarP(newIntSliceValue(value, p), name, shorthand, usage)
-}
-
-// IntSlice defines a []int flag with specified name, default value, and usage string.
-// The return value is the address of a []int variable that stores the value of the flag.
-func (f *FlagSet) IntSlice(name string, value []int, usage string) *[]int {
- p := []int{}
- f.IntSliceVarP(&p, name, "", value, usage)
- return &p
-}
-
-// IntSliceP is like IntSlice, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IntSliceP(name, shorthand string, value []int, usage string) *[]int {
- p := []int{}
- f.IntSliceVarP(&p, name, shorthand, value, usage)
- return &p
-}
-
-// IntSlice defines a []int flag with specified name, default value, and usage string.
-// The return value is the address of a []int variable that stores the value of the flag.
-func IntSlice(name string, value []int, usage string) *[]int {
- return CommandLine.IntSliceP(name, "", value, usage)
-}
-
-// IntSliceP is like IntSlice, but accepts a shorthand letter that can be used after a single dash.
-func IntSliceP(name, shorthand string, value []int, usage string) *[]int {
- return CommandLine.IntSliceP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/ip.go b/vendor/github.com/spf13/pflag/ip.go
deleted file mode 100644
index 3d414ba..0000000
--- a/vendor/github.com/spf13/pflag/ip.go
+++ /dev/null
@@ -1,94 +0,0 @@
-package pflag
-
-import (
- "fmt"
- "net"
- "strings"
-)
-
-// -- net.IP value
-type ipValue net.IP
-
-func newIPValue(val net.IP, p *net.IP) *ipValue {
- *p = val
- return (*ipValue)(p)
-}
-
-func (i *ipValue) String() string { return net.IP(*i).String() }
-func (i *ipValue) Set(s string) error {
- ip := net.ParseIP(strings.TrimSpace(s))
- if ip == nil {
- return fmt.Errorf("failed to parse IP: %q", s)
- }
- *i = ipValue(ip)
- return nil
-}
-
-func (i *ipValue) Type() string {
- return "ip"
-}
-
-func ipConv(sval string) (interface{}, error) {
- ip := net.ParseIP(sval)
- if ip != nil {
- return ip, nil
- }
- return nil, fmt.Errorf("invalid string being converted to IP address: %s", sval)
-}
-
-// GetIP return the net.IP value of a flag with the given name
-func (f *FlagSet) GetIP(name string) (net.IP, error) {
- val, err := f.getFlagType(name, "ip", ipConv)
- if err != nil {
- return nil, err
- }
- return val.(net.IP), nil
-}
-
-// IPVar defines an net.IP flag with specified name, default value, and usage string.
-// The argument p points to an net.IP variable in which to store the value of the flag.
-func (f *FlagSet) IPVar(p *net.IP, name string, value net.IP, usage string) {
- f.VarP(newIPValue(value, p), name, "", usage)
-}
-
-// IPVarP is like IPVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IPVarP(p *net.IP, name, shorthand string, value net.IP, usage string) {
- f.VarP(newIPValue(value, p), name, shorthand, usage)
-}
-
-// IPVar defines an net.IP flag with specified name, default value, and usage string.
-// The argument p points to an net.IP variable in which to store the value of the flag.
-func IPVar(p *net.IP, name string, value net.IP, usage string) {
- CommandLine.VarP(newIPValue(value, p), name, "", usage)
-}
-
-// IPVarP is like IPVar, but accepts a shorthand letter that can be used after a single dash.
-func IPVarP(p *net.IP, name, shorthand string, value net.IP, usage string) {
- CommandLine.VarP(newIPValue(value, p), name, shorthand, usage)
-}
-
-// IP defines an net.IP flag with specified name, default value, and usage string.
-// The return value is the address of an net.IP variable that stores the value of the flag.
-func (f *FlagSet) IP(name string, value net.IP, usage string) *net.IP {
- p := new(net.IP)
- f.IPVarP(p, name, "", value, usage)
- return p
-}
-
-// IPP is like IP, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IPP(name, shorthand string, value net.IP, usage string) *net.IP {
- p := new(net.IP)
- f.IPVarP(p, name, shorthand, value, usage)
- return p
-}
-
-// IP defines an net.IP flag with specified name, default value, and usage string.
-// The return value is the address of an net.IP variable that stores the value of the flag.
-func IP(name string, value net.IP, usage string) *net.IP {
- return CommandLine.IPP(name, "", value, usage)
-}
-
-// IPP is like IP, but accepts a shorthand letter that can be used after a single dash.
-func IPP(name, shorthand string, value net.IP, usage string) *net.IP {
- return CommandLine.IPP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/ip_slice.go b/vendor/github.com/spf13/pflag/ip_slice.go
deleted file mode 100644
index 7dd196f..0000000
--- a/vendor/github.com/spf13/pflag/ip_slice.go
+++ /dev/null
@@ -1,148 +0,0 @@
-package pflag
-
-import (
- "fmt"
- "io"
- "net"
- "strings"
-)
-
-// -- ipSlice Value
-type ipSliceValue struct {
- value *[]net.IP
- changed bool
-}
-
-func newIPSliceValue(val []net.IP, p *[]net.IP) *ipSliceValue {
- ipsv := new(ipSliceValue)
- ipsv.value = p
- *ipsv.value = val
- return ipsv
-}
-
-// Set converts, and assigns, the comma-separated IP argument string representation as the []net.IP value of this flag.
-// If Set is called on a flag that already has a []net.IP assigned, the newly converted values will be appended.
-func (s *ipSliceValue) Set(val string) error {
-
- // remove all quote characters
- rmQuote := strings.NewReplacer(`"`, "", `'`, "", "`", "")
-
- // read flag arguments with CSV parser
- ipStrSlice, err := readAsCSV(rmQuote.Replace(val))
- if err != nil && err != io.EOF {
- return err
- }
-
- // parse ip values into slice
- out := make([]net.IP, 0, len(ipStrSlice))
- for _, ipStr := range ipStrSlice {
- ip := net.ParseIP(strings.TrimSpace(ipStr))
- if ip == nil {
- return fmt.Errorf("invalid string being converted to IP address: %s", ipStr)
- }
- out = append(out, ip)
- }
-
- if !s.changed {
- *s.value = out
- } else {
- *s.value = append(*s.value, out...)
- }
-
- s.changed = true
-
- return nil
-}
-
-// Type returns a string that uniquely represents this flag's type.
-func (s *ipSliceValue) Type() string {
- return "ipSlice"
-}
-
-// String defines a "native" format for this net.IP slice flag value.
-func (s *ipSliceValue) String() string {
-
- ipStrSlice := make([]string, len(*s.value))
- for i, ip := range *s.value {
- ipStrSlice[i] = ip.String()
- }
-
- out, _ := writeAsCSV(ipStrSlice)
-
- return "[" + out + "]"
-}
-
-func ipSliceConv(val string) (interface{}, error) {
- val = strings.Trim(val, "[]")
- // Emtpy string would cause a slice with one (empty) entry
- if len(val) == 0 {
- return []net.IP{}, nil
- }
- ss := strings.Split(val, ",")
- out := make([]net.IP, len(ss))
- for i, sval := range ss {
- ip := net.ParseIP(strings.TrimSpace(sval))
- if ip == nil {
- return nil, fmt.Errorf("invalid string being converted to IP address: %s", sval)
- }
- out[i] = ip
- }
- return out, nil
-}
-
-// GetIPSlice returns the []net.IP value of a flag with the given name
-func (f *FlagSet) GetIPSlice(name string) ([]net.IP, error) {
- val, err := f.getFlagType(name, "ipSlice", ipSliceConv)
- if err != nil {
- return []net.IP{}, err
- }
- return val.([]net.IP), nil
-}
-
-// IPSliceVar defines a ipSlice flag with specified name, default value, and usage string.
-// The argument p points to a []net.IP variable in which to store the value of the flag.
-func (f *FlagSet) IPSliceVar(p *[]net.IP, name string, value []net.IP, usage string) {
- f.VarP(newIPSliceValue(value, p), name, "", usage)
-}
-
-// IPSliceVarP is like IPSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IPSliceVarP(p *[]net.IP, name, shorthand string, value []net.IP, usage string) {
- f.VarP(newIPSliceValue(value, p), name, shorthand, usage)
-}
-
-// IPSliceVar defines a []net.IP flag with specified name, default value, and usage string.
-// The argument p points to a []net.IP variable in which to store the value of the flag.
-func IPSliceVar(p *[]net.IP, name string, value []net.IP, usage string) {
- CommandLine.VarP(newIPSliceValue(value, p), name, "", usage)
-}
-
-// IPSliceVarP is like IPSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func IPSliceVarP(p *[]net.IP, name, shorthand string, value []net.IP, usage string) {
- CommandLine.VarP(newIPSliceValue(value, p), name, shorthand, usage)
-}
-
-// IPSlice defines a []net.IP flag with specified name, default value, and usage string.
-// The return value is the address of a []net.IP variable that stores the value of that flag.
-func (f *FlagSet) IPSlice(name string, value []net.IP, usage string) *[]net.IP {
- p := []net.IP{}
- f.IPSliceVarP(&p, name, "", value, usage)
- return &p
-}
-
-// IPSliceP is like IPSlice, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IPSliceP(name, shorthand string, value []net.IP, usage string) *[]net.IP {
- p := []net.IP{}
- f.IPSliceVarP(&p, name, shorthand, value, usage)
- return &p
-}
-
-// IPSlice defines a []net.IP flag with specified name, default value, and usage string.
-// The return value is the address of a []net.IP variable that stores the value of the flag.
-func IPSlice(name string, value []net.IP, usage string) *[]net.IP {
- return CommandLine.IPSliceP(name, "", value, usage)
-}
-
-// IPSliceP is like IPSlice, but accepts a shorthand letter that can be used after a single dash.
-func IPSliceP(name, shorthand string, value []net.IP, usage string) *[]net.IP {
- return CommandLine.IPSliceP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/ipmask.go b/vendor/github.com/spf13/pflag/ipmask.go
deleted file mode 100644
index 5bd44bd..0000000
--- a/vendor/github.com/spf13/pflag/ipmask.go
+++ /dev/null
@@ -1,122 +0,0 @@
-package pflag
-
-import (
- "fmt"
- "net"
- "strconv"
-)
-
-// -- net.IPMask value
-type ipMaskValue net.IPMask
-
-func newIPMaskValue(val net.IPMask, p *net.IPMask) *ipMaskValue {
- *p = val
- return (*ipMaskValue)(p)
-}
-
-func (i *ipMaskValue) String() string { return net.IPMask(*i).String() }
-func (i *ipMaskValue) Set(s string) error {
- ip := ParseIPv4Mask(s)
- if ip == nil {
- return fmt.Errorf("failed to parse IP mask: %q", s)
- }
- *i = ipMaskValue(ip)
- return nil
-}
-
-func (i *ipMaskValue) Type() string {
- return "ipMask"
-}
-
-// ParseIPv4Mask written in IP form (e.g. 255.255.255.0).
-// This function should really belong to the net package.
-func ParseIPv4Mask(s string) net.IPMask {
- mask := net.ParseIP(s)
- if mask == nil {
- if len(s) != 8 {
- return nil
- }
- // net.IPMask.String() actually outputs things like ffffff00
- // so write a horrible parser for that as well :-(
- m := []int{}
- for i := 0; i < 4; i++ {
- b := "0x" + s[2*i:2*i+2]
- d, err := strconv.ParseInt(b, 0, 0)
- if err != nil {
- return nil
- }
- m = append(m, int(d))
- }
- s := fmt.Sprintf("%d.%d.%d.%d", m[0], m[1], m[2], m[3])
- mask = net.ParseIP(s)
- if mask == nil {
- return nil
- }
- }
- return net.IPv4Mask(mask[12], mask[13], mask[14], mask[15])
-}
-
-func parseIPv4Mask(sval string) (interface{}, error) {
- mask := ParseIPv4Mask(sval)
- if mask == nil {
- return nil, fmt.Errorf("unable to parse %s as net.IPMask", sval)
- }
- return mask, nil
-}
-
-// GetIPv4Mask return the net.IPv4Mask value of a flag with the given name
-func (f *FlagSet) GetIPv4Mask(name string) (net.IPMask, error) {
- val, err := f.getFlagType(name, "ipMask", parseIPv4Mask)
- if err != nil {
- return nil, err
- }
- return val.(net.IPMask), nil
-}
-
-// IPMaskVar defines an net.IPMask flag with specified name, default value, and usage string.
-// The argument p points to an net.IPMask variable in which to store the value of the flag.
-func (f *FlagSet) IPMaskVar(p *net.IPMask, name string, value net.IPMask, usage string) {
- f.VarP(newIPMaskValue(value, p), name, "", usage)
-}
-
-// IPMaskVarP is like IPMaskVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IPMaskVarP(p *net.IPMask, name, shorthand string, value net.IPMask, usage string) {
- f.VarP(newIPMaskValue(value, p), name, shorthand, usage)
-}
-
-// IPMaskVar defines an net.IPMask flag with specified name, default value, and usage string.
-// The argument p points to an net.IPMask variable in which to store the value of the flag.
-func IPMaskVar(p *net.IPMask, name string, value net.IPMask, usage string) {
- CommandLine.VarP(newIPMaskValue(value, p), name, "", usage)
-}
-
-// IPMaskVarP is like IPMaskVar, but accepts a shorthand letter that can be used after a single dash.
-func IPMaskVarP(p *net.IPMask, name, shorthand string, value net.IPMask, usage string) {
- CommandLine.VarP(newIPMaskValue(value, p), name, shorthand, usage)
-}
-
-// IPMask defines an net.IPMask flag with specified name, default value, and usage string.
-// The return value is the address of an net.IPMask variable that stores the value of the flag.
-func (f *FlagSet) IPMask(name string, value net.IPMask, usage string) *net.IPMask {
- p := new(net.IPMask)
- f.IPMaskVarP(p, name, "", value, usage)
- return p
-}
-
-// IPMaskP is like IPMask, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IPMaskP(name, shorthand string, value net.IPMask, usage string) *net.IPMask {
- p := new(net.IPMask)
- f.IPMaskVarP(p, name, shorthand, value, usage)
- return p
-}
-
-// IPMask defines an net.IPMask flag with specified name, default value, and usage string.
-// The return value is the address of an net.IPMask variable that stores the value of the flag.
-func IPMask(name string, value net.IPMask, usage string) *net.IPMask {
- return CommandLine.IPMaskP(name, "", value, usage)
-}
-
-// IPMaskP is like IP, but accepts a shorthand letter that can be used after a single dash.
-func IPMaskP(name, shorthand string, value net.IPMask, usage string) *net.IPMask {
- return CommandLine.IPMaskP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/ipnet.go b/vendor/github.com/spf13/pflag/ipnet.go
deleted file mode 100644
index e2c1b8b..0000000
--- a/vendor/github.com/spf13/pflag/ipnet.go
+++ /dev/null
@@ -1,98 +0,0 @@
-package pflag
-
-import (
- "fmt"
- "net"
- "strings"
-)
-
-// IPNet adapts net.IPNet for use as a flag.
-type ipNetValue net.IPNet
-
-func (ipnet ipNetValue) String() string {
- n := net.IPNet(ipnet)
- return n.String()
-}
-
-func (ipnet *ipNetValue) Set(value string) error {
- _, n, err := net.ParseCIDR(strings.TrimSpace(value))
- if err != nil {
- return err
- }
- *ipnet = ipNetValue(*n)
- return nil
-}
-
-func (*ipNetValue) Type() string {
- return "ipNet"
-}
-
-func newIPNetValue(val net.IPNet, p *net.IPNet) *ipNetValue {
- *p = val
- return (*ipNetValue)(p)
-}
-
-func ipNetConv(sval string) (interface{}, error) {
- _, n, err := net.ParseCIDR(strings.TrimSpace(sval))
- if err == nil {
- return *n, nil
- }
- return nil, fmt.Errorf("invalid string being converted to IPNet: %s", sval)
-}
-
-// GetIPNet return the net.IPNet value of a flag with the given name
-func (f *FlagSet) GetIPNet(name string) (net.IPNet, error) {
- val, err := f.getFlagType(name, "ipNet", ipNetConv)
- if err != nil {
- return net.IPNet{}, err
- }
- return val.(net.IPNet), nil
-}
-
-// IPNetVar defines an net.IPNet flag with specified name, default value, and usage string.
-// The argument p points to an net.IPNet variable in which to store the value of the flag.
-func (f *FlagSet) IPNetVar(p *net.IPNet, name string, value net.IPNet, usage string) {
- f.VarP(newIPNetValue(value, p), name, "", usage)
-}
-
-// IPNetVarP is like IPNetVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IPNetVarP(p *net.IPNet, name, shorthand string, value net.IPNet, usage string) {
- f.VarP(newIPNetValue(value, p), name, shorthand, usage)
-}
-
-// IPNetVar defines an net.IPNet flag with specified name, default value, and usage string.
-// The argument p points to an net.IPNet variable in which to store the value of the flag.
-func IPNetVar(p *net.IPNet, name string, value net.IPNet, usage string) {
- CommandLine.VarP(newIPNetValue(value, p), name, "", usage)
-}
-
-// IPNetVarP is like IPNetVar, but accepts a shorthand letter that can be used after a single dash.
-func IPNetVarP(p *net.IPNet, name, shorthand string, value net.IPNet, usage string) {
- CommandLine.VarP(newIPNetValue(value, p), name, shorthand, usage)
-}
-
-// IPNet defines an net.IPNet flag with specified name, default value, and usage string.
-// The return value is the address of an net.IPNet variable that stores the value of the flag.
-func (f *FlagSet) IPNet(name string, value net.IPNet, usage string) *net.IPNet {
- p := new(net.IPNet)
- f.IPNetVarP(p, name, "", value, usage)
- return p
-}
-
-// IPNetP is like IPNet, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) IPNetP(name, shorthand string, value net.IPNet, usage string) *net.IPNet {
- p := new(net.IPNet)
- f.IPNetVarP(p, name, shorthand, value, usage)
- return p
-}
-
-// IPNet defines an net.IPNet flag with specified name, default value, and usage string.
-// The return value is the address of an net.IPNet variable that stores the value of the flag.
-func IPNet(name string, value net.IPNet, usage string) *net.IPNet {
- return CommandLine.IPNetP(name, "", value, usage)
-}
-
-// IPNetP is like IPNet, but accepts a shorthand letter that can be used after a single dash.
-func IPNetP(name, shorthand string, value net.IPNet, usage string) *net.IPNet {
- return CommandLine.IPNetP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/string.go b/vendor/github.com/spf13/pflag/string.go
deleted file mode 100644
index 04e0a26..0000000
--- a/vendor/github.com/spf13/pflag/string.go
+++ /dev/null
@@ -1,80 +0,0 @@
-package pflag
-
-// -- string Value
-type stringValue string
-
-func newStringValue(val string, p *string) *stringValue {
- *p = val
- return (*stringValue)(p)
-}
-
-func (s *stringValue) Set(val string) error {
- *s = stringValue(val)
- return nil
-}
-func (s *stringValue) Type() string {
- return "string"
-}
-
-func (s *stringValue) String() string { return string(*s) }
-
-func stringConv(sval string) (interface{}, error) {
- return sval, nil
-}
-
-// GetString return the string value of a flag with the given name
-func (f *FlagSet) GetString(name string) (string, error) {
- val, err := f.getFlagType(name, "string", stringConv)
- if err != nil {
- return "", err
- }
- return val.(string), nil
-}
-
-// StringVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a string variable in which to store the value of the flag.
-func (f *FlagSet) StringVar(p *string, name string, value string, usage string) {
- f.VarP(newStringValue(value, p), name, "", usage)
-}
-
-// StringVarP is like StringVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringVarP(p *string, name, shorthand string, value string, usage string) {
- f.VarP(newStringValue(value, p), name, shorthand, usage)
-}
-
-// StringVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a string variable in which to store the value of the flag.
-func StringVar(p *string, name string, value string, usage string) {
- CommandLine.VarP(newStringValue(value, p), name, "", usage)
-}
-
-// StringVarP is like StringVar, but accepts a shorthand letter that can be used after a single dash.
-func StringVarP(p *string, name, shorthand string, value string, usage string) {
- CommandLine.VarP(newStringValue(value, p), name, shorthand, usage)
-}
-
-// String defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a string variable that stores the value of the flag.
-func (f *FlagSet) String(name string, value string, usage string) *string {
- p := new(string)
- f.StringVarP(p, name, "", value, usage)
- return p
-}
-
-// StringP is like String, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringP(name, shorthand string, value string, usage string) *string {
- p := new(string)
- f.StringVarP(p, name, shorthand, value, usage)
- return p
-}
-
-// String defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a string variable that stores the value of the flag.
-func String(name string, value string, usage string) *string {
- return CommandLine.StringP(name, "", value, usage)
-}
-
-// StringP is like String, but accepts a shorthand letter that can be used after a single dash.
-func StringP(name, shorthand string, value string, usage string) *string {
- return CommandLine.StringP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/string_array.go b/vendor/github.com/spf13/pflag/string_array.go
deleted file mode 100644
index fa7bc60..0000000
--- a/vendor/github.com/spf13/pflag/string_array.go
+++ /dev/null
@@ -1,103 +0,0 @@
-package pflag
-
-// -- stringArray Value
-type stringArrayValue struct {
- value *[]string
- changed bool
-}
-
-func newStringArrayValue(val []string, p *[]string) *stringArrayValue {
- ssv := new(stringArrayValue)
- ssv.value = p
- *ssv.value = val
- return ssv
-}
-
-func (s *stringArrayValue) Set(val string) error {
- if !s.changed {
- *s.value = []string{val}
- s.changed = true
- } else {
- *s.value = append(*s.value, val)
- }
- return nil
-}
-
-func (s *stringArrayValue) Type() string {
- return "stringArray"
-}
-
-func (s *stringArrayValue) String() string {
- str, _ := writeAsCSV(*s.value)
- return "[" + str + "]"
-}
-
-func stringArrayConv(sval string) (interface{}, error) {
- sval = sval[1 : len(sval)-1]
- // An empty string would cause a array with one (empty) string
- if len(sval) == 0 {
- return []string{}, nil
- }
- return readAsCSV(sval)
-}
-
-// GetStringArray return the []string value of a flag with the given name
-func (f *FlagSet) GetStringArray(name string) ([]string, error) {
- val, err := f.getFlagType(name, "stringArray", stringArrayConv)
- if err != nil {
- return []string{}, err
- }
- return val.([]string), nil
-}
-
-// StringArrayVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a []string variable in which to store the values of the multiple flags.
-// The value of each argument will not try to be separated by comma. Use a StringSlice for that.
-func (f *FlagSet) StringArrayVar(p *[]string, name string, value []string, usage string) {
- f.VarP(newStringArrayValue(value, p), name, "", usage)
-}
-
-// StringArrayVarP is like StringArrayVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringArrayVarP(p *[]string, name, shorthand string, value []string, usage string) {
- f.VarP(newStringArrayValue(value, p), name, shorthand, usage)
-}
-
-// StringArrayVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a []string variable in which to store the value of the flag.
-// The value of each argument will not try to be separated by comma. Use a StringSlice for that.
-func StringArrayVar(p *[]string, name string, value []string, usage string) {
- CommandLine.VarP(newStringArrayValue(value, p), name, "", usage)
-}
-
-// StringArrayVarP is like StringArrayVar, but accepts a shorthand letter that can be used after a single dash.
-func StringArrayVarP(p *[]string, name, shorthand string, value []string, usage string) {
- CommandLine.VarP(newStringArrayValue(value, p), name, shorthand, usage)
-}
-
-// StringArray defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a []string variable that stores the value of the flag.
-// The value of each argument will not try to be separated by comma. Use a StringSlice for that.
-func (f *FlagSet) StringArray(name string, value []string, usage string) *[]string {
- p := []string{}
- f.StringArrayVarP(&p, name, "", value, usage)
- return &p
-}
-
-// StringArrayP is like StringArray, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringArrayP(name, shorthand string, value []string, usage string) *[]string {
- p := []string{}
- f.StringArrayVarP(&p, name, shorthand, value, usage)
- return &p
-}
-
-// StringArray defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a []string variable that stores the value of the flag.
-// The value of each argument will not try to be separated by comma. Use a StringSlice for that.
-func StringArray(name string, value []string, usage string) *[]string {
- return CommandLine.StringArrayP(name, "", value, usage)
-}
-
-// StringArrayP is like StringArray, but accepts a shorthand letter that can be used after a single dash.
-func StringArrayP(name, shorthand string, value []string, usage string) *[]string {
- return CommandLine.StringArrayP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/string_slice.go b/vendor/github.com/spf13/pflag/string_slice.go
deleted file mode 100644
index 0cd3ccc..0000000
--- a/vendor/github.com/spf13/pflag/string_slice.go
+++ /dev/null
@@ -1,149 +0,0 @@
-package pflag
-
-import (
- "bytes"
- "encoding/csv"
- "strings"
-)
-
-// -- stringSlice Value
-type stringSliceValue struct {
- value *[]string
- changed bool
-}
-
-func newStringSliceValue(val []string, p *[]string) *stringSliceValue {
- ssv := new(stringSliceValue)
- ssv.value = p
- *ssv.value = val
- return ssv
-}
-
-func readAsCSV(val string) ([]string, error) {
- if val == "" {
- return []string{}, nil
- }
- stringReader := strings.NewReader(val)
- csvReader := csv.NewReader(stringReader)
- return csvReader.Read()
-}
-
-func writeAsCSV(vals []string) (string, error) {
- b := &bytes.Buffer{}
- w := csv.NewWriter(b)
- err := w.Write(vals)
- if err != nil {
- return "", err
- }
- w.Flush()
- return strings.TrimSuffix(b.String(), "\n"), nil
-}
-
-func (s *stringSliceValue) Set(val string) error {
- v, err := readAsCSV(val)
- if err != nil {
- return err
- }
- if !s.changed {
- *s.value = v
- } else {
- *s.value = append(*s.value, v...)
- }
- s.changed = true
- return nil
-}
-
-func (s *stringSliceValue) Type() string {
- return "stringSlice"
-}
-
-func (s *stringSliceValue) String() string {
- str, _ := writeAsCSV(*s.value)
- return "[" + str + "]"
-}
-
-func stringSliceConv(sval string) (interface{}, error) {
- sval = sval[1 : len(sval)-1]
- // An empty string would cause a slice with one (empty) string
- if len(sval) == 0 {
- return []string{}, nil
- }
- return readAsCSV(sval)
-}
-
-// GetStringSlice return the []string value of a flag with the given name
-func (f *FlagSet) GetStringSlice(name string) ([]string, error) {
- val, err := f.getFlagType(name, "stringSlice", stringSliceConv)
- if err != nil {
- return []string{}, err
- }
- return val.([]string), nil
-}
-
-// StringSliceVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a []string variable in which to store the value of the flag.
-// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly.
-// For example:
-// --ss="v1,v2" -ss="v3"
-// will result in
-// []string{"v1", "v2", "v3"}
-func (f *FlagSet) StringSliceVar(p *[]string, name string, value []string, usage string) {
- f.VarP(newStringSliceValue(value, p), name, "", usage)
-}
-
-// StringSliceVarP is like StringSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringSliceVarP(p *[]string, name, shorthand string, value []string, usage string) {
- f.VarP(newStringSliceValue(value, p), name, shorthand, usage)
-}
-
-// StringSliceVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a []string variable in which to store the value of the flag.
-// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly.
-// For example:
-// --ss="v1,v2" -ss="v3"
-// will result in
-// []string{"v1", "v2", "v3"}
-func StringSliceVar(p *[]string, name string, value []string, usage string) {
- CommandLine.VarP(newStringSliceValue(value, p), name, "", usage)
-}
-
-// StringSliceVarP is like StringSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func StringSliceVarP(p *[]string, name, shorthand string, value []string, usage string) {
- CommandLine.VarP(newStringSliceValue(value, p), name, shorthand, usage)
-}
-
-// StringSlice defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a []string variable that stores the value of the flag.
-// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly.
-// For example:
-// --ss="v1,v2" -ss="v3"
-// will result in
-// []string{"v1", "v2", "v3"}
-func (f *FlagSet) StringSlice(name string, value []string, usage string) *[]string {
- p := []string{}
- f.StringSliceVarP(&p, name, "", value, usage)
- return &p
-}
-
-// StringSliceP is like StringSlice, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringSliceP(name, shorthand string, value []string, usage string) *[]string {
- p := []string{}
- f.StringSliceVarP(&p, name, shorthand, value, usage)
- return &p
-}
-
-// StringSlice defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a []string variable that stores the value of the flag.
-// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly.
-// For example:
-// --ss="v1,v2" -ss="v3"
-// will result in
-// []string{"v1", "v2", "v3"}
-func StringSlice(name string, value []string, usage string) *[]string {
- return CommandLine.StringSliceP(name, "", value, usage)
-}
-
-// StringSliceP is like StringSlice, but accepts a shorthand letter that can be used after a single dash.
-func StringSliceP(name, shorthand string, value []string, usage string) *[]string {
- return CommandLine.StringSliceP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/string_to_int.go b/vendor/github.com/spf13/pflag/string_to_int.go
deleted file mode 100644
index 5ceda39..0000000
--- a/vendor/github.com/spf13/pflag/string_to_int.go
+++ /dev/null
@@ -1,149 +0,0 @@
-package pflag
-
-import (
- "bytes"
- "fmt"
- "strconv"
- "strings"
-)
-
-// -- stringToInt Value
-type stringToIntValue struct {
- value *map[string]int
- changed bool
-}
-
-func newStringToIntValue(val map[string]int, p *map[string]int) *stringToIntValue {
- ssv := new(stringToIntValue)
- ssv.value = p
- *ssv.value = val
- return ssv
-}
-
-// Format: a=1,b=2
-func (s *stringToIntValue) Set(val string) error {
- ss := strings.Split(val, ",")
- out := make(map[string]int, len(ss))
- for _, pair := range ss {
- kv := strings.SplitN(pair, "=", 2)
- if len(kv) != 2 {
- return fmt.Errorf("%s must be formatted as key=value", pair)
- }
- var err error
- out[kv[0]], err = strconv.Atoi(kv[1])
- if err != nil {
- return err
- }
- }
- if !s.changed {
- *s.value = out
- } else {
- for k, v := range out {
- (*s.value)[k] = v
- }
- }
- s.changed = true
- return nil
-}
-
-func (s *stringToIntValue) Type() string {
- return "stringToInt"
-}
-
-func (s *stringToIntValue) String() string {
- var buf bytes.Buffer
- i := 0
- for k, v := range *s.value {
- if i > 0 {
- buf.WriteRune(',')
- }
- buf.WriteString(k)
- buf.WriteRune('=')
- buf.WriteString(strconv.Itoa(v))
- i++
- }
- return "[" + buf.String() + "]"
-}
-
-func stringToIntConv(val string) (interface{}, error) {
- val = strings.Trim(val, "[]")
- // An empty string would cause an empty map
- if len(val) == 0 {
- return map[string]int{}, nil
- }
- ss := strings.Split(val, ",")
- out := make(map[string]int, len(ss))
- for _, pair := range ss {
- kv := strings.SplitN(pair, "=", 2)
- if len(kv) != 2 {
- return nil, fmt.Errorf("%s must be formatted as key=value", pair)
- }
- var err error
- out[kv[0]], err = strconv.Atoi(kv[1])
- if err != nil {
- return nil, err
- }
- }
- return out, nil
-}
-
-// GetStringToInt return the map[string]int value of a flag with the given name
-func (f *FlagSet) GetStringToInt(name string) (map[string]int, error) {
- val, err := f.getFlagType(name, "stringToInt", stringToIntConv)
- if err != nil {
- return map[string]int{}, err
- }
- return val.(map[string]int), nil
-}
-
-// StringToIntVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a map[string]int variable in which to store the values of the multiple flags.
-// The value of each argument will not try to be separated by comma
-func (f *FlagSet) StringToIntVar(p *map[string]int, name string, value map[string]int, usage string) {
- f.VarP(newStringToIntValue(value, p), name, "", usage)
-}
-
-// StringToIntVarP is like StringToIntVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringToIntVarP(p *map[string]int, name, shorthand string, value map[string]int, usage string) {
- f.VarP(newStringToIntValue(value, p), name, shorthand, usage)
-}
-
-// StringToIntVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a map[string]int variable in which to store the value of the flag.
-// The value of each argument will not try to be separated by comma
-func StringToIntVar(p *map[string]int, name string, value map[string]int, usage string) {
- CommandLine.VarP(newStringToIntValue(value, p), name, "", usage)
-}
-
-// StringToIntVarP is like StringToIntVar, but accepts a shorthand letter that can be used after a single dash.
-func StringToIntVarP(p *map[string]int, name, shorthand string, value map[string]int, usage string) {
- CommandLine.VarP(newStringToIntValue(value, p), name, shorthand, usage)
-}
-
-// StringToInt defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a map[string]int variable that stores the value of the flag.
-// The value of each argument will not try to be separated by comma
-func (f *FlagSet) StringToInt(name string, value map[string]int, usage string) *map[string]int {
- p := map[string]int{}
- f.StringToIntVarP(&p, name, "", value, usage)
- return &p
-}
-
-// StringToIntP is like StringToInt, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringToIntP(name, shorthand string, value map[string]int, usage string) *map[string]int {
- p := map[string]int{}
- f.StringToIntVarP(&p, name, shorthand, value, usage)
- return &p
-}
-
-// StringToInt defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a map[string]int variable that stores the value of the flag.
-// The value of each argument will not try to be separated by comma
-func StringToInt(name string, value map[string]int, usage string) *map[string]int {
- return CommandLine.StringToIntP(name, "", value, usage)
-}
-
-// StringToIntP is like StringToInt, but accepts a shorthand letter that can be used after a single dash.
-func StringToIntP(name, shorthand string, value map[string]int, usage string) *map[string]int {
- return CommandLine.StringToIntP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/string_to_string.go b/vendor/github.com/spf13/pflag/string_to_string.go
deleted file mode 100644
index 890a01a..0000000
--- a/vendor/github.com/spf13/pflag/string_to_string.go
+++ /dev/null
@@ -1,160 +0,0 @@
-package pflag
-
-import (
- "bytes"
- "encoding/csv"
- "fmt"
- "strings"
-)
-
-// -- stringToString Value
-type stringToStringValue struct {
- value *map[string]string
- changed bool
-}
-
-func newStringToStringValue(val map[string]string, p *map[string]string) *stringToStringValue {
- ssv := new(stringToStringValue)
- ssv.value = p
- *ssv.value = val
- return ssv
-}
-
-// Format: a=1,b=2
-func (s *stringToStringValue) Set(val string) error {
- var ss []string
- n := strings.Count(val, "=")
- switch n {
- case 0:
- return fmt.Errorf("%s must be formatted as key=value", val)
- case 1:
- ss = append(ss, strings.Trim(val, `"`))
- default:
- r := csv.NewReader(strings.NewReader(val))
- var err error
- ss, err = r.Read()
- if err != nil {
- return err
- }
- }
-
- out := make(map[string]string, len(ss))
- for _, pair := range ss {
- kv := strings.SplitN(pair, "=", 2)
- if len(kv) != 2 {
- return fmt.Errorf("%s must be formatted as key=value", pair)
- }
- out[kv[0]] = kv[1]
- }
- if !s.changed {
- *s.value = out
- } else {
- for k, v := range out {
- (*s.value)[k] = v
- }
- }
- s.changed = true
- return nil
-}
-
-func (s *stringToStringValue) Type() string {
- return "stringToString"
-}
-
-func (s *stringToStringValue) String() string {
- records := make([]string, 0, len(*s.value)>>1)
- for k, v := range *s.value {
- records = append(records, k+"="+v)
- }
-
- var buf bytes.Buffer
- w := csv.NewWriter(&buf)
- if err := w.Write(records); err != nil {
- panic(err)
- }
- w.Flush()
- return "[" + strings.TrimSpace(buf.String()) + "]"
-}
-
-func stringToStringConv(val string) (interface{}, error) {
- val = strings.Trim(val, "[]")
- // An empty string would cause an empty map
- if len(val) == 0 {
- return map[string]string{}, nil
- }
- r := csv.NewReader(strings.NewReader(val))
- ss, err := r.Read()
- if err != nil {
- return nil, err
- }
- out := make(map[string]string, len(ss))
- for _, pair := range ss {
- kv := strings.SplitN(pair, "=", 2)
- if len(kv) != 2 {
- return nil, fmt.Errorf("%s must be formatted as key=value", pair)
- }
- out[kv[0]] = kv[1]
- }
- return out, nil
-}
-
-// GetStringToString return the map[string]string value of a flag with the given name
-func (f *FlagSet) GetStringToString(name string) (map[string]string, error) {
- val, err := f.getFlagType(name, "stringToString", stringToStringConv)
- if err != nil {
- return map[string]string{}, err
- }
- return val.(map[string]string), nil
-}
-
-// StringToStringVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a map[string]string variable in which to store the values of the multiple flags.
-// The value of each argument will not try to be separated by comma
-func (f *FlagSet) StringToStringVar(p *map[string]string, name string, value map[string]string, usage string) {
- f.VarP(newStringToStringValue(value, p), name, "", usage)
-}
-
-// StringToStringVarP is like StringToStringVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringToStringVarP(p *map[string]string, name, shorthand string, value map[string]string, usage string) {
- f.VarP(newStringToStringValue(value, p), name, shorthand, usage)
-}
-
-// StringToStringVar defines a string flag with specified name, default value, and usage string.
-// The argument p points to a map[string]string variable in which to store the value of the flag.
-// The value of each argument will not try to be separated by comma
-func StringToStringVar(p *map[string]string, name string, value map[string]string, usage string) {
- CommandLine.VarP(newStringToStringValue(value, p), name, "", usage)
-}
-
-// StringToStringVarP is like StringToStringVar, but accepts a shorthand letter that can be used after a single dash.
-func StringToStringVarP(p *map[string]string, name, shorthand string, value map[string]string, usage string) {
- CommandLine.VarP(newStringToStringValue(value, p), name, shorthand, usage)
-}
-
-// StringToString defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a map[string]string variable that stores the value of the flag.
-// The value of each argument will not try to be separated by comma
-func (f *FlagSet) StringToString(name string, value map[string]string, usage string) *map[string]string {
- p := map[string]string{}
- f.StringToStringVarP(&p, name, "", value, usage)
- return &p
-}
-
-// StringToStringP is like StringToString, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) StringToStringP(name, shorthand string, value map[string]string, usage string) *map[string]string {
- p := map[string]string{}
- f.StringToStringVarP(&p, name, shorthand, value, usage)
- return &p
-}
-
-// StringToString defines a string flag with specified name, default value, and usage string.
-// The return value is the address of a map[string]string variable that stores the value of the flag.
-// The value of each argument will not try to be separated by comma
-func StringToString(name string, value map[string]string, usage string) *map[string]string {
- return CommandLine.StringToStringP(name, "", value, usage)
-}
-
-// StringToStringP is like StringToString, but accepts a shorthand letter that can be used after a single dash.
-func StringToStringP(name, shorthand string, value map[string]string, usage string) *map[string]string {
- return CommandLine.StringToStringP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/uint.go b/vendor/github.com/spf13/pflag/uint.go
deleted file mode 100644
index dcbc2b7..0000000
--- a/vendor/github.com/spf13/pflag/uint.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- uint Value
-type uintValue uint
-
-func newUintValue(val uint, p *uint) *uintValue {
- *p = val
- return (*uintValue)(p)
-}
-
-func (i *uintValue) Set(s string) error {
- v, err := strconv.ParseUint(s, 0, 64)
- *i = uintValue(v)
- return err
-}
-
-func (i *uintValue) Type() string {
- return "uint"
-}
-
-func (i *uintValue) String() string { return strconv.FormatUint(uint64(*i), 10) }
-
-func uintConv(sval string) (interface{}, error) {
- v, err := strconv.ParseUint(sval, 0, 0)
- if err != nil {
- return 0, err
- }
- return uint(v), nil
-}
-
-// GetUint return the uint value of a flag with the given name
-func (f *FlagSet) GetUint(name string) (uint, error) {
- val, err := f.getFlagType(name, "uint", uintConv)
- if err != nil {
- return 0, err
- }
- return val.(uint), nil
-}
-
-// UintVar defines a uint flag with specified name, default value, and usage string.
-// The argument p points to a uint variable in which to store the value of the flag.
-func (f *FlagSet) UintVar(p *uint, name string, value uint, usage string) {
- f.VarP(newUintValue(value, p), name, "", usage)
-}
-
-// UintVarP is like UintVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) UintVarP(p *uint, name, shorthand string, value uint, usage string) {
- f.VarP(newUintValue(value, p), name, shorthand, usage)
-}
-
-// UintVar defines a uint flag with specified name, default value, and usage string.
-// The argument p points to a uint variable in which to store the value of the flag.
-func UintVar(p *uint, name string, value uint, usage string) {
- CommandLine.VarP(newUintValue(value, p), name, "", usage)
-}
-
-// UintVarP is like UintVar, but accepts a shorthand letter that can be used after a single dash.
-func UintVarP(p *uint, name, shorthand string, value uint, usage string) {
- CommandLine.VarP(newUintValue(value, p), name, shorthand, usage)
-}
-
-// Uint defines a uint flag with specified name, default value, and usage string.
-// The return value is the address of a uint variable that stores the value of the flag.
-func (f *FlagSet) Uint(name string, value uint, usage string) *uint {
- p := new(uint)
- f.UintVarP(p, name, "", value, usage)
- return p
-}
-
-// UintP is like Uint, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) UintP(name, shorthand string, value uint, usage string) *uint {
- p := new(uint)
- f.UintVarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Uint defines a uint flag with specified name, default value, and usage string.
-// The return value is the address of a uint variable that stores the value of the flag.
-func Uint(name string, value uint, usage string) *uint {
- return CommandLine.UintP(name, "", value, usage)
-}
-
-// UintP is like Uint, but accepts a shorthand letter that can be used after a single dash.
-func UintP(name, shorthand string, value uint, usage string) *uint {
- return CommandLine.UintP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/uint16.go b/vendor/github.com/spf13/pflag/uint16.go
deleted file mode 100644
index 7e9914e..0000000
--- a/vendor/github.com/spf13/pflag/uint16.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- uint16 value
-type uint16Value uint16
-
-func newUint16Value(val uint16, p *uint16) *uint16Value {
- *p = val
- return (*uint16Value)(p)
-}
-
-func (i *uint16Value) Set(s string) error {
- v, err := strconv.ParseUint(s, 0, 16)
- *i = uint16Value(v)
- return err
-}
-
-func (i *uint16Value) Type() string {
- return "uint16"
-}
-
-func (i *uint16Value) String() string { return strconv.FormatUint(uint64(*i), 10) }
-
-func uint16Conv(sval string) (interface{}, error) {
- v, err := strconv.ParseUint(sval, 0, 16)
- if err != nil {
- return 0, err
- }
- return uint16(v), nil
-}
-
-// GetUint16 return the uint16 value of a flag with the given name
-func (f *FlagSet) GetUint16(name string) (uint16, error) {
- val, err := f.getFlagType(name, "uint16", uint16Conv)
- if err != nil {
- return 0, err
- }
- return val.(uint16), nil
-}
-
-// Uint16Var defines a uint flag with specified name, default value, and usage string.
-// The argument p points to a uint variable in which to store the value of the flag.
-func (f *FlagSet) Uint16Var(p *uint16, name string, value uint16, usage string) {
- f.VarP(newUint16Value(value, p), name, "", usage)
-}
-
-// Uint16VarP is like Uint16Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Uint16VarP(p *uint16, name, shorthand string, value uint16, usage string) {
- f.VarP(newUint16Value(value, p), name, shorthand, usage)
-}
-
-// Uint16Var defines a uint flag with specified name, default value, and usage string.
-// The argument p points to a uint variable in which to store the value of the flag.
-func Uint16Var(p *uint16, name string, value uint16, usage string) {
- CommandLine.VarP(newUint16Value(value, p), name, "", usage)
-}
-
-// Uint16VarP is like Uint16Var, but accepts a shorthand letter that can be used after a single dash.
-func Uint16VarP(p *uint16, name, shorthand string, value uint16, usage string) {
- CommandLine.VarP(newUint16Value(value, p), name, shorthand, usage)
-}
-
-// Uint16 defines a uint flag with specified name, default value, and usage string.
-// The return value is the address of a uint variable that stores the value of the flag.
-func (f *FlagSet) Uint16(name string, value uint16, usage string) *uint16 {
- p := new(uint16)
- f.Uint16VarP(p, name, "", value, usage)
- return p
-}
-
-// Uint16P is like Uint16, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Uint16P(name, shorthand string, value uint16, usage string) *uint16 {
- p := new(uint16)
- f.Uint16VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Uint16 defines a uint flag with specified name, default value, and usage string.
-// The return value is the address of a uint variable that stores the value of the flag.
-func Uint16(name string, value uint16, usage string) *uint16 {
- return CommandLine.Uint16P(name, "", value, usage)
-}
-
-// Uint16P is like Uint16, but accepts a shorthand letter that can be used after a single dash.
-func Uint16P(name, shorthand string, value uint16, usage string) *uint16 {
- return CommandLine.Uint16P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/uint32.go b/vendor/github.com/spf13/pflag/uint32.go
deleted file mode 100644
index d802453..0000000
--- a/vendor/github.com/spf13/pflag/uint32.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- uint32 value
-type uint32Value uint32
-
-func newUint32Value(val uint32, p *uint32) *uint32Value {
- *p = val
- return (*uint32Value)(p)
-}
-
-func (i *uint32Value) Set(s string) error {
- v, err := strconv.ParseUint(s, 0, 32)
- *i = uint32Value(v)
- return err
-}
-
-func (i *uint32Value) Type() string {
- return "uint32"
-}
-
-func (i *uint32Value) String() string { return strconv.FormatUint(uint64(*i), 10) }
-
-func uint32Conv(sval string) (interface{}, error) {
- v, err := strconv.ParseUint(sval, 0, 32)
- if err != nil {
- return 0, err
- }
- return uint32(v), nil
-}
-
-// GetUint32 return the uint32 value of a flag with the given name
-func (f *FlagSet) GetUint32(name string) (uint32, error) {
- val, err := f.getFlagType(name, "uint32", uint32Conv)
- if err != nil {
- return 0, err
- }
- return val.(uint32), nil
-}
-
-// Uint32Var defines a uint32 flag with specified name, default value, and usage string.
-// The argument p points to a uint32 variable in which to store the value of the flag.
-func (f *FlagSet) Uint32Var(p *uint32, name string, value uint32, usage string) {
- f.VarP(newUint32Value(value, p), name, "", usage)
-}
-
-// Uint32VarP is like Uint32Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Uint32VarP(p *uint32, name, shorthand string, value uint32, usage string) {
- f.VarP(newUint32Value(value, p), name, shorthand, usage)
-}
-
-// Uint32Var defines a uint32 flag with specified name, default value, and usage string.
-// The argument p points to a uint32 variable in which to store the value of the flag.
-func Uint32Var(p *uint32, name string, value uint32, usage string) {
- CommandLine.VarP(newUint32Value(value, p), name, "", usage)
-}
-
-// Uint32VarP is like Uint32Var, but accepts a shorthand letter that can be used after a single dash.
-func Uint32VarP(p *uint32, name, shorthand string, value uint32, usage string) {
- CommandLine.VarP(newUint32Value(value, p), name, shorthand, usage)
-}
-
-// Uint32 defines a uint32 flag with specified name, default value, and usage string.
-// The return value is the address of a uint32 variable that stores the value of the flag.
-func (f *FlagSet) Uint32(name string, value uint32, usage string) *uint32 {
- p := new(uint32)
- f.Uint32VarP(p, name, "", value, usage)
- return p
-}
-
-// Uint32P is like Uint32, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Uint32P(name, shorthand string, value uint32, usage string) *uint32 {
- p := new(uint32)
- f.Uint32VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Uint32 defines a uint32 flag with specified name, default value, and usage string.
-// The return value is the address of a uint32 variable that stores the value of the flag.
-func Uint32(name string, value uint32, usage string) *uint32 {
- return CommandLine.Uint32P(name, "", value, usage)
-}
-
-// Uint32P is like Uint32, but accepts a shorthand letter that can be used after a single dash.
-func Uint32P(name, shorthand string, value uint32, usage string) *uint32 {
- return CommandLine.Uint32P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/uint64.go b/vendor/github.com/spf13/pflag/uint64.go
deleted file mode 100644
index f62240f..0000000
--- a/vendor/github.com/spf13/pflag/uint64.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- uint64 Value
-type uint64Value uint64
-
-func newUint64Value(val uint64, p *uint64) *uint64Value {
- *p = val
- return (*uint64Value)(p)
-}
-
-func (i *uint64Value) Set(s string) error {
- v, err := strconv.ParseUint(s, 0, 64)
- *i = uint64Value(v)
- return err
-}
-
-func (i *uint64Value) Type() string {
- return "uint64"
-}
-
-func (i *uint64Value) String() string { return strconv.FormatUint(uint64(*i), 10) }
-
-func uint64Conv(sval string) (interface{}, error) {
- v, err := strconv.ParseUint(sval, 0, 64)
- if err != nil {
- return 0, err
- }
- return uint64(v), nil
-}
-
-// GetUint64 return the uint64 value of a flag with the given name
-func (f *FlagSet) GetUint64(name string) (uint64, error) {
- val, err := f.getFlagType(name, "uint64", uint64Conv)
- if err != nil {
- return 0, err
- }
- return val.(uint64), nil
-}
-
-// Uint64Var defines a uint64 flag with specified name, default value, and usage string.
-// The argument p points to a uint64 variable in which to store the value of the flag.
-func (f *FlagSet) Uint64Var(p *uint64, name string, value uint64, usage string) {
- f.VarP(newUint64Value(value, p), name, "", usage)
-}
-
-// Uint64VarP is like Uint64Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Uint64VarP(p *uint64, name, shorthand string, value uint64, usage string) {
- f.VarP(newUint64Value(value, p), name, shorthand, usage)
-}
-
-// Uint64Var defines a uint64 flag with specified name, default value, and usage string.
-// The argument p points to a uint64 variable in which to store the value of the flag.
-func Uint64Var(p *uint64, name string, value uint64, usage string) {
- CommandLine.VarP(newUint64Value(value, p), name, "", usage)
-}
-
-// Uint64VarP is like Uint64Var, but accepts a shorthand letter that can be used after a single dash.
-func Uint64VarP(p *uint64, name, shorthand string, value uint64, usage string) {
- CommandLine.VarP(newUint64Value(value, p), name, shorthand, usage)
-}
-
-// Uint64 defines a uint64 flag with specified name, default value, and usage string.
-// The return value is the address of a uint64 variable that stores the value of the flag.
-func (f *FlagSet) Uint64(name string, value uint64, usage string) *uint64 {
- p := new(uint64)
- f.Uint64VarP(p, name, "", value, usage)
- return p
-}
-
-// Uint64P is like Uint64, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Uint64P(name, shorthand string, value uint64, usage string) *uint64 {
- p := new(uint64)
- f.Uint64VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Uint64 defines a uint64 flag with specified name, default value, and usage string.
-// The return value is the address of a uint64 variable that stores the value of the flag.
-func Uint64(name string, value uint64, usage string) *uint64 {
- return CommandLine.Uint64P(name, "", value, usage)
-}
-
-// Uint64P is like Uint64, but accepts a shorthand letter that can be used after a single dash.
-func Uint64P(name, shorthand string, value uint64, usage string) *uint64 {
- return CommandLine.Uint64P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/uint8.go b/vendor/github.com/spf13/pflag/uint8.go
deleted file mode 100644
index bb0e83c..0000000
--- a/vendor/github.com/spf13/pflag/uint8.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package pflag
-
-import "strconv"
-
-// -- uint8 Value
-type uint8Value uint8
-
-func newUint8Value(val uint8, p *uint8) *uint8Value {
- *p = val
- return (*uint8Value)(p)
-}
-
-func (i *uint8Value) Set(s string) error {
- v, err := strconv.ParseUint(s, 0, 8)
- *i = uint8Value(v)
- return err
-}
-
-func (i *uint8Value) Type() string {
- return "uint8"
-}
-
-func (i *uint8Value) String() string { return strconv.FormatUint(uint64(*i), 10) }
-
-func uint8Conv(sval string) (interface{}, error) {
- v, err := strconv.ParseUint(sval, 0, 8)
- if err != nil {
- return 0, err
- }
- return uint8(v), nil
-}
-
-// GetUint8 return the uint8 value of a flag with the given name
-func (f *FlagSet) GetUint8(name string) (uint8, error) {
- val, err := f.getFlagType(name, "uint8", uint8Conv)
- if err != nil {
- return 0, err
- }
- return val.(uint8), nil
-}
-
-// Uint8Var defines a uint8 flag with specified name, default value, and usage string.
-// The argument p points to a uint8 variable in which to store the value of the flag.
-func (f *FlagSet) Uint8Var(p *uint8, name string, value uint8, usage string) {
- f.VarP(newUint8Value(value, p), name, "", usage)
-}
-
-// Uint8VarP is like Uint8Var, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Uint8VarP(p *uint8, name, shorthand string, value uint8, usage string) {
- f.VarP(newUint8Value(value, p), name, shorthand, usage)
-}
-
-// Uint8Var defines a uint8 flag with specified name, default value, and usage string.
-// The argument p points to a uint8 variable in which to store the value of the flag.
-func Uint8Var(p *uint8, name string, value uint8, usage string) {
- CommandLine.VarP(newUint8Value(value, p), name, "", usage)
-}
-
-// Uint8VarP is like Uint8Var, but accepts a shorthand letter that can be used after a single dash.
-func Uint8VarP(p *uint8, name, shorthand string, value uint8, usage string) {
- CommandLine.VarP(newUint8Value(value, p), name, shorthand, usage)
-}
-
-// Uint8 defines a uint8 flag with specified name, default value, and usage string.
-// The return value is the address of a uint8 variable that stores the value of the flag.
-func (f *FlagSet) Uint8(name string, value uint8, usage string) *uint8 {
- p := new(uint8)
- f.Uint8VarP(p, name, "", value, usage)
- return p
-}
-
-// Uint8P is like Uint8, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) Uint8P(name, shorthand string, value uint8, usage string) *uint8 {
- p := new(uint8)
- f.Uint8VarP(p, name, shorthand, value, usage)
- return p
-}
-
-// Uint8 defines a uint8 flag with specified name, default value, and usage string.
-// The return value is the address of a uint8 variable that stores the value of the flag.
-func Uint8(name string, value uint8, usage string) *uint8 {
- return CommandLine.Uint8P(name, "", value, usage)
-}
-
-// Uint8P is like Uint8, but accepts a shorthand letter that can be used after a single dash.
-func Uint8P(name, shorthand string, value uint8, usage string) *uint8 {
- return CommandLine.Uint8P(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/spf13/pflag/uint_slice.go b/vendor/github.com/spf13/pflag/uint_slice.go
deleted file mode 100644
index edd94c6..0000000
--- a/vendor/github.com/spf13/pflag/uint_slice.go
+++ /dev/null
@@ -1,126 +0,0 @@
-package pflag
-
-import (
- "fmt"
- "strconv"
- "strings"
-)
-
-// -- uintSlice Value
-type uintSliceValue struct {
- value *[]uint
- changed bool
-}
-
-func newUintSliceValue(val []uint, p *[]uint) *uintSliceValue {
- uisv := new(uintSliceValue)
- uisv.value = p
- *uisv.value = val
- return uisv
-}
-
-func (s *uintSliceValue) Set(val string) error {
- ss := strings.Split(val, ",")
- out := make([]uint, len(ss))
- for i, d := range ss {
- u, err := strconv.ParseUint(d, 10, 0)
- if err != nil {
- return err
- }
- out[i] = uint(u)
- }
- if !s.changed {
- *s.value = out
- } else {
- *s.value = append(*s.value, out...)
- }
- s.changed = true
- return nil
-}
-
-func (s *uintSliceValue) Type() string {
- return "uintSlice"
-}
-
-func (s *uintSliceValue) String() string {
- out := make([]string, len(*s.value))
- for i, d := range *s.value {
- out[i] = fmt.Sprintf("%d", d)
- }
- return "[" + strings.Join(out, ",") + "]"
-}
-
-func uintSliceConv(val string) (interface{}, error) {
- val = strings.Trim(val, "[]")
- // Empty string would cause a slice with one (empty) entry
- if len(val) == 0 {
- return []uint{}, nil
- }
- ss := strings.Split(val, ",")
- out := make([]uint, len(ss))
- for i, d := range ss {
- u, err := strconv.ParseUint(d, 10, 0)
- if err != nil {
- return nil, err
- }
- out[i] = uint(u)
- }
- return out, nil
-}
-
-// GetUintSlice returns the []uint value of a flag with the given name.
-func (f *FlagSet) GetUintSlice(name string) ([]uint, error) {
- val, err := f.getFlagType(name, "uintSlice", uintSliceConv)
- if err != nil {
- return []uint{}, err
- }
- return val.([]uint), nil
-}
-
-// UintSliceVar defines a uintSlice flag with specified name, default value, and usage string.
-// The argument p points to a []uint variable in which to store the value of the flag.
-func (f *FlagSet) UintSliceVar(p *[]uint, name string, value []uint, usage string) {
- f.VarP(newUintSliceValue(value, p), name, "", usage)
-}
-
-// UintSliceVarP is like UintSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) UintSliceVarP(p *[]uint, name, shorthand string, value []uint, usage string) {
- f.VarP(newUintSliceValue(value, p), name, shorthand, usage)
-}
-
-// UintSliceVar defines a uint[] flag with specified name, default value, and usage string.
-// The argument p points to a uint[] variable in which to store the value of the flag.
-func UintSliceVar(p *[]uint, name string, value []uint, usage string) {
- CommandLine.VarP(newUintSliceValue(value, p), name, "", usage)
-}
-
-// UintSliceVarP is like the UintSliceVar, but accepts a shorthand letter that can be used after a single dash.
-func UintSliceVarP(p *[]uint, name, shorthand string, value []uint, usage string) {
- CommandLine.VarP(newUintSliceValue(value, p), name, shorthand, usage)
-}
-
-// UintSlice defines a []uint flag with specified name, default value, and usage string.
-// The return value is the address of a []uint variable that stores the value of the flag.
-func (f *FlagSet) UintSlice(name string, value []uint, usage string) *[]uint {
- p := []uint{}
- f.UintSliceVarP(&p, name, "", value, usage)
- return &p
-}
-
-// UintSliceP is like UintSlice, but accepts a shorthand letter that can be used after a single dash.
-func (f *FlagSet) UintSliceP(name, shorthand string, value []uint, usage string) *[]uint {
- p := []uint{}
- f.UintSliceVarP(&p, name, shorthand, value, usage)
- return &p
-}
-
-// UintSlice defines a []uint flag with specified name, default value, and usage string.
-// The return value is the address of a []uint variable that stores the value of the flag.
-func UintSlice(name string, value []uint, usage string) *[]uint {
- return CommandLine.UintSliceP(name, "", value, usage)
-}
-
-// UintSliceP is like UintSlice, but accepts a shorthand letter that can be used after a single dash.
-func UintSliceP(name, shorthand string, value []uint, usage string) *[]uint {
- return CommandLine.UintSliceP(name, shorthand, value, usage)
-}
diff --git a/vendor/github.com/stretchr/testify/LICENSE b/vendor/github.com/stretchr/testify/LICENSE
new file mode 100644
index 0000000..f38ec59
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2012-2018 Mat Ryer and Tyler Bunnell
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go b/vendor/github.com/stretchr/testify/assert/assertion_format.go
new file mode 100644
index 0000000..aa1c2b9
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go
@@ -0,0 +1,484 @@
+/*
+* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
+* THIS FILE MUST NOT BE EDITED BY HAND
+ */
+
+package assert
+
+import (
+ http "net/http"
+ url "net/url"
+ time "time"
+)
+
+// Conditionf uses a Comparison to assert a complex condition.
+func Conditionf(t TestingT, comp Comparison, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Condition(t, comp, append([]interface{}{msg}, args...)...)
+}
+
+// Containsf asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+//
+// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted")
+// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted")
+// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted")
+func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Contains(t, s, contains, append([]interface{}{msg}, args...)...)
+}
+
+// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists.
+func DirExistsf(t TestingT, path string, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return DirExists(t, path, append([]interface{}{msg}, args...)...)
+}
+
+// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified
+// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements,
+// the number of appearances of each of them in both lists should match.
+//
+// assert.ElementsMatchf(t, [1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted")
+func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return ElementsMatch(t, listA, listB, append([]interface{}{msg}, args...)...)
+}
+
+// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// assert.Emptyf(t, obj, "error message %s", "formatted")
+func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Empty(t, object, append([]interface{}{msg}, args...)...)
+}
+
+// Equalf asserts that two objects are equal.
+//
+// assert.Equalf(t, 123, 123, "error message %s", "formatted")
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses). Function equality
+// cannot be determined and will always fail.
+func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Equal(t, expected, actual, append([]interface{}{msg}, args...)...)
+}
+
+// EqualErrorf asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+//
+// actualObj, err := SomeFunction()
+// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted")
+func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return EqualError(t, theError, errString, append([]interface{}{msg}, args...)...)
+}
+
+// EqualValuesf asserts that two objects are equal or convertable to the same types
+// and equal.
+//
+// assert.EqualValuesf(t, uint32(123, "error message %s", "formatted"), int32(123))
+func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return EqualValues(t, expected, actual, append([]interface{}{msg}, args...)...)
+}
+
+// Errorf asserts that a function returned an error (i.e. not `nil`).
+//
+// actualObj, err := SomeFunction()
+// if assert.Errorf(t, err, "error message %s", "formatted") {
+// assert.Equal(t, expectedErrorf, err)
+// }
+func Errorf(t TestingT, err error, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Error(t, err, append([]interface{}{msg}, args...)...)
+}
+
+// Exactlyf asserts that two objects are equal in value and type.
+//
+// assert.Exactlyf(t, int32(123, "error message %s", "formatted"), int64(123))
+func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Exactly(t, expected, actual, append([]interface{}{msg}, args...)...)
+}
+
+// Failf reports a failure through
+func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Fail(t, failureMessage, append([]interface{}{msg}, args...)...)
+}
+
+// FailNowf fails test
+func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return FailNow(t, failureMessage, append([]interface{}{msg}, args...)...)
+}
+
+// Falsef asserts that the specified value is false.
+//
+// assert.Falsef(t, myBool, "error message %s", "formatted")
+func Falsef(t TestingT, value bool, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return False(t, value, append([]interface{}{msg}, args...)...)
+}
+
+// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file.
+func FileExistsf(t TestingT, path string, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return FileExists(t, path, append([]interface{}{msg}, args...)...)
+}
+
+// HTTPBodyContainsf asserts that a specified handler returns a
+// body that contains a string.
+//
+// assert.HTTPBodyContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPBodyContains(t, handler, method, url, values, str, append([]interface{}{msg}, args...)...)
+}
+
+// HTTPBodyNotContainsf asserts that a specified handler returns a
+// body that does not contain a string.
+//
+// assert.HTTPBodyNotContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPBodyNotContains(t, handler, method, url, values, str, append([]interface{}{msg}, args...)...)
+}
+
+// HTTPErrorf asserts that a specified handler returns an error status code.
+//
+// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false).
+func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPError(t, handler, method, url, values, append([]interface{}{msg}, args...)...)
+}
+
+// HTTPRedirectf asserts that a specified handler returns a redirect status code.
+//
+// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false).
+func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPRedirect(t, handler, method, url, values, append([]interface{}{msg}, args...)...)
+}
+
+// HTTPSuccessf asserts that a specified handler returns a success status code.
+//
+// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPSuccess(t, handler, method, url, values, append([]interface{}{msg}, args...)...)
+}
+
+// Implementsf asserts that an object is implemented by the specified interface.
+//
+// assert.Implementsf(t, (*MyInterface, "error message %s", "formatted")(nil), new(MyObject))
+func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Implements(t, interfaceObject, object, append([]interface{}{msg}, args...)...)
+}
+
+// InDeltaf asserts that the two numerals are within delta of each other.
+//
+// assert.InDeltaf(t, math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01)
+func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return InDelta(t, expected, actual, delta, append([]interface{}{msg}, args...)...)
+}
+
+// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys.
+func InDeltaMapValuesf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return InDeltaMapValues(t, expected, actual, delta, append([]interface{}{msg}, args...)...)
+}
+
+// InDeltaSlicef is the same as InDelta, except it compares two slices.
+func InDeltaSlicef(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return InDeltaSlice(t, expected, actual, delta, append([]interface{}{msg}, args...)...)
+}
+
+// InEpsilonf asserts that expected and actual have a relative error less than epsilon
+func InEpsilonf(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return InEpsilon(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...)
+}
+
+// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices.
+func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return InEpsilonSlice(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...)
+}
+
+// IsTypef asserts that the specified objects are of the same type.
+func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return IsType(t, expectedType, object, append([]interface{}{msg}, args...)...)
+}
+
+// JSONEqf asserts that two JSON strings are equivalent.
+//
+// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted")
+func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return JSONEq(t, expected, actual, append([]interface{}{msg}, args...)...)
+}
+
+// Lenf asserts that the specified object has specific length.
+// Lenf also fails if the object has a type that len() not accept.
+//
+// assert.Lenf(t, mySlice, 3, "error message %s", "formatted")
+func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Len(t, object, length, append([]interface{}{msg}, args...)...)
+}
+
+// Nilf asserts that the specified object is nil.
+//
+// assert.Nilf(t, err, "error message %s", "formatted")
+func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Nil(t, object, append([]interface{}{msg}, args...)...)
+}
+
+// NoErrorf asserts that a function returned no error (i.e. `nil`).
+//
+// actualObj, err := SomeFunction()
+// if assert.NoErrorf(t, err, "error message %s", "formatted") {
+// assert.Equal(t, expectedObj, actualObj)
+// }
+func NoErrorf(t TestingT, err error, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return NoError(t, err, append([]interface{}{msg}, args...)...)
+}
+
+// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+//
+// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted")
+// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted")
+// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted")
+func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotContains(t, s, contains, append([]interface{}{msg}, args...)...)
+}
+
+// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// if assert.NotEmptyf(t, obj, "error message %s", "formatted") {
+// assert.Equal(t, "two", obj[1])
+// }
+func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotEmpty(t, object, append([]interface{}{msg}, args...)...)
+}
+
+// NotEqualf asserts that the specified values are NOT equal.
+//
+// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted")
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses).
+func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotEqual(t, expected, actual, append([]interface{}{msg}, args...)...)
+}
+
+// NotNilf asserts that the specified object is not nil.
+//
+// assert.NotNilf(t, err, "error message %s", "formatted")
+func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotNil(t, object, append([]interface{}{msg}, args...)...)
+}
+
+// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic.
+//
+// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted")
+func NotPanicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotPanics(t, f, append([]interface{}{msg}, args...)...)
+}
+
+// NotRegexpf asserts that a specified regexp does not match a string.
+//
+// assert.NotRegexpf(t, regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting")
+// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted")
+func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotRegexp(t, rx, str, append([]interface{}{msg}, args...)...)
+}
+
+// NotSubsetf asserts that the specified list(array, slice...) contains not all
+// elements given in the specified subset(array, slice...).
+//
+// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
+func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotSubset(t, list, subset, append([]interface{}{msg}, args...)...)
+}
+
+// NotZerof asserts that i is not the zero value for its type.
+func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotZero(t, i, append([]interface{}{msg}, args...)...)
+}
+
+// Panicsf asserts that the code inside the specified PanicTestFunc panics.
+//
+// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted")
+func Panicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Panics(t, f, append([]interface{}{msg}, args...)...)
+}
+
+// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that
+// the recovered panic value equals the expected panic value.
+//
+// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted")
+func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return PanicsWithValue(t, expected, f, append([]interface{}{msg}, args...)...)
+}
+
+// Regexpf asserts that a specified regexp matches a string.
+//
+// assert.Regexpf(t, regexp.MustCompile("start", "error message %s", "formatted"), "it's starting")
+// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted")
+func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Regexp(t, rx, str, append([]interface{}{msg}, args...)...)
+}
+
+// Subsetf asserts that the specified list(array, slice...) contains all
+// elements given in the specified subset(array, slice...).
+//
+// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
+func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Subset(t, list, subset, append([]interface{}{msg}, args...)...)
+}
+
+// Truef asserts that the specified value is true.
+//
+// assert.Truef(t, myBool, "error message %s", "formatted")
+func Truef(t TestingT, value bool, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return True(t, value, append([]interface{}{msg}, args...)...)
+}
+
+// WithinDurationf asserts that the two times are within duration delta of each other.
+//
+// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted")
+func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return WithinDuration(t, expected, actual, delta, append([]interface{}{msg}, args...)...)
+}
+
+// Zerof asserts that i is the zero value for its type.
+func Zerof(t TestingT, i interface{}, msg string, args ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ return Zero(t, i, append([]interface{}{msg}, args...)...)
+}
diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl b/vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl
new file mode 100644
index 0000000..d2bb0b8
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl
@@ -0,0 +1,5 @@
+{{.CommentFormat}}
+func {{.DocInfo.Name}}f(t TestingT, {{.ParamsFormat}}) bool {
+ if h, ok := t.(tHelper); ok { h.Helper() }
+ return {{.DocInfo.Name}}(t, {{.ForwardedParamsFormat}})
+}
diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go
new file mode 100644
index 0000000..de39f79
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go
@@ -0,0 +1,956 @@
+/*
+* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
+* THIS FILE MUST NOT BE EDITED BY HAND
+ */
+
+package assert
+
+import (
+ http "net/http"
+ url "net/url"
+ time "time"
+)
+
+// Condition uses a Comparison to assert a complex condition.
+func (a *Assertions) Condition(comp Comparison, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Condition(a.t, comp, msgAndArgs...)
+}
+
+// Conditionf uses a Comparison to assert a complex condition.
+func (a *Assertions) Conditionf(comp Comparison, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Conditionf(a.t, comp, msg, args...)
+}
+
+// Contains asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+//
+// a.Contains("Hello World", "World")
+// a.Contains(["Hello", "World"], "World")
+// a.Contains({"Hello": "World"}, "Hello")
+func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Contains(a.t, s, contains, msgAndArgs...)
+}
+
+// Containsf asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+//
+// a.Containsf("Hello World", "World", "error message %s", "formatted")
+// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted")
+// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted")
+func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Containsf(a.t, s, contains, msg, args...)
+}
+
+// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists.
+func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return DirExists(a.t, path, msgAndArgs...)
+}
+
+// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists.
+func (a *Assertions) DirExistsf(path string, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return DirExistsf(a.t, path, msg, args...)
+}
+
+// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified
+// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements,
+// the number of appearances of each of them in both lists should match.
+//
+// a.ElementsMatch([1, 3, 2, 3], [1, 3, 3, 2])
+func (a *Assertions) ElementsMatch(listA interface{}, listB interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return ElementsMatch(a.t, listA, listB, msgAndArgs...)
+}
+
+// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified
+// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements,
+// the number of appearances of each of them in both lists should match.
+//
+// a.ElementsMatchf([1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted")
+func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return ElementsMatchf(a.t, listA, listB, msg, args...)
+}
+
+// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// a.Empty(obj)
+func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Empty(a.t, object, msgAndArgs...)
+}
+
+// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// a.Emptyf(obj, "error message %s", "formatted")
+func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Emptyf(a.t, object, msg, args...)
+}
+
+// Equal asserts that two objects are equal.
+//
+// a.Equal(123, 123)
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses). Function equality
+// cannot be determined and will always fail.
+func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Equal(a.t, expected, actual, msgAndArgs...)
+}
+
+// EqualError asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+//
+// actualObj, err := SomeFunction()
+// a.EqualError(err, expectedErrorString)
+func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return EqualError(a.t, theError, errString, msgAndArgs...)
+}
+
+// EqualErrorf asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+//
+// actualObj, err := SomeFunction()
+// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted")
+func (a *Assertions) EqualErrorf(theError error, errString string, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return EqualErrorf(a.t, theError, errString, msg, args...)
+}
+
+// EqualValues asserts that two objects are equal or convertable to the same types
+// and equal.
+//
+// a.EqualValues(uint32(123), int32(123))
+func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return EqualValues(a.t, expected, actual, msgAndArgs...)
+}
+
+// EqualValuesf asserts that two objects are equal or convertable to the same types
+// and equal.
+//
+// a.EqualValuesf(uint32(123, "error message %s", "formatted"), int32(123))
+func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return EqualValuesf(a.t, expected, actual, msg, args...)
+}
+
+// Equalf asserts that two objects are equal.
+//
+// a.Equalf(123, 123, "error message %s", "formatted")
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses). Function equality
+// cannot be determined and will always fail.
+func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Equalf(a.t, expected, actual, msg, args...)
+}
+
+// Error asserts that a function returned an error (i.e. not `nil`).
+//
+// actualObj, err := SomeFunction()
+// if a.Error(err) {
+// assert.Equal(t, expectedError, err)
+// }
+func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Error(a.t, err, msgAndArgs...)
+}
+
+// Errorf asserts that a function returned an error (i.e. not `nil`).
+//
+// actualObj, err := SomeFunction()
+// if a.Errorf(err, "error message %s", "formatted") {
+// assert.Equal(t, expectedErrorf, err)
+// }
+func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Errorf(a.t, err, msg, args...)
+}
+
+// Exactly asserts that two objects are equal in value and type.
+//
+// a.Exactly(int32(123), int64(123))
+func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Exactly(a.t, expected, actual, msgAndArgs...)
+}
+
+// Exactlyf asserts that two objects are equal in value and type.
+//
+// a.Exactlyf(int32(123, "error message %s", "formatted"), int64(123))
+func (a *Assertions) Exactlyf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Exactlyf(a.t, expected, actual, msg, args...)
+}
+
+// Fail reports a failure through
+func (a *Assertions) Fail(failureMessage string, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Fail(a.t, failureMessage, msgAndArgs...)
+}
+
+// FailNow fails test
+func (a *Assertions) FailNow(failureMessage string, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return FailNow(a.t, failureMessage, msgAndArgs...)
+}
+
+// FailNowf fails test
+func (a *Assertions) FailNowf(failureMessage string, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return FailNowf(a.t, failureMessage, msg, args...)
+}
+
+// Failf reports a failure through
+func (a *Assertions) Failf(failureMessage string, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Failf(a.t, failureMessage, msg, args...)
+}
+
+// False asserts that the specified value is false.
+//
+// a.False(myBool)
+func (a *Assertions) False(value bool, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return False(a.t, value, msgAndArgs...)
+}
+
+// Falsef asserts that the specified value is false.
+//
+// a.Falsef(myBool, "error message %s", "formatted")
+func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Falsef(a.t, value, msg, args...)
+}
+
+// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file.
+func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return FileExists(a.t, path, msgAndArgs...)
+}
+
+// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file.
+func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return FileExistsf(a.t, path, msg, args...)
+}
+
+// HTTPBodyContains asserts that a specified handler returns a
+// body that contains a string.
+//
+// a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPBodyContains(a.t, handler, method, url, values, str, msgAndArgs...)
+}
+
+// HTTPBodyContainsf asserts that a specified handler returns a
+// body that contains a string.
+//
+// a.HTTPBodyContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPBodyContainsf(a.t, handler, method, url, values, str, msg, args...)
+}
+
+// HTTPBodyNotContains asserts that a specified handler returns a
+// body that does not contain a string.
+//
+// a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPBodyNotContains(a.t, handler, method, url, values, str, msgAndArgs...)
+}
+
+// HTTPBodyNotContainsf asserts that a specified handler returns a
+// body that does not contain a string.
+//
+// a.HTTPBodyNotContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPBodyNotContainsf(a.t, handler, method, url, values, str, msg, args...)
+}
+
+// HTTPError asserts that a specified handler returns an error status code.
+//
+// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPError(a.t, handler, method, url, values, msgAndArgs...)
+}
+
+// HTTPErrorf asserts that a specified handler returns an error status code.
+//
+// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false).
+func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPErrorf(a.t, handler, method, url, values, msg, args...)
+}
+
+// HTTPRedirect asserts that a specified handler returns a redirect status code.
+//
+// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPRedirect(a.t, handler, method, url, values, msgAndArgs...)
+}
+
+// HTTPRedirectf asserts that a specified handler returns a redirect status code.
+//
+// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false).
+func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPRedirectf(a.t, handler, method, url, values, msg, args...)
+}
+
+// HTTPSuccess asserts that a specified handler returns a success status code.
+//
+// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil)
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPSuccess(a.t, handler, method, url, values, msgAndArgs...)
+}
+
+// HTTPSuccessf asserts that a specified handler returns a success status code.
+//
+// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return HTTPSuccessf(a.t, handler, method, url, values, msg, args...)
+}
+
+// Implements asserts that an object is implemented by the specified interface.
+//
+// a.Implements((*MyInterface)(nil), new(MyObject))
+func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Implements(a.t, interfaceObject, object, msgAndArgs...)
+}
+
+// Implementsf asserts that an object is implemented by the specified interface.
+//
+// a.Implementsf((*MyInterface, "error message %s", "formatted")(nil), new(MyObject))
+func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Implementsf(a.t, interfaceObject, object, msg, args...)
+}
+
+// InDelta asserts that the two numerals are within delta of each other.
+//
+// a.InDelta(math.Pi, (22 / 7.0), 0.01)
+func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InDelta(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys.
+func (a *Assertions) InDeltaMapValues(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InDeltaMapValues(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys.
+func (a *Assertions) InDeltaMapValuesf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InDeltaMapValuesf(a.t, expected, actual, delta, msg, args...)
+}
+
+// InDeltaSlice is the same as InDelta, except it compares two slices.
+func (a *Assertions) InDeltaSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InDeltaSlice(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+// InDeltaSlicef is the same as InDelta, except it compares two slices.
+func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InDeltaSlicef(a.t, expected, actual, delta, msg, args...)
+}
+
+// InDeltaf asserts that the two numerals are within delta of each other.
+//
+// a.InDeltaf(math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01)
+func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InDeltaf(a.t, expected, actual, delta, msg, args...)
+}
+
+// InEpsilon asserts that expected and actual have a relative error less than epsilon
+func (a *Assertions) InEpsilon(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InEpsilon(a.t, expected, actual, epsilon, msgAndArgs...)
+}
+
+// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices.
+func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InEpsilonSlice(a.t, expected, actual, epsilon, msgAndArgs...)
+}
+
+// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices.
+func (a *Assertions) InEpsilonSlicef(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InEpsilonSlicef(a.t, expected, actual, epsilon, msg, args...)
+}
+
+// InEpsilonf asserts that expected and actual have a relative error less than epsilon
+func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return InEpsilonf(a.t, expected, actual, epsilon, msg, args...)
+}
+
+// IsType asserts that the specified objects are of the same type.
+func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return IsType(a.t, expectedType, object, msgAndArgs...)
+}
+
+// IsTypef asserts that the specified objects are of the same type.
+func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return IsTypef(a.t, expectedType, object, msg, args...)
+}
+
+// JSONEq asserts that two JSON strings are equivalent.
+//
+// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`)
+func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return JSONEq(a.t, expected, actual, msgAndArgs...)
+}
+
+// JSONEqf asserts that two JSON strings are equivalent.
+//
+// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted")
+func (a *Assertions) JSONEqf(expected string, actual string, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return JSONEqf(a.t, expected, actual, msg, args...)
+}
+
+// Len asserts that the specified object has specific length.
+// Len also fails if the object has a type that len() not accept.
+//
+// a.Len(mySlice, 3)
+func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Len(a.t, object, length, msgAndArgs...)
+}
+
+// Lenf asserts that the specified object has specific length.
+// Lenf also fails if the object has a type that len() not accept.
+//
+// a.Lenf(mySlice, 3, "error message %s", "formatted")
+func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Lenf(a.t, object, length, msg, args...)
+}
+
+// Nil asserts that the specified object is nil.
+//
+// a.Nil(err)
+func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Nil(a.t, object, msgAndArgs...)
+}
+
+// Nilf asserts that the specified object is nil.
+//
+// a.Nilf(err, "error message %s", "formatted")
+func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Nilf(a.t, object, msg, args...)
+}
+
+// NoError asserts that a function returned no error (i.e. `nil`).
+//
+// actualObj, err := SomeFunction()
+// if a.NoError(err) {
+// assert.Equal(t, expectedObj, actualObj)
+// }
+func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NoError(a.t, err, msgAndArgs...)
+}
+
+// NoErrorf asserts that a function returned no error (i.e. `nil`).
+//
+// actualObj, err := SomeFunction()
+// if a.NoErrorf(err, "error message %s", "formatted") {
+// assert.Equal(t, expectedObj, actualObj)
+// }
+func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NoErrorf(a.t, err, msg, args...)
+}
+
+// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+//
+// a.NotContains("Hello World", "Earth")
+// a.NotContains(["Hello", "World"], "Earth")
+// a.NotContains({"Hello": "World"}, "Earth")
+func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotContains(a.t, s, contains, msgAndArgs...)
+}
+
+// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+//
+// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted")
+// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted")
+// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted")
+func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotContainsf(a.t, s, contains, msg, args...)
+}
+
+// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// if a.NotEmpty(obj) {
+// assert.Equal(t, "two", obj[1])
+// }
+func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotEmpty(a.t, object, msgAndArgs...)
+}
+
+// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// if a.NotEmptyf(obj, "error message %s", "formatted") {
+// assert.Equal(t, "two", obj[1])
+// }
+func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotEmptyf(a.t, object, msg, args...)
+}
+
+// NotEqual asserts that the specified values are NOT equal.
+//
+// a.NotEqual(obj1, obj2)
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses).
+func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotEqual(a.t, expected, actual, msgAndArgs...)
+}
+
+// NotEqualf asserts that the specified values are NOT equal.
+//
+// a.NotEqualf(obj1, obj2, "error message %s", "formatted")
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses).
+func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotEqualf(a.t, expected, actual, msg, args...)
+}
+
+// NotNil asserts that the specified object is not nil.
+//
+// a.NotNil(err)
+func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotNil(a.t, object, msgAndArgs...)
+}
+
+// NotNilf asserts that the specified object is not nil.
+//
+// a.NotNilf(err, "error message %s", "formatted")
+func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotNilf(a.t, object, msg, args...)
+}
+
+// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic.
+//
+// a.NotPanics(func(){ RemainCalm() })
+func (a *Assertions) NotPanics(f PanicTestFunc, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotPanics(a.t, f, msgAndArgs...)
+}
+
+// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic.
+//
+// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted")
+func (a *Assertions) NotPanicsf(f PanicTestFunc, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotPanicsf(a.t, f, msg, args...)
+}
+
+// NotRegexp asserts that a specified regexp does not match a string.
+//
+// a.NotRegexp(regexp.MustCompile("starts"), "it's starting")
+// a.NotRegexp("^start", "it's not starting")
+func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotRegexp(a.t, rx, str, msgAndArgs...)
+}
+
+// NotRegexpf asserts that a specified regexp does not match a string.
+//
+// a.NotRegexpf(regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting")
+// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted")
+func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotRegexpf(a.t, rx, str, msg, args...)
+}
+
+// NotSubset asserts that the specified list(array, slice...) contains not all
+// elements given in the specified subset(array, slice...).
+//
+// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
+func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotSubset(a.t, list, subset, msgAndArgs...)
+}
+
+// NotSubsetf asserts that the specified list(array, slice...) contains not all
+// elements given in the specified subset(array, slice...).
+//
+// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
+func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotSubsetf(a.t, list, subset, msg, args...)
+}
+
+// NotZero asserts that i is not the zero value for its type.
+func (a *Assertions) NotZero(i interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotZero(a.t, i, msgAndArgs...)
+}
+
+// NotZerof asserts that i is not the zero value for its type.
+func (a *Assertions) NotZerof(i interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return NotZerof(a.t, i, msg, args...)
+}
+
+// Panics asserts that the code inside the specified PanicTestFunc panics.
+//
+// a.Panics(func(){ GoCrazy() })
+func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Panics(a.t, f, msgAndArgs...)
+}
+
+// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that
+// the recovered panic value equals the expected panic value.
+//
+// a.PanicsWithValue("crazy error", func(){ GoCrazy() })
+func (a *Assertions) PanicsWithValue(expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return PanicsWithValue(a.t, expected, f, msgAndArgs...)
+}
+
+// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that
+// the recovered panic value equals the expected panic value.
+//
+// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted")
+func (a *Assertions) PanicsWithValuef(expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return PanicsWithValuef(a.t, expected, f, msg, args...)
+}
+
+// Panicsf asserts that the code inside the specified PanicTestFunc panics.
+//
+// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted")
+func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Panicsf(a.t, f, msg, args...)
+}
+
+// Regexp asserts that a specified regexp matches a string.
+//
+// a.Regexp(regexp.MustCompile("start"), "it's starting")
+// a.Regexp("start...$", "it's not starting")
+func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Regexp(a.t, rx, str, msgAndArgs...)
+}
+
+// Regexpf asserts that a specified regexp matches a string.
+//
+// a.Regexpf(regexp.MustCompile("start", "error message %s", "formatted"), "it's starting")
+// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted")
+func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Regexpf(a.t, rx, str, msg, args...)
+}
+
+// Subset asserts that the specified list(array, slice...) contains all
+// elements given in the specified subset(array, slice...).
+//
+// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
+func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Subset(a.t, list, subset, msgAndArgs...)
+}
+
+// Subsetf asserts that the specified list(array, slice...) contains all
+// elements given in the specified subset(array, slice...).
+//
+// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
+func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Subsetf(a.t, list, subset, msg, args...)
+}
+
+// True asserts that the specified value is true.
+//
+// a.True(myBool)
+func (a *Assertions) True(value bool, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return True(a.t, value, msgAndArgs...)
+}
+
+// Truef asserts that the specified value is true.
+//
+// a.Truef(myBool, "error message %s", "formatted")
+func (a *Assertions) Truef(value bool, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Truef(a.t, value, msg, args...)
+}
+
+// WithinDuration asserts that the two times are within duration delta of each other.
+//
+// a.WithinDuration(time.Now(), time.Now(), 10*time.Second)
+func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return WithinDuration(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+// WithinDurationf asserts that the two times are within duration delta of each other.
+//
+// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted")
+func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return WithinDurationf(a.t, expected, actual, delta, msg, args...)
+}
+
+// Zero asserts that i is the zero value for its type.
+func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Zero(a.t, i, msgAndArgs...)
+}
+
+// Zerof asserts that i is the zero value for its type.
+func (a *Assertions) Zerof(i interface{}, msg string, args ...interface{}) bool {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ return Zerof(a.t, i, msg, args...)
+}
diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl b/vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl
new file mode 100644
index 0000000..188bb9e
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl
@@ -0,0 +1,5 @@
+{{.CommentWithoutT "a"}}
+func (a *Assertions) {{.DocInfo.Name}}({{.Params}}) bool {
+ if h, ok := a.t.(tHelper); ok { h.Helper() }
+ return {{.DocInfo.Name}}(a.t, {{.ForwardedParams}})
+}
diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go
new file mode 100644
index 0000000..9bd4a80
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/assert/assertions.go
@@ -0,0 +1,1416 @@
+package assert
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "math"
+ "os"
+ "reflect"
+ "regexp"
+ "runtime"
+ "strings"
+ "time"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/davecgh/go-spew/spew"
+ "github.com/pmezard/go-difflib/difflib"
+)
+
+//go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_format.go.tmpl
+
+// TestingT is an interface wrapper around *testing.T
+type TestingT interface {
+ Errorf(format string, args ...interface{})
+}
+
+// ComparisonAssertionFunc is a common function prototype when comparing two values. Can be useful
+// for table driven tests.
+type ComparisonAssertionFunc func(TestingT, interface{}, interface{}, ...interface{}) bool
+
+// ValueAssertionFunc is a common function prototype when validating a single value. Can be useful
+// for table driven tests.
+type ValueAssertionFunc func(TestingT, interface{}, ...interface{}) bool
+
+// BoolAssertionFunc is a common function prototype when validating a bool value. Can be useful
+// for table driven tests.
+type BoolAssertionFunc func(TestingT, bool, ...interface{}) bool
+
+// ErrorAssertionFunc is a common function prototype when validating an error value. Can be useful
+// for table driven tests.
+type ErrorAssertionFunc func(TestingT, error, ...interface{}) bool
+
+// Comparison a custom function that returns true on success and false on failure
+type Comparison func() (success bool)
+
+/*
+ Helper functions
+*/
+
+// ObjectsAreEqual determines if two objects are considered equal.
+//
+// This function does no assertion of any kind.
+func ObjectsAreEqual(expected, actual interface{}) bool {
+ if expected == nil || actual == nil {
+ return expected == actual
+ }
+
+ exp, ok := expected.([]byte)
+ if !ok {
+ return reflect.DeepEqual(expected, actual)
+ }
+
+ act, ok := actual.([]byte)
+ if !ok {
+ return false
+ }
+ if exp == nil || act == nil {
+ return exp == nil && act == nil
+ }
+ return bytes.Equal(exp, act)
+}
+
+// ObjectsAreEqualValues gets whether two objects are equal, or if their
+// values are equal.
+func ObjectsAreEqualValues(expected, actual interface{}) bool {
+ if ObjectsAreEqual(expected, actual) {
+ return true
+ }
+
+ actualType := reflect.TypeOf(actual)
+ if actualType == nil {
+ return false
+ }
+ expectedValue := reflect.ValueOf(expected)
+ if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
+ // Attempt comparison after type conversion
+ return reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), actual)
+ }
+
+ return false
+}
+
+/* CallerInfo is necessary because the assert functions use the testing object
+internally, causing it to print the file:line of the assert method, rather than where
+the problem actually occurred in calling code.*/
+
+// CallerInfo returns an array of strings containing the file and line number
+// of each stack frame leading from the current test to the assert call that
+// failed.
+func CallerInfo() []string {
+
+ pc := uintptr(0)
+ file := ""
+ line := 0
+ ok := false
+ name := ""
+
+ callers := []string{}
+ for i := 0; ; i++ {
+ pc, file, line, ok = runtime.Caller(i)
+ if !ok {
+ // The breaks below failed to terminate the loop, and we ran off the
+ // end of the call stack.
+ break
+ }
+
+ // This is a huge edge case, but it will panic if this is the case, see #180
+ if file == "" {
+ break
+ }
+
+ f := runtime.FuncForPC(pc)
+ if f == nil {
+ break
+ }
+ name = f.Name()
+
+ // testing.tRunner is the standard library function that calls
+ // tests. Subtests are called directly by tRunner, without going through
+ // the Test/Benchmark/Example function that contains the t.Run calls, so
+ // with subtests we should break when we hit tRunner, without adding it
+ // to the list of callers.
+ if name == "testing.tRunner" {
+ break
+ }
+
+ parts := strings.Split(file, "/")
+ file = parts[len(parts)-1]
+ if len(parts) > 1 {
+ dir := parts[len(parts)-2]
+ if (dir != "assert" && dir != "mock" && dir != "require") || file == "mock_test.go" {
+ callers = append(callers, fmt.Sprintf("%s:%d", file, line))
+ }
+ }
+
+ // Drop the package
+ segments := strings.Split(name, ".")
+ name = segments[len(segments)-1]
+ if isTest(name, "Test") ||
+ isTest(name, "Benchmark") ||
+ isTest(name, "Example") {
+ break
+ }
+ }
+
+ return callers
+}
+
+// Stolen from the `go test` tool.
+// isTest tells whether name looks like a test (or benchmark, according to prefix).
+// It is a Test (say) if there is a character after Test that is not a lower-case letter.
+// We don't want TesticularCancer.
+func isTest(name, prefix string) bool {
+ if !strings.HasPrefix(name, prefix) {
+ return false
+ }
+ if len(name) == len(prefix) { // "Test" is ok
+ return true
+ }
+ rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
+ return !unicode.IsLower(rune)
+}
+
+func messageFromMsgAndArgs(msgAndArgs ...interface{}) string {
+ if len(msgAndArgs) == 0 || msgAndArgs == nil {
+ return ""
+ }
+ if len(msgAndArgs) == 1 {
+ msg := msgAndArgs[0]
+ if msgAsStr, ok := msg.(string); ok {
+ return msgAsStr
+ }
+ return fmt.Sprintf("%+v", msg)
+ }
+ if len(msgAndArgs) > 1 {
+ return fmt.Sprintf(msgAndArgs[0].(string), msgAndArgs[1:]...)
+ }
+ return ""
+}
+
+// Aligns the provided message so that all lines after the first line start at the same location as the first line.
+// Assumes that the first line starts at the correct location (after carriage return, tab, label, spacer and tab).
+// The longestLabelLen parameter specifies the length of the longest label in the output (required becaues this is the
+// basis on which the alignment occurs).
+func indentMessageLines(message string, longestLabelLen int) string {
+ outBuf := new(bytes.Buffer)
+
+ for i, scanner := 0, bufio.NewScanner(strings.NewReader(message)); scanner.Scan(); i++ {
+ // no need to align first line because it starts at the correct location (after the label)
+ if i != 0 {
+ // append alignLen+1 spaces to align with "{{longestLabel}}:" before adding tab
+ outBuf.WriteString("\n\t" + strings.Repeat(" ", longestLabelLen+1) + "\t")
+ }
+ outBuf.WriteString(scanner.Text())
+ }
+
+ return outBuf.String()
+}
+
+type failNower interface {
+ FailNow()
+}
+
+// FailNow fails test
+func FailNow(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ Fail(t, failureMessage, msgAndArgs...)
+
+ // We cannot extend TestingT with FailNow() and
+ // maintain backwards compatibility, so we fallback
+ // to panicking when FailNow is not available in
+ // TestingT.
+ // See issue #263
+
+ if t, ok := t.(failNower); ok {
+ t.FailNow()
+ } else {
+ panic("test failed and t is missing `FailNow()`")
+ }
+ return false
+}
+
+// Fail reports a failure through
+func Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ content := []labeledContent{
+ {"Error Trace", strings.Join(CallerInfo(), "\n\t\t\t")},
+ {"Error", failureMessage},
+ }
+
+ // Add test name if the Go version supports it
+ if n, ok := t.(interface {
+ Name() string
+ }); ok {
+ content = append(content, labeledContent{"Test", n.Name()})
+ }
+
+ message := messageFromMsgAndArgs(msgAndArgs...)
+ if len(message) > 0 {
+ content = append(content, labeledContent{"Messages", message})
+ }
+
+ t.Errorf("\n%s", ""+labeledOutput(content...))
+
+ return false
+}
+
+type labeledContent struct {
+ label string
+ content string
+}
+
+// labeledOutput returns a string consisting of the provided labeledContent. Each labeled output is appended in the following manner:
+//
+// \t{{label}}:{{align_spaces}}\t{{content}}\n
+//
+// The initial carriage return is required to undo/erase any padding added by testing.T.Errorf. The "\t{{label}}:" is for the label.
+// If a label is shorter than the longest label provided, padding spaces are added to make all the labels match in length. Once this
+// alignment is achieved, "\t{{content}}\n" is added for the output.
+//
+// If the content of the labeledOutput contains line breaks, the subsequent lines are aligned so that they start at the same location as the first line.
+func labeledOutput(content ...labeledContent) string {
+ longestLabel := 0
+ for _, v := range content {
+ if len(v.label) > longestLabel {
+ longestLabel = len(v.label)
+ }
+ }
+ var output string
+ for _, v := range content {
+ output += "\t" + v.label + ":" + strings.Repeat(" ", longestLabel-len(v.label)) + "\t" + indentMessageLines(v.content, longestLabel) + "\n"
+ }
+ return output
+}
+
+// Implements asserts that an object is implemented by the specified interface.
+//
+// assert.Implements(t, (*MyInterface)(nil), new(MyObject))
+func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ interfaceType := reflect.TypeOf(interfaceObject).Elem()
+
+ if object == nil {
+ return Fail(t, fmt.Sprintf("Cannot check if nil implements %v", interfaceType), msgAndArgs...)
+ }
+ if !reflect.TypeOf(object).Implements(interfaceType) {
+ return Fail(t, fmt.Sprintf("%T must implement %v", object, interfaceType), msgAndArgs...)
+ }
+
+ return true
+}
+
+// IsType asserts that the specified objects are of the same type.
+func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ if !ObjectsAreEqual(reflect.TypeOf(object), reflect.TypeOf(expectedType)) {
+ return Fail(t, fmt.Sprintf("Object expected to be of type %v, but was %v", reflect.TypeOf(expectedType), reflect.TypeOf(object)), msgAndArgs...)
+ }
+
+ return true
+}
+
+// Equal asserts that two objects are equal.
+//
+// assert.Equal(t, 123, 123)
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses). Function equality
+// cannot be determined and will always fail.
+func Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if err := validateEqualArgs(expected, actual); err != nil {
+ return Fail(t, fmt.Sprintf("Invalid operation: %#v == %#v (%s)",
+ expected, actual, err), msgAndArgs...)
+ }
+
+ if !ObjectsAreEqual(expected, actual) {
+ diff := diff(expected, actual)
+ expected, actual = formatUnequalValues(expected, actual)
+ return Fail(t, fmt.Sprintf("Not equal: \n"+
+ "expected: %s\n"+
+ "actual : %s%s", expected, actual, diff), msgAndArgs...)
+ }
+
+ return true
+
+}
+
+// formatUnequalValues takes two values of arbitrary types and returns string
+// representations appropriate to be presented to the user.
+//
+// If the values are not of like type, the returned strings will be prefixed
+// with the type name, and the value will be enclosed in parenthesis similar
+// to a type conversion in the Go grammar.
+func formatUnequalValues(expected, actual interface{}) (e string, a string) {
+ if reflect.TypeOf(expected) != reflect.TypeOf(actual) {
+ return fmt.Sprintf("%T(%#v)", expected, expected),
+ fmt.Sprintf("%T(%#v)", actual, actual)
+ }
+
+ return fmt.Sprintf("%#v", expected),
+ fmt.Sprintf("%#v", actual)
+}
+
+// EqualValues asserts that two objects are equal or convertable to the same types
+// and equal.
+//
+// assert.EqualValues(t, uint32(123), int32(123))
+func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ if !ObjectsAreEqualValues(expected, actual) {
+ diff := diff(expected, actual)
+ expected, actual = formatUnequalValues(expected, actual)
+ return Fail(t, fmt.Sprintf("Not equal: \n"+
+ "expected: %s\n"+
+ "actual : %s%s", expected, actual, diff), msgAndArgs...)
+ }
+
+ return true
+
+}
+
+// Exactly asserts that two objects are equal in value and type.
+//
+// assert.Exactly(t, int32(123), int64(123))
+func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ aType := reflect.TypeOf(expected)
+ bType := reflect.TypeOf(actual)
+
+ if aType != bType {
+ return Fail(t, fmt.Sprintf("Types expected to match exactly\n\t%v != %v", aType, bType), msgAndArgs...)
+ }
+
+ return Equal(t, expected, actual, msgAndArgs...)
+
+}
+
+// NotNil asserts that the specified object is not nil.
+//
+// assert.NotNil(t, err)
+func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if !isNil(object) {
+ return true
+ }
+ return Fail(t, "Expected value not to be nil.", msgAndArgs...)
+}
+
+// containsKind checks if a specified kind in the slice of kinds.
+func containsKind(kinds []reflect.Kind, kind reflect.Kind) bool {
+ for i := 0; i < len(kinds); i++ {
+ if kind == kinds[i] {
+ return true
+ }
+ }
+
+ return false
+}
+
+// isNil checks if a specified object is nil or not, without Failing.
+func isNil(object interface{}) bool {
+ if object == nil {
+ return true
+ }
+
+ value := reflect.ValueOf(object)
+ kind := value.Kind()
+ isNilableKind := containsKind(
+ []reflect.Kind{
+ reflect.Chan, reflect.Func,
+ reflect.Interface, reflect.Map,
+ reflect.Ptr, reflect.Slice},
+ kind)
+
+ if isNilableKind && value.IsNil() {
+ return true
+ }
+
+ return false
+}
+
+// Nil asserts that the specified object is nil.
+//
+// assert.Nil(t, err)
+func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if isNil(object) {
+ return true
+ }
+ return Fail(t, fmt.Sprintf("Expected nil, but got: %#v", object), msgAndArgs...)
+}
+
+// isEmpty gets whether the specified object is considered empty or not.
+func isEmpty(object interface{}) bool {
+
+ // get nil case out of the way
+ if object == nil {
+ return true
+ }
+
+ objValue := reflect.ValueOf(object)
+
+ switch objValue.Kind() {
+ // collection types are empty when they have no element
+ case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice:
+ return objValue.Len() == 0
+ // pointers are empty if nil or if the value they point to is empty
+ case reflect.Ptr:
+ if objValue.IsNil() {
+ return true
+ }
+ deref := objValue.Elem().Interface()
+ return isEmpty(deref)
+ // for all other types, compare against the zero value
+ default:
+ zero := reflect.Zero(objValue.Type())
+ return reflect.DeepEqual(object, zero.Interface())
+ }
+}
+
+// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// assert.Empty(t, obj)
+func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ pass := isEmpty(object)
+ if !pass {
+ Fail(t, fmt.Sprintf("Should be empty, but was %v", object), msgAndArgs...)
+ }
+
+ return pass
+
+}
+
+// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// if assert.NotEmpty(t, obj) {
+// assert.Equal(t, "two", obj[1])
+// }
+func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ pass := !isEmpty(object)
+ if !pass {
+ Fail(t, fmt.Sprintf("Should NOT be empty, but was %v", object), msgAndArgs...)
+ }
+
+ return pass
+
+}
+
+// getLen try to get length of object.
+// return (false, 0) if impossible.
+func getLen(x interface{}) (ok bool, length int) {
+ v := reflect.ValueOf(x)
+ defer func() {
+ if e := recover(); e != nil {
+ ok = false
+ }
+ }()
+ return true, v.Len()
+}
+
+// Len asserts that the specified object has specific length.
+// Len also fails if the object has a type that len() not accept.
+//
+// assert.Len(t, mySlice, 3)
+func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ ok, l := getLen(object)
+ if !ok {
+ return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", object), msgAndArgs...)
+ }
+
+ if l != length {
+ return Fail(t, fmt.Sprintf("\"%s\" should have %d item(s), but has %d", object, length, l), msgAndArgs...)
+ }
+ return true
+}
+
+// True asserts that the specified value is true.
+//
+// assert.True(t, myBool)
+func True(t TestingT, value bool, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if h, ok := t.(interface {
+ Helper()
+ }); ok {
+ h.Helper()
+ }
+
+ if value != true {
+ return Fail(t, "Should be true", msgAndArgs...)
+ }
+
+ return true
+
+}
+
+// False asserts that the specified value is false.
+//
+// assert.False(t, myBool)
+func False(t TestingT, value bool, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ if value != false {
+ return Fail(t, "Should be false", msgAndArgs...)
+ }
+
+ return true
+
+}
+
+// NotEqual asserts that the specified values are NOT equal.
+//
+// assert.NotEqual(t, obj1, obj2)
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses).
+func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if err := validateEqualArgs(expected, actual); err != nil {
+ return Fail(t, fmt.Sprintf("Invalid operation: %#v != %#v (%s)",
+ expected, actual, err), msgAndArgs...)
+ }
+
+ if ObjectsAreEqual(expected, actual) {
+ return Fail(t, fmt.Sprintf("Should not be: %#v\n", actual), msgAndArgs...)
+ }
+
+ return true
+
+}
+
+// containsElement try loop over the list check if the list includes the element.
+// return (false, false) if impossible.
+// return (true, false) if element was not found.
+// return (true, true) if element was found.
+func includeElement(list interface{}, element interface{}) (ok, found bool) {
+
+ listValue := reflect.ValueOf(list)
+ elementValue := reflect.ValueOf(element)
+ defer func() {
+ if e := recover(); e != nil {
+ ok = false
+ found = false
+ }
+ }()
+
+ if reflect.TypeOf(list).Kind() == reflect.String {
+ return true, strings.Contains(listValue.String(), elementValue.String())
+ }
+
+ if reflect.TypeOf(list).Kind() == reflect.Map {
+ mapKeys := listValue.MapKeys()
+ for i := 0; i < len(mapKeys); i++ {
+ if ObjectsAreEqual(mapKeys[i].Interface(), element) {
+ return true, true
+ }
+ }
+ return true, false
+ }
+
+ for i := 0; i < listValue.Len(); i++ {
+ if ObjectsAreEqual(listValue.Index(i).Interface(), element) {
+ return true, true
+ }
+ }
+ return true, false
+
+}
+
+// Contains asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+//
+// assert.Contains(t, "Hello World", "World")
+// assert.Contains(t, ["Hello", "World"], "World")
+// assert.Contains(t, {"Hello": "World"}, "Hello")
+func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ ok, found := includeElement(s, contains)
+ if !ok {
+ return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", s), msgAndArgs...)
+ }
+ if !found {
+ return Fail(t, fmt.Sprintf("\"%s\" does not contain \"%s\"", s, contains), msgAndArgs...)
+ }
+
+ return true
+
+}
+
+// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+//
+// assert.NotContains(t, "Hello World", "Earth")
+// assert.NotContains(t, ["Hello", "World"], "Earth")
+// assert.NotContains(t, {"Hello": "World"}, "Earth")
+func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ ok, found := includeElement(s, contains)
+ if !ok {
+ return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", s), msgAndArgs...)
+ }
+ if found {
+ return Fail(t, fmt.Sprintf("\"%s\" should not contain \"%s\"", s, contains), msgAndArgs...)
+ }
+
+ return true
+
+}
+
+// Subset asserts that the specified list(array, slice...) contains all
+// elements given in the specified subset(array, slice...).
+//
+// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
+func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if subset == nil {
+ return true // we consider nil to be equal to the nil set
+ }
+
+ subsetValue := reflect.ValueOf(subset)
+ defer func() {
+ if e := recover(); e != nil {
+ ok = false
+ }
+ }()
+
+ listKind := reflect.TypeOf(list).Kind()
+ subsetKind := reflect.TypeOf(subset).Kind()
+
+ if listKind != reflect.Array && listKind != reflect.Slice {
+ return Fail(t, fmt.Sprintf("%q has an unsupported type %s", list, listKind), msgAndArgs...)
+ }
+
+ if subsetKind != reflect.Array && subsetKind != reflect.Slice {
+ return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...)
+ }
+
+ for i := 0; i < subsetValue.Len(); i++ {
+ element := subsetValue.Index(i).Interface()
+ ok, found := includeElement(list, element)
+ if !ok {
+ return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", list), msgAndArgs...)
+ }
+ if !found {
+ return Fail(t, fmt.Sprintf("\"%s\" does not contain \"%s\"", list, element), msgAndArgs...)
+ }
+ }
+
+ return true
+}
+
+// NotSubset asserts that the specified list(array, slice...) contains not all
+// elements given in the specified subset(array, slice...).
+//
+// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
+func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if subset == nil {
+ return Fail(t, fmt.Sprintf("nil is the empty set which is a subset of every set"), msgAndArgs...)
+ }
+
+ subsetValue := reflect.ValueOf(subset)
+ defer func() {
+ if e := recover(); e != nil {
+ ok = false
+ }
+ }()
+
+ listKind := reflect.TypeOf(list).Kind()
+ subsetKind := reflect.TypeOf(subset).Kind()
+
+ if listKind != reflect.Array && listKind != reflect.Slice {
+ return Fail(t, fmt.Sprintf("%q has an unsupported type %s", list, listKind), msgAndArgs...)
+ }
+
+ if subsetKind != reflect.Array && subsetKind != reflect.Slice {
+ return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...)
+ }
+
+ for i := 0; i < subsetValue.Len(); i++ {
+ element := subsetValue.Index(i).Interface()
+ ok, found := includeElement(list, element)
+ if !ok {
+ return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", list), msgAndArgs...)
+ }
+ if !found {
+ return true
+ }
+ }
+
+ return Fail(t, fmt.Sprintf("%q is a subset of %q", subset, list), msgAndArgs...)
+}
+
+// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified
+// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements,
+// the number of appearances of each of them in both lists should match.
+//
+// assert.ElementsMatch(t, [1, 3, 2, 3], [1, 3, 3, 2])
+func ElementsMatch(t TestingT, listA, listB interface{}, msgAndArgs ...interface{}) (ok bool) {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if isEmpty(listA) && isEmpty(listB) {
+ return true
+ }
+
+ aKind := reflect.TypeOf(listA).Kind()
+ bKind := reflect.TypeOf(listB).Kind()
+
+ if aKind != reflect.Array && aKind != reflect.Slice {
+ return Fail(t, fmt.Sprintf("%q has an unsupported type %s", listA, aKind), msgAndArgs...)
+ }
+
+ if bKind != reflect.Array && bKind != reflect.Slice {
+ return Fail(t, fmt.Sprintf("%q has an unsupported type %s", listB, bKind), msgAndArgs...)
+ }
+
+ aValue := reflect.ValueOf(listA)
+ bValue := reflect.ValueOf(listB)
+
+ aLen := aValue.Len()
+ bLen := bValue.Len()
+
+ if aLen != bLen {
+ return Fail(t, fmt.Sprintf("lengths don't match: %d != %d", aLen, bLen), msgAndArgs...)
+ }
+
+ // Mark indexes in bValue that we already used
+ visited := make([]bool, bLen)
+ for i := 0; i < aLen; i++ {
+ element := aValue.Index(i).Interface()
+ found := false
+ for j := 0; j < bLen; j++ {
+ if visited[j] {
+ continue
+ }
+ if ObjectsAreEqual(bValue.Index(j).Interface(), element) {
+ visited[j] = true
+ found = true
+ break
+ }
+ }
+ if !found {
+ return Fail(t, fmt.Sprintf("element %s appears more times in %s than in %s", element, aValue, bValue), msgAndArgs...)
+ }
+ }
+
+ return true
+}
+
+// Condition uses a Comparison to assert a complex condition.
+func Condition(t TestingT, comp Comparison, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ result := comp()
+ if !result {
+ Fail(t, "Condition failed!", msgAndArgs...)
+ }
+ return result
+}
+
+// PanicTestFunc defines a func that should be passed to the assert.Panics and assert.NotPanics
+// methods, and represents a simple func that takes no arguments, and returns nothing.
+type PanicTestFunc func()
+
+// didPanic returns true if the function passed to it panics. Otherwise, it returns false.
+func didPanic(f PanicTestFunc) (bool, interface{}) {
+
+ didPanic := false
+ var message interface{}
+ func() {
+
+ defer func() {
+ if message = recover(); message != nil {
+ didPanic = true
+ }
+ }()
+
+ // call the target function
+ f()
+
+ }()
+
+ return didPanic, message
+
+}
+
+// Panics asserts that the code inside the specified PanicTestFunc panics.
+//
+// assert.Panics(t, func(){ GoCrazy() })
+func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ if funcDidPanic, panicValue := didPanic(f); !funcDidPanic {
+ return Fail(t, fmt.Sprintf("func %#v should panic\n\tPanic value:\t%#v", f, panicValue), msgAndArgs...)
+ }
+
+ return true
+}
+
+// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that
+// the recovered panic value equals the expected panic value.
+//
+// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() })
+func PanicsWithValue(t TestingT, expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ funcDidPanic, panicValue := didPanic(f)
+ if !funcDidPanic {
+ return Fail(t, fmt.Sprintf("func %#v should panic\n\tPanic value:\t%#v", f, panicValue), msgAndArgs...)
+ }
+ if panicValue != expected {
+ return Fail(t, fmt.Sprintf("func %#v should panic with value:\t%#v\n\tPanic value:\t%#v", f, expected, panicValue), msgAndArgs...)
+ }
+
+ return true
+}
+
+// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic.
+//
+// assert.NotPanics(t, func(){ RemainCalm() })
+func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ if funcDidPanic, panicValue := didPanic(f); funcDidPanic {
+ return Fail(t, fmt.Sprintf("func %#v should not panic\n\tPanic value:\t%v", f, panicValue), msgAndArgs...)
+ }
+
+ return true
+}
+
+// WithinDuration asserts that the two times are within duration delta of each other.
+//
+// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second)
+func WithinDuration(t TestingT, expected, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ dt := expected.Sub(actual)
+ if dt < -delta || dt > delta {
+ return Fail(t, fmt.Sprintf("Max difference between %v and %v allowed is %v, but difference was %v", expected, actual, delta, dt), msgAndArgs...)
+ }
+
+ return true
+}
+
+func toFloat(x interface{}) (float64, bool) {
+ var xf float64
+ xok := true
+
+ switch xn := x.(type) {
+ case uint8:
+ xf = float64(xn)
+ case uint16:
+ xf = float64(xn)
+ case uint32:
+ xf = float64(xn)
+ case uint64:
+ xf = float64(xn)
+ case int:
+ xf = float64(xn)
+ case int8:
+ xf = float64(xn)
+ case int16:
+ xf = float64(xn)
+ case int32:
+ xf = float64(xn)
+ case int64:
+ xf = float64(xn)
+ case float32:
+ xf = float64(xn)
+ case float64:
+ xf = float64(xn)
+ case time.Duration:
+ xf = float64(xn)
+ default:
+ xok = false
+ }
+
+ return xf, xok
+}
+
+// InDelta asserts that the two numerals are within delta of each other.
+//
+// assert.InDelta(t, math.Pi, (22 / 7.0), 0.01)
+func InDelta(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ af, aok := toFloat(expected)
+ bf, bok := toFloat(actual)
+
+ if !aok || !bok {
+ return Fail(t, fmt.Sprintf("Parameters must be numerical"), msgAndArgs...)
+ }
+
+ if math.IsNaN(af) {
+ return Fail(t, fmt.Sprintf("Expected must not be NaN"), msgAndArgs...)
+ }
+
+ if math.IsNaN(bf) {
+ return Fail(t, fmt.Sprintf("Expected %v with delta %v, but was NaN", expected, delta), msgAndArgs...)
+ }
+
+ dt := af - bf
+ if dt < -delta || dt > delta {
+ return Fail(t, fmt.Sprintf("Max difference between %v and %v allowed is %v, but difference was %v", expected, actual, delta, dt), msgAndArgs...)
+ }
+
+ return true
+}
+
+// InDeltaSlice is the same as InDelta, except it compares two slices.
+func InDeltaSlice(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if expected == nil || actual == nil ||
+ reflect.TypeOf(actual).Kind() != reflect.Slice ||
+ reflect.TypeOf(expected).Kind() != reflect.Slice {
+ return Fail(t, fmt.Sprintf("Parameters must be slice"), msgAndArgs...)
+ }
+
+ actualSlice := reflect.ValueOf(actual)
+ expectedSlice := reflect.ValueOf(expected)
+
+ for i := 0; i < actualSlice.Len(); i++ {
+ result := InDelta(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), delta, msgAndArgs...)
+ if !result {
+ return result
+ }
+ }
+
+ return true
+}
+
+// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys.
+func InDeltaMapValues(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if expected == nil || actual == nil ||
+ reflect.TypeOf(actual).Kind() != reflect.Map ||
+ reflect.TypeOf(expected).Kind() != reflect.Map {
+ return Fail(t, "Arguments must be maps", msgAndArgs...)
+ }
+
+ expectedMap := reflect.ValueOf(expected)
+ actualMap := reflect.ValueOf(actual)
+
+ if expectedMap.Len() != actualMap.Len() {
+ return Fail(t, "Arguments must have the same number of keys", msgAndArgs...)
+ }
+
+ for _, k := range expectedMap.MapKeys() {
+ ev := expectedMap.MapIndex(k)
+ av := actualMap.MapIndex(k)
+
+ if !ev.IsValid() {
+ return Fail(t, fmt.Sprintf("missing key %q in expected map", k), msgAndArgs...)
+ }
+
+ if !av.IsValid() {
+ return Fail(t, fmt.Sprintf("missing key %q in actual map", k), msgAndArgs...)
+ }
+
+ if !InDelta(
+ t,
+ ev.Interface(),
+ av.Interface(),
+ delta,
+ msgAndArgs...,
+ ) {
+ return false
+ }
+ }
+
+ return true
+}
+
+func calcRelativeError(expected, actual interface{}) (float64, error) {
+ af, aok := toFloat(expected)
+ if !aok {
+ return 0, fmt.Errorf("expected value %q cannot be converted to float", expected)
+ }
+ if af == 0 {
+ return 0, fmt.Errorf("expected value must have a value other than zero to calculate the relative error")
+ }
+ bf, bok := toFloat(actual)
+ if !bok {
+ return 0, fmt.Errorf("actual value %q cannot be converted to float", actual)
+ }
+
+ return math.Abs(af-bf) / math.Abs(af), nil
+}
+
+// InEpsilon asserts that expected and actual have a relative error less than epsilon
+func InEpsilon(t TestingT, expected, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ actualEpsilon, err := calcRelativeError(expected, actual)
+ if err != nil {
+ return Fail(t, err.Error(), msgAndArgs...)
+ }
+ if actualEpsilon > epsilon {
+ return Fail(t, fmt.Sprintf("Relative error is too high: %#v (expected)\n"+
+ " < %#v (actual)", epsilon, actualEpsilon), msgAndArgs...)
+ }
+
+ return true
+}
+
+// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices.
+func InEpsilonSlice(t TestingT, expected, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if expected == nil || actual == nil ||
+ reflect.TypeOf(actual).Kind() != reflect.Slice ||
+ reflect.TypeOf(expected).Kind() != reflect.Slice {
+ return Fail(t, fmt.Sprintf("Parameters must be slice"), msgAndArgs...)
+ }
+
+ actualSlice := reflect.ValueOf(actual)
+ expectedSlice := reflect.ValueOf(expected)
+
+ for i := 0; i < actualSlice.Len(); i++ {
+ result := InEpsilon(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), epsilon)
+ if !result {
+ return result
+ }
+ }
+
+ return true
+}
+
+/*
+ Errors
+*/
+
+// NoError asserts that a function returned no error (i.e. `nil`).
+//
+// actualObj, err := SomeFunction()
+// if assert.NoError(t, err) {
+// assert.Equal(t, expectedObj, actualObj)
+// }
+func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if err != nil {
+ return Fail(t, fmt.Sprintf("Received unexpected error:\n%+v", err), msgAndArgs...)
+ }
+
+ return true
+}
+
+// Error asserts that a function returned an error (i.e. not `nil`).
+//
+// actualObj, err := SomeFunction()
+// if assert.Error(t, err) {
+// assert.Equal(t, expectedError, err)
+// }
+func Error(t TestingT, err error, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ if err == nil {
+ return Fail(t, "An error is expected but got nil.", msgAndArgs...)
+ }
+
+ return true
+}
+
+// EqualError asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+//
+// actualObj, err := SomeFunction()
+// assert.EqualError(t, err, expectedErrorString)
+func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if !Error(t, theError, msgAndArgs...) {
+ return false
+ }
+ expected := errString
+ actual := theError.Error()
+ // don't need to use deep equals here, we know they are both strings
+ if expected != actual {
+ return Fail(t, fmt.Sprintf("Error message not equal:\n"+
+ "expected: %q\n"+
+ "actual : %q", expected, actual), msgAndArgs...)
+ }
+ return true
+}
+
+// matchRegexp return true if a specified regexp matches a string.
+func matchRegexp(rx interface{}, str interface{}) bool {
+
+ var r *regexp.Regexp
+ if rr, ok := rx.(*regexp.Regexp); ok {
+ r = rr
+ } else {
+ r = regexp.MustCompile(fmt.Sprint(rx))
+ }
+
+ return (r.FindStringIndex(fmt.Sprint(str)) != nil)
+
+}
+
+// Regexp asserts that a specified regexp matches a string.
+//
+// assert.Regexp(t, regexp.MustCompile("start"), "it's starting")
+// assert.Regexp(t, "start...$", "it's not starting")
+func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+
+ match := matchRegexp(rx, str)
+
+ if !match {
+ Fail(t, fmt.Sprintf("Expect \"%v\" to match \"%v\"", str, rx), msgAndArgs...)
+ }
+
+ return match
+}
+
+// NotRegexp asserts that a specified regexp does not match a string.
+//
+// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting")
+// assert.NotRegexp(t, "^start", "it's not starting")
+func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ match := matchRegexp(rx, str)
+
+ if match {
+ Fail(t, fmt.Sprintf("Expect \"%v\" to NOT match \"%v\"", str, rx), msgAndArgs...)
+ }
+
+ return !match
+
+}
+
+// Zero asserts that i is the zero value for its type.
+func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if i != nil && !reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) {
+ return Fail(t, fmt.Sprintf("Should be zero, but was %v", i), msgAndArgs...)
+ }
+ return true
+}
+
+// NotZero asserts that i is not the zero value for its type.
+func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ if i == nil || reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) {
+ return Fail(t, fmt.Sprintf("Should not be zero, but was %v", i), msgAndArgs...)
+ }
+ return true
+}
+
+// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file.
+func FileExists(t TestingT, path string, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ info, err := os.Lstat(path)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return Fail(t, fmt.Sprintf("unable to find file %q", path), msgAndArgs...)
+ }
+ return Fail(t, fmt.Sprintf("error when running os.Lstat(%q): %s", path, err), msgAndArgs...)
+ }
+ if info.IsDir() {
+ return Fail(t, fmt.Sprintf("%q is a directory", path), msgAndArgs...)
+ }
+ return true
+}
+
+// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists.
+func DirExists(t TestingT, path string, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ info, err := os.Lstat(path)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return Fail(t, fmt.Sprintf("unable to find file %q", path), msgAndArgs...)
+ }
+ return Fail(t, fmt.Sprintf("error when running os.Lstat(%q): %s", path, err), msgAndArgs...)
+ }
+ if !info.IsDir() {
+ return Fail(t, fmt.Sprintf("%q is a file", path), msgAndArgs...)
+ }
+ return true
+}
+
+// JSONEq asserts that two JSON strings are equivalent.
+//
+// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`)
+func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ var expectedJSONAsInterface, actualJSONAsInterface interface{}
+
+ if err := json.Unmarshal([]byte(expected), &expectedJSONAsInterface); err != nil {
+ return Fail(t, fmt.Sprintf("Expected value ('%s') is not valid json.\nJSON parsing error: '%s'", expected, err.Error()), msgAndArgs...)
+ }
+
+ if err := json.Unmarshal([]byte(actual), &actualJSONAsInterface); err != nil {
+ return Fail(t, fmt.Sprintf("Input ('%s') needs to be valid json.\nJSON parsing error: '%s'", actual, err.Error()), msgAndArgs...)
+ }
+
+ return Equal(t, expectedJSONAsInterface, actualJSONAsInterface, msgAndArgs...)
+}
+
+func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) {
+ t := reflect.TypeOf(v)
+ k := t.Kind()
+
+ if k == reflect.Ptr {
+ t = t.Elem()
+ k = t.Kind()
+ }
+ return t, k
+}
+
+// diff returns a diff of both values as long as both are of the same type and
+// are a struct, map, slice, array or string. Otherwise it returns an empty string.
+func diff(expected interface{}, actual interface{}) string {
+ if expected == nil || actual == nil {
+ return ""
+ }
+
+ et, ek := typeAndKind(expected)
+ at, _ := typeAndKind(actual)
+
+ if et != at {
+ return ""
+ }
+
+ if ek != reflect.Struct && ek != reflect.Map && ek != reflect.Slice && ek != reflect.Array && ek != reflect.String {
+ return ""
+ }
+
+ var e, a string
+ if et != reflect.TypeOf("") {
+ e = spewConfig.Sdump(expected)
+ a = spewConfig.Sdump(actual)
+ } else {
+ e = expected.(string)
+ a = actual.(string)
+ }
+
+ diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{
+ A: difflib.SplitLines(e),
+ B: difflib.SplitLines(a),
+ FromFile: "Expected",
+ FromDate: "",
+ ToFile: "Actual",
+ ToDate: "",
+ Context: 1,
+ })
+
+ return "\n\nDiff:\n" + diff
+}
+
+// validateEqualArgs checks whether provided arguments can be safely used in the
+// Equal/NotEqual functions.
+func validateEqualArgs(expected, actual interface{}) error {
+ if isFunction(expected) || isFunction(actual) {
+ return errors.New("cannot take func type as argument")
+ }
+ return nil
+}
+
+func isFunction(arg interface{}) bool {
+ if arg == nil {
+ return false
+ }
+ return reflect.TypeOf(arg).Kind() == reflect.Func
+}
+
+var spewConfig = spew.ConfigState{
+ Indent: " ",
+ DisablePointerAddresses: true,
+ DisableCapacities: true,
+ SortKeys: true,
+}
+
+type tHelper interface {
+ Helper()
+}
diff --git a/vendor/github.com/stretchr/testify/assert/doc.go b/vendor/github.com/stretchr/testify/assert/doc.go
new file mode 100644
index 0000000..c9dccc4
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/assert/doc.go
@@ -0,0 +1,45 @@
+// Package assert provides a set of comprehensive testing tools for use with the normal Go testing system.
+//
+// Example Usage
+//
+// The following is a complete example using assert in a standard test function:
+// import (
+// "testing"
+// "github.com/stretchr/testify/assert"
+// )
+//
+// func TestSomething(t *testing.T) {
+//
+// var a string = "Hello"
+// var b string = "Hello"
+//
+// assert.Equal(t, a, b, "The two words should be the same.")
+//
+// }
+//
+// if you assert many times, use the format below:
+//
+// import (
+// "testing"
+// "github.com/stretchr/testify/assert"
+// )
+//
+// func TestSomething(t *testing.T) {
+// assert := assert.New(t)
+//
+// var a string = "Hello"
+// var b string = "Hello"
+//
+// assert.Equal(a, b, "The two words should be the same.")
+// }
+//
+// Assertions
+//
+// Assertions allow you to easily write test code, and are global funcs in the `assert` package.
+// All assertion functions take, as the first argument, the `*testing.T` object provided by the
+// testing framework. This allows the assertion funcs to write the failings and other details to
+// the correct place.
+//
+// Every assertion function also takes an optional string message as the final argument,
+// allowing custom error messages to be appended to the message the assertion method outputs.
+package assert
diff --git a/vendor/github.com/stretchr/testify/assert/errors.go b/vendor/github.com/stretchr/testify/assert/errors.go
new file mode 100644
index 0000000..ac9dc9d
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/assert/errors.go
@@ -0,0 +1,10 @@
+package assert
+
+import (
+ "errors"
+)
+
+// AnError is an error instance useful for testing. If the code does not care
+// about error specifics, and only needs to return the error for example, this
+// error should be used to make the test code more readable.
+var AnError = errors.New("assert.AnError general error for testing")
diff --git a/vendor/github.com/stretchr/testify/assert/forward_assertions.go b/vendor/github.com/stretchr/testify/assert/forward_assertions.go
new file mode 100644
index 0000000..9ad5685
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/assert/forward_assertions.go
@@ -0,0 +1,16 @@
+package assert
+
+// Assertions provides assertion methods around the
+// TestingT interface.
+type Assertions struct {
+ t TestingT
+}
+
+// New makes a new Assertions object for the specified TestingT.
+func New(t TestingT) *Assertions {
+ return &Assertions{
+ t: t,
+ }
+}
+
+//go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_forward.go.tmpl -include-format-funcs
diff --git a/vendor/github.com/stretchr/testify/assert/http_assertions.go b/vendor/github.com/stretchr/testify/assert/http_assertions.go
new file mode 100644
index 0000000..df46fa7
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/assert/http_assertions.go
@@ -0,0 +1,143 @@
+package assert
+
+import (
+ "fmt"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "strings"
+)
+
+// httpCode is a helper that returns HTTP code of the response. It returns -1 and
+// an error if building a new request fails.
+func httpCode(handler http.HandlerFunc, method, url string, values url.Values) (int, error) {
+ w := httptest.NewRecorder()
+ req, err := http.NewRequest(method, url, nil)
+ if err != nil {
+ return -1, err
+ }
+ req.URL.RawQuery = values.Encode()
+ handler(w, req)
+ return w.Code, nil
+}
+
+// HTTPSuccess asserts that a specified handler returns a success status code.
+//
+// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil)
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ code, err := httpCode(handler, method, url, values)
+ if err != nil {
+ Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
+ return false
+ }
+
+ isSuccessCode := code >= http.StatusOK && code <= http.StatusPartialContent
+ if !isSuccessCode {
+ Fail(t, fmt.Sprintf("Expected HTTP success status code for %q but received %d", url+"?"+values.Encode(), code))
+ }
+
+ return isSuccessCode
+}
+
+// HTTPRedirect asserts that a specified handler returns a redirect status code.
+//
+// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ code, err := httpCode(handler, method, url, values)
+ if err != nil {
+ Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
+ return false
+ }
+
+ isRedirectCode := code >= http.StatusMultipleChoices && code <= http.StatusTemporaryRedirect
+ if !isRedirectCode {
+ Fail(t, fmt.Sprintf("Expected HTTP redirect status code for %q but received %d", url+"?"+values.Encode(), code))
+ }
+
+ return isRedirectCode
+}
+
+// HTTPError asserts that a specified handler returns an error status code.
+//
+// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ code, err := httpCode(handler, method, url, values)
+ if err != nil {
+ Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
+ return false
+ }
+
+ isErrorCode := code >= http.StatusBadRequest
+ if !isErrorCode {
+ Fail(t, fmt.Sprintf("Expected HTTP error status code for %q but received %d", url+"?"+values.Encode(), code))
+ }
+
+ return isErrorCode
+}
+
+// HTTPBody is a helper that returns HTTP body of the response. It returns
+// empty string if building a new request fails.
+func HTTPBody(handler http.HandlerFunc, method, url string, values url.Values) string {
+ w := httptest.NewRecorder()
+ req, err := http.NewRequest(method, url+"?"+values.Encode(), nil)
+ if err != nil {
+ return ""
+ }
+ handler(w, req)
+ return w.Body.String()
+}
+
+// HTTPBodyContains asserts that a specified handler returns a
+// body that contains a string.
+//
+// assert.HTTPBodyContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ body := HTTPBody(handler, method, url, values)
+
+ contains := strings.Contains(body, fmt.Sprint(str))
+ if !contains {
+ Fail(t, fmt.Sprintf("Expected response body for \"%s\" to contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body))
+ }
+
+ return contains
+}
+
+// HTTPBodyNotContains asserts that a specified handler returns a
+// body that does not contain a string.
+//
+// assert.HTTPBodyNotContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool {
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ body := HTTPBody(handler, method, url, values)
+
+ contains := strings.Contains(body, fmt.Sprint(str))
+ if contains {
+ Fail(t, fmt.Sprintf("Expected response body for \"%s\" to NOT contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body))
+ }
+
+ return !contains
+}
diff --git a/vendor/github.com/stretchr/testify/require/doc.go b/vendor/github.com/stretchr/testify/require/doc.go
new file mode 100644
index 0000000..169de39
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/require/doc.go
@@ -0,0 +1,28 @@
+// Package require implements the same assertions as the `assert` package but
+// stops test execution when a test fails.
+//
+// Example Usage
+//
+// The following is a complete example using require in a standard test function:
+// import (
+// "testing"
+// "github.com/stretchr/testify/require"
+// )
+//
+// func TestSomething(t *testing.T) {
+//
+// var a string = "Hello"
+// var b string = "Hello"
+//
+// require.Equal(t, a, b, "The two words should be the same.")
+//
+// }
+//
+// Assertions
+//
+// The `require` package have same global functions as in the `assert` package,
+// but instead of returning a boolean result they call `t.FailNow()`.
+//
+// Every assertion function also takes an optional string message as the final argument,
+// allowing custom error messages to be appended to the message the assertion method outputs.
+package require
diff --git a/vendor/github.com/stretchr/testify/require/forward_requirements.go b/vendor/github.com/stretchr/testify/require/forward_requirements.go
new file mode 100644
index 0000000..ac71d40
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/require/forward_requirements.go
@@ -0,0 +1,16 @@
+package require
+
+// Assertions provides assertion methods around the
+// TestingT interface.
+type Assertions struct {
+ t TestingT
+}
+
+// New makes a new Assertions object for the specified TestingT.
+func New(t TestingT) *Assertions {
+ return &Assertions{
+ t: t,
+ }
+}
+
+//go:generate go run ../_codegen/main.go -output-package=require -template=require_forward.go.tmpl -include-format-funcs
diff --git a/vendor/github.com/stretchr/testify/require/require.go b/vendor/github.com/stretchr/testify/require/require.go
new file mode 100644
index 0000000..535f293
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/require/require.go
@@ -0,0 +1,1227 @@
+/*
+* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
+* THIS FILE MUST NOT BE EDITED BY HAND
+ */
+
+package require
+
+import (
+ assert "github.com/stretchr/testify/assert"
+ http "net/http"
+ url "net/url"
+ time "time"
+)
+
+// Condition uses a Comparison to assert a complex condition.
+func Condition(t TestingT, comp assert.Comparison, msgAndArgs ...interface{}) {
+ if assert.Condition(t, comp, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Conditionf uses a Comparison to assert a complex condition.
+func Conditionf(t TestingT, comp assert.Comparison, msg string, args ...interface{}) {
+ if assert.Conditionf(t, comp, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Contains asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+//
+// assert.Contains(t, "Hello World", "World")
+// assert.Contains(t, ["Hello", "World"], "World")
+// assert.Contains(t, {"Hello": "World"}, "Hello")
+func Contains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) {
+ if assert.Contains(t, s, contains, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Containsf asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+//
+// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted")
+// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted")
+// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted")
+func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) {
+ if assert.Containsf(t, s, contains, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists.
+func DirExists(t TestingT, path string, msgAndArgs ...interface{}) {
+ if assert.DirExists(t, path, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists.
+func DirExistsf(t TestingT, path string, msg string, args ...interface{}) {
+ if assert.DirExistsf(t, path, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified
+// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements,
+// the number of appearances of each of them in both lists should match.
+//
+// assert.ElementsMatch(t, [1, 3, 2, 3], [1, 3, 3, 2])
+func ElementsMatch(t TestingT, listA interface{}, listB interface{}, msgAndArgs ...interface{}) {
+ if assert.ElementsMatch(t, listA, listB, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified
+// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements,
+// the number of appearances of each of them in both lists should match.
+//
+// assert.ElementsMatchf(t, [1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted")
+func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string, args ...interface{}) {
+ if assert.ElementsMatchf(t, listA, listB, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// assert.Empty(t, obj)
+func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) {
+ if assert.Empty(t, object, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// assert.Emptyf(t, obj, "error message %s", "formatted")
+func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) {
+ if assert.Emptyf(t, object, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Equal asserts that two objects are equal.
+//
+// assert.Equal(t, 123, 123)
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses). Function equality
+// cannot be determined and will always fail.
+func Equal(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
+ if assert.Equal(t, expected, actual, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// EqualError asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+//
+// actualObj, err := SomeFunction()
+// assert.EqualError(t, err, expectedErrorString)
+func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) {
+ if assert.EqualError(t, theError, errString, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// EqualErrorf asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+//
+// actualObj, err := SomeFunction()
+// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted")
+func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) {
+ if assert.EqualErrorf(t, theError, errString, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// EqualValues asserts that two objects are equal or convertable to the same types
+// and equal.
+//
+// assert.EqualValues(t, uint32(123), int32(123))
+func EqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
+ if assert.EqualValues(t, expected, actual, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// EqualValuesf asserts that two objects are equal or convertable to the same types
+// and equal.
+//
+// assert.EqualValuesf(t, uint32(123, "error message %s", "formatted"), int32(123))
+func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) {
+ if assert.EqualValuesf(t, expected, actual, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Equalf asserts that two objects are equal.
+//
+// assert.Equalf(t, 123, 123, "error message %s", "formatted")
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses). Function equality
+// cannot be determined and will always fail.
+func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) {
+ if assert.Equalf(t, expected, actual, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Error asserts that a function returned an error (i.e. not `nil`).
+//
+// actualObj, err := SomeFunction()
+// if assert.Error(t, err) {
+// assert.Equal(t, expectedError, err)
+// }
+func Error(t TestingT, err error, msgAndArgs ...interface{}) {
+ if assert.Error(t, err, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Errorf asserts that a function returned an error (i.e. not `nil`).
+//
+// actualObj, err := SomeFunction()
+// if assert.Errorf(t, err, "error message %s", "formatted") {
+// assert.Equal(t, expectedErrorf, err)
+// }
+func Errorf(t TestingT, err error, msg string, args ...interface{}) {
+ if assert.Errorf(t, err, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Exactly asserts that two objects are equal in value and type.
+//
+// assert.Exactly(t, int32(123), int64(123))
+func Exactly(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
+ if assert.Exactly(t, expected, actual, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Exactlyf asserts that two objects are equal in value and type.
+//
+// assert.Exactlyf(t, int32(123, "error message %s", "formatted"), int64(123))
+func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) {
+ if assert.Exactlyf(t, expected, actual, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Fail reports a failure through
+func Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) {
+ if assert.Fail(t, failureMessage, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// FailNow fails test
+func FailNow(t TestingT, failureMessage string, msgAndArgs ...interface{}) {
+ if assert.FailNow(t, failureMessage, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// FailNowf fails test
+func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{}) {
+ if assert.FailNowf(t, failureMessage, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Failf reports a failure through
+func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) {
+ if assert.Failf(t, failureMessage, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// False asserts that the specified value is false.
+//
+// assert.False(t, myBool)
+func False(t TestingT, value bool, msgAndArgs ...interface{}) {
+ if assert.False(t, value, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Falsef asserts that the specified value is false.
+//
+// assert.Falsef(t, myBool, "error message %s", "formatted")
+func Falsef(t TestingT, value bool, msg string, args ...interface{}) {
+ if assert.Falsef(t, value, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file.
+func FileExists(t TestingT, path string, msgAndArgs ...interface{}) {
+ if assert.FileExists(t, path, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file.
+func FileExistsf(t TestingT, path string, msg string, args ...interface{}) {
+ if assert.FileExistsf(t, path, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPBodyContains asserts that a specified handler returns a
+// body that contains a string.
+//
+// assert.HTTPBodyContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) {
+ if assert.HTTPBodyContains(t, handler, method, url, values, str, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPBodyContainsf asserts that a specified handler returns a
+// body that contains a string.
+//
+// assert.HTTPBodyContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) {
+ if assert.HTTPBodyContainsf(t, handler, method, url, values, str, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPBodyNotContains asserts that a specified handler returns a
+// body that does not contain a string.
+//
+// assert.HTTPBodyNotContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) {
+ if assert.HTTPBodyNotContains(t, handler, method, url, values, str, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPBodyNotContainsf asserts that a specified handler returns a
+// body that does not contain a string.
+//
+// assert.HTTPBodyNotContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) {
+ if assert.HTTPBodyNotContainsf(t, handler, method, url, values, str, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPError asserts that a specified handler returns an error status code.
+//
+// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPError(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) {
+ if assert.HTTPError(t, handler, method, url, values, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPErrorf asserts that a specified handler returns an error status code.
+//
+// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false).
+func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) {
+ if assert.HTTPErrorf(t, handler, method, url, values, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPRedirect asserts that a specified handler returns a redirect status code.
+//
+// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPRedirect(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) {
+ if assert.HTTPRedirect(t, handler, method, url, values, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPRedirectf asserts that a specified handler returns a redirect status code.
+//
+// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false).
+func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) {
+ if assert.HTTPRedirectf(t, handler, method, url, values, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPSuccess asserts that a specified handler returns a success status code.
+//
+// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil)
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPSuccess(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) {
+ if assert.HTTPSuccess(t, handler, method, url, values, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// HTTPSuccessf asserts that a specified handler returns a success status code.
+//
+// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) {
+ if assert.HTTPSuccessf(t, handler, method, url, values, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Implements asserts that an object is implemented by the specified interface.
+//
+// assert.Implements(t, (*MyInterface)(nil), new(MyObject))
+func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) {
+ if assert.Implements(t, interfaceObject, object, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Implementsf asserts that an object is implemented by the specified interface.
+//
+// assert.Implementsf(t, (*MyInterface, "error message %s", "formatted")(nil), new(MyObject))
+func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) {
+ if assert.Implementsf(t, interfaceObject, object, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InDelta asserts that the two numerals are within delta of each other.
+//
+// assert.InDelta(t, math.Pi, (22 / 7.0), 0.01)
+func InDelta(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) {
+ if assert.InDelta(t, expected, actual, delta, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys.
+func InDeltaMapValues(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) {
+ if assert.InDeltaMapValues(t, expected, actual, delta, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys.
+func InDeltaMapValuesf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) {
+ if assert.InDeltaMapValuesf(t, expected, actual, delta, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InDeltaSlice is the same as InDelta, except it compares two slices.
+func InDeltaSlice(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) {
+ if assert.InDeltaSlice(t, expected, actual, delta, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InDeltaSlicef is the same as InDelta, except it compares two slices.
+func InDeltaSlicef(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) {
+ if assert.InDeltaSlicef(t, expected, actual, delta, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InDeltaf asserts that the two numerals are within delta of each other.
+//
+// assert.InDeltaf(t, math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01)
+func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) {
+ if assert.InDeltaf(t, expected, actual, delta, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InEpsilon asserts that expected and actual have a relative error less than epsilon
+func InEpsilon(t TestingT, expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) {
+ if assert.InEpsilon(t, expected, actual, epsilon, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices.
+func InEpsilonSlice(t TestingT, expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) {
+ if assert.InEpsilonSlice(t, expected, actual, epsilon, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices.
+func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) {
+ if assert.InEpsilonSlicef(t, expected, actual, epsilon, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// InEpsilonf asserts that expected and actual have a relative error less than epsilon
+func InEpsilonf(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) {
+ if assert.InEpsilonf(t, expected, actual, epsilon, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// IsType asserts that the specified objects are of the same type.
+func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) {
+ if assert.IsType(t, expectedType, object, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// IsTypef asserts that the specified objects are of the same type.
+func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) {
+ if assert.IsTypef(t, expectedType, object, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// JSONEq asserts that two JSON strings are equivalent.
+//
+// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`)
+func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) {
+ if assert.JSONEq(t, expected, actual, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// JSONEqf asserts that two JSON strings are equivalent.
+//
+// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted")
+func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) {
+ if assert.JSONEqf(t, expected, actual, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Len asserts that the specified object has specific length.
+// Len also fails if the object has a type that len() not accept.
+//
+// assert.Len(t, mySlice, 3)
+func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) {
+ if assert.Len(t, object, length, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Lenf asserts that the specified object has specific length.
+// Lenf also fails if the object has a type that len() not accept.
+//
+// assert.Lenf(t, mySlice, 3, "error message %s", "formatted")
+func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) {
+ if assert.Lenf(t, object, length, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Nil asserts that the specified object is nil.
+//
+// assert.Nil(t, err)
+func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) {
+ if assert.Nil(t, object, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Nilf asserts that the specified object is nil.
+//
+// assert.Nilf(t, err, "error message %s", "formatted")
+func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) {
+ if assert.Nilf(t, object, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NoError asserts that a function returned no error (i.e. `nil`).
+//
+// actualObj, err := SomeFunction()
+// if assert.NoError(t, err) {
+// assert.Equal(t, expectedObj, actualObj)
+// }
+func NoError(t TestingT, err error, msgAndArgs ...interface{}) {
+ if assert.NoError(t, err, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NoErrorf asserts that a function returned no error (i.e. `nil`).
+//
+// actualObj, err := SomeFunction()
+// if assert.NoErrorf(t, err, "error message %s", "formatted") {
+// assert.Equal(t, expectedObj, actualObj)
+// }
+func NoErrorf(t TestingT, err error, msg string, args ...interface{}) {
+ if assert.NoErrorf(t, err, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+//
+// assert.NotContains(t, "Hello World", "Earth")
+// assert.NotContains(t, ["Hello", "World"], "Earth")
+// assert.NotContains(t, {"Hello": "World"}, "Earth")
+func NotContains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) {
+ if assert.NotContains(t, s, contains, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+//
+// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted")
+// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted")
+// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted")
+func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) {
+ if assert.NotContainsf(t, s, contains, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// if assert.NotEmpty(t, obj) {
+// assert.Equal(t, "two", obj[1])
+// }
+func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) {
+ if assert.NotEmpty(t, object, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// if assert.NotEmptyf(t, obj, "error message %s", "formatted") {
+// assert.Equal(t, "two", obj[1])
+// }
+func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) {
+ if assert.NotEmptyf(t, object, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotEqual asserts that the specified values are NOT equal.
+//
+// assert.NotEqual(t, obj1, obj2)
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses).
+func NotEqual(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
+ if assert.NotEqual(t, expected, actual, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotEqualf asserts that the specified values are NOT equal.
+//
+// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted")
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses).
+func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) {
+ if assert.NotEqualf(t, expected, actual, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotNil asserts that the specified object is not nil.
+//
+// assert.NotNil(t, err)
+func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) {
+ if assert.NotNil(t, object, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotNilf asserts that the specified object is not nil.
+//
+// assert.NotNilf(t, err, "error message %s", "formatted")
+func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) {
+ if assert.NotNilf(t, object, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic.
+//
+// assert.NotPanics(t, func(){ RemainCalm() })
+func NotPanics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) {
+ if assert.NotPanics(t, f, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic.
+//
+// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted")
+func NotPanicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) {
+ if assert.NotPanicsf(t, f, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotRegexp asserts that a specified regexp does not match a string.
+//
+// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting")
+// assert.NotRegexp(t, "^start", "it's not starting")
+func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) {
+ if assert.NotRegexp(t, rx, str, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotRegexpf asserts that a specified regexp does not match a string.
+//
+// assert.NotRegexpf(t, regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting")
+// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted")
+func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) {
+ if assert.NotRegexpf(t, rx, str, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotSubset asserts that the specified list(array, slice...) contains not all
+// elements given in the specified subset(array, slice...).
+//
+// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
+func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) {
+ if assert.NotSubset(t, list, subset, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotSubsetf asserts that the specified list(array, slice...) contains not all
+// elements given in the specified subset(array, slice...).
+//
+// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
+func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) {
+ if assert.NotSubsetf(t, list, subset, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotZero asserts that i is not the zero value for its type.
+func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) {
+ if assert.NotZero(t, i, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// NotZerof asserts that i is not the zero value for its type.
+func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) {
+ if assert.NotZerof(t, i, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Panics asserts that the code inside the specified PanicTestFunc panics.
+//
+// assert.Panics(t, func(){ GoCrazy() })
+func Panics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) {
+ if assert.Panics(t, f, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that
+// the recovered panic value equals the expected panic value.
+//
+// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() })
+func PanicsWithValue(t TestingT, expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) {
+ if assert.PanicsWithValue(t, expected, f, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that
+// the recovered panic value equals the expected panic value.
+//
+// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted")
+func PanicsWithValuef(t TestingT, expected interface{}, f assert.PanicTestFunc, msg string, args ...interface{}) {
+ if assert.PanicsWithValuef(t, expected, f, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Panicsf asserts that the code inside the specified PanicTestFunc panics.
+//
+// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted")
+func Panicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) {
+ if assert.Panicsf(t, f, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Regexp asserts that a specified regexp matches a string.
+//
+// assert.Regexp(t, regexp.MustCompile("start"), "it's starting")
+// assert.Regexp(t, "start...$", "it's not starting")
+func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) {
+ if assert.Regexp(t, rx, str, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Regexpf asserts that a specified regexp matches a string.
+//
+// assert.Regexpf(t, regexp.MustCompile("start", "error message %s", "formatted"), "it's starting")
+// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted")
+func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) {
+ if assert.Regexpf(t, rx, str, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Subset asserts that the specified list(array, slice...) contains all
+// elements given in the specified subset(array, slice...).
+//
+// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
+func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) {
+ if assert.Subset(t, list, subset, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Subsetf asserts that the specified list(array, slice...) contains all
+// elements given in the specified subset(array, slice...).
+//
+// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
+func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) {
+ if assert.Subsetf(t, list, subset, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// True asserts that the specified value is true.
+//
+// assert.True(t, myBool)
+func True(t TestingT, value bool, msgAndArgs ...interface{}) {
+ if assert.True(t, value, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Truef asserts that the specified value is true.
+//
+// assert.Truef(t, myBool, "error message %s", "formatted")
+func Truef(t TestingT, value bool, msg string, args ...interface{}) {
+ if assert.Truef(t, value, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// WithinDuration asserts that the two times are within duration delta of each other.
+//
+// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second)
+func WithinDuration(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) {
+ if assert.WithinDuration(t, expected, actual, delta, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// WithinDurationf asserts that the two times are within duration delta of each other.
+//
+// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted")
+func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) {
+ if assert.WithinDurationf(t, expected, actual, delta, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Zero asserts that i is the zero value for its type.
+func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) {
+ if assert.Zero(t, i, msgAndArgs...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
+
+// Zerof asserts that i is the zero value for its type.
+func Zerof(t TestingT, i interface{}, msg string, args ...interface{}) {
+ if assert.Zerof(t, i, msg, args...) {
+ return
+ }
+ if h, ok := t.(tHelper); ok {
+ h.Helper()
+ }
+ t.FailNow()
+}
diff --git a/vendor/github.com/stretchr/testify/require/require.go.tmpl b/vendor/github.com/stretchr/testify/require/require.go.tmpl
new file mode 100644
index 0000000..6ffc751
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/require/require.go.tmpl
@@ -0,0 +1,6 @@
+{{.Comment}}
+func {{.DocInfo.Name}}(t TestingT, {{.Params}}) {
+ if assert.{{.DocInfo.Name}}(t, {{.ForwardedParams}}) { return }
+ if h, ok := t.(tHelper); ok { h.Helper() }
+ t.FailNow()
+}
diff --git a/vendor/github.com/stretchr/testify/require/require_forward.go b/vendor/github.com/stretchr/testify/require/require_forward.go
new file mode 100644
index 0000000..9fe41db
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/require/require_forward.go
@@ -0,0 +1,957 @@
+/*
+* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
+* THIS FILE MUST NOT BE EDITED BY HAND
+ */
+
+package require
+
+import (
+ assert "github.com/stretchr/testify/assert"
+ http "net/http"
+ url "net/url"
+ time "time"
+)
+
+// Condition uses a Comparison to assert a complex condition.
+func (a *Assertions) Condition(comp assert.Comparison, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Condition(a.t, comp, msgAndArgs...)
+}
+
+// Conditionf uses a Comparison to assert a complex condition.
+func (a *Assertions) Conditionf(comp assert.Comparison, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Conditionf(a.t, comp, msg, args...)
+}
+
+// Contains asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+//
+// a.Contains("Hello World", "World")
+// a.Contains(["Hello", "World"], "World")
+// a.Contains({"Hello": "World"}, "Hello")
+func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Contains(a.t, s, contains, msgAndArgs...)
+}
+
+// Containsf asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+//
+// a.Containsf("Hello World", "World", "error message %s", "formatted")
+// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted")
+// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted")
+func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Containsf(a.t, s, contains, msg, args...)
+}
+
+// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists.
+func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ DirExists(a.t, path, msgAndArgs...)
+}
+
+// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists.
+func (a *Assertions) DirExistsf(path string, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ DirExistsf(a.t, path, msg, args...)
+}
+
+// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified
+// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements,
+// the number of appearances of each of them in both lists should match.
+//
+// a.ElementsMatch([1, 3, 2, 3], [1, 3, 3, 2])
+func (a *Assertions) ElementsMatch(listA interface{}, listB interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ ElementsMatch(a.t, listA, listB, msgAndArgs...)
+}
+
+// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified
+// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements,
+// the number of appearances of each of them in both lists should match.
+//
+// a.ElementsMatchf([1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted")
+func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ ElementsMatchf(a.t, listA, listB, msg, args...)
+}
+
+// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// a.Empty(obj)
+func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Empty(a.t, object, msgAndArgs...)
+}
+
+// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// a.Emptyf(obj, "error message %s", "formatted")
+func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Emptyf(a.t, object, msg, args...)
+}
+
+// Equal asserts that two objects are equal.
+//
+// a.Equal(123, 123)
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses). Function equality
+// cannot be determined and will always fail.
+func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Equal(a.t, expected, actual, msgAndArgs...)
+}
+
+// EqualError asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+//
+// actualObj, err := SomeFunction()
+// a.EqualError(err, expectedErrorString)
+func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ EqualError(a.t, theError, errString, msgAndArgs...)
+}
+
+// EqualErrorf asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+//
+// actualObj, err := SomeFunction()
+// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted")
+func (a *Assertions) EqualErrorf(theError error, errString string, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ EqualErrorf(a.t, theError, errString, msg, args...)
+}
+
+// EqualValues asserts that two objects are equal or convertable to the same types
+// and equal.
+//
+// a.EqualValues(uint32(123), int32(123))
+func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ EqualValues(a.t, expected, actual, msgAndArgs...)
+}
+
+// EqualValuesf asserts that two objects are equal or convertable to the same types
+// and equal.
+//
+// a.EqualValuesf(uint32(123, "error message %s", "formatted"), int32(123))
+func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ EqualValuesf(a.t, expected, actual, msg, args...)
+}
+
+// Equalf asserts that two objects are equal.
+//
+// a.Equalf(123, 123, "error message %s", "formatted")
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses). Function equality
+// cannot be determined and will always fail.
+func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Equalf(a.t, expected, actual, msg, args...)
+}
+
+// Error asserts that a function returned an error (i.e. not `nil`).
+//
+// actualObj, err := SomeFunction()
+// if a.Error(err) {
+// assert.Equal(t, expectedError, err)
+// }
+func (a *Assertions) Error(err error, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Error(a.t, err, msgAndArgs...)
+}
+
+// Errorf asserts that a function returned an error (i.e. not `nil`).
+//
+// actualObj, err := SomeFunction()
+// if a.Errorf(err, "error message %s", "formatted") {
+// assert.Equal(t, expectedErrorf, err)
+// }
+func (a *Assertions) Errorf(err error, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Errorf(a.t, err, msg, args...)
+}
+
+// Exactly asserts that two objects are equal in value and type.
+//
+// a.Exactly(int32(123), int64(123))
+func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Exactly(a.t, expected, actual, msgAndArgs...)
+}
+
+// Exactlyf asserts that two objects are equal in value and type.
+//
+// a.Exactlyf(int32(123, "error message %s", "formatted"), int64(123))
+func (a *Assertions) Exactlyf(expected interface{}, actual interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Exactlyf(a.t, expected, actual, msg, args...)
+}
+
+// Fail reports a failure through
+func (a *Assertions) Fail(failureMessage string, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Fail(a.t, failureMessage, msgAndArgs...)
+}
+
+// FailNow fails test
+func (a *Assertions) FailNow(failureMessage string, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ FailNow(a.t, failureMessage, msgAndArgs...)
+}
+
+// FailNowf fails test
+func (a *Assertions) FailNowf(failureMessage string, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ FailNowf(a.t, failureMessage, msg, args...)
+}
+
+// Failf reports a failure through
+func (a *Assertions) Failf(failureMessage string, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Failf(a.t, failureMessage, msg, args...)
+}
+
+// False asserts that the specified value is false.
+//
+// a.False(myBool)
+func (a *Assertions) False(value bool, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ False(a.t, value, msgAndArgs...)
+}
+
+// Falsef asserts that the specified value is false.
+//
+// a.Falsef(myBool, "error message %s", "formatted")
+func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Falsef(a.t, value, msg, args...)
+}
+
+// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file.
+func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ FileExists(a.t, path, msgAndArgs...)
+}
+
+// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file.
+func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ FileExistsf(a.t, path, msg, args...)
+}
+
+// HTTPBodyContains asserts that a specified handler returns a
+// body that contains a string.
+//
+// a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPBodyContains(a.t, handler, method, url, values, str, msgAndArgs...)
+}
+
+// HTTPBodyContainsf asserts that a specified handler returns a
+// body that contains a string.
+//
+// a.HTTPBodyContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPBodyContainsf(a.t, handler, method, url, values, str, msg, args...)
+}
+
+// HTTPBodyNotContains asserts that a specified handler returns a
+// body that does not contain a string.
+//
+// a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPBodyNotContains(a.t, handler, method, url, values, str, msgAndArgs...)
+}
+
+// HTTPBodyNotContainsf asserts that a specified handler returns a
+// body that does not contain a string.
+//
+// a.HTTPBodyNotContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPBodyNotContainsf(a.t, handler, method, url, values, str, msg, args...)
+}
+
+// HTTPError asserts that a specified handler returns an error status code.
+//
+// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPError(a.t, handler, method, url, values, msgAndArgs...)
+}
+
+// HTTPErrorf asserts that a specified handler returns an error status code.
+//
+// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false).
+func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPErrorf(a.t, handler, method, url, values, msg, args...)
+}
+
+// HTTPRedirect asserts that a specified handler returns a redirect status code.
+//
+// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPRedirect(a.t, handler, method, url, values, msgAndArgs...)
+}
+
+// HTTPRedirectf asserts that a specified handler returns a redirect status code.
+//
+// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false).
+func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPRedirectf(a.t, handler, method, url, values, msg, args...)
+}
+
+// HTTPSuccess asserts that a specified handler returns a success status code.
+//
+// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil)
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPSuccess(a.t, handler, method, url, values, msgAndArgs...)
+}
+
+// HTTPSuccessf asserts that a specified handler returns a success status code.
+//
+// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ HTTPSuccessf(a.t, handler, method, url, values, msg, args...)
+}
+
+// Implements asserts that an object is implemented by the specified interface.
+//
+// a.Implements((*MyInterface)(nil), new(MyObject))
+func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Implements(a.t, interfaceObject, object, msgAndArgs...)
+}
+
+// Implementsf asserts that an object is implemented by the specified interface.
+//
+// a.Implementsf((*MyInterface, "error message %s", "formatted")(nil), new(MyObject))
+func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Implementsf(a.t, interfaceObject, object, msg, args...)
+}
+
+// InDelta asserts that the two numerals are within delta of each other.
+//
+// a.InDelta(math.Pi, (22 / 7.0), 0.01)
+func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InDelta(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys.
+func (a *Assertions) InDeltaMapValues(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InDeltaMapValues(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys.
+func (a *Assertions) InDeltaMapValuesf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InDeltaMapValuesf(a.t, expected, actual, delta, msg, args...)
+}
+
+// InDeltaSlice is the same as InDelta, except it compares two slices.
+func (a *Assertions) InDeltaSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InDeltaSlice(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+// InDeltaSlicef is the same as InDelta, except it compares two slices.
+func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InDeltaSlicef(a.t, expected, actual, delta, msg, args...)
+}
+
+// InDeltaf asserts that the two numerals are within delta of each other.
+//
+// a.InDeltaf(math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01)
+func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InDeltaf(a.t, expected, actual, delta, msg, args...)
+}
+
+// InEpsilon asserts that expected and actual have a relative error less than epsilon
+func (a *Assertions) InEpsilon(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InEpsilon(a.t, expected, actual, epsilon, msgAndArgs...)
+}
+
+// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices.
+func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InEpsilonSlice(a.t, expected, actual, epsilon, msgAndArgs...)
+}
+
+// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices.
+func (a *Assertions) InEpsilonSlicef(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InEpsilonSlicef(a.t, expected, actual, epsilon, msg, args...)
+}
+
+// InEpsilonf asserts that expected and actual have a relative error less than epsilon
+func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ InEpsilonf(a.t, expected, actual, epsilon, msg, args...)
+}
+
+// IsType asserts that the specified objects are of the same type.
+func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ IsType(a.t, expectedType, object, msgAndArgs...)
+}
+
+// IsTypef asserts that the specified objects are of the same type.
+func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ IsTypef(a.t, expectedType, object, msg, args...)
+}
+
+// JSONEq asserts that two JSON strings are equivalent.
+//
+// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`)
+func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ JSONEq(a.t, expected, actual, msgAndArgs...)
+}
+
+// JSONEqf asserts that two JSON strings are equivalent.
+//
+// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted")
+func (a *Assertions) JSONEqf(expected string, actual string, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ JSONEqf(a.t, expected, actual, msg, args...)
+}
+
+// Len asserts that the specified object has specific length.
+// Len also fails if the object has a type that len() not accept.
+//
+// a.Len(mySlice, 3)
+func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Len(a.t, object, length, msgAndArgs...)
+}
+
+// Lenf asserts that the specified object has specific length.
+// Lenf also fails if the object has a type that len() not accept.
+//
+// a.Lenf(mySlice, 3, "error message %s", "formatted")
+func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Lenf(a.t, object, length, msg, args...)
+}
+
+// Nil asserts that the specified object is nil.
+//
+// a.Nil(err)
+func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Nil(a.t, object, msgAndArgs...)
+}
+
+// Nilf asserts that the specified object is nil.
+//
+// a.Nilf(err, "error message %s", "formatted")
+func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Nilf(a.t, object, msg, args...)
+}
+
+// NoError asserts that a function returned no error (i.e. `nil`).
+//
+// actualObj, err := SomeFunction()
+// if a.NoError(err) {
+// assert.Equal(t, expectedObj, actualObj)
+// }
+func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NoError(a.t, err, msgAndArgs...)
+}
+
+// NoErrorf asserts that a function returned no error (i.e. `nil`).
+//
+// actualObj, err := SomeFunction()
+// if a.NoErrorf(err, "error message %s", "formatted") {
+// assert.Equal(t, expectedObj, actualObj)
+// }
+func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NoErrorf(a.t, err, msg, args...)
+}
+
+// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+//
+// a.NotContains("Hello World", "Earth")
+// a.NotContains(["Hello", "World"], "Earth")
+// a.NotContains({"Hello": "World"}, "Earth")
+func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotContains(a.t, s, contains, msgAndArgs...)
+}
+
+// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+//
+// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted")
+// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted")
+// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted")
+func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotContainsf(a.t, s, contains, msg, args...)
+}
+
+// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// if a.NotEmpty(obj) {
+// assert.Equal(t, "two", obj[1])
+// }
+func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotEmpty(a.t, object, msgAndArgs...)
+}
+
+// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+// if a.NotEmptyf(obj, "error message %s", "formatted") {
+// assert.Equal(t, "two", obj[1])
+// }
+func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotEmptyf(a.t, object, msg, args...)
+}
+
+// NotEqual asserts that the specified values are NOT equal.
+//
+// a.NotEqual(obj1, obj2)
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses).
+func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotEqual(a.t, expected, actual, msgAndArgs...)
+}
+
+// NotEqualf asserts that the specified values are NOT equal.
+//
+// a.NotEqualf(obj1, obj2, "error message %s", "formatted")
+//
+// Pointer variable equality is determined based on the equality of the
+// referenced values (as opposed to the memory addresses).
+func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotEqualf(a.t, expected, actual, msg, args...)
+}
+
+// NotNil asserts that the specified object is not nil.
+//
+// a.NotNil(err)
+func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotNil(a.t, object, msgAndArgs...)
+}
+
+// NotNilf asserts that the specified object is not nil.
+//
+// a.NotNilf(err, "error message %s", "formatted")
+func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotNilf(a.t, object, msg, args...)
+}
+
+// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic.
+//
+// a.NotPanics(func(){ RemainCalm() })
+func (a *Assertions) NotPanics(f assert.PanicTestFunc, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotPanics(a.t, f, msgAndArgs...)
+}
+
+// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic.
+//
+// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted")
+func (a *Assertions) NotPanicsf(f assert.PanicTestFunc, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotPanicsf(a.t, f, msg, args...)
+}
+
+// NotRegexp asserts that a specified regexp does not match a string.
+//
+// a.NotRegexp(regexp.MustCompile("starts"), "it's starting")
+// a.NotRegexp("^start", "it's not starting")
+func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotRegexp(a.t, rx, str, msgAndArgs...)
+}
+
+// NotRegexpf asserts that a specified regexp does not match a string.
+//
+// a.NotRegexpf(regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting")
+// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted")
+func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotRegexpf(a.t, rx, str, msg, args...)
+}
+
+// NotSubset asserts that the specified list(array, slice...) contains not all
+// elements given in the specified subset(array, slice...).
+//
+// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
+func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotSubset(a.t, list, subset, msgAndArgs...)
+}
+
+// NotSubsetf asserts that the specified list(array, slice...) contains not all
+// elements given in the specified subset(array, slice...).
+//
+// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
+func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotSubsetf(a.t, list, subset, msg, args...)
+}
+
+// NotZero asserts that i is not the zero value for its type.
+func (a *Assertions) NotZero(i interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotZero(a.t, i, msgAndArgs...)
+}
+
+// NotZerof asserts that i is not the zero value for its type.
+func (a *Assertions) NotZerof(i interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ NotZerof(a.t, i, msg, args...)
+}
+
+// Panics asserts that the code inside the specified PanicTestFunc panics.
+//
+// a.Panics(func(){ GoCrazy() })
+func (a *Assertions) Panics(f assert.PanicTestFunc, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Panics(a.t, f, msgAndArgs...)
+}
+
+// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that
+// the recovered panic value equals the expected panic value.
+//
+// a.PanicsWithValue("crazy error", func(){ GoCrazy() })
+func (a *Assertions) PanicsWithValue(expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ PanicsWithValue(a.t, expected, f, msgAndArgs...)
+}
+
+// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that
+// the recovered panic value equals the expected panic value.
+//
+// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted")
+func (a *Assertions) PanicsWithValuef(expected interface{}, f assert.PanicTestFunc, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ PanicsWithValuef(a.t, expected, f, msg, args...)
+}
+
+// Panicsf asserts that the code inside the specified PanicTestFunc panics.
+//
+// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted")
+func (a *Assertions) Panicsf(f assert.PanicTestFunc, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Panicsf(a.t, f, msg, args...)
+}
+
+// Regexp asserts that a specified regexp matches a string.
+//
+// a.Regexp(regexp.MustCompile("start"), "it's starting")
+// a.Regexp("start...$", "it's not starting")
+func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Regexp(a.t, rx, str, msgAndArgs...)
+}
+
+// Regexpf asserts that a specified regexp matches a string.
+//
+// a.Regexpf(regexp.MustCompile("start", "error message %s", "formatted"), "it's starting")
+// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted")
+func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Regexpf(a.t, rx, str, msg, args...)
+}
+
+// Subset asserts that the specified list(array, slice...) contains all
+// elements given in the specified subset(array, slice...).
+//
+// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
+func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Subset(a.t, list, subset, msgAndArgs...)
+}
+
+// Subsetf asserts that the specified list(array, slice...) contains all
+// elements given in the specified subset(array, slice...).
+//
+// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
+func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Subsetf(a.t, list, subset, msg, args...)
+}
+
+// True asserts that the specified value is true.
+//
+// a.True(myBool)
+func (a *Assertions) True(value bool, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ True(a.t, value, msgAndArgs...)
+}
+
+// Truef asserts that the specified value is true.
+//
+// a.Truef(myBool, "error message %s", "formatted")
+func (a *Assertions) Truef(value bool, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Truef(a.t, value, msg, args...)
+}
+
+// WithinDuration asserts that the two times are within duration delta of each other.
+//
+// a.WithinDuration(time.Now(), time.Now(), 10*time.Second)
+func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ WithinDuration(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+// WithinDurationf asserts that the two times are within duration delta of each other.
+//
+// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted")
+func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ WithinDurationf(a.t, expected, actual, delta, msg, args...)
+}
+
+// Zero asserts that i is the zero value for its type.
+func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Zero(a.t, i, msgAndArgs...)
+}
+
+// Zerof asserts that i is the zero value for its type.
+func (a *Assertions) Zerof(i interface{}, msg string, args ...interface{}) {
+ if h, ok := a.t.(tHelper); ok {
+ h.Helper()
+ }
+ Zerof(a.t, i, msg, args...)
+}
diff --git a/vendor/github.com/stretchr/testify/require/require_forward.go.tmpl b/vendor/github.com/stretchr/testify/require/require_forward.go.tmpl
new file mode 100644
index 0000000..54124df
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/require/require_forward.go.tmpl
@@ -0,0 +1,5 @@
+{{.CommentWithoutT "a"}}
+func (a *Assertions) {{.DocInfo.Name}}({{.Params}}) {
+ if h, ok := a.t.(tHelper); ok { h.Helper() }
+ {{.DocInfo.Name}}(a.t, {{.ForwardedParams}})
+}
diff --git a/vendor/github.com/stretchr/testify/require/requirements.go b/vendor/github.com/stretchr/testify/require/requirements.go
new file mode 100644
index 0000000..6b85c5e
--- /dev/null
+++ b/vendor/github.com/stretchr/testify/require/requirements.go
@@ -0,0 +1,29 @@
+package require
+
+// TestingT is an interface wrapper around *testing.T
+type TestingT interface {
+ Errorf(format string, args ...interface{})
+ FailNow()
+}
+
+type tHelper interface {
+ Helper()
+}
+
+// ComparisonAssertionFunc is a common function prototype when comparing two values. Can be useful
+// for table driven tests.
+type ComparisonAssertionFunc func(TestingT, interface{}, interface{}, ...interface{})
+
+// ValueAssertionFunc is a common function prototype when validating a single value. Can be useful
+// for table driven tests.
+type ValueAssertionFunc func(TestingT, interface{}, ...interface{})
+
+// BoolAssertionFunc is a common function prototype when validating a bool value. Can be useful
+// for table driven tests.
+type BoolAssertionFunc func(TestingT, bool, ...interface{})
+
+// ErrorAssertionFunc is a common function prototype when validating an error value. Can be useful
+// for table driven tests.
+type ErrorAssertionFunc func(TestingT, error, ...interface{})
+
+//go:generate go run ../_codegen/main.go -output-package=require -template=require.go.tmpl -include-format-funcs
diff --git a/vendor/github.com/vrischmann/envconfig/.travis.yml b/vendor/github.com/vrischmann/envconfig/.travis.yml
new file mode 100644
index 0000000..20b6c7e
--- /dev/null
+++ b/vendor/github.com/vrischmann/envconfig/.travis.yml
@@ -0,0 +1,10 @@
+language: go
+
+go:
+ - 1.5
+ - 1.6
+ - 1.7
+ - 1.8
+ - 1.9
+ - "1.10"
+ - tip
diff --git a/vendor/github.com/vrischmann/envconfig/LICENSE b/vendor/github.com/vrischmann/envconfig/LICENSE
new file mode 100644
index 0000000..e5bf20c
--- /dev/null
+++ b/vendor/github.com/vrischmann/envconfig/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2015 Vincent Rischmann
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/vrischmann/envconfig/README.md b/vendor/github.com/vrischmann/envconfig/README.md
new file mode 100644
index 0000000..1de5710
--- /dev/null
+++ b/vendor/github.com/vrischmann/envconfig/README.md
@@ -0,0 +1,151 @@
+envconfig
+=========
+
+[![Build Status](https://travis-ci.org/vrischmann/envconfig.svg?branch=master)](https://travis-ci.org/vrischmann/envconfig)
+[![GoDoc](https://godoc.org/github.com/vrischmann/envconfig?status.svg)](https://godoc.org/github.com/vrischmann/envconfig)
+
+envconfig is a library which allows you to parse your configuration from environment variables and fill an arbitrary struct.
+
+See [the example](https://godoc.org/github.com/vrischmann/envconfig#example-Init) to understand how to use it, it's pretty simple.
+
+Supported types
+---------------
+
+ * Almost all standard types plus `time.Duration` are supported by default.
+ * Slices and arrays
+ * Arbitrary structs
+ * Custom types via the [Unmarshaler](https://godoc.org/github.com/vrischmann/envconfig/#Unmarshaler) interface.
+
+How does it work
+----------------
+
+*envconfig* takes the hierarchy of your configuration struct and the names of the fields to create a environment variable key.
+
+For example:
+
+```go
+var conf struct {
+ Name string
+ Shard struct {
+ Host string
+ Port int
+ }
+}
+```
+
+This will check for those 3 keys:
+
+ * NAME or name
+ * SHARD\_HOST, or shard\_host
+ * SHARD\_PORT, or shard\_port
+
+Flexible key naming
+-------------------
+
+*envconfig* supports having underscores in the key names where there is a _word boundary_. Now, that term is not super explicit, so let me show you an example:
+
+```go
+var conf struct {
+ Cassandra struct {
+ SSLCert string
+ SslKey string
+ }
+}
+```
+
+This will check all of the following keys:
+
+ * CASSANDRA\_SSL\_CERT, CASSANDRA\_SSLCERT, cassandra\_ssl\_cert, cassandra\_sslcert
+ * CASSANDRA\_SSL\_KEY, CASSANDRA\_SSLKEY, cassandra\_ssl\_key, cassandra\_sslkey
+
+If that is not good enough, look just below.
+
+Custom environment variable names
+---------------------------------
+
+*envconfig* supports custom environment variable names:
+
+```go
+var conf struct {
+ Name string `envconfig:"myName"`
+}
+```
+
+Default values
+--------------
+
+*envconfig* supports default values:
+
+```go
+var conf struct {
+ Name string `envconfig:"default=Vincent"`
+}
+```
+
+Optional values
+---------------
+
+*envconfig* supports optional values:
+
+```go
+var conf struct {
+ Name string `envconfig:"optional"`
+ Age int `envconfig:"-"`
+}
+```
+
+The two syntax are equivalent.
+
+Combining multiple options in one tag
+-------------------------------------
+
+You can of course combine multiple options:
+
+```go
+var conf struct {
+ Name string `envconfig:"default=Vincent,myName"`
+}
+```
+
+Slices or arrays
+----------------
+
+With slices or arrays, the same naming is applied for the slice. To put multiple elements into the slice or array, you need to separate
+them with a *,* (will probably be configurable in the future, or at least have a way to escape)
+
+For example:
+
+```go
+var conf struct {
+ Ports []int
+}
+```
+
+This will check for the key __PORTS__:
+
+ * if your variable is *9000* the slice will contain only 9000
+ * if your variable is *9000,100* the slice will contain 9000 and 100
+
+For slices of structs, it's a little more complicated. The same splitting of slice elements is done with a *comma*, however, each token must follow
+a specific format like this: `{,,...}`
+
+For example:
+
+```go
+var conf struct {
+ Shards []struct {
+ Name string
+ Port int
+ }
+}
+```
+
+This will check for the key __SHARDS__. Example variable content: `{foobar,9000},{barbaz,20000}`
+
+This will result in two struct defined in the *Shards* slice.
+
+Future work
+-----------
+
+ * support for time.Time values with a layout defined via a field tag
+ * support for complex types
diff --git a/vendor/github.com/vrischmann/envconfig/doc.go b/vendor/github.com/vrischmann/envconfig/doc.go
new file mode 100644
index 0000000..0e8d7c8
--- /dev/null
+++ b/vendor/github.com/vrischmann/envconfig/doc.go
@@ -0,0 +1,199 @@
+/*
+Package envconfig implements a configuration reader which reads each value from an environment variable.
+
+The basic idea is that you define a configuration struct, like this:
+
+ var conf struct {
+ Addr string
+ Port int
+ Auth struct {
+ Key string
+ Endpoint string
+ }
+ Partitions []int
+ Shards []struct {
+ Name string
+ Id int
+ }
+ }
+
+Once you have that, you need to initialize the configuration:
+
+ if err := envconfig.Init(&conf); err != nil {
+ log.Fatalln(err)
+ }
+
+Then it's just a matter of setting the environment variables when calling your binary:
+
+ ADDR=localhost PORT=6379 AUTH_KEY=foobar ./mybinary
+
+Layout of the conf struct
+
+Your conf struct must follow the following rules:
+ - no unexported fields by default (can turn off with Options.AllowUnexported)
+ - only supported types (no map fields for example)
+
+Naming of the keys
+
+By default, envconfig generates all possible keys based on the field chain according to a flexible naming scheme.
+
+The field chain is how you access your field in the configuration struct. For example:
+
+ var conf struct {
+ Shard struct {
+ Name string
+ }
+ }
+
+With that struct, you access the name field via the chain *Shard.Name*
+
+The default naming scheme takes that and transforms it into the following:
+ - SHARD_NAME
+ - shard_name
+
+It can handles more complicated cases, with multiple words in one field name. It needs to be in the correct case though, for example:
+
+ var conf struct {
+ Cassandra struct {
+ SSLCert string
+ SslKey string
+ }
+ }
+
+With that struct, you access the name field via the chain *Cassandra.SSLCert* or *Cassandra.SslKey*
+
+The default naming scheme takes that and transforms it into the following:
+ - CASSANDRA_SSL_CERT, cassandra_ssl_cert, CASSANDRA_SSLCERT, cassandra_sslcert
+ - CASSANDRA_SSL_KEY, cassandra_ssl_key, CASSANDRA_SSLKEY, cassandra_sslkey
+
+And, if that is not good enough for you, you always have the option to use a custom key:
+
+ var conf struct {
+ Cassandra struct {
+ Name string `envconfig:"cassandraMyName"`
+ }
+ }
+
+Now envconfig will only ever checks the environment variable _cassandraMyName_.
+
+
+Content of the variables
+
+There are three types of content for a single variable:
+ - for simple types, a single string representing the value, and parseable into the type.
+ - for slices or arrays, a comma-separated list of strings. Each string must be parseable into the element type of the slice or array.
+ - for structs, a comma-separated list of specially formatted strings representing structs.
+
+Example of a valid slice value:
+ foo,bar,baz
+
+The format for a struct is as follow:
+ - prefixed with {
+ - suffixed with }
+ - contains a comma-separated list of field values, in the order in which they are defined in the struct
+
+Example of a valid struct value:
+ type MyStruct struct {
+ Name string
+ Id int
+ Timeout time.Duration
+ }
+
+ {foobar,10,120s}
+
+Example of a valid slice of struct values:
+ {foobar,10,120s},{barbaz,20,50s}
+
+Special case for bytes slices
+
+For bytes slices, you generally don't want to type out a comma-separated list of byte values.
+
+For this use case, we support base64 encoded values.
+
+Here's an example:
+
+ var conf struct {
+ Data []byte
+ }
+
+ os.Setenv("DATA", "Rk9PQkFS")
+
+This will decode DATA to FOOBAR and put that into conf.Data.
+
+Optional values
+
+Sometimes you don't absolutely need a value. Here's how we tell envconfig a value is optional:
+
+ var conf struct {
+ Name string `envconfig:"optional"`
+ Age int `envconfig:"-"`
+ }
+
+The two syntax are equivalent.
+
+Default values
+
+Often times you have configuration keys which almost never changes, but you still want to be able to change them.
+
+In such cases, you might want to provide a default value.
+
+Here's to do this with envconfig:
+
+ var conf struct {
+ Timeout time.Duration `envconfig:"default=1m"`
+ }
+
+Combining options
+
+You can of course combine multiple options. The syntax is simple enough, separate each option with a comma.
+
+For example:
+
+ var conf struct {
+ Timeout time.Duration `envconfig:"default=1m,myTimeout"`
+ }
+
+This would give you the default timeout of 1 minute, and lookup the myTimeout environment variable.
+
+Supported types
+
+envconfig supports the following list of types:
+
+ - bool
+ - string
+ - intX
+ - uintX
+ - floatX
+ - time.Duration
+ - pointers to all of the above types
+
+Notably, we don't (yet) support complex types simply because I had no use for it yet.
+
+Custom unmarshaler
+
+When the standard types are not enough, you will want to use a custom unmarshaler for your types.
+
+You do this by implementing Unmarshaler on your type. Here's an example:
+
+ type connectionType uint
+
+ const (
+ tlsConnection connectionType = iota
+ insecureConnection
+ )
+
+ func (t *connectionType) Unmarshal(s string) error {
+ switch s {
+ case "tls":
+ *t = tlsConnection
+ case "insecure":
+ *t = insecureConnection
+ default:
+ return fmt.Errorf("unable to unmarshal %s to a connection type", s)
+ }
+
+ return nil
+ }
+
+*/
+package envconfig
diff --git a/vendor/github.com/vrischmann/envconfig/envconfig.go b/vendor/github.com/vrischmann/envconfig/envconfig.go
new file mode 100644
index 0000000..ba0a914
--- /dev/null
+++ b/vendor/github.com/vrischmann/envconfig/envconfig.go
@@ -0,0 +1,491 @@
+package envconfig
+
+import (
+ "bytes"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "os"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+ "unicode"
+)
+
+var (
+ // ErrUnexportedField is the error returned by the Init* functions when a field of the config struct is not exported and the option AllowUnexported is not used.
+ ErrUnexportedField = errors.New("envconfig: unexported field")
+ // ErrNotAPointer is the error returned by the Init* functions when the configuration object is not a pointer.
+ ErrNotAPointer = errors.New("envconfig: value is not a pointer")
+ // ErrInvalidValueKind is the error returned by the Init* functions when the configuration object is not a struct.
+ ErrInvalidValueKind = errors.New("envconfig: invalid value kind, only works on structs")
+ // ErrDefaultUnsupportedOnSlice is the error returned by the Init* functions when there is a default tag on a slice.
+ // The `default` tag is unsupported on slices because slice parsing uses , as the separator, as does the envconfig tags separator.
+ ErrDefaultUnsupportedOnSlice = errors.New("envconfig: default tag unsupported on slice")
+)
+
+type context struct {
+ name string
+ customName string
+ defaultVal string
+ parents []reflect.Value
+ optional, leaveNil bool
+ allowUnexported bool
+}
+
+// Unmarshaler is the interface implemented by objects that can unmarshal
+// a environment variable string of themselves.
+type Unmarshaler interface {
+ Unmarshal(s string) error
+}
+
+// Options is used to customize the behavior of envconfig. Use it with InitWithOptions.
+type Options struct {
+ // Prefix allows specifying a prefix for each key.
+ Prefix string
+
+ // AllOptional determines whether to not throw errors by default for any key
+ // that is not found. AllOptional=true means errors will not be thrown.
+ AllOptional bool
+
+ // LeaveNil specifies whether to not create new pointers for any pointer fields
+ // found within the passed config. Rather, it behaves such that if and only if
+ // there is a) a non-empty field in the value or b) a non-empty value that
+ // the pointer is pointing to will a new pointer be created. By default,
+ // LeaveNil=false will create all pointers in all structs if they are nil.
+ //
+ // var X struct {
+ // A *struct{
+ // B string
+ // }
+ // }
+ // envconfig.InitWithOptions(&X, Options{LeaveNil: true})
+ //
+ // $ ./program
+ //
+ // X.A == nil
+ //
+ // $ A_B="string" ./program
+ //
+ // X.A.B="string" // A will not be nil
+ LeaveNil bool
+
+ // AllowUnexported allows unexported fields to be present in the passed config.
+ AllowUnexported bool
+}
+
+// Init reads the configuration from environment variables and populates the conf object. conf must be a pointer
+func Init(conf interface{}) error {
+ return InitWithOptions(conf, Options{})
+}
+
+// InitWithPrefix reads the configuration from environment variables and populates the conf object. conf must be a pointer.
+// Each key read will be prefixed with the prefix string.
+func InitWithPrefix(conf interface{}, prefix string) error {
+ return InitWithOptions(conf, Options{Prefix: prefix})
+}
+
+// InitWithOptions reads the configuration from environment variables and populates the conf object.
+// conf must be a pointer.
+func InitWithOptions(conf interface{}, opts Options) error {
+ value := reflect.ValueOf(conf)
+ if value.Kind() != reflect.Ptr {
+ return ErrNotAPointer
+ }
+
+ elem := value.Elem()
+
+ ctx := context{
+ name: opts.Prefix,
+ optional: opts.AllOptional,
+ leaveNil: opts.LeaveNil,
+ allowUnexported: opts.AllowUnexported,
+ }
+ switch elem.Kind() {
+ case reflect.Ptr:
+ if elem.IsNil() {
+ elem.Set(reflect.New(elem.Type().Elem()))
+ }
+ _, err := readStruct(elem.Elem(), &ctx)
+ return err
+ case reflect.Struct:
+ _, err := readStruct(elem, &ctx)
+ return err
+ default:
+ return ErrInvalidValueKind
+ }
+}
+
+type tag struct {
+ customName string
+ optional bool
+ skip bool
+ defaultVal string
+}
+
+func parseTag(s string) *tag {
+ var t tag
+
+ tokens := strings.Split(s, ",")
+ for _, v := range tokens {
+ switch {
+ case v == "-":
+ t.skip = true
+ case v == "optional":
+ t.optional = true
+ case strings.HasPrefix(v, "default="):
+ t.defaultVal = strings.TrimPrefix(v, "default=")
+ default:
+ t.customName = v
+ }
+ }
+
+ return &t
+}
+
+func readStruct(value reflect.Value, ctx *context) (nonNil bool, err error) {
+ var parents []reflect.Value
+
+ for i := 0; i < value.NumField(); i++ {
+ field := value.Field(i)
+ name := value.Type().Field(i).Name
+
+ tag := parseTag(value.Type().Field(i).Tag.Get("envconfig"))
+ if tag.skip || !field.CanSet() {
+ if !field.CanSet() && !ctx.allowUnexported {
+ return false, ErrUnexportedField
+ }
+ continue
+ }
+
+ parents = ctx.parents
+
+ doRead:
+ switch field.Kind() {
+ case reflect.Ptr:
+ // it's a pointer, create a new value and restart the switch
+ if field.IsNil() {
+ field.Set(reflect.New(field.Type().Elem()))
+ parents = append(parents, field) // track parent pointers to deallocate if no children are filled in
+ }
+ field = field.Elem()
+ goto doRead
+ case reflect.Struct:
+ var nonNilIn bool
+ nonNilIn, err = readStruct(field, &context{
+ name: combineName(ctx.name, name),
+ optional: ctx.optional || tag.optional,
+ defaultVal: tag.defaultVal,
+ parents: parents,
+ leaveNil: ctx.leaveNil,
+ allowUnexported: ctx.allowUnexported,
+ })
+ nonNil = nonNil || nonNilIn
+ default:
+ var ok bool
+ ok, err = setField(field, &context{
+ name: combineName(ctx.name, name),
+ customName: tag.customName,
+ optional: ctx.optional || tag.optional,
+ defaultVal: tag.defaultVal,
+ parents: parents,
+ leaveNil: ctx.leaveNil,
+ allowUnexported: ctx.allowUnexported,
+ })
+ nonNil = nonNil || ok
+ }
+
+ if err != nil {
+ return false, err
+ }
+ }
+
+ if !nonNil && ctx.leaveNil { // re-zero
+ for _, p := range parents {
+ p.Set(reflect.Zero(p.Type()))
+ }
+ }
+
+ return nonNil, err
+}
+
+var byteSliceType = reflect.TypeOf([]byte(nil))
+
+func setField(value reflect.Value, ctx *context) (ok bool, err error) {
+ str, err := readValue(ctx)
+ if err != nil {
+ return false, err
+ }
+
+ if len(str) == 0 && ctx.optional {
+ return false, nil
+ }
+
+ isSliceNotUnmarshaler := value.Kind() == reflect.Slice && !isUnmarshaler(value.Type())
+ switch {
+ case isSliceNotUnmarshaler && value.Type() == byteSliceType:
+ err := parseBytesValue(value, str)
+ if err != nil {
+ err = fmt.Errorf("envconfig: unable to parse value %q as bytes for possible keys %v. err=%v", str, makeAllPossibleKeys(ctx), err)
+ }
+ return true, err
+
+ case isSliceNotUnmarshaler:
+ return true, setSliceField(value, str, ctx)
+
+ default:
+ return true, parseValue(value, str, ctx)
+ }
+}
+
+func setSliceField(value reflect.Value, str string, ctx *context) error {
+ if ctx.defaultVal != "" {
+ return ErrDefaultUnsupportedOnSlice
+ }
+
+ elType := value.Type().Elem()
+ tnz := newSliceTokenizer(str)
+
+ slice := reflect.MakeSlice(value.Type(), value.Len(), value.Cap())
+
+ for tnz.scan() {
+ token := tnz.text()
+
+ el := reflect.New(elType).Elem()
+
+ if err := parseValue(el, token, ctx); err != nil {
+ return err
+ }
+
+ slice = reflect.Append(slice, el)
+ }
+
+ value.Set(slice)
+
+ return tnz.Err()
+}
+
+var (
+ durationType = reflect.TypeOf(new(time.Duration)).Elem()
+ unmarshalerType = reflect.TypeOf(new(Unmarshaler)).Elem()
+)
+
+func isDurationField(t reflect.Type) bool {
+ return t.AssignableTo(durationType)
+}
+
+func isUnmarshaler(t reflect.Type) bool {
+ return t.Implements(unmarshalerType) || reflect.PtrTo(t).Implements(unmarshalerType)
+}
+
+func parseValue(v reflect.Value, str string, ctx *context) (err error) {
+ vtype := v.Type()
+
+ // Special case when the type is a map: we need to make the map
+ switch vtype.Kind() {
+ case reflect.Map:
+ v.Set(reflect.MakeMap(vtype))
+ }
+
+ kind := vtype.Kind()
+ switch {
+ case isUnmarshaler(vtype):
+ // Special case for Unmarshaler
+ err = parseWithUnmarshaler(v, str)
+ case isDurationField(vtype):
+ // Special case for time.Duration
+ err = parseDuration(v, str)
+ case kind == reflect.Bool:
+ err = parseBoolValue(v, str)
+ case kind == reflect.Int, kind == reflect.Int8, kind == reflect.Int16, kind == reflect.Int32, kind == reflect.Int64:
+ err = parseIntValue(v, str)
+ case kind == reflect.Uint, kind == reflect.Uint8, kind == reflect.Uint16, kind == reflect.Uint32, kind == reflect.Uint64:
+ err = parseUintValue(v, str)
+ case kind == reflect.Float32, kind == reflect.Float64:
+ err = parseFloatValue(v, str)
+ case kind == reflect.Ptr:
+ v.Set(reflect.New(vtype.Elem()))
+ return parseValue(v.Elem(), str, ctx)
+ case kind == reflect.String:
+ v.SetString(str)
+ case kind == reflect.Struct:
+ err = parseStruct(v, str, ctx)
+ default:
+ return fmt.Errorf("envconfig: kind %v not supported", kind)
+ }
+
+ if err != nil {
+ return fmt.Errorf("envconfig: unable to parse value %q for possible keys %v. err=%v", str, makeAllPossibleKeys(ctx), err)
+ }
+
+ return
+}
+
+func parseWithUnmarshaler(v reflect.Value, str string) error {
+ var u = v.Addr().Interface().(Unmarshaler)
+ return u.Unmarshal(str)
+}
+
+func parseDuration(v reflect.Value, str string) error {
+ d, err := time.ParseDuration(str)
+ if err != nil {
+ return err
+ }
+
+ v.SetInt(int64(d))
+
+ return nil
+}
+
+// NOTE(vincent): this is only called when parsing structs inside a slice.
+func parseStruct(value reflect.Value, token string, ctx *context) error {
+ tokens := strings.Split(token[1:len(token)-1], ",")
+ if len(tokens) != value.NumField() {
+ return fmt.Errorf("struct token has %d fields but struct has %d", len(tokens), value.NumField())
+ }
+
+ for i := 0; i < value.NumField(); i++ {
+ field := value.Field(i)
+ t := tokens[i]
+
+ if err := parseValue(field, t, ctx); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func parseBoolValue(v reflect.Value, str string) error {
+ val, err := strconv.ParseBool(str)
+ if err != nil {
+ return err
+ }
+ v.SetBool(val)
+
+ return nil
+}
+
+func parseIntValue(v reflect.Value, str string) error {
+ val, err := strconv.ParseInt(str, 10, 64)
+ if err != nil {
+ return err
+ }
+ v.SetInt(val)
+
+ return nil
+}
+
+func parseUintValue(v reflect.Value, str string) error {
+ val, err := strconv.ParseUint(str, 10, 64)
+ if err != nil {
+ return err
+ }
+ v.SetUint(val)
+
+ return nil
+}
+
+func parseFloatValue(v reflect.Value, str string) error {
+ val, err := strconv.ParseFloat(str, 64)
+ if err != nil {
+ return err
+ }
+ v.SetFloat(val)
+
+ return nil
+}
+
+func parseBytesValue(v reflect.Value, str string) error {
+ val, err := base64.StdEncoding.DecodeString(str)
+ if err != nil {
+ return err
+ }
+ v.SetBytes(val)
+
+ return nil
+}
+
+func combineName(parentName, name string) string {
+ if parentName == "" {
+ return name
+ }
+
+ return parentName + "." + name
+}
+
+func readValue(ctx *context) (string, error) {
+ keys := makeAllPossibleKeys(ctx)
+
+ var str string
+
+ for _, key := range keys {
+ str = os.Getenv(key)
+ if str != "" {
+ break
+ }
+ }
+
+ if str != "" {
+ return str, nil
+ }
+
+ if ctx.defaultVal != "" {
+ return ctx.defaultVal, nil
+ }
+
+ if ctx.optional {
+ return "", nil
+ }
+
+ return "", fmt.Errorf("envconfig: keys %s not found", strings.Join(keys, ", "))
+}
+
+func makeAllPossibleKeys(ctx *context) (res []string) {
+ if ctx.customName != "" {
+ return []string{ctx.customName}
+ }
+
+ tmp := make(map[string]struct{})
+ {
+ n := []rune(ctx.name)
+
+ var buf bytes.Buffer // this is the buffer where we put extra underscores on "word" boundaries
+ var buf2 bytes.Buffer // this is the buffer with the standard naming scheme
+
+ wroteUnderscore := false
+ for i, r := range ctx.name {
+ if r == '.' {
+ buf.WriteRune('_')
+ buf2.WriteRune('_')
+ wroteUnderscore = true
+ continue
+ }
+
+ prevOrNextLower := i+1 < len(n) && i-1 > 0 && (unicode.IsLower(n[i+1]) || unicode.IsLower(n[i-1]))
+ if i > 0 && unicode.IsUpper(r) && prevOrNextLower && !wroteUnderscore {
+ buf.WriteRune('_')
+ }
+
+ buf.WriteRune(r)
+ buf2.WriteRune(r)
+
+ wroteUnderscore = false
+ }
+
+ tmp[strings.ToLower(buf.String())] = struct{}{}
+ tmp[strings.ToUpper(buf.String())] = struct{}{}
+ tmp[strings.ToLower(buf2.String())] = struct{}{}
+ tmp[strings.ToUpper(buf2.String())] = struct{}{}
+ }
+
+ for k := range tmp {
+ res = append(res, k)
+ }
+
+ sort.Strings(res)
+
+ return
+}
diff --git a/vendor/github.com/vrischmann/envconfig/go.mod b/vendor/github.com/vrischmann/envconfig/go.mod
new file mode 100644
index 0000000..526374d
--- /dev/null
+++ b/vendor/github.com/vrischmann/envconfig/go.mod
@@ -0,0 +1 @@
+module github.com/vrischmann/envconfig
diff --git a/vendor/github.com/vrischmann/envconfig/slice.go b/vendor/github.com/vrischmann/envconfig/slice.go
new file mode 100644
index 0000000..daa7095
--- /dev/null
+++ b/vendor/github.com/vrischmann/envconfig/slice.go
@@ -0,0 +1,76 @@
+package envconfig
+
+import (
+ "bufio"
+ "bytes"
+ "io"
+ "strings"
+)
+
+type sliceTokenizer struct {
+ err error
+ r *bufio.Reader
+ buf bytes.Buffer
+ inBraces bool
+}
+
+var eof = rune(0)
+
+func newSliceTokenizer(str string) *sliceTokenizer {
+ return &sliceTokenizer{
+ r: bufio.NewReader(strings.NewReader(str)),
+ }
+}
+
+func (t *sliceTokenizer) scan() bool {
+ for {
+ if t.err == io.EOF && t.buf.Len() == 0 {
+ return false
+ }
+
+ ch := t.readRune()
+ if ch == eof {
+ return true
+ }
+
+ if ch == '{' {
+ t.inBraces = true
+ }
+ if ch == '}' {
+ t.inBraces = false
+ }
+
+ if ch == ',' && !t.inBraces {
+ return true
+ }
+
+ // NOTE(vincent): we ignore the WriteRune error here because there is NO WAY
+ // for WriteRune to return an error.
+ // Yep. Seriously. Look here http://golang.org/src/bytes/buffer.go?s=7661:7714#L227
+ _, _ = t.buf.WriteRune(ch)
+ }
+}
+
+func (t *sliceTokenizer) readRune() rune {
+ ch, _, err := t.r.ReadRune()
+ if err != nil {
+ t.err = err
+ return eof
+ }
+
+ return ch
+}
+
+func (t *sliceTokenizer) text() string {
+ str := t.buf.String()
+ t.buf.Reset()
+
+ return str
+}
+
+func (t *sliceTokenizer) Err() error {
+ if t.err == io.EOF {
+ return nil
+ }
+ return t.err
+}
diff --git a/vendor/golang.org/x/net/AUTHORS b/vendor/golang.org/x/net/AUTHORS
new file mode 100644
index 0000000..15167cd
--- /dev/null
+++ b/vendor/golang.org/x/net/AUTHORS
@@ -0,0 +1,3 @@
+# This source code refers to The Go Authors for copyright purposes.
+# The master list of authors is in the main Go distribution,
+# visible at http://tip.golang.org/AUTHORS.
diff --git a/vendor/golang.org/x/net/CONTRIBUTORS b/vendor/golang.org/x/net/CONTRIBUTORS
new file mode 100644
index 0000000..1c4577e
--- /dev/null
+++ b/vendor/golang.org/x/net/CONTRIBUTORS
@@ -0,0 +1,3 @@
+# This source code was written by the Go contributors.
+# The master list of contributors is in the main Go distribution,
+# visible at http://tip.golang.org/CONTRIBUTORS.
diff --git a/vendor/golang.org/x/net/LICENSE b/vendor/golang.org/x/net/LICENSE
new file mode 100644
index 0000000..6a66aea
--- /dev/null
+++ b/vendor/golang.org/x/net/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/net/PATENTS b/vendor/golang.org/x/net/PATENTS
new file mode 100644
index 0000000..7330990
--- /dev/null
+++ b/vendor/golang.org/x/net/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/net/context/context.go b/vendor/golang.org/x/net/context/context.go
new file mode 100644
index 0000000..a3c021d
--- /dev/null
+++ b/vendor/golang.org/x/net/context/context.go
@@ -0,0 +1,56 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package context defines the Context type, which carries deadlines,
+// cancelation signals, and other request-scoped values across API boundaries
+// and between processes.
+// As of Go 1.7 this package is available in the standard library under the
+// name context. https://golang.org/pkg/context.
+//
+// Incoming requests to a server should create a Context, and outgoing calls to
+// servers should accept a Context. The chain of function calls between must
+// propagate the Context, optionally replacing it with a modified copy created
+// using WithDeadline, WithTimeout, WithCancel, or WithValue.
+//
+// Programs that use Contexts should follow these rules to keep interfaces
+// consistent across packages and enable static analysis tools to check context
+// propagation:
+//
+// Do not store Contexts inside a struct type; instead, pass a Context
+// explicitly to each function that needs it. The Context should be the first
+// parameter, typically named ctx:
+//
+// func DoSomething(ctx context.Context, arg Arg) error {
+// // ... use ctx ...
+// }
+//
+// Do not pass a nil Context, even if a function permits it. Pass context.TODO
+// if you are unsure about which Context to use.
+//
+// Use context Values only for request-scoped data that transits processes and
+// APIs, not for passing optional parameters to functions.
+//
+// The same Context may be passed to functions running in different goroutines;
+// Contexts are safe for simultaneous use by multiple goroutines.
+//
+// See http://blog.golang.org/context for example code for a server that uses
+// Contexts.
+package context // import "golang.org/x/net/context"
+
+// Background returns a non-nil, empty Context. It is never canceled, has no
+// values, and has no deadline. It is typically used by the main function,
+// initialization, and tests, and as the top-level Context for incoming
+// requests.
+func Background() Context {
+ return background
+}
+
+// TODO returns a non-nil, empty Context. Code should use context.TODO when
+// it's unclear which Context to use or it is not yet available (because the
+// surrounding function has not yet been extended to accept a Context
+// parameter). TODO is recognized by static analysis tools that determine
+// whether Contexts are propagated correctly in a program.
+func TODO() Context {
+ return todo
+}
diff --git a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go
new file mode 100644
index 0000000..37dc0cf
--- /dev/null
+++ b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go
@@ -0,0 +1,71 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package ctxhttp provides helper functions for performing context-aware HTTP requests.
+package ctxhttp // import "golang.org/x/net/context/ctxhttp"
+
+import (
+ "context"
+ "io"
+ "net/http"
+ "net/url"
+ "strings"
+)
+
+// Do sends an HTTP request with the provided http.Client and returns
+// an HTTP response.
+//
+// If the client is nil, http.DefaultClient is used.
+//
+// The provided ctx must be non-nil. If it is canceled or times out,
+// ctx.Err() will be returned.
+func Do(ctx context.Context, client *http.Client, req *http.Request) (*http.Response, error) {
+ if client == nil {
+ client = http.DefaultClient
+ }
+ resp, err := client.Do(req.WithContext(ctx))
+ // If we got an error, and the context has been canceled,
+ // the context's error is probably more useful.
+ if err != nil {
+ select {
+ case <-ctx.Done():
+ err = ctx.Err()
+ default:
+ }
+ }
+ return resp, err
+}
+
+// Get issues a GET request via the Do function.
+func Get(ctx context.Context, client *http.Client, url string) (*http.Response, error) {
+ req, err := http.NewRequest("GET", url, nil)
+ if err != nil {
+ return nil, err
+ }
+ return Do(ctx, client, req)
+}
+
+// Head issues a HEAD request via the Do function.
+func Head(ctx context.Context, client *http.Client, url string) (*http.Response, error) {
+ req, err := http.NewRequest("HEAD", url, nil)
+ if err != nil {
+ return nil, err
+ }
+ return Do(ctx, client, req)
+}
+
+// Post issues a POST request via the Do function.
+func Post(ctx context.Context, client *http.Client, url string, bodyType string, body io.Reader) (*http.Response, error) {
+ req, err := http.NewRequest("POST", url, body)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Content-Type", bodyType)
+ return Do(ctx, client, req)
+}
+
+// PostForm issues a POST request via the Do function.
+func PostForm(ctx context.Context, client *http.Client, url string, data url.Values) (*http.Response, error) {
+ return Post(ctx, client, url, "application/x-www-form-urlencoded", strings.NewReader(data.Encode()))
+}
diff --git a/vendor/golang.org/x/net/context/go17.go b/vendor/golang.org/x/net/context/go17.go
new file mode 100644
index 0000000..d20f52b
--- /dev/null
+++ b/vendor/golang.org/x/net/context/go17.go
@@ -0,0 +1,72 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.7
+
+package context
+
+import (
+ "context" // standard library's context, as of Go 1.7
+ "time"
+)
+
+var (
+ todo = context.TODO()
+ background = context.Background()
+)
+
+// Canceled is the error returned by Context.Err when the context is canceled.
+var Canceled = context.Canceled
+
+// DeadlineExceeded is the error returned by Context.Err when the context's
+// deadline passes.
+var DeadlineExceeded = context.DeadlineExceeded
+
+// WithCancel returns a copy of parent with a new Done channel. The returned
+// context's Done channel is closed when the returned cancel function is called
+// or when the parent context's Done channel is closed, whichever happens first.
+//
+// Canceling this context releases resources associated with it, so code should
+// call cancel as soon as the operations running in this Context complete.
+func WithCancel(parent Context) (ctx Context, cancel CancelFunc) {
+ ctx, f := context.WithCancel(parent)
+ return ctx, CancelFunc(f)
+}
+
+// WithDeadline returns a copy of the parent context with the deadline adjusted
+// to be no later than d. If the parent's deadline is already earlier than d,
+// WithDeadline(parent, d) is semantically equivalent to parent. The returned
+// context's Done channel is closed when the deadline expires, when the returned
+// cancel function is called, or when the parent context's Done channel is
+// closed, whichever happens first.
+//
+// Canceling this context releases resources associated with it, so code should
+// call cancel as soon as the operations running in this Context complete.
+func WithDeadline(parent Context, deadline time.Time) (Context, CancelFunc) {
+ ctx, f := context.WithDeadline(parent, deadline)
+ return ctx, CancelFunc(f)
+}
+
+// WithTimeout returns WithDeadline(parent, time.Now().Add(timeout)).
+//
+// Canceling this context releases resources associated with it, so code should
+// call cancel as soon as the operations running in this Context complete:
+//
+// func slowOperationWithTimeout(ctx context.Context) (Result, error) {
+// ctx, cancel := context.WithTimeout(ctx, 100*time.Millisecond)
+// defer cancel() // releases resources if slowOperation completes before timeout elapses
+// return slowOperation(ctx)
+// }
+func WithTimeout(parent Context, timeout time.Duration) (Context, CancelFunc) {
+ return WithDeadline(parent, time.Now().Add(timeout))
+}
+
+// WithValue returns a copy of parent in which the value associated with key is
+// val.
+//
+// Use context Values only for request-scoped data that transits processes and
+// APIs, not for passing optional parameters to functions.
+func WithValue(parent Context, key interface{}, val interface{}) Context {
+ return context.WithValue(parent, key, val)
+}
diff --git a/vendor/golang.org/x/net/context/go19.go b/vendor/golang.org/x/net/context/go19.go
new file mode 100644
index 0000000..d88bd1d
--- /dev/null
+++ b/vendor/golang.org/x/net/context/go19.go
@@ -0,0 +1,20 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.9
+
+package context
+
+import "context" // standard library's context, as of Go 1.7
+
+// A Context carries a deadline, a cancelation signal, and other values across
+// API boundaries.
+//
+// Context's methods may be called by multiple goroutines simultaneously.
+type Context = context.Context
+
+// A CancelFunc tells an operation to abandon its work.
+// A CancelFunc does not wait for the work to stop.
+// After the first call, subsequent calls to a CancelFunc do nothing.
+type CancelFunc = context.CancelFunc
diff --git a/vendor/golang.org/x/net/context/pre_go17.go b/vendor/golang.org/x/net/context/pre_go17.go
new file mode 100644
index 0000000..0f35592
--- /dev/null
+++ b/vendor/golang.org/x/net/context/pre_go17.go
@@ -0,0 +1,300 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !go1.7
+
+package context
+
+import (
+ "errors"
+ "fmt"
+ "sync"
+ "time"
+)
+
+// An emptyCtx is never canceled, has no values, and has no deadline. It is not
+// struct{}, since vars of this type must have distinct addresses.
+type emptyCtx int
+
+func (*emptyCtx) Deadline() (deadline time.Time, ok bool) {
+ return
+}
+
+func (*emptyCtx) Done() <-chan struct{} {
+ return nil
+}
+
+func (*emptyCtx) Err() error {
+ return nil
+}
+
+func (*emptyCtx) Value(key interface{}) interface{} {
+ return nil
+}
+
+func (e *emptyCtx) String() string {
+ switch e {
+ case background:
+ return "context.Background"
+ case todo:
+ return "context.TODO"
+ }
+ return "unknown empty Context"
+}
+
+var (
+ background = new(emptyCtx)
+ todo = new(emptyCtx)
+)
+
+// Canceled is the error returned by Context.Err when the context is canceled.
+var Canceled = errors.New("context canceled")
+
+// DeadlineExceeded is the error returned by Context.Err when the context's
+// deadline passes.
+var DeadlineExceeded = errors.New("context deadline exceeded")
+
+// WithCancel returns a copy of parent with a new Done channel. The returned
+// context's Done channel is closed when the returned cancel function is called
+// or when the parent context's Done channel is closed, whichever happens first.
+//
+// Canceling this context releases resources associated with it, so code should
+// call cancel as soon as the operations running in this Context complete.
+func WithCancel(parent Context) (ctx Context, cancel CancelFunc) {
+ c := newCancelCtx(parent)
+ propagateCancel(parent, c)
+ return c, func() { c.cancel(true, Canceled) }
+}
+
+// newCancelCtx returns an initialized cancelCtx.
+func newCancelCtx(parent Context) *cancelCtx {
+ return &cancelCtx{
+ Context: parent,
+ done: make(chan struct{}),
+ }
+}
+
+// propagateCancel arranges for child to be canceled when parent is.
+func propagateCancel(parent Context, child canceler) {
+ if parent.Done() == nil {
+ return // parent is never canceled
+ }
+ if p, ok := parentCancelCtx(parent); ok {
+ p.mu.Lock()
+ if p.err != nil {
+ // parent has already been canceled
+ child.cancel(false, p.err)
+ } else {
+ if p.children == nil {
+ p.children = make(map[canceler]bool)
+ }
+ p.children[child] = true
+ }
+ p.mu.Unlock()
+ } else {
+ go func() {
+ select {
+ case <-parent.Done():
+ child.cancel(false, parent.Err())
+ case <-child.Done():
+ }
+ }()
+ }
+}
+
+// parentCancelCtx follows a chain of parent references until it finds a
+// *cancelCtx. This function understands how each of the concrete types in this
+// package represents its parent.
+func parentCancelCtx(parent Context) (*cancelCtx, bool) {
+ for {
+ switch c := parent.(type) {
+ case *cancelCtx:
+ return c, true
+ case *timerCtx:
+ return c.cancelCtx, true
+ case *valueCtx:
+ parent = c.Context
+ default:
+ return nil, false
+ }
+ }
+}
+
+// removeChild removes a context from its parent.
+func removeChild(parent Context, child canceler) {
+ p, ok := parentCancelCtx(parent)
+ if !ok {
+ return
+ }
+ p.mu.Lock()
+ if p.children != nil {
+ delete(p.children, child)
+ }
+ p.mu.Unlock()
+}
+
+// A canceler is a context type that can be canceled directly. The
+// implementations are *cancelCtx and *timerCtx.
+type canceler interface {
+ cancel(removeFromParent bool, err error)
+ Done() <-chan struct{}
+}
+
+// A cancelCtx can be canceled. When canceled, it also cancels any children
+// that implement canceler.
+type cancelCtx struct {
+ Context
+
+ done chan struct{} // closed by the first cancel call.
+
+ mu sync.Mutex
+ children map[canceler]bool // set to nil by the first cancel call
+ err error // set to non-nil by the first cancel call
+}
+
+func (c *cancelCtx) Done() <-chan struct{} {
+ return c.done
+}
+
+func (c *cancelCtx) Err() error {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ return c.err
+}
+
+func (c *cancelCtx) String() string {
+ return fmt.Sprintf("%v.WithCancel", c.Context)
+}
+
+// cancel closes c.done, cancels each of c's children, and, if
+// removeFromParent is true, removes c from its parent's children.
+func (c *cancelCtx) cancel(removeFromParent bool, err error) {
+ if err == nil {
+ panic("context: internal error: missing cancel error")
+ }
+ c.mu.Lock()
+ if c.err != nil {
+ c.mu.Unlock()
+ return // already canceled
+ }
+ c.err = err
+ close(c.done)
+ for child := range c.children {
+ // NOTE: acquiring the child's lock while holding parent's lock.
+ child.cancel(false, err)
+ }
+ c.children = nil
+ c.mu.Unlock()
+
+ if removeFromParent {
+ removeChild(c.Context, c)
+ }
+}
+
+// WithDeadline returns a copy of the parent context with the deadline adjusted
+// to be no later than d. If the parent's deadline is already earlier than d,
+// WithDeadline(parent, d) is semantically equivalent to parent. The returned
+// context's Done channel is closed when the deadline expires, when the returned
+// cancel function is called, or when the parent context's Done channel is
+// closed, whichever happens first.
+//
+// Canceling this context releases resources associated with it, so code should
+// call cancel as soon as the operations running in this Context complete.
+func WithDeadline(parent Context, deadline time.Time) (Context, CancelFunc) {
+ if cur, ok := parent.Deadline(); ok && cur.Before(deadline) {
+ // The current deadline is already sooner than the new one.
+ return WithCancel(parent)
+ }
+ c := &timerCtx{
+ cancelCtx: newCancelCtx(parent),
+ deadline: deadline,
+ }
+ propagateCancel(parent, c)
+ d := deadline.Sub(time.Now())
+ if d <= 0 {
+ c.cancel(true, DeadlineExceeded) // deadline has already passed
+ return c, func() { c.cancel(true, Canceled) }
+ }
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ if c.err == nil {
+ c.timer = time.AfterFunc(d, func() {
+ c.cancel(true, DeadlineExceeded)
+ })
+ }
+ return c, func() { c.cancel(true, Canceled) }
+}
+
+// A timerCtx carries a timer and a deadline. It embeds a cancelCtx to
+// implement Done and Err. It implements cancel by stopping its timer then
+// delegating to cancelCtx.cancel.
+type timerCtx struct {
+ *cancelCtx
+ timer *time.Timer // Under cancelCtx.mu.
+
+ deadline time.Time
+}
+
+func (c *timerCtx) Deadline() (deadline time.Time, ok bool) {
+ return c.deadline, true
+}
+
+func (c *timerCtx) String() string {
+ return fmt.Sprintf("%v.WithDeadline(%s [%s])", c.cancelCtx.Context, c.deadline, c.deadline.Sub(time.Now()))
+}
+
+func (c *timerCtx) cancel(removeFromParent bool, err error) {
+ c.cancelCtx.cancel(false, err)
+ if removeFromParent {
+ // Remove this timerCtx from its parent cancelCtx's children.
+ removeChild(c.cancelCtx.Context, c)
+ }
+ c.mu.Lock()
+ if c.timer != nil {
+ c.timer.Stop()
+ c.timer = nil
+ }
+ c.mu.Unlock()
+}
+
+// WithTimeout returns WithDeadline(parent, time.Now().Add(timeout)).
+//
+// Canceling this context releases resources associated with it, so code should
+// call cancel as soon as the operations running in this Context complete:
+//
+// func slowOperationWithTimeout(ctx context.Context) (Result, error) {
+// ctx, cancel := context.WithTimeout(ctx, 100*time.Millisecond)
+// defer cancel() // releases resources if slowOperation completes before timeout elapses
+// return slowOperation(ctx)
+// }
+func WithTimeout(parent Context, timeout time.Duration) (Context, CancelFunc) {
+ return WithDeadline(parent, time.Now().Add(timeout))
+}
+
+// WithValue returns a copy of parent in which the value associated with key is
+// val.
+//
+// Use context Values only for request-scoped data that transits processes and
+// APIs, not for passing optional parameters to functions.
+func WithValue(parent Context, key interface{}, val interface{}) Context {
+ return &valueCtx{parent, key, val}
+}
+
+// A valueCtx carries a key-value pair. It implements Value for that key and
+// delegates all other calls to the embedded Context.
+type valueCtx struct {
+ Context
+ key, val interface{}
+}
+
+func (c *valueCtx) String() string {
+ return fmt.Sprintf("%v.WithValue(%#v, %#v)", c.Context, c.key, c.val)
+}
+
+func (c *valueCtx) Value(key interface{}) interface{} {
+ if c.key == key {
+ return c.val
+ }
+ return c.Context.Value(key)
+}
diff --git a/vendor/golang.org/x/net/context/pre_go19.go b/vendor/golang.org/x/net/context/pre_go19.go
new file mode 100644
index 0000000..b105f80
--- /dev/null
+++ b/vendor/golang.org/x/net/context/pre_go19.go
@@ -0,0 +1,109 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !go1.9
+
+package context
+
+import "time"
+
+// A Context carries a deadline, a cancelation signal, and other values across
+// API boundaries.
+//
+// Context's methods may be called by multiple goroutines simultaneously.
+type Context interface {
+ // Deadline returns the time when work done on behalf of this context
+ // should be canceled. Deadline returns ok==false when no deadline is
+ // set. Successive calls to Deadline return the same results.
+ Deadline() (deadline time.Time, ok bool)
+
+ // Done returns a channel that's closed when work done on behalf of this
+ // context should be canceled. Done may return nil if this context can
+ // never be canceled. Successive calls to Done return the same value.
+ //
+ // WithCancel arranges for Done to be closed when cancel is called;
+ // WithDeadline arranges for Done to be closed when the deadline
+ // expires; WithTimeout arranges for Done to be closed when the timeout
+ // elapses.
+ //
+ // Done is provided for use in select statements:
+ //
+ // // Stream generates values with DoSomething and sends them to out
+ // // until DoSomething returns an error or ctx.Done is closed.
+ // func Stream(ctx context.Context, out chan<- Value) error {
+ // for {
+ // v, err := DoSomething(ctx)
+ // if err != nil {
+ // return err
+ // }
+ // select {
+ // case <-ctx.Done():
+ // return ctx.Err()
+ // case out <- v:
+ // }
+ // }
+ // }
+ //
+ // See http://blog.golang.org/pipelines for more examples of how to use
+ // a Done channel for cancelation.
+ Done() <-chan struct{}
+
+ // Err returns a non-nil error value after Done is closed. Err returns
+ // Canceled if the context was canceled or DeadlineExceeded if the
+ // context's deadline passed. No other values for Err are defined.
+ // After Done is closed, successive calls to Err return the same value.
+ Err() error
+
+ // Value returns the value associated with this context for key, or nil
+ // if no value is associated with key. Successive calls to Value with
+ // the same key returns the same result.
+ //
+ // Use context values only for request-scoped data that transits
+ // processes and API boundaries, not for passing optional parameters to
+ // functions.
+ //
+ // A key identifies a specific value in a Context. Functions that wish
+ // to store values in Context typically allocate a key in a global
+ // variable then use that key as the argument to context.WithValue and
+ // Context.Value. A key can be any type that supports equality;
+ // packages should define keys as an unexported type to avoid
+ // collisions.
+ //
+ // Packages that define a Context key should provide type-safe accessors
+ // for the values stores using that key:
+ //
+ // // Package user defines a User type that's stored in Contexts.
+ // package user
+ //
+ // import "golang.org/x/net/context"
+ //
+ // // User is the type of value stored in the Contexts.
+ // type User struct {...}
+ //
+ // // key is an unexported type for keys defined in this package.
+ // // This prevents collisions with keys defined in other packages.
+ // type key int
+ //
+ // // userKey is the key for user.User values in Contexts. It is
+ // // unexported; clients use user.NewContext and user.FromContext
+ // // instead of using this key directly.
+ // var userKey key = 0
+ //
+ // // NewContext returns a new Context that carries value u.
+ // func NewContext(ctx context.Context, u *User) context.Context {
+ // return context.WithValue(ctx, userKey, u)
+ // }
+ //
+ // // FromContext returns the User value stored in ctx, if any.
+ // func FromContext(ctx context.Context) (*User, bool) {
+ // u, ok := ctx.Value(userKey).(*User)
+ // return u, ok
+ // }
+ Value(key interface{}) interface{}
+}
+
+// A CancelFunc tells an operation to abandon its work.
+// A CancelFunc does not wait for the work to stop.
+// After the first call, subsequent calls to a CancelFunc do nothing.
+type CancelFunc func()
diff --git a/vendor/golang.org/x/oauth2/.travis.yml b/vendor/golang.org/x/oauth2/.travis.yml
new file mode 100644
index 0000000..fa139db
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/.travis.yml
@@ -0,0 +1,13 @@
+language: go
+
+go:
+ - tip
+
+install:
+ - export GOPATH="$HOME/gopath"
+ - mkdir -p "$GOPATH/src/golang.org/x"
+ - mv "$TRAVIS_BUILD_DIR" "$GOPATH/src/golang.org/x/oauth2"
+ - go get -v -t -d golang.org/x/oauth2/...
+
+script:
+ - go test -v golang.org/x/oauth2/...
diff --git a/vendor/golang.org/x/oauth2/AUTHORS b/vendor/golang.org/x/oauth2/AUTHORS
new file mode 100644
index 0000000..15167cd
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/AUTHORS
@@ -0,0 +1,3 @@
+# This source code refers to The Go Authors for copyright purposes.
+# The master list of authors is in the main Go distribution,
+# visible at http://tip.golang.org/AUTHORS.
diff --git a/vendor/golang.org/x/oauth2/CONTRIBUTING.md b/vendor/golang.org/x/oauth2/CONTRIBUTING.md
new file mode 100644
index 0000000..dfbed62
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/CONTRIBUTING.md
@@ -0,0 +1,26 @@
+# Contributing to Go
+
+Go is an open source project.
+
+It is the work of hundreds of contributors. We appreciate your help!
+
+## Filing issues
+
+When [filing an issue](https://github.com/golang/oauth2/issues), make sure to answer these five questions:
+
+1. What version of Go are you using (`go version`)?
+2. What operating system and processor architecture are you using?
+3. What did you do?
+4. What did you expect to see?
+5. What did you see instead?
+
+General questions should go to the [golang-nuts mailing list](https://groups.google.com/group/golang-nuts) instead of the issue tracker.
+The gophers there will answer or ask you to file an issue if you've tripped over a bug.
+
+## Contributing code
+
+Please read the [Contribution Guidelines](https://golang.org/doc/contribute.html)
+before sending patches.
+
+Unless otherwise noted, the Go source files are distributed under
+the BSD-style license found in the LICENSE file.
diff --git a/vendor/golang.org/x/oauth2/CONTRIBUTORS b/vendor/golang.org/x/oauth2/CONTRIBUTORS
new file mode 100644
index 0000000..1c4577e
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/CONTRIBUTORS
@@ -0,0 +1,3 @@
+# This source code was written by the Go contributors.
+# The master list of contributors is in the main Go distribution,
+# visible at http://tip.golang.org/CONTRIBUTORS.
diff --git a/vendor/golang.org/x/oauth2/LICENSE b/vendor/golang.org/x/oauth2/LICENSE
new file mode 100644
index 0000000..6a66aea
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/oauth2/README.md b/vendor/golang.org/x/oauth2/README.md
new file mode 100644
index 0000000..0f443e6
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/README.md
@@ -0,0 +1,35 @@
+# OAuth2 for Go
+
+[![Build Status](https://travis-ci.org/golang/oauth2.svg?branch=master)](https://travis-ci.org/golang/oauth2)
+[![GoDoc](https://godoc.org/golang.org/x/oauth2?status.svg)](https://godoc.org/golang.org/x/oauth2)
+
+oauth2 package contains a client implementation for OAuth 2.0 spec.
+
+## Installation
+
+~~~~
+go get golang.org/x/oauth2
+~~~~
+
+Or you can manually git clone the repository to
+`$(go env GOPATH)/src/golang.org/x/oauth2`.
+
+See godoc for further documentation and examples.
+
+* [godoc.org/golang.org/x/oauth2](http://godoc.org/golang.org/x/oauth2)
+* [godoc.org/golang.org/x/oauth2/google](http://godoc.org/golang.org/x/oauth2/google)
+
+## Policy for new packages
+
+We no longer accept new provider-specific packages in this repo. For
+defining provider endpoints and provider-specific OAuth2 behavior, we
+encourage you to create packages elsewhere. We'll keep the existing
+packages for compatibility.
+
+## Report Issues / Send Patches
+
+This repository uses Gerrit for code changes. To learn how to submit changes to
+this repository, see https://golang.org/doc/contribute.html.
+
+The main issue tracker for the oauth2 repository is located at
+https://github.com/golang/oauth2/issues.
diff --git a/vendor/golang.org/x/oauth2/go.mod b/vendor/golang.org/x/oauth2/go.mod
new file mode 100644
index 0000000..b345781
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/go.mod
@@ -0,0 +1,10 @@
+module golang.org/x/oauth2
+
+go 1.11
+
+require (
+ cloud.google.com/go v0.34.0
+ golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e
+ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 // indirect
+ google.golang.org/appengine v1.4.0
+)
diff --git a/vendor/golang.org/x/oauth2/go.sum b/vendor/golang.org/x/oauth2/go.sum
new file mode 100644
index 0000000..6f0079b
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/go.sum
@@ -0,0 +1,12 @@
+cloud.google.com/go v0.34.0 h1:eOI3/cP2VTU6uZLDYAoic+eyzzB9YyGmJ7eIjl8rOPg=
+cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e h1:bRhVy7zSSasaqNksaRZiA5EEI+Ei4I1nO5Jh72wfHlg=
+golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 h1:YUO/7uOKsKeq9UokNS62b8FYywz3ker1l1vDZRCRefw=
+golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
diff --git a/vendor/golang.org/x/oauth2/internal/client_appengine.go b/vendor/golang.org/x/oauth2/internal/client_appengine.go
new file mode 100644
index 0000000..7434871
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/internal/client_appengine.go
@@ -0,0 +1,13 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build appengine
+
+package internal
+
+import "google.golang.org/appengine/urlfetch"
+
+func init() {
+ appengineClientHook = urlfetch.Client
+}
diff --git a/vendor/golang.org/x/oauth2/internal/doc.go b/vendor/golang.org/x/oauth2/internal/doc.go
new file mode 100644
index 0000000..03265e8
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/internal/doc.go
@@ -0,0 +1,6 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package internal contains support packages for oauth2 package.
+package internal
diff --git a/vendor/golang.org/x/oauth2/internal/oauth2.go b/vendor/golang.org/x/oauth2/internal/oauth2.go
new file mode 100644
index 0000000..c0ab196
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/internal/oauth2.go
@@ -0,0 +1,37 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+ "crypto/rsa"
+ "crypto/x509"
+ "encoding/pem"
+ "errors"
+ "fmt"
+)
+
+// ParseKey converts the binary contents of a private key file
+// to an *rsa.PrivateKey. It detects whether the private key is in a
+// PEM container or not. If so, it extracts the the private key
+// from PEM container before conversion. It only supports PEM
+// containers with no passphrase.
+func ParseKey(key []byte) (*rsa.PrivateKey, error) {
+ block, _ := pem.Decode(key)
+ if block != nil {
+ key = block.Bytes
+ }
+ parsedKey, err := x509.ParsePKCS8PrivateKey(key)
+ if err != nil {
+ parsedKey, err = x509.ParsePKCS1PrivateKey(key)
+ if err != nil {
+ return nil, fmt.Errorf("private key should be a PEM or plain PKCS1 or PKCS8; parse error: %v", err)
+ }
+ }
+ parsed, ok := parsedKey.(*rsa.PrivateKey)
+ if !ok {
+ return nil, errors.New("private key is invalid")
+ }
+ return parsed, nil
+}
diff --git a/vendor/golang.org/x/oauth2/internal/token.go b/vendor/golang.org/x/oauth2/internal/token.go
new file mode 100644
index 0000000..355c386
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/internal/token.go
@@ -0,0 +1,294 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "math"
+ "mime"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/net/context/ctxhttp"
+)
+
+// Token represents the credentials used to authorize
+// the requests to access protected resources on the OAuth 2.0
+// provider's backend.
+//
+// This type is a mirror of oauth2.Token and exists to break
+// an otherwise-circular dependency. Other internal packages
+// should convert this Token into an oauth2.Token before use.
+type Token struct {
+ // AccessToken is the token that authorizes and authenticates
+ // the requests.
+ AccessToken string
+
+ // TokenType is the type of token.
+ // The Type method returns either this or "Bearer", the default.
+ TokenType string
+
+ // RefreshToken is a token that's used by the application
+ // (as opposed to the user) to refresh the access token
+ // if it expires.
+ RefreshToken string
+
+ // Expiry is the optional expiration time of the access token.
+ //
+ // If zero, TokenSource implementations will reuse the same
+ // token forever and RefreshToken or equivalent
+ // mechanisms for that TokenSource will not be used.
+ Expiry time.Time
+
+ // Raw optionally contains extra metadata from the server
+ // when updating a token.
+ Raw interface{}
+}
+
+// tokenJSON is the struct representing the HTTP response from OAuth2
+// providers returning a token in JSON form.
+type tokenJSON struct {
+ AccessToken string `json:"access_token"`
+ TokenType string `json:"token_type"`
+ RefreshToken string `json:"refresh_token"`
+ ExpiresIn expirationTime `json:"expires_in"` // at least PayPal returns string, while most return number
+}
+
+func (e *tokenJSON) expiry() (t time.Time) {
+ if v := e.ExpiresIn; v != 0 {
+ return time.Now().Add(time.Duration(v) * time.Second)
+ }
+ return
+}
+
+type expirationTime int32
+
+func (e *expirationTime) UnmarshalJSON(b []byte) error {
+ if len(b) == 0 || string(b) == "null" {
+ return nil
+ }
+ var n json.Number
+ err := json.Unmarshal(b, &n)
+ if err != nil {
+ return err
+ }
+ i, err := n.Int64()
+ if err != nil {
+ return err
+ }
+ if i > math.MaxInt32 {
+ i = math.MaxInt32
+ }
+ *e = expirationTime(i)
+ return nil
+}
+
+// RegisterBrokenAuthHeaderProvider previously did something. It is now a no-op.
+//
+// Deprecated: this function no longer does anything. Caller code that
+// wants to avoid potential extra HTTP requests made during
+// auto-probing of the provider's auth style should set
+// Endpoint.AuthStyle.
+func RegisterBrokenAuthHeaderProvider(tokenURL string) {}
+
+// AuthStyle is a copy of the golang.org/x/oauth2 package's AuthStyle type.
+type AuthStyle int
+
+const (
+ AuthStyleUnknown AuthStyle = 0
+ AuthStyleInParams AuthStyle = 1
+ AuthStyleInHeader AuthStyle = 2
+)
+
+// authStyleCache is the set of tokenURLs we've successfully used via
+// RetrieveToken and which style auth we ended up using.
+// It's called a cache, but it doesn't (yet?) shrink. It's expected that
+// the set of OAuth2 servers a program contacts over time is fixed and
+// small.
+var authStyleCache struct {
+ sync.Mutex
+ m map[string]AuthStyle // keyed by tokenURL
+}
+
+// ResetAuthCache resets the global authentication style cache used
+// for AuthStyleUnknown token requests.
+func ResetAuthCache() {
+ authStyleCache.Lock()
+ defer authStyleCache.Unlock()
+ authStyleCache.m = nil
+}
+
+// lookupAuthStyle reports which auth style we last used with tokenURL
+// when calling RetrieveToken and whether we have ever done so.
+func lookupAuthStyle(tokenURL string) (style AuthStyle, ok bool) {
+ authStyleCache.Lock()
+ defer authStyleCache.Unlock()
+ style, ok = authStyleCache.m[tokenURL]
+ return
+}
+
+// setAuthStyle adds an entry to authStyleCache, documented above.
+func setAuthStyle(tokenURL string, v AuthStyle) {
+ authStyleCache.Lock()
+ defer authStyleCache.Unlock()
+ if authStyleCache.m == nil {
+ authStyleCache.m = make(map[string]AuthStyle)
+ }
+ authStyleCache.m[tokenURL] = v
+}
+
+// newTokenRequest returns a new *http.Request to retrieve a new token
+// from tokenURL using the provided clientID, clientSecret, and POST
+// body parameters.
+//
+// inParams is whether the clientID & clientSecret should be encoded
+// as the POST body. An 'inParams' value of true means to send it in
+// the POST body (along with any values in v); false means to send it
+// in the Authorization header.
+func newTokenRequest(tokenURL, clientID, clientSecret string, v url.Values, authStyle AuthStyle) (*http.Request, error) {
+ if authStyle == AuthStyleInParams {
+ v = cloneURLValues(v)
+ if clientID != "" {
+ v.Set("client_id", clientID)
+ }
+ if clientSecret != "" {
+ v.Set("client_secret", clientSecret)
+ }
+ }
+ req, err := http.NewRequest("POST", tokenURL, strings.NewReader(v.Encode()))
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
+ if authStyle == AuthStyleInHeader {
+ req.SetBasicAuth(url.QueryEscape(clientID), url.QueryEscape(clientSecret))
+ }
+ return req, nil
+}
+
+func cloneURLValues(v url.Values) url.Values {
+ v2 := make(url.Values, len(v))
+ for k, vv := range v {
+ v2[k] = append([]string(nil), vv...)
+ }
+ return v2
+}
+
+func RetrieveToken(ctx context.Context, clientID, clientSecret, tokenURL string, v url.Values, authStyle AuthStyle) (*Token, error) {
+ needsAuthStyleProbe := authStyle == 0
+ if needsAuthStyleProbe {
+ if style, ok := lookupAuthStyle(tokenURL); ok {
+ authStyle = style
+ needsAuthStyleProbe = false
+ } else {
+ authStyle = AuthStyleInHeader // the first way we'll try
+ }
+ }
+ req, err := newTokenRequest(tokenURL, clientID, clientSecret, v, authStyle)
+ if err != nil {
+ return nil, err
+ }
+ token, err := doTokenRoundTrip(ctx, req)
+ if err != nil && needsAuthStyleProbe {
+ // If we get an error, assume the server wants the
+ // clientID & clientSecret in a different form.
+ // See https://code.google.com/p/goauth2/issues/detail?id=31 for background.
+ // In summary:
+ // - Reddit only accepts client secret in the Authorization header
+ // - Dropbox accepts either it in URL param or Auth header, but not both.
+ // - Google only accepts URL param (not spec compliant?), not Auth header
+ // - Stripe only accepts client secret in Auth header with Bearer method, not Basic
+ //
+ // We used to maintain a big table in this code of all the sites and which way
+ // they went, but maintaining it didn't scale & got annoying.
+ // So just try both ways.
+ authStyle = AuthStyleInParams // the second way we'll try
+ req, _ = newTokenRequest(tokenURL, clientID, clientSecret, v, authStyle)
+ token, err = doTokenRoundTrip(ctx, req)
+ }
+ if needsAuthStyleProbe && err == nil {
+ setAuthStyle(tokenURL, authStyle)
+ }
+ // Don't overwrite `RefreshToken` with an empty value
+ // if this was a token refreshing request.
+ if token != nil && token.RefreshToken == "" {
+ token.RefreshToken = v.Get("refresh_token")
+ }
+ return token, err
+}
+
+func doTokenRoundTrip(ctx context.Context, req *http.Request) (*Token, error) {
+ r, err := ctxhttp.Do(ctx, ContextClient(ctx), req)
+ if err != nil {
+ return nil, err
+ }
+ body, err := ioutil.ReadAll(io.LimitReader(r.Body, 1<<20))
+ r.Body.Close()
+ if err != nil {
+ return nil, fmt.Errorf("oauth2: cannot fetch token: %v", err)
+ }
+ if code := r.StatusCode; code < 200 || code > 299 {
+ return nil, &RetrieveError{
+ Response: r,
+ Body: body,
+ }
+ }
+
+ var token *Token
+ content, _, _ := mime.ParseMediaType(r.Header.Get("Content-Type"))
+ switch content {
+ case "application/x-www-form-urlencoded", "text/plain":
+ vals, err := url.ParseQuery(string(body))
+ if err != nil {
+ return nil, err
+ }
+ token = &Token{
+ AccessToken: vals.Get("access_token"),
+ TokenType: vals.Get("token_type"),
+ RefreshToken: vals.Get("refresh_token"),
+ Raw: vals,
+ }
+ e := vals.Get("expires_in")
+ expires, _ := strconv.Atoi(e)
+ if expires != 0 {
+ token.Expiry = time.Now().Add(time.Duration(expires) * time.Second)
+ }
+ default:
+ var tj tokenJSON
+ if err = json.Unmarshal(body, &tj); err != nil {
+ return nil, err
+ }
+ token = &Token{
+ AccessToken: tj.AccessToken,
+ TokenType: tj.TokenType,
+ RefreshToken: tj.RefreshToken,
+ Expiry: tj.expiry(),
+ Raw: make(map[string]interface{}),
+ }
+ json.Unmarshal(body, &token.Raw) // no error checks for optional fields
+ }
+ if token.AccessToken == "" {
+ return nil, errors.New("oauth2: server response missing access_token")
+ }
+ return token, nil
+}
+
+type RetrieveError struct {
+ Response *http.Response
+ Body []byte
+}
+
+func (r *RetrieveError) Error() string {
+ return fmt.Sprintf("oauth2: cannot fetch token: %v\nResponse: %s", r.Response.Status, r.Body)
+}
diff --git a/vendor/golang.org/x/oauth2/internal/transport.go b/vendor/golang.org/x/oauth2/internal/transport.go
new file mode 100644
index 0000000..572074a
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/internal/transport.go
@@ -0,0 +1,33 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+ "context"
+ "net/http"
+)
+
+// HTTPClient is the context key to use with golang.org/x/net/context's
+// WithValue function to associate an *http.Client value with a context.
+var HTTPClient ContextKey
+
+// ContextKey is just an empty struct. It exists so HTTPClient can be
+// an immutable public variable with a unique type. It's immutable
+// because nobody else can create a ContextKey, being unexported.
+type ContextKey struct{}
+
+var appengineClientHook func(context.Context) *http.Client
+
+func ContextClient(ctx context.Context) *http.Client {
+ if ctx != nil {
+ if hc, ok := ctx.Value(HTTPClient).(*http.Client); ok {
+ return hc
+ }
+ }
+ if appengineClientHook != nil {
+ return appengineClientHook(ctx)
+ }
+ return http.DefaultClient
+}
diff --git a/vendor/golang.org/x/oauth2/oauth2.go b/vendor/golang.org/x/oauth2/oauth2.go
new file mode 100644
index 0000000..428283f
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/oauth2.go
@@ -0,0 +1,381 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package oauth2 provides support for making
+// OAuth2 authorized and authenticated HTTP requests,
+// as specified in RFC 6749.
+// It can additionally grant authorization with Bearer JWT.
+package oauth2 // import "golang.org/x/oauth2"
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "net/http"
+ "net/url"
+ "strings"
+ "sync"
+
+ "golang.org/x/oauth2/internal"
+)
+
+// NoContext is the default context you should supply if not using
+// your own context.Context (see https://golang.org/x/net/context).
+//
+// Deprecated: Use context.Background() or context.TODO() instead.
+var NoContext = context.TODO()
+
+// RegisterBrokenAuthHeaderProvider previously did something. It is now a no-op.
+//
+// Deprecated: this function no longer does anything. Caller code that
+// wants to avoid potential extra HTTP requests made during
+// auto-probing of the provider's auth style should set
+// Endpoint.AuthStyle.
+func RegisterBrokenAuthHeaderProvider(tokenURL string) {}
+
+// Config describes a typical 3-legged OAuth2 flow, with both the
+// client application information and the server's endpoint URLs.
+// For the client credentials 2-legged OAuth2 flow, see the clientcredentials
+// package (https://golang.org/x/oauth2/clientcredentials).
+type Config struct {
+ // ClientID is the application's ID.
+ ClientID string
+
+ // ClientSecret is the application's secret.
+ ClientSecret string
+
+ // Endpoint contains the resource server's token endpoint
+ // URLs. These are constants specific to each server and are
+ // often available via site-specific packages, such as
+ // google.Endpoint or github.Endpoint.
+ Endpoint Endpoint
+
+ // RedirectURL is the URL to redirect users going through
+ // the OAuth flow, after the resource owner's URLs.
+ RedirectURL string
+
+ // Scope specifies optional requested permissions.
+ Scopes []string
+}
+
+// A TokenSource is anything that can return a token.
+type TokenSource interface {
+ // Token returns a token or an error.
+ // Token must be safe for concurrent use by multiple goroutines.
+ // The returned Token must not be modified.
+ Token() (*Token, error)
+}
+
+// Endpoint represents an OAuth 2.0 provider's authorization and token
+// endpoint URLs.
+type Endpoint struct {
+ AuthURL string
+ TokenURL string
+
+ // AuthStyle optionally specifies how the endpoint wants the
+ // client ID & client secret sent. The zero value means to
+ // auto-detect.
+ AuthStyle AuthStyle
+}
+
+// AuthStyle represents how requests for tokens are authenticated
+// to the server.
+type AuthStyle int
+
+const (
+ // AuthStyleAutoDetect means to auto-detect which authentication
+ // style the provider wants by trying both ways and caching
+ // the successful way for the future.
+ AuthStyleAutoDetect AuthStyle = 0
+
+ // AuthStyleInParams sends the "client_id" and "client_secret"
+ // in the POST body as application/x-www-form-urlencoded parameters.
+ AuthStyleInParams AuthStyle = 1
+
+ // AuthStyleInHeader sends the client_id and client_password
+ // using HTTP Basic Authorization. This is an optional style
+ // described in the OAuth2 RFC 6749 section 2.3.1.
+ AuthStyleInHeader AuthStyle = 2
+)
+
+var (
+ // AccessTypeOnline and AccessTypeOffline are options passed
+ // to the Options.AuthCodeURL method. They modify the
+ // "access_type" field that gets sent in the URL returned by
+ // AuthCodeURL.
+ //
+ // Online is the default if neither is specified. If your
+ // application needs to refresh access tokens when the user
+ // is not present at the browser, then use offline. This will
+ // result in your application obtaining a refresh token the
+ // first time your application exchanges an authorization
+ // code for a user.
+ AccessTypeOnline AuthCodeOption = SetAuthURLParam("access_type", "online")
+ AccessTypeOffline AuthCodeOption = SetAuthURLParam("access_type", "offline")
+
+ // ApprovalForce forces the users to view the consent dialog
+ // and confirm the permissions request at the URL returned
+ // from AuthCodeURL, even if they've already done so.
+ ApprovalForce AuthCodeOption = SetAuthURLParam("approval_prompt", "force")
+)
+
+// An AuthCodeOption is passed to Config.AuthCodeURL.
+type AuthCodeOption interface {
+ setValue(url.Values)
+}
+
+type setParam struct{ k, v string }
+
+func (p setParam) setValue(m url.Values) { m.Set(p.k, p.v) }
+
+// SetAuthURLParam builds an AuthCodeOption which passes key/value parameters
+// to a provider's authorization endpoint.
+func SetAuthURLParam(key, value string) AuthCodeOption {
+ return setParam{key, value}
+}
+
+// AuthCodeURL returns a URL to OAuth 2.0 provider's consent page
+// that asks for permissions for the required scopes explicitly.
+//
+// State is a token to protect the user from CSRF attacks. You must
+// always provide a non-empty string and validate that it matches the
+// the state query parameter on your redirect callback.
+// See http://tools.ietf.org/html/rfc6749#section-10.12 for more info.
+//
+// Opts may include AccessTypeOnline or AccessTypeOffline, as well
+// as ApprovalForce.
+// It can also be used to pass the PKCE challenge.
+// See https://www.oauth.com/oauth2-servers/pkce/ for more info.
+func (c *Config) AuthCodeURL(state string, opts ...AuthCodeOption) string {
+ var buf bytes.Buffer
+ buf.WriteString(c.Endpoint.AuthURL)
+ v := url.Values{
+ "response_type": {"code"},
+ "client_id": {c.ClientID},
+ }
+ if c.RedirectURL != "" {
+ v.Set("redirect_uri", c.RedirectURL)
+ }
+ if len(c.Scopes) > 0 {
+ v.Set("scope", strings.Join(c.Scopes, " "))
+ }
+ if state != "" {
+ // TODO(light): Docs say never to omit state; don't allow empty.
+ v.Set("state", state)
+ }
+ for _, opt := range opts {
+ opt.setValue(v)
+ }
+ if strings.Contains(c.Endpoint.AuthURL, "?") {
+ buf.WriteByte('&')
+ } else {
+ buf.WriteByte('?')
+ }
+ buf.WriteString(v.Encode())
+ return buf.String()
+}
+
+// PasswordCredentialsToken converts a resource owner username and password
+// pair into a token.
+//
+// Per the RFC, this grant type should only be used "when there is a high
+// degree of trust between the resource owner and the client (e.g., the client
+// is part of the device operating system or a highly privileged application),
+// and when other authorization grant types are not available."
+// See https://tools.ietf.org/html/rfc6749#section-4.3 for more info.
+//
+// The provided context optionally controls which HTTP client is used. See the HTTPClient variable.
+func (c *Config) PasswordCredentialsToken(ctx context.Context, username, password string) (*Token, error) {
+ v := url.Values{
+ "grant_type": {"password"},
+ "username": {username},
+ "password": {password},
+ }
+ if len(c.Scopes) > 0 {
+ v.Set("scope", strings.Join(c.Scopes, " "))
+ }
+ return retrieveToken(ctx, c, v)
+}
+
+// Exchange converts an authorization code into a token.
+//
+// It is used after a resource provider redirects the user back
+// to the Redirect URI (the URL obtained from AuthCodeURL).
+//
+// The provided context optionally controls which HTTP client is used. See the HTTPClient variable.
+//
+// The code will be in the *http.Request.FormValue("code"). Before
+// calling Exchange, be sure to validate FormValue("state").
+//
+// Opts may include the PKCE verifier code if previously used in AuthCodeURL.
+// See https://www.oauth.com/oauth2-servers/pkce/ for more info.
+func (c *Config) Exchange(ctx context.Context, code string, opts ...AuthCodeOption) (*Token, error) {
+ v := url.Values{
+ "grant_type": {"authorization_code"},
+ "code": {code},
+ }
+ if c.RedirectURL != "" {
+ v.Set("redirect_uri", c.RedirectURL)
+ }
+ for _, opt := range opts {
+ opt.setValue(v)
+ }
+ return retrieveToken(ctx, c, v)
+}
+
+// Client returns an HTTP client using the provided token.
+// The token will auto-refresh as necessary. The underlying
+// HTTP transport will be obtained using the provided context.
+// The returned client and its Transport should not be modified.
+func (c *Config) Client(ctx context.Context, t *Token) *http.Client {
+ return NewClient(ctx, c.TokenSource(ctx, t))
+}
+
+// TokenSource returns a TokenSource that returns t until t expires,
+// automatically refreshing it as necessary using the provided context.
+//
+// Most users will use Config.Client instead.
+func (c *Config) TokenSource(ctx context.Context, t *Token) TokenSource {
+ tkr := &tokenRefresher{
+ ctx: ctx,
+ conf: c,
+ }
+ if t != nil {
+ tkr.refreshToken = t.RefreshToken
+ }
+ return &reuseTokenSource{
+ t: t,
+ new: tkr,
+ }
+}
+
+// tokenRefresher is a TokenSource that makes "grant_type"=="refresh_token"
+// HTTP requests to renew a token using a RefreshToken.
+type tokenRefresher struct {
+ ctx context.Context // used to get HTTP requests
+ conf *Config
+ refreshToken string
+}
+
+// WARNING: Token is not safe for concurrent access, as it
+// updates the tokenRefresher's refreshToken field.
+// Within this package, it is used by reuseTokenSource which
+// synchronizes calls to this method with its own mutex.
+func (tf *tokenRefresher) Token() (*Token, error) {
+ if tf.refreshToken == "" {
+ return nil, errors.New("oauth2: token expired and refresh token is not set")
+ }
+
+ tk, err := retrieveToken(tf.ctx, tf.conf, url.Values{
+ "grant_type": {"refresh_token"},
+ "refresh_token": {tf.refreshToken},
+ })
+
+ if err != nil {
+ return nil, err
+ }
+ if tf.refreshToken != tk.RefreshToken {
+ tf.refreshToken = tk.RefreshToken
+ }
+ return tk, err
+}
+
+// reuseTokenSource is a TokenSource that holds a single token in memory
+// and validates its expiry before each call to retrieve it with
+// Token. If it's expired, it will be auto-refreshed using the
+// new TokenSource.
+type reuseTokenSource struct {
+ new TokenSource // called when t is expired.
+
+ mu sync.Mutex // guards t
+ t *Token
+}
+
+// Token returns the current token if it's still valid, else will
+// refresh the current token (using r.Context for HTTP client
+// information) and return the new one.
+func (s *reuseTokenSource) Token() (*Token, error) {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ if s.t.Valid() {
+ return s.t, nil
+ }
+ t, err := s.new.Token()
+ if err != nil {
+ return nil, err
+ }
+ s.t = t
+ return t, nil
+}
+
+// StaticTokenSource returns a TokenSource that always returns the same token.
+// Because the provided token t is never refreshed, StaticTokenSource is only
+// useful for tokens that never expire.
+func StaticTokenSource(t *Token) TokenSource {
+ return staticTokenSource{t}
+}
+
+// staticTokenSource is a TokenSource that always returns the same Token.
+type staticTokenSource struct {
+ t *Token
+}
+
+func (s staticTokenSource) Token() (*Token, error) {
+ return s.t, nil
+}
+
+// HTTPClient is the context key to use with golang.org/x/net/context's
+// WithValue function to associate an *http.Client value with a context.
+var HTTPClient internal.ContextKey
+
+// NewClient creates an *http.Client from a Context and TokenSource.
+// The returned client is not valid beyond the lifetime of the context.
+//
+// Note that if a custom *http.Client is provided via the Context it
+// is used only for token acquisition and is not used to configure the
+// *http.Client returned from NewClient.
+//
+// As a special case, if src is nil, a non-OAuth2 client is returned
+// using the provided context. This exists to support related OAuth2
+// packages.
+func NewClient(ctx context.Context, src TokenSource) *http.Client {
+ if src == nil {
+ return internal.ContextClient(ctx)
+ }
+ return &http.Client{
+ Transport: &Transport{
+ Base: internal.ContextClient(ctx).Transport,
+ Source: ReuseTokenSource(nil, src),
+ },
+ }
+}
+
+// ReuseTokenSource returns a TokenSource which repeatedly returns the
+// same token as long as it's valid, starting with t.
+// When its cached token is invalid, a new token is obtained from src.
+//
+// ReuseTokenSource is typically used to reuse tokens from a cache
+// (such as a file on disk) between runs of a program, rather than
+// obtaining new tokens unnecessarily.
+//
+// The initial token t may be nil, in which case the TokenSource is
+// wrapped in a caching version if it isn't one already. This also
+// means it's always safe to wrap ReuseTokenSource around any other
+// TokenSource without adverse effects.
+func ReuseTokenSource(t *Token, src TokenSource) TokenSource {
+ // Don't wrap a reuseTokenSource in itself. That would work,
+ // but cause an unnecessary number of mutex operations.
+ // Just build the equivalent one.
+ if rt, ok := src.(*reuseTokenSource); ok {
+ if t == nil {
+ // Just use it directly.
+ return rt
+ }
+ src = rt.new
+ }
+ return &reuseTokenSource{
+ t: t,
+ new: src,
+ }
+}
diff --git a/vendor/golang.org/x/oauth2/token.go b/vendor/golang.org/x/oauth2/token.go
new file mode 100644
index 0000000..8227203
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/token.go
@@ -0,0 +1,178 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package oauth2
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "golang.org/x/oauth2/internal"
+)
+
+// expiryDelta determines how earlier a token should be considered
+// expired than its actual expiration time. It is used to avoid late
+// expirations due to client-server time mismatches.
+const expiryDelta = 10 * time.Second
+
+// Token represents the credentials used to authorize
+// the requests to access protected resources on the OAuth 2.0
+// provider's backend.
+//
+// Most users of this package should not access fields of Token
+// directly. They're exported mostly for use by related packages
+// implementing derivative OAuth2 flows.
+type Token struct {
+ // AccessToken is the token that authorizes and authenticates
+ // the requests.
+ AccessToken string `json:"access_token"`
+
+ // TokenType is the type of token.
+ // The Type method returns either this or "Bearer", the default.
+ TokenType string `json:"token_type,omitempty"`
+
+ // RefreshToken is a token that's used by the application
+ // (as opposed to the user) to refresh the access token
+ // if it expires.
+ RefreshToken string `json:"refresh_token,omitempty"`
+
+ // Expiry is the optional expiration time of the access token.
+ //
+ // If zero, TokenSource implementations will reuse the same
+ // token forever and RefreshToken or equivalent
+ // mechanisms for that TokenSource will not be used.
+ Expiry time.Time `json:"expiry,omitempty"`
+
+ // raw optionally contains extra metadata from the server
+ // when updating a token.
+ raw interface{}
+}
+
+// Type returns t.TokenType if non-empty, else "Bearer".
+func (t *Token) Type() string {
+ if strings.EqualFold(t.TokenType, "bearer") {
+ return "Bearer"
+ }
+ if strings.EqualFold(t.TokenType, "mac") {
+ return "MAC"
+ }
+ if strings.EqualFold(t.TokenType, "basic") {
+ return "Basic"
+ }
+ if t.TokenType != "" {
+ return t.TokenType
+ }
+ return "Bearer"
+}
+
+// SetAuthHeader sets the Authorization header to r using the access
+// token in t.
+//
+// This method is unnecessary when using Transport or an HTTP Client
+// returned by this package.
+func (t *Token) SetAuthHeader(r *http.Request) {
+ r.Header.Set("Authorization", t.Type()+" "+t.AccessToken)
+}
+
+// WithExtra returns a new Token that's a clone of t, but using the
+// provided raw extra map. This is only intended for use by packages
+// implementing derivative OAuth2 flows.
+func (t *Token) WithExtra(extra interface{}) *Token {
+ t2 := new(Token)
+ *t2 = *t
+ t2.raw = extra
+ return t2
+}
+
+// Extra returns an extra field.
+// Extra fields are key-value pairs returned by the server as a
+// part of the token retrieval response.
+func (t *Token) Extra(key string) interface{} {
+ if raw, ok := t.raw.(map[string]interface{}); ok {
+ return raw[key]
+ }
+
+ vals, ok := t.raw.(url.Values)
+ if !ok {
+ return nil
+ }
+
+ v := vals.Get(key)
+ switch s := strings.TrimSpace(v); strings.Count(s, ".") {
+ case 0: // Contains no "."; try to parse as int
+ if i, err := strconv.ParseInt(s, 10, 64); err == nil {
+ return i
+ }
+ case 1: // Contains a single "."; try to parse as float
+ if f, err := strconv.ParseFloat(s, 64); err == nil {
+ return f
+ }
+ }
+
+ return v
+}
+
+// timeNow is time.Now but pulled out as a variable for tests.
+var timeNow = time.Now
+
+// expired reports whether the token is expired.
+// t must be non-nil.
+func (t *Token) expired() bool {
+ if t.Expiry.IsZero() {
+ return false
+ }
+ return t.Expiry.Round(0).Add(-expiryDelta).Before(timeNow())
+}
+
+// Valid reports whether t is non-nil, has an AccessToken, and is not expired.
+func (t *Token) Valid() bool {
+ return t != nil && t.AccessToken != "" && !t.expired()
+}
+
+// tokenFromInternal maps an *internal.Token struct into
+// a *Token struct.
+func tokenFromInternal(t *internal.Token) *Token {
+ if t == nil {
+ return nil
+ }
+ return &Token{
+ AccessToken: t.AccessToken,
+ TokenType: t.TokenType,
+ RefreshToken: t.RefreshToken,
+ Expiry: t.Expiry,
+ raw: t.Raw,
+ }
+}
+
+// retrieveToken takes a *Config and uses that to retrieve an *internal.Token.
+// This token is then mapped from *internal.Token into an *oauth2.Token which is returned along
+// with an error..
+func retrieveToken(ctx context.Context, c *Config, v url.Values) (*Token, error) {
+ tk, err := internal.RetrieveToken(ctx, c.ClientID, c.ClientSecret, c.Endpoint.TokenURL, v, internal.AuthStyle(c.Endpoint.AuthStyle))
+ if err != nil {
+ if rErr, ok := err.(*internal.RetrieveError); ok {
+ return nil, (*RetrieveError)(rErr)
+ }
+ return nil, err
+ }
+ return tokenFromInternal(tk), nil
+}
+
+// RetrieveError is the error returned when the token endpoint returns a
+// non-2XX HTTP status code.
+type RetrieveError struct {
+ Response *http.Response
+ // Body is the body that was consumed by reading Response.Body.
+ // It may be truncated.
+ Body []byte
+}
+
+func (r *RetrieveError) Error() string {
+ return fmt.Sprintf("oauth2: cannot fetch token: %v\nResponse: %s", r.Response.Status, r.Body)
+}
diff --git a/vendor/golang.org/x/oauth2/transport.go b/vendor/golang.org/x/oauth2/transport.go
new file mode 100644
index 0000000..aa0d34f
--- /dev/null
+++ b/vendor/golang.org/x/oauth2/transport.go
@@ -0,0 +1,144 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package oauth2
+
+import (
+ "errors"
+ "io"
+ "net/http"
+ "sync"
+)
+
+// Transport is an http.RoundTripper that makes OAuth 2.0 HTTP requests,
+// wrapping a base RoundTripper and adding an Authorization header
+// with a token from the supplied Sources.
+//
+// Transport is a low-level mechanism. Most code will use the
+// higher-level Config.Client method instead.
+type Transport struct {
+ // Source supplies the token to add to outgoing requests'
+ // Authorization headers.
+ Source TokenSource
+
+ // Base is the base RoundTripper used to make HTTP requests.
+ // If nil, http.DefaultTransport is used.
+ Base http.RoundTripper
+
+ mu sync.Mutex // guards modReq
+ modReq map[*http.Request]*http.Request // original -> modified
+}
+
+// RoundTrip authorizes and authenticates the request with an
+// access token from Transport's Source.
+func (t *Transport) RoundTrip(req *http.Request) (*http.Response, error) {
+ reqBodyClosed := false
+ if req.Body != nil {
+ defer func() {
+ if !reqBodyClosed {
+ req.Body.Close()
+ }
+ }()
+ }
+
+ if t.Source == nil {
+ return nil, errors.New("oauth2: Transport's Source is nil")
+ }
+ token, err := t.Source.Token()
+ if err != nil {
+ return nil, err
+ }
+
+ req2 := cloneRequest(req) // per RoundTripper contract
+ token.SetAuthHeader(req2)
+ t.setModReq(req, req2)
+ res, err := t.base().RoundTrip(req2)
+
+ // req.Body is assumed to have been closed by the base RoundTripper.
+ reqBodyClosed = true
+
+ if err != nil {
+ t.setModReq(req, nil)
+ return nil, err
+ }
+ res.Body = &onEOFReader{
+ rc: res.Body,
+ fn: func() { t.setModReq(req, nil) },
+ }
+ return res, nil
+}
+
+// CancelRequest cancels an in-flight request by closing its connection.
+func (t *Transport) CancelRequest(req *http.Request) {
+ type canceler interface {
+ CancelRequest(*http.Request)
+ }
+ if cr, ok := t.base().(canceler); ok {
+ t.mu.Lock()
+ modReq := t.modReq[req]
+ delete(t.modReq, req)
+ t.mu.Unlock()
+ cr.CancelRequest(modReq)
+ }
+}
+
+func (t *Transport) base() http.RoundTripper {
+ if t.Base != nil {
+ return t.Base
+ }
+ return http.DefaultTransport
+}
+
+func (t *Transport) setModReq(orig, mod *http.Request) {
+ t.mu.Lock()
+ defer t.mu.Unlock()
+ if t.modReq == nil {
+ t.modReq = make(map[*http.Request]*http.Request)
+ }
+ if mod == nil {
+ delete(t.modReq, orig)
+ } else {
+ t.modReq[orig] = mod
+ }
+}
+
+// cloneRequest returns a clone of the provided *http.Request.
+// The clone is a shallow copy of the struct and its Header map.
+func cloneRequest(r *http.Request) *http.Request {
+ // shallow copy of the struct
+ r2 := new(http.Request)
+ *r2 = *r
+ // deep copy of the Header
+ r2.Header = make(http.Header, len(r.Header))
+ for k, s := range r.Header {
+ r2.Header[k] = append([]string(nil), s...)
+ }
+ return r2
+}
+
+type onEOFReader struct {
+ rc io.ReadCloser
+ fn func()
+}
+
+func (r *onEOFReader) Read(p []byte) (n int, err error) {
+ n, err = r.rc.Read(p)
+ if err == io.EOF {
+ r.runFunc()
+ }
+ return
+}
+
+func (r *onEOFReader) Close() error {
+ err := r.rc.Close()
+ r.runFunc()
+ return err
+}
+
+func (r *onEOFReader) runFunc() {
+ if fn := r.fn; fn != nil {
+ fn()
+ r.fn = nil
+ }
+}
diff --git a/vendor/golang.org/x/tools/AUTHORS b/vendor/golang.org/x/tools/AUTHORS
new file mode 100644
index 0000000..15167cd
--- /dev/null
+++ b/vendor/golang.org/x/tools/AUTHORS
@@ -0,0 +1,3 @@
+# This source code refers to The Go Authors for copyright purposes.
+# The master list of authors is in the main Go distribution,
+# visible at http://tip.golang.org/AUTHORS.
diff --git a/vendor/golang.org/x/tools/CONTRIBUTORS b/vendor/golang.org/x/tools/CONTRIBUTORS
new file mode 100644
index 0000000..1c4577e
--- /dev/null
+++ b/vendor/golang.org/x/tools/CONTRIBUTORS
@@ -0,0 +1,3 @@
+# This source code was written by the Go contributors.
+# The master list of contributors is in the main Go distribution,
+# visible at http://tip.golang.org/CONTRIBUTORS.
diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE
new file mode 100644
index 0000000..6a66aea
--- /dev/null
+++ b/vendor/golang.org/x/tools/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS
new file mode 100644
index 0000000..7330990
--- /dev/null
+++ b/vendor/golang.org/x/tools/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/tools/cmd/getgo/LICENSE b/vendor/golang.org/x/tools/cmd/getgo/LICENSE
new file mode 100644
index 0000000..32017f8
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/getgo/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2017 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/tools/cmd/goimports/doc.go b/vendor/golang.org/x/tools/cmd/goimports/doc.go
new file mode 100644
index 0000000..7033e4d
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/goimports/doc.go
@@ -0,0 +1,43 @@
+/*
+
+Command goimports updates your Go import lines,
+adding missing ones and removing unreferenced ones.
+
+ $ go get golang.org/x/tools/cmd/goimports
+
+In addition to fixing imports, goimports also formats
+your code in the same style as gofmt so it can be used
+as a replacement for your editor's gofmt-on-save hook.
+
+For emacs, make sure you have the latest go-mode.el:
+ https://github.com/dominikh/go-mode.el
+Then in your .emacs file:
+ (setq gofmt-command "goimports")
+ (add-hook 'before-save-hook 'gofmt-before-save)
+
+For vim, set "gofmt_command" to "goimports":
+ https://golang.org/change/39c724dd7f252
+ https://golang.org/wiki/IDEsAndTextEditorPlugins
+ etc
+
+For GoSublime, follow the steps described here:
+ http://michaelwhatcott.com/gosublime-goimports/
+
+For other editors, you probably know what to do.
+
+To exclude directories in your $GOPATH from being scanned for Go
+files, goimports respects a configuration file at
+$GOPATH/src/.goimportsignore which may contain blank lines, comment
+lines (beginning with '#'), or lines naming a directory relative to
+the configuration file to ignore when scanning. No globbing or regex
+patterns are allowed. Use the "-v" verbose flag to verify it's
+working and see what goimports is doing.
+
+File bugs or feature requests at:
+
+ https://golang.org/issues/new?title=x/tools/cmd/goimports:+
+
+Happy hacking!
+
+*/
+package main // import "golang.org/x/tools/cmd/goimports"
diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports.go b/vendor/golang.org/x/tools/cmd/goimports/goimports.go
new file mode 100644
index 0000000..0ce85c9
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/goimports/goimports.go
@@ -0,0 +1,369 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "flag"
+ "fmt"
+ "go/scanner"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+ "runtime/pprof"
+ "strings"
+
+ "golang.org/x/tools/imports"
+)
+
+var (
+ // main operation modes
+ list = flag.Bool("l", false, "list files whose formatting differs from goimport's")
+ write = flag.Bool("w", false, "write result to (source) file instead of stdout")
+ doDiff = flag.Bool("d", false, "display diffs instead of rewriting files")
+ srcdir = flag.String("srcdir", "", "choose imports as if source code is from `dir`. When operating on a single file, dir may instead be the complete file name.")
+ verbose bool // verbose logging
+
+ cpuProfile = flag.String("cpuprofile", "", "CPU profile output")
+ memProfile = flag.String("memprofile", "", "memory profile output")
+ memProfileRate = flag.Int("memrate", 0, "if > 0, sets runtime.MemProfileRate")
+
+ options = &imports.Options{
+ TabWidth: 8,
+ TabIndent: true,
+ Comments: true,
+ Fragment: true,
+ }
+ exitCode = 0
+)
+
+func init() {
+ flag.BoolVar(&options.AllErrors, "e", false, "report all errors (not just the first 10 on different lines)")
+ flag.StringVar(&imports.LocalPrefix, "local", "", "put imports beginning with this string after 3rd-party packages; comma-separated list")
+}
+
+func report(err error) {
+ scanner.PrintError(os.Stderr, err)
+ exitCode = 2
+}
+
+func usage() {
+ fmt.Fprintf(os.Stderr, "usage: goimports [flags] [path ...]\n")
+ flag.PrintDefaults()
+ os.Exit(2)
+}
+
+func isGoFile(f os.FileInfo) bool {
+ // ignore non-Go files
+ name := f.Name()
+ return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go")
+}
+
+// argumentType is which mode goimports was invoked as.
+type argumentType int
+
+const (
+ // fromStdin means the user is piping their source into goimports.
+ fromStdin argumentType = iota
+
+ // singleArg is the common case from editors, when goimports is run on
+ // a single file.
+ singleArg
+
+ // multipleArg is when the user ran "goimports file1.go file2.go"
+ // or ran goimports on a directory tree.
+ multipleArg
+)
+
+func processFile(filename string, in io.Reader, out io.Writer, argType argumentType) error {
+ opt := options
+ if argType == fromStdin {
+ nopt := *options
+ nopt.Fragment = true
+ opt = &nopt
+ }
+
+ if in == nil {
+ f, err := os.Open(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ in = f
+ }
+
+ src, err := ioutil.ReadAll(in)
+ if err != nil {
+ return err
+ }
+
+ target := filename
+ if *srcdir != "" {
+ // Determine whether the provided -srcdirc is a directory or file
+ // and then use it to override the target.
+ //
+ // See https://github.com/dominikh/go-mode.el/issues/146
+ if isFile(*srcdir) {
+ if argType == multipleArg {
+ return errors.New("-srcdir value can't be a file when passing multiple arguments or when walking directories")
+ }
+ target = *srcdir
+ } else if argType == singleArg && strings.HasSuffix(*srcdir, ".go") && !isDir(*srcdir) {
+ // For a file which doesn't exist on disk yet, but might shortly.
+ // e.g. user in editor opens $DIR/newfile.go and newfile.go doesn't yet exist on disk.
+ // The goimports on-save hook writes the buffer to a temp file
+ // first and runs goimports before the actual save to newfile.go.
+ // The editor's buffer is named "newfile.go" so that is passed to goimports as:
+ // goimports -srcdir=/gopath/src/pkg/newfile.go /tmp/gofmtXXXXXXXX.go
+ // and then the editor reloads the result from the tmp file and writes
+ // it to newfile.go.
+ target = *srcdir
+ } else {
+ // Pretend that file is from *srcdir in order to decide
+ // visible imports correctly.
+ target = filepath.Join(*srcdir, filepath.Base(filename))
+ }
+ }
+
+ res, err := imports.Process(target, src, opt)
+ if err != nil {
+ return err
+ }
+
+ if !bytes.Equal(src, res) {
+ // formatting has changed
+ if *list {
+ fmt.Fprintln(out, filename)
+ }
+ if *write {
+ if argType == fromStdin {
+ // filename is ""
+ return errors.New("can't use -w on stdin")
+ }
+ err = ioutil.WriteFile(filename, res, 0)
+ if err != nil {
+ return err
+ }
+ }
+ if *doDiff {
+ if argType == fromStdin {
+ filename = "stdin.go" // because .orig looks silly
+ }
+ data, err := diff(src, res, filename)
+ if err != nil {
+ return fmt.Errorf("computing diff: %s", err)
+ }
+ fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename))
+ out.Write(data)
+ }
+ }
+
+ if !*list && !*write && !*doDiff {
+ _, err = out.Write(res)
+ }
+
+ return err
+}
+
+func visitFile(path string, f os.FileInfo, err error) error {
+ if err == nil && isGoFile(f) {
+ err = processFile(path, nil, os.Stdout, multipleArg)
+ }
+ if err != nil {
+ report(err)
+ }
+ return nil
+}
+
+func walkDir(path string) {
+ filepath.Walk(path, visitFile)
+}
+
+func main() {
+ runtime.GOMAXPROCS(runtime.NumCPU())
+
+ // call gofmtMain in a separate function
+ // so that it can use defer and have them
+ // run before the exit.
+ gofmtMain()
+ os.Exit(exitCode)
+}
+
+// parseFlags parses command line flags and returns the paths to process.
+// It's a var so that custom implementations can replace it in other files.
+var parseFlags = func() []string {
+ flag.BoolVar(&verbose, "v", false, "verbose logging")
+
+ flag.Parse()
+ return flag.Args()
+}
+
+func bufferedFileWriter(dest string) (w io.Writer, close func()) {
+ f, err := os.Create(dest)
+ if err != nil {
+ log.Fatal(err)
+ }
+ bw := bufio.NewWriter(f)
+ return bw, func() {
+ if err := bw.Flush(); err != nil {
+ log.Fatalf("error flushing %v: %v", dest, err)
+ }
+ if err := f.Close(); err != nil {
+ log.Fatal(err)
+ }
+ }
+}
+
+func gofmtMain() {
+ flag.Usage = usage
+ paths := parseFlags()
+
+ if *cpuProfile != "" {
+ bw, flush := bufferedFileWriter(*cpuProfile)
+ pprof.StartCPUProfile(bw)
+ defer flush()
+ defer pprof.StopCPUProfile()
+ }
+ // doTrace is a conditionally compiled wrapper around runtime/trace. It is
+ // used to allow goimports to compile under gccgo, which does not support
+ // runtime/trace. See https://golang.org/issue/15544.
+ defer doTrace()()
+ if *memProfileRate > 0 {
+ runtime.MemProfileRate = *memProfileRate
+ bw, flush := bufferedFileWriter(*memProfile)
+ defer func() {
+ runtime.GC() // materialize all statistics
+ if err := pprof.WriteHeapProfile(bw); err != nil {
+ log.Fatal(err)
+ }
+ flush()
+ }()
+ }
+
+ if verbose {
+ log.SetFlags(log.LstdFlags | log.Lmicroseconds)
+ imports.Debug = true
+ }
+ if options.TabWidth < 0 {
+ fmt.Fprintf(os.Stderr, "negative tabwidth %d\n", options.TabWidth)
+ exitCode = 2
+ return
+ }
+
+ if len(paths) == 0 {
+ if err := processFile("", os.Stdin, os.Stdout, fromStdin); err != nil {
+ report(err)
+ }
+ return
+ }
+
+ argType := singleArg
+ if len(paths) > 1 {
+ argType = multipleArg
+ }
+
+ for _, path := range paths {
+ switch dir, err := os.Stat(path); {
+ case err != nil:
+ report(err)
+ case dir.IsDir():
+ walkDir(path)
+ default:
+ if err := processFile(path, nil, os.Stdout, argType); err != nil {
+ report(err)
+ }
+ }
+ }
+}
+
+func writeTempFile(dir, prefix string, data []byte) (string, error) {
+ file, err := ioutil.TempFile(dir, prefix)
+ if err != nil {
+ return "", err
+ }
+ _, err = file.Write(data)
+ if err1 := file.Close(); err == nil {
+ err = err1
+ }
+ if err != nil {
+ os.Remove(file.Name())
+ return "", err
+ }
+ return file.Name(), nil
+}
+
+func diff(b1, b2 []byte, filename string) (data []byte, err error) {
+ f1, err := writeTempFile("", "gofmt", b1)
+ if err != nil {
+ return
+ }
+ defer os.Remove(f1)
+
+ f2, err := writeTempFile("", "gofmt", b2)
+ if err != nil {
+ return
+ }
+ defer os.Remove(f2)
+
+ cmd := "diff"
+ if runtime.GOOS == "plan9" {
+ cmd = "/bin/ape/diff"
+ }
+
+ data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput()
+ if len(data) > 0 {
+ // diff exits with a non-zero status when the files don't match.
+ // Ignore that failure as long as we get output.
+ return replaceTempFilename(data, filename)
+ }
+ return
+}
+
+// replaceTempFilename replaces temporary filenames in diff with actual one.
+//
+// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500
+// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500
+// ...
+// ->
+// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500
+// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500
+// ...
+func replaceTempFilename(diff []byte, filename string) ([]byte, error) {
+ bs := bytes.SplitN(diff, []byte{'\n'}, 3)
+ if len(bs) < 3 {
+ return nil, fmt.Errorf("got unexpected diff for %s", filename)
+ }
+ // Preserve timestamps.
+ var t0, t1 []byte
+ if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 {
+ t0 = bs[0][i:]
+ }
+ if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 {
+ t1 = bs[1][i:]
+ }
+ // Always print filepath with slash separator.
+ f := filepath.ToSlash(filename)
+ bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0))
+ bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1))
+ return bytes.Join(bs, []byte{'\n'}), nil
+}
+
+// isFile reports whether name is a file.
+func isFile(name string) bool {
+ fi, err := os.Stat(name)
+ return err == nil && fi.Mode().IsRegular()
+}
+
+// isDir reports whether name is a directory.
+func isDir(name string) bool {
+ fi, err := os.Stat(name)
+ return err == nil && fi.IsDir()
+}
diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go b/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go
new file mode 100644
index 0000000..21d867e
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go
@@ -0,0 +1,26 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build gc
+
+package main
+
+import (
+ "flag"
+ "runtime/trace"
+)
+
+var traceProfile = flag.String("trace", "", "trace profile output")
+
+func doTrace() func() {
+ if *traceProfile != "" {
+ bw, flush := bufferedFileWriter(*traceProfile)
+ trace.Start(bw)
+ return func() {
+ flush()
+ trace.Stop()
+ }
+ }
+ return func() {}
+}
diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go b/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go
new file mode 100644
index 0000000..f5531ce
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go
@@ -0,0 +1,11 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !gc
+
+package main
+
+func doTrace() func() {
+ return func() {}
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
new file mode 100644
index 0000000..6b7052b
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
@@ -0,0 +1,627 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+// This file defines utilities for working with source positions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "sort"
+)
+
+// PathEnclosingInterval returns the node that encloses the source
+// interval [start, end), and all its ancestors up to the AST root.
+//
+// The definition of "enclosing" used by this function considers
+// additional whitespace abutting a node to be enclosed by it.
+// In this example:
+//
+// z := x + y // add them
+// <-A->
+// <----B----->
+//
+// the ast.BinaryExpr(+) node is considered to enclose interval B
+// even though its [Pos()..End()) is actually only interval A.
+// This behaviour makes user interfaces more tolerant of imperfect
+// input.
+//
+// This function treats tokens as nodes, though they are not included
+// in the result. e.g. PathEnclosingInterval("+") returns the
+// enclosing ast.BinaryExpr("x + y").
+//
+// If start==end, the 1-char interval following start is used instead.
+//
+// The 'exact' result is true if the interval contains only path[0]
+// and perhaps some adjacent whitespace. It is false if the interval
+// overlaps multiple children of path[0], or if it contains only
+// interior whitespace of path[0].
+// In this example:
+//
+// z := x + y // add them
+// <--C--> <---E-->
+// ^
+// D
+//
+// intervals C, D and E are inexact. C is contained by the
+// z-assignment statement, because it spans three of its children (:=,
+// x, +). So too is the 1-char interval D, because it contains only
+// interior whitespace of the assignment. E is considered interior
+// whitespace of the BlockStmt containing the assignment.
+//
+// Precondition: [start, end) both lie within the same file as root.
+// TODO(adonovan): return (nil, false) in this case and remove precond.
+// Requires FileSet; see loader.tokenFileContainsPos.
+//
+// Postcondition: path is never nil; it always contains at least 'root'.
+//
+func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
+ // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
+
+ // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
+ var visit func(node ast.Node) bool
+ visit = func(node ast.Node) bool {
+ path = append(path, node)
+
+ nodePos := node.Pos()
+ nodeEnd := node.End()
+
+ // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
+
+ // Intersect [start, end) with interval of node.
+ if start < nodePos {
+ start = nodePos
+ }
+ if end > nodeEnd {
+ end = nodeEnd
+ }
+
+ // Find sole child that contains [start, end).
+ children := childrenOf(node)
+ l := len(children)
+ for i, child := range children {
+ // [childPos, childEnd) is unaugmented interval of child.
+ childPos := child.Pos()
+ childEnd := child.End()
+
+ // [augPos, augEnd) is whitespace-augmented interval of child.
+ augPos := childPos
+ augEnd := childEnd
+ if i > 0 {
+ augPos = children[i-1].End() // start of preceding whitespace
+ }
+ if i < l-1 {
+ nextChildPos := children[i+1].Pos()
+ // Does [start, end) lie between child and next child?
+ if start >= augEnd && end <= nextChildPos {
+ return false // inexact match
+ }
+ augEnd = nextChildPos // end of following whitespace
+ }
+
+ // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
+ // i, augPos, augEnd, start, end) // debugging
+
+ // Does augmented child strictly contain [start, end)?
+ if augPos <= start && end <= augEnd {
+ _, isToken := child.(tokenNode)
+ return isToken || visit(child)
+ }
+
+ // Does [start, end) overlap multiple children?
+ // i.e. left-augmented child contains start
+ // but LR-augmented child does not contain end.
+ if start < childEnd && end > augEnd {
+ break
+ }
+ }
+
+ // No single child contained [start, end),
+ // so node is the result. Is it exact?
+
+ // (It's tempting to put this condition before the
+ // child loop, but it gives the wrong result in the
+ // case where a node (e.g. ExprStmt) and its sole
+ // child have equal intervals.)
+ if start == nodePos && end == nodeEnd {
+ return true // exact match
+ }
+
+ return false // inexact: overlaps multiple children
+ }
+
+ if start > end {
+ start, end = end, start
+ }
+
+ if start < root.End() && end > root.Pos() {
+ if start == end {
+ end = start + 1 // empty interval => interval of size 1
+ }
+ exact = visit(root)
+
+ // Reverse the path:
+ for i, l := 0, len(path); i < l/2; i++ {
+ path[i], path[l-1-i] = path[l-1-i], path[i]
+ }
+ } else {
+ // Selection lies within whitespace preceding the
+ // first (or following the last) declaration in the file.
+ // The result nonetheless always includes the ast.File.
+ path = append(path, root)
+ }
+
+ return
+}
+
+// tokenNode is a dummy implementation of ast.Node for a single token.
+// They are used transiently by PathEnclosingInterval but never escape
+// this package.
+//
+type tokenNode struct {
+ pos token.Pos
+ end token.Pos
+}
+
+func (n tokenNode) Pos() token.Pos {
+ return n.pos
+}
+
+func (n tokenNode) End() token.Pos {
+ return n.end
+}
+
+func tok(pos token.Pos, len int) ast.Node {
+ return tokenNode{pos, pos + token.Pos(len)}
+}
+
+// childrenOf returns the direct non-nil children of ast.Node n.
+// It may include fake ast.Node implementations for bare tokens.
+// it is not safe to call (e.g.) ast.Walk on such nodes.
+//
+func childrenOf(n ast.Node) []ast.Node {
+ var children []ast.Node
+
+ // First add nodes for all true subtrees.
+ ast.Inspect(n, func(node ast.Node) bool {
+ if node == n { // push n
+ return true // recur
+ }
+ if node != nil { // push child
+ children = append(children, node)
+ }
+ return false // no recursion
+ })
+
+ // Then add fake Nodes for bare tokens.
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Elt.End(), len("]")))
+
+ case *ast.AssignStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.BasicLit:
+ children = append(children,
+ tok(n.ValuePos, len(n.Value)))
+
+ case *ast.BinaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.BlockStmt:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("}")))
+
+ case *ast.BranchStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.CallExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ if n.Ellipsis != 0 {
+ children = append(children, tok(n.Ellipsis, len("...")))
+ }
+
+ case *ast.CaseClause:
+ if n.List == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.ChanType:
+ switch n.Dir {
+ case ast.RECV:
+ children = append(children, tok(n.Begin, len("<-chan")))
+ case ast.SEND:
+ children = append(children, tok(n.Begin, len("chan<-")))
+ case ast.RECV | ast.SEND:
+ children = append(children, tok(n.Begin, len("chan")))
+ }
+
+ case *ast.CommClause:
+ if n.Comm == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.Comment:
+ // nop
+
+ case *ast.CommentGroup:
+ // nop
+
+ case *ast.CompositeLit:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("{")))
+
+ case *ast.DeclStmt:
+ // nop
+
+ case *ast.DeferStmt:
+ children = append(children,
+ tok(n.Defer, len("defer")))
+
+ case *ast.Ellipsis:
+ children = append(children,
+ tok(n.Ellipsis, len("...")))
+
+ case *ast.EmptyStmt:
+ // nop
+
+ case *ast.ExprStmt:
+ // nop
+
+ case *ast.Field:
+ // TODO(adonovan): Field.{Doc,Comment,Tag}?
+
+ case *ast.FieldList:
+ children = append(children,
+ tok(n.Opening, len("(")),
+ tok(n.Closing, len(")")))
+
+ case *ast.File:
+ // TODO test: Doc
+ children = append(children,
+ tok(n.Package, len("package")))
+
+ case *ast.ForStmt:
+ children = append(children,
+ tok(n.For, len("for")))
+
+ case *ast.FuncDecl:
+ // TODO(adonovan): FuncDecl.Comment?
+
+ // Uniquely, FuncDecl breaks the invariant that
+ // preorder traversal yields tokens in lexical order:
+ // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
+ //
+ // As a workaround, we inline the case for FuncType
+ // here and order things correctly.
+ //
+ children = nil // discard ast.Walk(FuncDecl) info subtrees
+ children = append(children, tok(n.Type.Func, len("func")))
+ if n.Recv != nil {
+ children = append(children, n.Recv)
+ }
+ children = append(children, n.Name)
+ if n.Type.Params != nil {
+ children = append(children, n.Type.Params)
+ }
+ if n.Type.Results != nil {
+ children = append(children, n.Type.Results)
+ }
+ if n.Body != nil {
+ children = append(children, n.Body)
+ }
+
+ case *ast.FuncLit:
+ // nop
+
+ case *ast.FuncType:
+ if n.Func != 0 {
+ children = append(children,
+ tok(n.Func, len("func")))
+ }
+
+ case *ast.GenDecl:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+ if n.Lparen != 0 {
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ }
+
+ case *ast.GoStmt:
+ children = append(children,
+ tok(n.Go, len("go")))
+
+ case *ast.Ident:
+ children = append(children,
+ tok(n.NamePos, len(n.Name)))
+
+ case *ast.IfStmt:
+ children = append(children,
+ tok(n.If, len("if")))
+
+ case *ast.ImportSpec:
+ // TODO(adonovan): ImportSpec.{Doc,EndPos}?
+
+ case *ast.IncDecStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.IndexExpr:
+ children = append(children,
+ tok(n.Lbrack, len("{")),
+ tok(n.Rbrack, len("}")))
+
+ case *ast.InterfaceType:
+ children = append(children,
+ tok(n.Interface, len("interface")))
+
+ case *ast.KeyValueExpr:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.LabeledStmt:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.MapType:
+ children = append(children,
+ tok(n.Map, len("map")))
+
+ case *ast.ParenExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.RangeStmt:
+ children = append(children,
+ tok(n.For, len("for")),
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.ReturnStmt:
+ children = append(children,
+ tok(n.Return, len("return")))
+
+ case *ast.SelectStmt:
+ children = append(children,
+ tok(n.Select, len("select")))
+
+ case *ast.SelectorExpr:
+ // nop
+
+ case *ast.SendStmt:
+ children = append(children,
+ tok(n.Arrow, len("<-")))
+
+ case *ast.SliceExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.StarExpr:
+ children = append(children, tok(n.Star, len("*")))
+
+ case *ast.StructType:
+ children = append(children, tok(n.Struct, len("struct")))
+
+ case *ast.SwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.TypeAssertExpr:
+ children = append(children,
+ tok(n.Lparen-1, len(".")),
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.TypeSpec:
+ // TODO(adonovan): TypeSpec.{Doc,Comment}?
+
+ case *ast.TypeSwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.UnaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.ValueSpec:
+ // TODO(adonovan): ValueSpec.{Doc,Comment}?
+
+ case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
+ // nop
+ }
+
+ // TODO(adonovan): opt: merge the logic of ast.Inspect() into
+ // the switch above so we can make interleaved callbacks for
+ // both Nodes and Tokens in the right order and avoid the need
+ // to sort.
+ sort.Sort(byPos(children))
+
+ return children
+}
+
+type byPos []ast.Node
+
+func (sl byPos) Len() int {
+ return len(sl)
+}
+func (sl byPos) Less(i, j int) bool {
+ return sl[i].Pos() < sl[j].Pos()
+}
+func (sl byPos) Swap(i, j int) {
+ sl[i], sl[j] = sl[j], sl[i]
+}
+
+// NodeDescription returns a description of the concrete type of n suitable
+// for a user interface.
+//
+// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
+// StarExpr) we could be much more specific given the path to the AST
+// root. Perhaps we should do that.
+//
+func NodeDescription(n ast.Node) string {
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ return "array type"
+ case *ast.AssignStmt:
+ return "assignment"
+ case *ast.BadDecl:
+ return "bad declaration"
+ case *ast.BadExpr:
+ return "bad expression"
+ case *ast.BadStmt:
+ return "bad statement"
+ case *ast.BasicLit:
+ return "basic literal"
+ case *ast.BinaryExpr:
+ return fmt.Sprintf("binary %s operation", n.Op)
+ case *ast.BlockStmt:
+ return "block"
+ case *ast.BranchStmt:
+ switch n.Tok {
+ case token.BREAK:
+ return "break statement"
+ case token.CONTINUE:
+ return "continue statement"
+ case token.GOTO:
+ return "goto statement"
+ case token.FALLTHROUGH:
+ return "fall-through statement"
+ }
+ case *ast.CallExpr:
+ if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
+ return "function call (or conversion)"
+ }
+ return "function call"
+ case *ast.CaseClause:
+ return "case clause"
+ case *ast.ChanType:
+ return "channel type"
+ case *ast.CommClause:
+ return "communication clause"
+ case *ast.Comment:
+ return "comment"
+ case *ast.CommentGroup:
+ return "comment group"
+ case *ast.CompositeLit:
+ return "composite literal"
+ case *ast.DeclStmt:
+ return NodeDescription(n.Decl) + " statement"
+ case *ast.DeferStmt:
+ return "defer statement"
+ case *ast.Ellipsis:
+ return "ellipsis"
+ case *ast.EmptyStmt:
+ return "empty statement"
+ case *ast.ExprStmt:
+ return "expression statement"
+ case *ast.Field:
+ // Can be any of these:
+ // struct {x, y int} -- struct field(s)
+ // struct {T} -- anon struct field
+ // interface {I} -- interface embedding
+ // interface {f()} -- interface method
+ // func (A) func(B) C -- receiver, param(s), result(s)
+ return "field/method/parameter"
+ case *ast.FieldList:
+ return "field/method/parameter list"
+ case *ast.File:
+ return "source file"
+ case *ast.ForStmt:
+ return "for loop"
+ case *ast.FuncDecl:
+ return "function declaration"
+ case *ast.FuncLit:
+ return "function literal"
+ case *ast.FuncType:
+ return "function type"
+ case *ast.GenDecl:
+ switch n.Tok {
+ case token.IMPORT:
+ return "import declaration"
+ case token.CONST:
+ return "constant declaration"
+ case token.TYPE:
+ return "type declaration"
+ case token.VAR:
+ return "variable declaration"
+ }
+ case *ast.GoStmt:
+ return "go statement"
+ case *ast.Ident:
+ return "identifier"
+ case *ast.IfStmt:
+ return "if statement"
+ case *ast.ImportSpec:
+ return "import specification"
+ case *ast.IncDecStmt:
+ if n.Tok == token.INC {
+ return "increment statement"
+ }
+ return "decrement statement"
+ case *ast.IndexExpr:
+ return "index expression"
+ case *ast.InterfaceType:
+ return "interface type"
+ case *ast.KeyValueExpr:
+ return "key/value association"
+ case *ast.LabeledStmt:
+ return "statement label"
+ case *ast.MapType:
+ return "map type"
+ case *ast.Package:
+ return "package"
+ case *ast.ParenExpr:
+ return "parenthesized " + NodeDescription(n.X)
+ case *ast.RangeStmt:
+ return "range loop"
+ case *ast.ReturnStmt:
+ return "return statement"
+ case *ast.SelectStmt:
+ return "select statement"
+ case *ast.SelectorExpr:
+ return "selector"
+ case *ast.SendStmt:
+ return "channel send"
+ case *ast.SliceExpr:
+ return "slice expression"
+ case *ast.StarExpr:
+ return "*-operation" // load/store expr or pointer type
+ case *ast.StructType:
+ return "struct type"
+ case *ast.SwitchStmt:
+ return "switch statement"
+ case *ast.TypeAssertExpr:
+ return "type assertion"
+ case *ast.TypeSpec:
+ return "type specification"
+ case *ast.TypeSwitchStmt:
+ return "type switch"
+ case *ast.UnaryExpr:
+ return fmt.Sprintf("unary %s operation", n.Op)
+ case *ast.ValueSpec:
+ return "value specification"
+
+ }
+ panic(fmt.Sprintf("unexpected node type: %T", n))
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
new file mode 100644
index 0000000..3e4b195
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
@@ -0,0 +1,481 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package astutil contains common utilities for working with the Go AST.
+package astutil // import "golang.org/x/tools/go/ast/astutil"
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strconv"
+ "strings"
+)
+
+// AddImport adds the import path to the file f, if absent.
+func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
+ return AddNamedImport(fset, f, "", path)
+}
+
+// AddNamedImport adds the import with the given name and path to the file f, if absent.
+// If name is not empty, it is used to rename the import.
+//
+// For example, calling
+// AddNamedImport(fset, f, "pathpkg", "path")
+// adds
+// import pathpkg "path"
+func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
+ if imports(f, name, path) {
+ return false
+ }
+
+ newImport := &ast.ImportSpec{
+ Path: &ast.BasicLit{
+ Kind: token.STRING,
+ Value: strconv.Quote(path),
+ },
+ }
+ if name != "" {
+ newImport.Name = &ast.Ident{Name: name}
+ }
+
+ // Find an import decl to add to.
+ // The goal is to find an existing import
+ // whose import path has the longest shared
+ // prefix with path.
+ var (
+ bestMatch = -1 // length of longest shared prefix
+ lastImport = -1 // index in f.Decls of the file's final import decl
+ impDecl *ast.GenDecl // import decl containing the best match
+ impIndex = -1 // spec index in impDecl containing the best match
+
+ isThirdPartyPath = isThirdParty(path)
+ )
+ for i, decl := range f.Decls {
+ gen, ok := decl.(*ast.GenDecl)
+ if ok && gen.Tok == token.IMPORT {
+ lastImport = i
+ // Do not add to import "C", to avoid disrupting the
+ // association with its doc comment, breaking cgo.
+ if declImports(gen, "C") {
+ continue
+ }
+
+ // Match an empty import decl if that's all that is available.
+ if len(gen.Specs) == 0 && bestMatch == -1 {
+ impDecl = gen
+ }
+
+ // Compute longest shared prefix with imports in this group and find best
+ // matched import spec.
+ // 1. Always prefer import spec with longest shared prefix.
+ // 2. While match length is 0,
+ // - for stdlib package: prefer first import spec.
+ // - for third party package: prefer first third party import spec.
+ // We cannot use last import spec as best match for third party package
+ // because grouped imports are usually placed last by goimports -local
+ // flag.
+ // See issue #19190.
+ seenAnyThirdParty := false
+ for j, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ p := importPath(impspec)
+ n := matchLen(p, path)
+ if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
+ bestMatch = n
+ impDecl = gen
+ impIndex = j
+ }
+ seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
+ }
+ }
+ }
+
+ // If no import decl found, add one after the last import.
+ if impDecl == nil {
+ impDecl = &ast.GenDecl{
+ Tok: token.IMPORT,
+ }
+ if lastImport >= 0 {
+ impDecl.TokPos = f.Decls[lastImport].End()
+ } else {
+ // There are no existing imports.
+ // Our new import, preceded by a blank line, goes after the package declaration
+ // and after the comment, if any, that starts on the same line as the
+ // package declaration.
+ impDecl.TokPos = f.Package
+
+ file := fset.File(f.Package)
+ pkgLine := file.Line(f.Package)
+ for _, c := range f.Comments {
+ if file.Line(c.Pos()) > pkgLine {
+ break
+ }
+ // +2 for a blank line
+ impDecl.TokPos = c.End() + 2
+ }
+ }
+ f.Decls = append(f.Decls, nil)
+ copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
+ f.Decls[lastImport+1] = impDecl
+ }
+
+ // Insert new import at insertAt.
+ insertAt := 0
+ if impIndex >= 0 {
+ // insert after the found import
+ insertAt = impIndex + 1
+ }
+ impDecl.Specs = append(impDecl.Specs, nil)
+ copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
+ impDecl.Specs[insertAt] = newImport
+ pos := impDecl.Pos()
+ if insertAt > 0 {
+ // If there is a comment after an existing import, preserve the comment
+ // position by adding the new import after the comment.
+ if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
+ pos = spec.Comment.End()
+ } else {
+ // Assign same position as the previous import,
+ // so that the sorter sees it as being in the same block.
+ pos = impDecl.Specs[insertAt-1].Pos()
+ }
+ }
+ if newImport.Name != nil {
+ newImport.Name.NamePos = pos
+ }
+ newImport.Path.ValuePos = pos
+ newImport.EndPos = pos
+
+ // Clean up parens. impDecl contains at least one spec.
+ if len(impDecl.Specs) == 1 {
+ // Remove unneeded parens.
+ impDecl.Lparen = token.NoPos
+ } else if !impDecl.Lparen.IsValid() {
+ // impDecl needs parens added.
+ impDecl.Lparen = impDecl.Specs[0].Pos()
+ }
+
+ f.Imports = append(f.Imports, newImport)
+
+ if len(f.Decls) <= 1 {
+ return true
+ }
+
+ // Merge all the import declarations into the first one.
+ var first *ast.GenDecl
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+ continue
+ }
+ if first == nil {
+ first = gen
+ continue // Don't touch the first one.
+ }
+ // We now know there is more than one package in this import
+ // declaration. Ensure that it ends up parenthesized.
+ first.Lparen = first.Pos()
+ // Move the imports of the other import declaration to the first one.
+ for _, spec := range gen.Specs {
+ spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+ first.Specs = append(first.Specs, spec)
+ }
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ i--
+ }
+
+ return true
+}
+
+func isThirdParty(importPath string) bool {
+ // Third party package import path usually contains "." (".com", ".org", ...)
+ // This logic is taken from golang.org/x/tools/imports package.
+ return strings.Contains(importPath, ".")
+}
+
+// DeleteImport deletes the import path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
+ return DeleteNamedImport(fset, f, "", path)
+}
+
+// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
+ var delspecs []*ast.ImportSpec
+ var delcomments []*ast.CommentGroup
+
+ // Find the import nodes that import path, if any.
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT {
+ continue
+ }
+ for j := 0; j < len(gen.Specs); j++ {
+ spec := gen.Specs[j]
+ impspec := spec.(*ast.ImportSpec)
+ if importName(impspec) != name || importPath(impspec) != path {
+ continue
+ }
+
+ // We found an import spec that imports path.
+ // Delete it.
+ delspecs = append(delspecs, impspec)
+ deleted = true
+ copy(gen.Specs[j:], gen.Specs[j+1:])
+ gen.Specs = gen.Specs[:len(gen.Specs)-1]
+
+ // If this was the last import spec in this decl,
+ // delete the decl, too.
+ if len(gen.Specs) == 0 {
+ copy(f.Decls[i:], f.Decls[i+1:])
+ f.Decls = f.Decls[:len(f.Decls)-1]
+ i--
+ break
+ } else if len(gen.Specs) == 1 {
+ if impspec.Doc != nil {
+ delcomments = append(delcomments, impspec.Doc)
+ }
+ if impspec.Comment != nil {
+ delcomments = append(delcomments, impspec.Comment)
+ }
+ for _, cg := range f.Comments {
+ // Found comment on the same line as the import spec.
+ if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
+ delcomments = append(delcomments, cg)
+ break
+ }
+ }
+
+ spec := gen.Specs[0].(*ast.ImportSpec)
+
+ // Move the documentation right after the import decl.
+ if spec.Doc != nil {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ }
+ for _, cg := range f.Comments {
+ if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ break
+ }
+ }
+ }
+ if j > 0 {
+ lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
+ lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
+ line := fset.Position(impspec.Path.ValuePos).Line
+
+ // We deleted an entry but now there may be
+ // a blank line-sized hole where the import was.
+ if line-lastLine > 1 {
+ // There was a blank line immediately preceding the deleted import,
+ // so there's no need to close the hole.
+ // Do nothing.
+ } else if line != fset.File(gen.Rparen).LineCount() {
+ // There was no blank line. Close the hole.
+ fset.File(gen.Rparen).MergeLine(line)
+ }
+ }
+ j--
+ }
+ }
+
+ // Delete imports from f.Imports.
+ for i := 0; i < len(f.Imports); i++ {
+ imp := f.Imports[i]
+ for j, del := range delspecs {
+ if imp == del {
+ copy(f.Imports[i:], f.Imports[i+1:])
+ f.Imports = f.Imports[:len(f.Imports)-1]
+ copy(delspecs[j:], delspecs[j+1:])
+ delspecs = delspecs[:len(delspecs)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ // Delete comments from f.Comments.
+ for i := 0; i < len(f.Comments); i++ {
+ cg := f.Comments[i]
+ for j, del := range delcomments {
+ if cg == del {
+ copy(f.Comments[i:], f.Comments[i+1:])
+ f.Comments = f.Comments[:len(f.Comments)-1]
+ copy(delcomments[j:], delcomments[j+1:])
+ delcomments = delcomments[:len(delcomments)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ if len(delspecs) > 0 {
+ panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
+ }
+
+ return
+}
+
+// RewriteImport rewrites any import of path oldPath to path newPath.
+func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
+ for _, imp := range f.Imports {
+ if importPath(imp) == oldPath {
+ rewrote = true
+ // record old End, because the default is to compute
+ // it using the length of imp.Path.Value.
+ imp.EndPos = imp.End()
+ imp.Path.Value = strconv.Quote(newPath)
+ }
+ }
+ return
+}
+
+// UsesImport reports whether a given import is used.
+func UsesImport(f *ast.File, path string) (used bool) {
+ spec := importSpec(f, path)
+ if spec == nil {
+ return
+ }
+
+ name := spec.Name.String()
+ switch name {
+ case "":
+ // If the package name is not explicitly specified,
+ // make an educated guess. This is not guaranteed to be correct.
+ lastSlash := strings.LastIndex(path, "/")
+ if lastSlash == -1 {
+ name = path
+ } else {
+ name = path[lastSlash+1:]
+ }
+ case "_", ".":
+ // Not sure if this import is used - err on the side of caution.
+ return true
+ }
+
+ ast.Walk(visitFn(func(n ast.Node) {
+ sel, ok := n.(*ast.SelectorExpr)
+ if ok && isTopName(sel.X, name) {
+ used = true
+ }
+ }), f)
+
+ return
+}
+
+type visitFn func(node ast.Node)
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ fn(node)
+ return fn
+}
+
+// imports reports whether f has an import with the specified name and path.
+func imports(f *ast.File, name, path string) bool {
+ for _, s := range f.Imports {
+ if importName(s) == name && importPath(s) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// importSpec returns the import spec if f imports path,
+// or nil otherwise.
+func importSpec(f *ast.File, path string) *ast.ImportSpec {
+ for _, s := range f.Imports {
+ if importPath(s) == path {
+ return s
+ }
+ }
+ return nil
+}
+
+// importName returns the name of s,
+// or "" if the import is not named.
+func importName(s *ast.ImportSpec) string {
+ if s.Name == nil {
+ return ""
+ }
+ return s.Name.Name
+}
+
+// importPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func importPath(s *ast.ImportSpec) string {
+ t, err := strconv.Unquote(s.Path.Value)
+ if err != nil {
+ return ""
+ }
+ return t
+}
+
+// declImports reports whether gen contains an import of path.
+func declImports(gen *ast.GenDecl, path string) bool {
+ if gen.Tok != token.IMPORT {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ if importPath(impspec) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// matchLen returns the length of the longest path segment prefix shared by x and y.
+func matchLen(x, y string) int {
+ n := 0
+ for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
+ if x[i] == '/' {
+ n++
+ }
+ }
+ return n
+}
+
+// isTopName returns true if n is a top-level unresolved identifier with the given name.
+func isTopName(n ast.Expr, name string) bool {
+ id, ok := n.(*ast.Ident)
+ return ok && id.Name == name && id.Obj == nil
+}
+
+// Imports returns the file imports grouped by paragraph.
+func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
+ var groups [][]*ast.ImportSpec
+
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok || genDecl.Tok != token.IMPORT {
+ break
+ }
+
+ group := []*ast.ImportSpec{}
+
+ var lastLine int
+ for _, spec := range genDecl.Specs {
+ importSpec := spec.(*ast.ImportSpec)
+ pos := importSpec.Path.ValuePos
+ line := fset.Position(pos).Line
+ if lastLine > 0 && pos > 0 && line-lastLine > 1 {
+ groups = append(groups, group)
+ group = []*ast.ImportSpec{}
+ }
+ group = append(group, importSpec)
+ lastLine = line
+ }
+ groups = append(groups, group)
+ }
+
+ return groups
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
new file mode 100644
index 0000000..cf72ea9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
@@ -0,0 +1,477 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "fmt"
+ "go/ast"
+ "reflect"
+ "sort"
+)
+
+// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
+// before and/or after the node's children, using a Cursor describing
+// the current node and providing operations on it.
+//
+// The return value of ApplyFunc controls the syntax tree traversal.
+// See Apply for details.
+type ApplyFunc func(*Cursor) bool
+
+// Apply traverses a syntax tree recursively, starting with root,
+// and calling pre and post for each node as described below.
+// Apply returns the syntax tree, possibly modified.
+//
+// If pre is not nil, it is called for each node before the node's
+// children are traversed (pre-order). If pre returns false, no
+// children are traversed, and post is not called for that node.
+//
+// If post is not nil, and a prior call of pre didn't return false,
+// post is called for each node after its children are traversed
+// (post-order). If post returns false, traversal is terminated and
+// Apply returns immediately.
+//
+// Only fields that refer to AST nodes are considered children;
+// i.e., token.Pos, Scopes, Objects, and fields of basic types
+// (strings, etc.) are ignored.
+//
+// Children are traversed in the order in which they appear in the
+// respective node's struct definition. A package's files are
+// traversed in the filenames' alphabetical order.
+//
+func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
+ parent := &struct{ ast.Node }{root}
+ defer func() {
+ if r := recover(); r != nil && r != abort {
+ panic(r)
+ }
+ result = parent.Node
+ }()
+ a := &application{pre: pre, post: post}
+ a.apply(parent, "Node", nil, root)
+ return
+}
+
+var abort = new(int) // singleton, to signal termination of Apply
+
+// A Cursor describes a node encountered during Apply.
+// Information about the node and its parent is available
+// from the Node, Parent, Name, and Index methods.
+//
+// If p is a variable of type and value of the current parent node
+// c.Parent(), and f is the field identifier with name c.Name(),
+// the following invariants hold:
+//
+// p.f == c.Node() if c.Index() < 0
+// p.f[c.Index()] == c.Node() if c.Index() >= 0
+//
+// The methods Replace, Delete, InsertBefore, and InsertAfter
+// can be used to change the AST without disrupting Apply.
+type Cursor struct {
+ parent ast.Node
+ name string
+ iter *iterator // valid if non-nil
+ node ast.Node
+}
+
+// Node returns the current Node.
+func (c *Cursor) Node() ast.Node { return c.node }
+
+// Parent returns the parent of the current Node.
+func (c *Cursor) Parent() ast.Node { return c.parent }
+
+// Name returns the name of the parent Node field that contains the current Node.
+// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
+// the filename for the current Node.
+func (c *Cursor) Name() string { return c.name }
+
+// Index reports the index >= 0 of the current Node in the slice of Nodes that
+// contains it, or a value < 0 if the current Node is not part of a slice.
+// The index of the current node changes if InsertBefore is called while
+// processing the current node.
+func (c *Cursor) Index() int {
+ if c.iter != nil {
+ return c.iter.index
+ }
+ return -1
+}
+
+// field returns the current node's parent field value.
+func (c *Cursor) field() reflect.Value {
+ return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
+}
+
+// Replace replaces the current Node with n.
+// The replacement node is not walked by Apply.
+func (c *Cursor) Replace(n ast.Node) {
+ if _, ok := c.node.(*ast.File); ok {
+ file, ok := n.(*ast.File)
+ if !ok {
+ panic("attempt to replace *ast.File with non-*ast.File")
+ }
+ c.parent.(*ast.Package).Files[c.name] = file
+ return
+ }
+
+ v := c.field()
+ if i := c.Index(); i >= 0 {
+ v = v.Index(i)
+ }
+ v.Set(reflect.ValueOf(n))
+}
+
+// Delete deletes the current Node from its containing slice.
+// If the current Node is not part of a slice, Delete panics.
+// As a special case, if the current node is a package file,
+// Delete removes it from the package's Files map.
+func (c *Cursor) Delete() {
+ if _, ok := c.node.(*ast.File); ok {
+ delete(c.parent.(*ast.Package).Files, c.name)
+ return
+ }
+
+ i := c.Index()
+ if i < 0 {
+ panic("Delete node not contained in slice")
+ }
+ v := c.field()
+ l := v.Len()
+ reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
+ v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
+ v.SetLen(l - 1)
+ c.iter.step--
+}
+
+// InsertAfter inserts n after the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertAfter panics.
+// Apply does not walk n.
+func (c *Cursor) InsertAfter(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertAfter node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
+ v.Index(i + 1).Set(reflect.ValueOf(n))
+ c.iter.step++
+}
+
+// InsertBefore inserts n before the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertBefore panics.
+// Apply will not walk n.
+func (c *Cursor) InsertBefore(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertBefore node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
+ v.Index(i).Set(reflect.ValueOf(n))
+ c.iter.index++
+}
+
+// application carries all the shared data so we can pass it around cheaply.
+type application struct {
+ pre, post ApplyFunc
+ cursor Cursor
+ iter iterator
+}
+
+func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
+ // convert typed nil into untyped nil
+ if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
+ n = nil
+ }
+
+ // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
+ saved := a.cursor
+ a.cursor.parent = parent
+ a.cursor.name = name
+ a.cursor.iter = iter
+ a.cursor.node = n
+
+ if a.pre != nil && !a.pre(&a.cursor) {
+ a.cursor = saved
+ return
+ }
+
+ // walk children
+ // (the order of the cases matches the order of the corresponding node types in go/ast)
+ switch n := n.(type) {
+ case nil:
+ // nothing to do
+
+ // Comments and fields
+ case *ast.Comment:
+ // nothing to do
+
+ case *ast.CommentGroup:
+ if n != nil {
+ a.applyList(n, "List")
+ }
+
+ case *ast.Field:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.FieldList:
+ a.applyList(n, "List")
+
+ // Expressions
+ case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
+ // nothing to do
+
+ case *ast.Ellipsis:
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.FuncLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CompositeLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Elts")
+
+ case *ast.ParenExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SelectorExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Sel", nil, n.Sel)
+
+ case *ast.IndexExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Index", nil, n.Index)
+
+ case *ast.SliceExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Low", nil, n.Low)
+ a.apply(n, "High", nil, n.High)
+ a.apply(n, "Max", nil, n.Max)
+
+ case *ast.TypeAssertExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Type", nil, n.Type)
+
+ case *ast.CallExpr:
+ a.apply(n, "Fun", nil, n.Fun)
+ a.applyList(n, "Args")
+
+ case *ast.StarExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.UnaryExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.BinaryExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Y", nil, n.Y)
+
+ case *ast.KeyValueExpr:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ // Types
+ case *ast.ArrayType:
+ a.apply(n, "Len", nil, n.Len)
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.StructType:
+ a.apply(n, "Fields", nil, n.Fields)
+
+ case *ast.FuncType:
+ a.apply(n, "Params", nil, n.Params)
+ a.apply(n, "Results", nil, n.Results)
+
+ case *ast.InterfaceType:
+ a.apply(n, "Methods", nil, n.Methods)
+
+ case *ast.MapType:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.ChanType:
+ a.apply(n, "Value", nil, n.Value)
+
+ // Statements
+ case *ast.BadStmt:
+ // nothing to do
+
+ case *ast.DeclStmt:
+ a.apply(n, "Decl", nil, n.Decl)
+
+ case *ast.EmptyStmt:
+ // nothing to do
+
+ case *ast.LabeledStmt:
+ a.apply(n, "Label", nil, n.Label)
+ a.apply(n, "Stmt", nil, n.Stmt)
+
+ case *ast.ExprStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SendStmt:
+ a.apply(n, "Chan", nil, n.Chan)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.IncDecStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.AssignStmt:
+ a.applyList(n, "Lhs")
+ a.applyList(n, "Rhs")
+
+ case *ast.GoStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.DeferStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.ReturnStmt:
+ a.applyList(n, "Results")
+
+ case *ast.BranchStmt:
+ a.apply(n, "Label", nil, n.Label)
+
+ case *ast.BlockStmt:
+ a.applyList(n, "List")
+
+ case *ast.IfStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Body", nil, n.Body)
+ a.apply(n, "Else", nil, n.Else)
+
+ case *ast.CaseClause:
+ a.applyList(n, "List")
+ a.applyList(n, "Body")
+
+ case *ast.SwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.TypeSwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Assign", nil, n.Assign)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CommClause:
+ a.apply(n, "Comm", nil, n.Comm)
+ a.applyList(n, "Body")
+
+ case *ast.SelectStmt:
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.ForStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Post", nil, n.Post)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.RangeStmt:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Declarations
+ case *ast.ImportSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Path", nil, n.Path)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.ValueSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Values")
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.TypeSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.BadDecl:
+ // nothing to do
+
+ case *ast.GenDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Specs")
+
+ case *ast.FuncDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Recv", nil, n.Recv)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Files and packages
+ case *ast.File:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.applyList(n, "Decls")
+ // Don't walk n.Comments; they have either been walked already if
+ // they are Doc comments, or they can be easily walked explicitly.
+
+ case *ast.Package:
+ // collect and sort names for reproducible behavior
+ var names []string
+ for name := range n.Files {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ a.apply(n, name, nil, n.Files[name])
+ }
+
+ default:
+ panic(fmt.Sprintf("Apply: unexpected node type %T", n))
+ }
+
+ if a.post != nil && !a.post(&a.cursor) {
+ panic(abort)
+ }
+
+ a.cursor = saved
+}
+
+// An iterator controls iteration over a slice of nodes.
+type iterator struct {
+ index, step int
+}
+
+func (a *application) applyList(parent ast.Node, name string) {
+ // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
+ saved := a.iter
+ a.iter.index = 0
+ for {
+ // must reload parent.name each time, since cursor modifications might change it
+ v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
+ if a.iter.index >= v.Len() {
+ break
+ }
+
+ // element x may be nil in a bad AST - be cautious
+ var x ast.Node
+ if e := v.Index(a.iter.index); e.IsValid() {
+ x = e.Interface().(ast.Node)
+ }
+
+ a.iter.step = 1
+ a.apply(parent, name, &a.iter, x)
+ a.iter.index += a.iter.step
+ }
+ a.iter = saved
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go
new file mode 100644
index 0000000..7630629
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go
@@ -0,0 +1,14 @@
+package astutil
+
+import "go/ast"
+
+// Unparen returns e with any enclosing parentheses stripped.
+func Unparen(e ast.Expr) ast.Expr {
+ for {
+ p, ok := e.(*ast.ParenExpr)
+ if !ok {
+ return e
+ }
+ e = p.X
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
new file mode 100644
index 0000000..98b3987
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
@@ -0,0 +1,109 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gcexportdata provides functions for locating, reading, and
+// writing export data files containing type information produced by the
+// gc compiler. This package supports go1.7 export data format and all
+// later versions.
+//
+// Although it might seem convenient for this package to live alongside
+// go/types in the standard library, this would cause version skew
+// problems for developer tools that use it, since they must be able to
+// consume the outputs of the gc compiler both before and after a Go
+// update such as from Go 1.7 to Go 1.8. Because this package lives in
+// golang.org/x/tools, sites can update their version of this repo some
+// time before the Go 1.8 release and rebuild and redeploy their
+// developer tools, which will then be able to consume both Go 1.7 and
+// Go 1.8 export data files, so they will work before and after the
+// Go update. (See discussion at https://golang.org/issue/15651.)
+//
+package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/token"
+ "go/types"
+ "io"
+ "io/ioutil"
+
+ "golang.org/x/tools/go/internal/gcimporter"
+)
+
+// Find returns the name of an object (.o) or archive (.a) file
+// containing type information for the specified import path,
+// using the workspace layout conventions of go/build.
+// If no file was found, an empty filename is returned.
+//
+// A relative srcDir is interpreted relative to the current working directory.
+//
+// Find also returns the package's resolved (canonical) import path,
+// reflecting the effects of srcDir and vendoring on importPath.
+func Find(importPath, srcDir string) (filename, path string) {
+ return gcimporter.FindPkg(importPath, srcDir)
+}
+
+// NewReader returns a reader for the export data section of an object
+// (.o) or archive (.a) file read from r. The new reader may provide
+// additional trailing data beyond the end of the export data.
+func NewReader(r io.Reader) (io.Reader, error) {
+ buf := bufio.NewReader(r)
+ _, err := gcimporter.FindExportData(buf)
+ // If we ever switch to a zip-like archive format with the ToC
+ // at the end, we can return the correct portion of export data,
+ // but for now we must return the entire rest of the file.
+ return buf, err
+}
+
+// Read reads export data from in, decodes it, and returns type
+// information for the package.
+// The package name is specified by path.
+// File position information is added to fset.
+//
+// Read may inspect and add to the imports map to ensure that references
+// within the export data to other packages are consistent. The caller
+// must ensure that imports[path] does not exist, or exists but is
+// incomplete (see types.Package.Complete), and Read inserts the
+// resulting package into this map entry.
+//
+// On return, the state of the reader is undefined.
+func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
+ data, err := ioutil.ReadAll(in)
+ if err != nil {
+ return nil, fmt.Errorf("reading export data for %q: %v", path, err)
+ }
+
+ if bytes.HasPrefix(data, []byte("!")) {
+ return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
+ }
+
+ // The App Engine Go runtime v1.6 uses the old export data format.
+ // TODO(adonovan): delete once v1.7 has been around for a while.
+ if bytes.HasPrefix(data, []byte("package ")) {
+ return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
+ }
+
+ // The indexed export format starts with an 'i'; the older
+ // binary export format starts with a 'c', 'd', or 'v'
+ // (from "version"). Select appropriate importer.
+ if len(data) > 0 && data[0] == 'i' {
+ _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
+ return pkg, err
+ }
+
+ _, pkg, err := gcimporter.BImportData(fset, imports, data, path)
+ return pkg, err
+}
+
+// Write writes encoded type information for the specified package to out.
+// The FileSet provides file position information for named objects.
+func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
+ b, err := gcimporter.BExportData(fset, pkg)
+ if err != nil {
+ return err
+ }
+ _, err = out.Write(b)
+ return err
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
new file mode 100644
index 0000000..efe221e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
@@ -0,0 +1,73 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gcexportdata
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "os"
+)
+
+// NewImporter returns a new instance of the types.Importer interface
+// that reads type information from export data files written by gc.
+// The Importer also satisfies types.ImporterFrom.
+//
+// Export data files are located using "go build" workspace conventions
+// and the build.Default context.
+//
+// Use this importer instead of go/importer.For("gc", ...) to avoid the
+// version-skew problems described in the documentation of this package,
+// or to control the FileSet or access the imports map populated during
+// package loading.
+//
+func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
+ return importer{fset, imports}
+}
+
+type importer struct {
+ fset *token.FileSet
+ imports map[string]*types.Package
+}
+
+func (imp importer) Import(importPath string) (*types.Package, error) {
+ return imp.ImportFrom(importPath, "", 0)
+}
+
+func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) {
+ filename, path := Find(importPath, srcDir)
+ if filename == "" {
+ if importPath == "unsafe" {
+ // Even for unsafe, call Find first in case
+ // the package was vendored.
+ return types.Unsafe, nil
+ }
+ return nil, fmt.Errorf("can't find import: %s", importPath)
+ }
+
+ if pkg, ok := imp.imports[path]; ok && pkg.Complete() {
+ return pkg, nil // cache hit
+ }
+
+ // open file
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ f.Close()
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("reading export data: %s: %v", filename, err)
+ }
+ }()
+
+ r, err := NewReader(f)
+ if err != nil {
+ return nil, err
+ }
+
+ return Read(r, imp.fset, imp.imports, path)
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/main.go b/vendor/golang.org/x/tools/go/gcexportdata/main.go
new file mode 100644
index 0000000..2713dce
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/main.go
@@ -0,0 +1,99 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+// The gcexportdata command is a diagnostic tool that displays the
+// contents of gc export data files.
+package main
+
+import (
+ "flag"
+ "fmt"
+ "go/token"
+ "go/types"
+ "log"
+ "os"
+
+ "golang.org/x/tools/go/gcexportdata"
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+var packageFlag = flag.String("package", "", "alternative package to print")
+
+func main() {
+ log.SetPrefix("gcexportdata: ")
+ log.SetFlags(0)
+ flag.Usage = func() {
+ fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a")
+ }
+ flag.Parse()
+ if flag.NArg() != 1 {
+ flag.Usage()
+ os.Exit(2)
+ }
+ filename := flag.Args()[0]
+
+ f, err := os.Open(filename)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ r, err := gcexportdata.NewReader(f)
+ if err != nil {
+ log.Fatalf("%s: %s", filename, err)
+ }
+
+ // Decode the package.
+ const primary = ""
+ imports := make(map[string]*types.Package)
+ fset := token.NewFileSet()
+ pkg, err := gcexportdata.Read(r, fset, imports, primary)
+ if err != nil {
+ log.Fatalf("%s: %s", filename, err)
+ }
+
+ // Optionally select an indirectly mentioned package.
+ if *packageFlag != "" {
+ pkg = imports[*packageFlag]
+ if pkg == nil {
+ fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n",
+ filename, *packageFlag)
+ for p := range imports {
+ if p != primary {
+ fmt.Fprintf(os.Stderr, "\t%s\n", p)
+ }
+ }
+ os.Exit(1)
+ }
+ }
+
+ // Print all package-level declarations, including non-exported ones.
+ fmt.Printf("package %s\n", pkg.Name())
+ for _, imp := range pkg.Imports() {
+ fmt.Printf("import %q\n", imp.Path())
+ }
+ qual := func(p *types.Package) string {
+ if pkg == p {
+ return ""
+ }
+ return p.Name()
+ }
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ obj := scope.Lookup(name)
+ fmt.Printf("%s: %s\n",
+ fset.Position(obj.Pos()),
+ types.ObjectString(obj, qual))
+
+ // For types, print each method.
+ if _, ok := obj.(*types.TypeName); ok {
+ for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) {
+ fmt.Printf("%s: %s\n",
+ fset.Position(method.Obj().Pos()),
+ types.SelectionString(method, qual))
+ }
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
new file mode 100644
index 0000000..0f652ea
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
@@ -0,0 +1,220 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cgo
+
+// This file handles cgo preprocessing of files containing `import "C"`.
+//
+// DESIGN
+//
+// The approach taken is to run the cgo processor on the package's
+// CgoFiles and parse the output, faking the filenames of the
+// resulting ASTs so that the synthetic file containing the C types is
+// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
+// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
+// not the names of the actual temporary files.
+//
+// The advantage of this approach is its fidelity to 'go build'. The
+// downside is that the token.Position.Offset for each AST node is
+// incorrect, being an offset within the temporary file. Line numbers
+// should still be correct because of the //line comments.
+//
+// The logic of this file is mostly plundered from the 'go build'
+// tool, which also invokes the cgo preprocessor.
+//
+//
+// REJECTED ALTERNATIVE
+//
+// An alternative approach that we explored is to extend go/types'
+// Importer mechanism to provide the identity of the importing package
+// so that each time `import "C"` appears it resolves to a different
+// synthetic package containing just the objects needed in that case.
+// The loader would invoke cgo but parse only the cgo_types.go file
+// defining the package-level objects, discarding the other files
+// resulting from preprocessing.
+//
+// The benefit of this approach would have been that source-level
+// syntax information would correspond exactly to the original cgo
+// file, with no preprocessing involved, making source tools like
+// godoc, guru, and eg happy. However, the approach was rejected
+// due to the additional complexity it would impose on go/types. (It
+// made for a beautiful demo, though.)
+//
+// cgo files, despite their *.go extension, are not legal Go source
+// files per the specification since they may refer to unexported
+// members of package "C" such as C.int. Also, a function such as
+// C.getpwent has in effect two types, one matching its C type and one
+// which additionally returns (errno C.int). The cgo preprocessor
+// uses name mangling to distinguish these two functions in the
+// processed code, but go/types would need to duplicate this logic in
+// its handling of function calls, analogous to the treatment of map
+// lookups in which y=m[k] and y,ok=m[k] are both legal.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strings"
+)
+
+// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
+// the output and returns the resulting ASTs.
+//
+func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
+ tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
+ if err != nil {
+ return nil, err
+ }
+ defer os.RemoveAll(tmpdir)
+
+ pkgdir := bp.Dir
+ if DisplayPath != nil {
+ pkgdir = DisplayPath(pkgdir)
+ }
+
+ cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false)
+ if err != nil {
+ return nil, err
+ }
+ var files []*ast.File
+ for i := range cgoFiles {
+ rd, err := os.Open(cgoFiles[i])
+ if err != nil {
+ return nil, err
+ }
+ display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
+ f, err := parser.ParseFile(fset, display, rd, mode)
+ rd.Close()
+ if err != nil {
+ return nil, err
+ }
+ files = append(files, f)
+ }
+ return files, nil
+}
+
+var cgoRe = regexp.MustCompile(`[/\\:]`)
+
+// Run invokes the cgo preprocessor on bp.CgoFiles and returns two
+// lists of files: the resulting processed files (in temporary
+// directory tmpdir) and the corresponding names of the unprocessed files.
+//
+// Run is adapted from (*builder).cgo in
+// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
+// Objective C, CGOPKGPATH, CGO_FLAGS.
+//
+// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in
+// to the cgo preprocessor. This in turn will set the // line comments
+// referring to those files to use absolute paths. This is needed for
+// go/packages using the legacy go list support so it is able to find
+// the original files.
+func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) {
+ cgoCPPFLAGS, _, _, _ := cflags(bp, true)
+ _, cgoexeCFLAGS, _, _ := cflags(bp, false)
+
+ if len(bp.CgoPkgConfig) > 0 {
+ pcCFLAGS, err := pkgConfigFlags(bp)
+ if err != nil {
+ return nil, nil, err
+ }
+ cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
+ }
+
+ // Allows including _cgo_export.h from .[ch] files in the package.
+ cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
+
+ // _cgo_gotypes.go (displayed "C") contains the type definitions.
+ files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
+ displayFiles = append(displayFiles, "C")
+ for _, fn := range bp.CgoFiles {
+ // "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
+ f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
+ files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
+ displayFiles = append(displayFiles, fn)
+ }
+
+ var cgoflags []string
+ if bp.Goroot && bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_runtime_cgo=false")
+ }
+ if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_syscall=false")
+ }
+
+ var cgoFiles []string = bp.CgoFiles
+ if useabs {
+ cgoFiles = make([]string, len(bp.CgoFiles))
+ for i := range cgoFiles {
+ cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i])
+ }
+ }
+
+ args := stringList(
+ "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
+ cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles,
+ )
+ if false {
+ log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
+ }
+ cmd := exec.Command(args[0], args[1:]...)
+ cmd.Dir = pkgdir
+ cmd.Stdout = os.Stderr
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
+ }
+
+ return files, displayFiles, nil
+}
+
+// -- unmodified from 'go build' ---------------------------------------
+
+// Return the flags to use when invoking the C or C++ compilers, or cgo.
+func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
+ var defaults string
+ if def {
+ defaults = "-g -O2"
+ }
+
+ cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
+ cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
+ cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
+ ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
+ return
+}
+
+// envList returns the value of the given environment variable broken
+// into fields, using the default value when the variable is empty.
+func envList(key, def string) []string {
+ v := os.Getenv(key)
+ if v == "" {
+ v = def
+ }
+ return strings.Fields(v)
+}
+
+// stringList's arguments should be a sequence of string or []string values.
+// stringList flattens them into a single []string.
+func stringList(args ...interface{}) []string {
+ var x []string
+ for _, arg := range args {
+ switch arg := arg.(type) {
+ case []string:
+ x = append(x, arg...)
+ case string:
+ x = append(x, arg)
+ default:
+ panic("stringList: invalid argument")
+ }
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
new file mode 100644
index 0000000..b5bb95a
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
@@ -0,0 +1,39 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cgo
+
+import (
+ "errors"
+ "fmt"
+ "go/build"
+ "os/exec"
+ "strings"
+)
+
+// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
+func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
+ cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
+ out, err := cmd.CombinedOutput()
+ if err != nil {
+ s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
+ if len(out) > 0 {
+ s = fmt.Sprintf("%s: %s", s, out)
+ }
+ return nil, errors.New(s)
+ }
+ if len(out) > 0 {
+ flags = strings.Fields(string(out))
+ }
+ return
+}
+
+// pkgConfigFlags calls pkg-config if needed and returns the cflags
+// needed to build the package.
+func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
+ if len(p.CgoPkgConfig) == 0 {
+ return nil, nil
+ }
+ return pkgConfig("--cflags", p.CgoPkgConfig)
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
new file mode 100644
index 0000000..a807d0a
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
@@ -0,0 +1,852 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Binary package export.
+// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
+// see that file for specification of the format.
+
+package gcimporter
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "math"
+ "math/big"
+ "sort"
+ "strings"
+)
+
+// If debugFormat is set, each integer and string value is preceded by a marker
+// and position information in the encoding. This mechanism permits an importer
+// to recognize immediately when it is out of sync. The importer recognizes this
+// mode automatically (i.e., it can import export data produced with debugging
+// support even if debugFormat is not set at the time of import). This mode will
+// lead to massively larger export data (by a factor of 2 to 3) and should only
+// be enabled during development and debugging.
+//
+// NOTE: This flag is the first flag to enable if importing dies because of
+// (suspected) format errors, and whenever a change is made to the format.
+const debugFormat = false // default: false
+
+// If trace is set, debugging output is printed to std out.
+const trace = false // default: false
+
+// Current export format version. Increase with each format change.
+// Note: The latest binary (non-indexed) export format is at version 6.
+// This exporter is still at level 4, but it doesn't matter since
+// the binary importer can handle older versions just fine.
+// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
+// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
+// 4: type name objects support type aliases, uses aliasTag
+// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
+// 2: removed unused bool in ODCL export (compiler only)
+// 1: header format change (more regular), export package for _ struct fields
+// 0: Go1.7 encoding
+const exportVersion = 4
+
+// trackAllTypes enables cycle tracking for all types, not just named
+// types. The existing compiler invariants assume that unnamed types
+// that are not completely set up are not used, or else there are spurious
+// errors.
+// If disabled, only named types are tracked, possibly leading to slightly
+// less efficient encoding in rare cases. It also prevents the export of
+// some corner-case type declarations (but those are not handled correctly
+// with with the textual export format either).
+// TODO(gri) enable and remove once issues caused by it are fixed
+const trackAllTypes = false
+
+type exporter struct {
+ fset *token.FileSet
+ out bytes.Buffer
+
+ // object -> index maps, indexed in order of serialization
+ strIndex map[string]int
+ pkgIndex map[*types.Package]int
+ typIndex map[types.Type]int
+
+ // position encoding
+ posInfoFormat bool
+ prevFile string
+ prevLine int
+
+ // debugging support
+ written int // bytes written
+ indent int // for trace
+}
+
+// internalError represents an error generated inside this package.
+type internalError string
+
+func (e internalError) Error() string { return "gcimporter: " + string(e) }
+
+func internalErrorf(format string, args ...interface{}) error {
+ return internalError(fmt.Sprintf(format, args...))
+}
+
+// BExportData returns binary export data for pkg.
+// If no file set is provided, position info will be missing.
+func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
+ defer func() {
+ if e := recover(); e != nil {
+ if ierr, ok := e.(internalError); ok {
+ err = ierr
+ return
+ }
+ // Not an internal error; panic again.
+ panic(e)
+ }
+ }()
+
+ p := exporter{
+ fset: fset,
+ strIndex: map[string]int{"": 0}, // empty string is mapped to 0
+ pkgIndex: make(map[*types.Package]int),
+ typIndex: make(map[types.Type]int),
+ posInfoFormat: true, // TODO(gri) might become a flag, eventually
+ }
+
+ // write version info
+ // The version string must start with "version %d" where %d is the version
+ // number. Additional debugging information may follow after a blank; that
+ // text is ignored by the importer.
+ p.rawStringln(fmt.Sprintf("version %d", exportVersion))
+ var debug string
+ if debugFormat {
+ debug = "debug"
+ }
+ p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
+ p.bool(trackAllTypes)
+ p.bool(p.posInfoFormat)
+
+ // --- generic export data ---
+
+ // populate type map with predeclared "known" types
+ for index, typ := range predeclared() {
+ p.typIndex[typ] = index
+ }
+ if len(p.typIndex) != len(predeclared()) {
+ return nil, internalError("duplicate entries in type map?")
+ }
+
+ // write package data
+ p.pkg(pkg, true)
+ if trace {
+ p.tracef("\n")
+ }
+
+ // write objects
+ objcount := 0
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ if !ast.IsExported(name) {
+ continue
+ }
+ if trace {
+ p.tracef("\n")
+ }
+ p.obj(scope.Lookup(name))
+ objcount++
+ }
+
+ // indicate end of list
+ if trace {
+ p.tracef("\n")
+ }
+ p.tag(endTag)
+
+ // for self-verification only (redundant)
+ p.int(objcount)
+
+ if trace {
+ p.tracef("\n")
+ }
+
+ // --- end of export data ---
+
+ return p.out.Bytes(), nil
+}
+
+func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
+ if pkg == nil {
+ panic(internalError("unexpected nil pkg"))
+ }
+
+ // if we saw the package before, write its index (>= 0)
+ if i, ok := p.pkgIndex[pkg]; ok {
+ p.index('P', i)
+ return
+ }
+
+ // otherwise, remember the package, write the package tag (< 0) and package data
+ if trace {
+ p.tracef("P%d = { ", len(p.pkgIndex))
+ defer p.tracef("} ")
+ }
+ p.pkgIndex[pkg] = len(p.pkgIndex)
+
+ p.tag(packageTag)
+ p.string(pkg.Name())
+ if emptypath {
+ p.string("")
+ } else {
+ p.string(pkg.Path())
+ }
+}
+
+func (p *exporter) obj(obj types.Object) {
+ switch obj := obj.(type) {
+ case *types.Const:
+ p.tag(constTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ p.typ(obj.Type())
+ p.value(obj.Val())
+
+ case *types.TypeName:
+ if obj.IsAlias() {
+ p.tag(aliasTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ } else {
+ p.tag(typeTag)
+ }
+ p.typ(obj.Type())
+
+ case *types.Var:
+ p.tag(varTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ p.typ(obj.Type())
+
+ case *types.Func:
+ p.tag(funcTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ sig := obj.Type().(*types.Signature)
+ p.paramList(sig.Params(), sig.Variadic())
+ p.paramList(sig.Results(), false)
+
+ default:
+ panic(internalErrorf("unexpected object %v (%T)", obj, obj))
+ }
+}
+
+func (p *exporter) pos(obj types.Object) {
+ if !p.posInfoFormat {
+ return
+ }
+
+ file, line := p.fileLine(obj)
+ if file == p.prevFile {
+ // common case: write line delta
+ // delta == 0 means different file or no line change
+ delta := line - p.prevLine
+ p.int(delta)
+ if delta == 0 {
+ p.int(-1) // -1 means no file change
+ }
+ } else {
+ // different file
+ p.int(0)
+ // Encode filename as length of common prefix with previous
+ // filename, followed by (possibly empty) suffix. Filenames
+ // frequently share path prefixes, so this can save a lot
+ // of space and make export data size less dependent on file
+ // path length. The suffix is unlikely to be empty because
+ // file names tend to end in ".go".
+ n := commonPrefixLen(p.prevFile, file)
+ p.int(n) // n >= 0
+ p.string(file[n:]) // write suffix only
+ p.prevFile = file
+ p.int(line)
+ }
+ p.prevLine = line
+}
+
+func (p *exporter) fileLine(obj types.Object) (file string, line int) {
+ if p.fset != nil {
+ pos := p.fset.Position(obj.Pos())
+ file = pos.Filename
+ line = pos.Line
+ }
+ return
+}
+
+func commonPrefixLen(a, b string) int {
+ if len(a) > len(b) {
+ a, b = b, a
+ }
+ // len(a) <= len(b)
+ i := 0
+ for i < len(a) && a[i] == b[i] {
+ i++
+ }
+ return i
+}
+
+func (p *exporter) qualifiedName(obj types.Object) {
+ p.string(obj.Name())
+ p.pkg(obj.Pkg(), false)
+}
+
+func (p *exporter) typ(t types.Type) {
+ if t == nil {
+ panic(internalError("nil type"))
+ }
+
+ // Possible optimization: Anonymous pointer types *T where
+ // T is a named type are common. We could canonicalize all
+ // such types *T to a single type PT = *T. This would lead
+ // to at most one *T entry in typIndex, and all future *T's
+ // would be encoded as the respective index directly. Would
+ // save 1 byte (pointerTag) per *T and reduce the typIndex
+ // size (at the cost of a canonicalization map). We can do
+ // this later, without encoding format change.
+
+ // if we saw the type before, write its index (>= 0)
+ if i, ok := p.typIndex[t]; ok {
+ p.index('T', i)
+ return
+ }
+
+ // otherwise, remember the type, write the type tag (< 0) and type data
+ if trackAllTypes {
+ if trace {
+ p.tracef("T%d = {>\n", len(p.typIndex))
+ defer p.tracef("<\n} ")
+ }
+ p.typIndex[t] = len(p.typIndex)
+ }
+
+ switch t := t.(type) {
+ case *types.Named:
+ if !trackAllTypes {
+ // if we don't track all types, track named types now
+ p.typIndex[t] = len(p.typIndex)
+ }
+
+ p.tag(namedTag)
+ p.pos(t.Obj())
+ p.qualifiedName(t.Obj())
+ p.typ(t.Underlying())
+ if !types.IsInterface(t) {
+ p.assocMethods(t)
+ }
+
+ case *types.Array:
+ p.tag(arrayTag)
+ p.int64(t.Len())
+ p.typ(t.Elem())
+
+ case *types.Slice:
+ p.tag(sliceTag)
+ p.typ(t.Elem())
+
+ case *dddSlice:
+ p.tag(dddTag)
+ p.typ(t.elem)
+
+ case *types.Struct:
+ p.tag(structTag)
+ p.fieldList(t)
+
+ case *types.Pointer:
+ p.tag(pointerTag)
+ p.typ(t.Elem())
+
+ case *types.Signature:
+ p.tag(signatureTag)
+ p.paramList(t.Params(), t.Variadic())
+ p.paramList(t.Results(), false)
+
+ case *types.Interface:
+ p.tag(interfaceTag)
+ p.iface(t)
+
+ case *types.Map:
+ p.tag(mapTag)
+ p.typ(t.Key())
+ p.typ(t.Elem())
+
+ case *types.Chan:
+ p.tag(chanTag)
+ p.int(int(3 - t.Dir())) // hack
+ p.typ(t.Elem())
+
+ default:
+ panic(internalErrorf("unexpected type %T: %s", t, t))
+ }
+}
+
+func (p *exporter) assocMethods(named *types.Named) {
+ // Sort methods (for determinism).
+ var methods []*types.Func
+ for i := 0; i < named.NumMethods(); i++ {
+ methods = append(methods, named.Method(i))
+ }
+ sort.Sort(methodsByName(methods))
+
+ p.int(len(methods))
+
+ if trace && methods != nil {
+ p.tracef("associated methods {>\n")
+ }
+
+ for i, m := range methods {
+ if trace && i > 0 {
+ p.tracef("\n")
+ }
+
+ p.pos(m)
+ name := m.Name()
+ p.string(name)
+ if !exported(name) {
+ p.pkg(m.Pkg(), false)
+ }
+
+ sig := m.Type().(*types.Signature)
+ p.paramList(types.NewTuple(sig.Recv()), false)
+ p.paramList(sig.Params(), sig.Variadic())
+ p.paramList(sig.Results(), false)
+ p.int(0) // dummy value for go:nointerface pragma - ignored by importer
+ }
+
+ if trace && methods != nil {
+ p.tracef("<\n} ")
+ }
+}
+
+type methodsByName []*types.Func
+
+func (x methodsByName) Len() int { return len(x) }
+func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
+
+func (p *exporter) fieldList(t *types.Struct) {
+ if trace && t.NumFields() > 0 {
+ p.tracef("fields {>\n")
+ defer p.tracef("<\n} ")
+ }
+
+ p.int(t.NumFields())
+ for i := 0; i < t.NumFields(); i++ {
+ if trace && i > 0 {
+ p.tracef("\n")
+ }
+ p.field(t.Field(i))
+ p.string(t.Tag(i))
+ }
+}
+
+func (p *exporter) field(f *types.Var) {
+ if !f.IsField() {
+ panic(internalError("field expected"))
+ }
+
+ p.pos(f)
+ p.fieldName(f)
+ p.typ(f.Type())
+}
+
+func (p *exporter) iface(t *types.Interface) {
+ // TODO(gri): enable importer to load embedded interfaces,
+ // then emit Embeddeds and ExplicitMethods separately here.
+ p.int(0)
+
+ n := t.NumMethods()
+ if trace && n > 0 {
+ p.tracef("methods {>\n")
+ defer p.tracef("<\n} ")
+ }
+ p.int(n)
+ for i := 0; i < n; i++ {
+ if trace && i > 0 {
+ p.tracef("\n")
+ }
+ p.method(t.Method(i))
+ }
+}
+
+func (p *exporter) method(m *types.Func) {
+ sig := m.Type().(*types.Signature)
+ if sig.Recv() == nil {
+ panic(internalError("method expected"))
+ }
+
+ p.pos(m)
+ p.string(m.Name())
+ if m.Name() != "_" && !ast.IsExported(m.Name()) {
+ p.pkg(m.Pkg(), false)
+ }
+
+ // interface method; no need to encode receiver.
+ p.paramList(sig.Params(), sig.Variadic())
+ p.paramList(sig.Results(), false)
+}
+
+func (p *exporter) fieldName(f *types.Var) {
+ name := f.Name()
+
+ if f.Anonymous() {
+ // anonymous field - we distinguish between 3 cases:
+ // 1) field name matches base type name and is exported
+ // 2) field name matches base type name and is not exported
+ // 3) field name doesn't match base type name (alias name)
+ bname := basetypeName(f.Type())
+ if name == bname {
+ if ast.IsExported(name) {
+ name = "" // 1) we don't need to know the field name or package
+ } else {
+ name = "?" // 2) use unexported name "?" to force package export
+ }
+ } else {
+ // 3) indicate alias and export name as is
+ // (this requires an extra "@" but this is a rare case)
+ p.string("@")
+ }
+ }
+
+ p.string(name)
+ if name != "" && !ast.IsExported(name) {
+ p.pkg(f.Pkg(), false)
+ }
+}
+
+func basetypeName(typ types.Type) string {
+ switch typ := deref(typ).(type) {
+ case *types.Basic:
+ return typ.Name()
+ case *types.Named:
+ return typ.Obj().Name()
+ default:
+ return "" // unnamed type
+ }
+}
+
+func (p *exporter) paramList(params *types.Tuple, variadic bool) {
+ // use negative length to indicate unnamed parameters
+ // (look at the first parameter only since either all
+ // names are present or all are absent)
+ n := params.Len()
+ if n > 0 && params.At(0).Name() == "" {
+ n = -n
+ }
+ p.int(n)
+ for i := 0; i < params.Len(); i++ {
+ q := params.At(i)
+ t := q.Type()
+ if variadic && i == params.Len()-1 {
+ t = &dddSlice{t.(*types.Slice).Elem()}
+ }
+ p.typ(t)
+ if n > 0 {
+ name := q.Name()
+ p.string(name)
+ if name != "_" {
+ p.pkg(q.Pkg(), false)
+ }
+ }
+ p.string("") // no compiler-specific info
+ }
+}
+
+func (p *exporter) value(x constant.Value) {
+ if trace {
+ p.tracef("= ")
+ }
+
+ switch x.Kind() {
+ case constant.Bool:
+ tag := falseTag
+ if constant.BoolVal(x) {
+ tag = trueTag
+ }
+ p.tag(tag)
+
+ case constant.Int:
+ if v, exact := constant.Int64Val(x); exact {
+ // common case: x fits into an int64 - use compact encoding
+ p.tag(int64Tag)
+ p.int64(v)
+ return
+ }
+ // uncommon case: large x - use float encoding
+ // (powers of 2 will be encoded efficiently with exponent)
+ p.tag(floatTag)
+ p.float(constant.ToFloat(x))
+
+ case constant.Float:
+ p.tag(floatTag)
+ p.float(x)
+
+ case constant.Complex:
+ p.tag(complexTag)
+ p.float(constant.Real(x))
+ p.float(constant.Imag(x))
+
+ case constant.String:
+ p.tag(stringTag)
+ p.string(constant.StringVal(x))
+
+ case constant.Unknown:
+ // package contains type errors
+ p.tag(unknownTag)
+
+ default:
+ panic(internalErrorf("unexpected value %v (%T)", x, x))
+ }
+}
+
+func (p *exporter) float(x constant.Value) {
+ if x.Kind() != constant.Float {
+ panic(internalErrorf("unexpected constant %v, want float", x))
+ }
+ // extract sign (there is no -0)
+ sign := constant.Sign(x)
+ if sign == 0 {
+ // x == 0
+ p.int(0)
+ return
+ }
+ // x != 0
+
+ var f big.Float
+ if v, exact := constant.Float64Val(x); exact {
+ // float64
+ f.SetFloat64(v)
+ } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
+ // TODO(gri): add big.Rat accessor to constant.Value.
+ r := valueToRat(num)
+ f.SetRat(r.Quo(r, valueToRat(denom)))
+ } else {
+ // Value too large to represent as a fraction => inaccessible.
+ // TODO(gri): add big.Float accessor to constant.Value.
+ f.SetFloat64(math.MaxFloat64) // FIXME
+ }
+
+ // extract exponent such that 0.5 <= m < 1.0
+ var m big.Float
+ exp := f.MantExp(&m)
+
+ // extract mantissa as *big.Int
+ // - set exponent large enough so mant satisfies mant.IsInt()
+ // - get *big.Int from mant
+ m.SetMantExp(&m, int(m.MinPrec()))
+ mant, acc := m.Int(nil)
+ if acc != big.Exact {
+ panic(internalError("internal error"))
+ }
+
+ p.int(sign)
+ p.int(exp)
+ p.string(string(mant.Bytes()))
+}
+
+func valueToRat(x constant.Value) *big.Rat {
+ // Convert little-endian to big-endian.
+ // I can't believe this is necessary.
+ bytes := constant.Bytes(x)
+ for i := 0; i < len(bytes)/2; i++ {
+ bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
+ }
+ return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
+}
+
+func (p *exporter) bool(b bool) bool {
+ if trace {
+ p.tracef("[")
+ defer p.tracef("= %v] ", b)
+ }
+
+ x := 0
+ if b {
+ x = 1
+ }
+ p.int(x)
+ return b
+}
+
+// ----------------------------------------------------------------------------
+// Low-level encoders
+
+func (p *exporter) index(marker byte, index int) {
+ if index < 0 {
+ panic(internalError("invalid index < 0"))
+ }
+ if debugFormat {
+ p.marker('t')
+ }
+ if trace {
+ p.tracef("%c%d ", marker, index)
+ }
+ p.rawInt64(int64(index))
+}
+
+func (p *exporter) tag(tag int) {
+ if tag >= 0 {
+ panic(internalError("invalid tag >= 0"))
+ }
+ if debugFormat {
+ p.marker('t')
+ }
+ if trace {
+ p.tracef("%s ", tagString[-tag])
+ }
+ p.rawInt64(int64(tag))
+}
+
+func (p *exporter) int(x int) {
+ p.int64(int64(x))
+}
+
+func (p *exporter) int64(x int64) {
+ if debugFormat {
+ p.marker('i')
+ }
+ if trace {
+ p.tracef("%d ", x)
+ }
+ p.rawInt64(x)
+}
+
+func (p *exporter) string(s string) {
+ if debugFormat {
+ p.marker('s')
+ }
+ if trace {
+ p.tracef("%q ", s)
+ }
+ // if we saw the string before, write its index (>= 0)
+ // (the empty string is mapped to 0)
+ if i, ok := p.strIndex[s]; ok {
+ p.rawInt64(int64(i))
+ return
+ }
+ // otherwise, remember string and write its negative length and bytes
+ p.strIndex[s] = len(p.strIndex)
+ p.rawInt64(-int64(len(s)))
+ for i := 0; i < len(s); i++ {
+ p.rawByte(s[i])
+ }
+}
+
+// marker emits a marker byte and position information which makes
+// it easy for a reader to detect if it is "out of sync". Used for
+// debugFormat format only.
+func (p *exporter) marker(m byte) {
+ p.rawByte(m)
+ // Enable this for help tracking down the location
+ // of an incorrect marker when running in debugFormat.
+ if false && trace {
+ p.tracef("#%d ", p.written)
+ }
+ p.rawInt64(int64(p.written))
+}
+
+// rawInt64 should only be used by low-level encoders.
+func (p *exporter) rawInt64(x int64) {
+ var tmp [binary.MaxVarintLen64]byte
+ n := binary.PutVarint(tmp[:], x)
+ for i := 0; i < n; i++ {
+ p.rawByte(tmp[i])
+ }
+}
+
+// rawStringln should only be used to emit the initial version string.
+func (p *exporter) rawStringln(s string) {
+ for i := 0; i < len(s); i++ {
+ p.rawByte(s[i])
+ }
+ p.rawByte('\n')
+}
+
+// rawByte is the bottleneck interface to write to p.out.
+// rawByte escapes b as follows (any encoding does that
+// hides '$'):
+//
+// '$' => '|' 'S'
+// '|' => '|' '|'
+//
+// Necessary so other tools can find the end of the
+// export data by searching for "$$".
+// rawByte should only be used by low-level encoders.
+func (p *exporter) rawByte(b byte) {
+ switch b {
+ case '$':
+ // write '$' as '|' 'S'
+ b = 'S'
+ fallthrough
+ case '|':
+ // write '|' as '|' '|'
+ p.out.WriteByte('|')
+ p.written++
+ }
+ p.out.WriteByte(b)
+ p.written++
+}
+
+// tracef is like fmt.Printf but it rewrites the format string
+// to take care of indentation.
+func (p *exporter) tracef(format string, args ...interface{}) {
+ if strings.ContainsAny(format, "<>\n") {
+ var buf bytes.Buffer
+ for i := 0; i < len(format); i++ {
+ // no need to deal with runes
+ ch := format[i]
+ switch ch {
+ case '>':
+ p.indent++
+ continue
+ case '<':
+ p.indent--
+ continue
+ }
+ buf.WriteByte(ch)
+ if ch == '\n' {
+ for j := p.indent; j > 0; j-- {
+ buf.WriteString(". ")
+ }
+ }
+ }
+ format = buf.String()
+ }
+ fmt.Printf(format, args...)
+}
+
+// Debugging support.
+// (tagString is only used when tracing is enabled)
+var tagString = [...]string{
+ // Packages
+ -packageTag: "package",
+
+ // Types
+ -namedTag: "named type",
+ -arrayTag: "array",
+ -sliceTag: "slice",
+ -dddTag: "ddd",
+ -structTag: "struct",
+ -pointerTag: "pointer",
+ -signatureTag: "signature",
+ -interfaceTag: "interface",
+ -mapTag: "map",
+ -chanTag: "chan",
+
+ // Values
+ -falseTag: "false",
+ -trueTag: "true",
+ -int64Tag: "int64",
+ -floatTag: "float",
+ -fractionTag: "fraction",
+ -complexTag: "complex",
+ -stringTag: "string",
+ -unknownTag: "unknown",
+
+ // Type aliases
+ -aliasTag: "alias",
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go
new file mode 100644
index 0000000..e3c3107
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go
@@ -0,0 +1,1036 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go.
+
+package gcimporter
+
+import (
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+)
+
+type importer struct {
+ imports map[string]*types.Package
+ data []byte
+ importpath string
+ buf []byte // for reading strings
+ version int // export format version
+
+ // object lists
+ strList []string // in order of appearance
+ pathList []string // in order of appearance
+ pkgList []*types.Package // in order of appearance
+ typList []types.Type // in order of appearance
+ interfaceList []*types.Interface // for delayed completion only
+ trackAllTypes bool
+
+ // position encoding
+ posInfoFormat bool
+ prevFile string
+ prevLine int
+ fake fakeFileSet
+
+ // debugging support
+ debugFormat bool
+ read int // bytes read
+}
+
+// BImportData imports a package from the serialized package data
+// and returns the number of bytes consumed and a reference to the package.
+// If the export data version is not recognized or the format is otherwise
+// compromised, an error is returned.
+func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ // catch panics and return them as errors
+ const currentVersion = 6
+ version := -1 // unknown version
+ defer func() {
+ if e := recover(); e != nil {
+ // Return a (possibly nil or incomplete) package unchanged (see #16088).
+ if version > currentVersion {
+ err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
+ } else {
+ err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
+ }
+ }
+ }()
+
+ p := importer{
+ imports: imports,
+ data: data,
+ importpath: path,
+ version: version,
+ strList: []string{""}, // empty string is mapped to 0
+ pathList: []string{""}, // empty string is mapped to 0
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*token.File),
+ },
+ }
+
+ // read version info
+ var versionstr string
+ if b := p.rawByte(); b == 'c' || b == 'd' {
+ // Go1.7 encoding; first byte encodes low-level
+ // encoding format (compact vs debug).
+ // For backward-compatibility only (avoid problems with
+ // old installed packages). Newly compiled packages use
+ // the extensible format string.
+ // TODO(gri) Remove this support eventually; after Go1.8.
+ if b == 'd' {
+ p.debugFormat = true
+ }
+ p.trackAllTypes = p.rawByte() == 'a'
+ p.posInfoFormat = p.int() != 0
+ versionstr = p.string()
+ if versionstr == "v1" {
+ version = 0
+ }
+ } else {
+ // Go1.8 extensible encoding
+ // read version string and extract version number (ignore anything after the version number)
+ versionstr = p.rawStringln(b)
+ if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" {
+ if v, err := strconv.Atoi(s[1]); err == nil && v > 0 {
+ version = v
+ }
+ }
+ }
+ p.version = version
+
+ // read version specific flags - extend as necessary
+ switch p.version {
+ // case currentVersion:
+ // ...
+ // fallthrough
+ case currentVersion, 5, 4, 3, 2, 1:
+ p.debugFormat = p.rawStringln(p.rawByte()) == "debug"
+ p.trackAllTypes = p.int() != 0
+ p.posInfoFormat = p.int() != 0
+ case 0:
+ // Go1.7 encoding format - nothing to do here
+ default:
+ errorf("unknown bexport format version %d (%q)", p.version, versionstr)
+ }
+
+ // --- generic export data ---
+
+ // populate typList with predeclared "known" types
+ p.typList = append(p.typList, predeclared()...)
+
+ // read package data
+ pkg = p.pkg()
+
+ // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go)
+ objcount := 0
+ for {
+ tag := p.tagOrIndex()
+ if tag == endTag {
+ break
+ }
+ p.obj(tag)
+ objcount++
+ }
+
+ // self-verification
+ if count := p.int(); count != objcount {
+ errorf("got %d objects; want %d", objcount, count)
+ }
+
+ // ignore compiler-specific import data
+
+ // complete interfaces
+ // TODO(gri) re-investigate if we still need to do this in a delayed fashion
+ for _, typ := range p.interfaceList {
+ typ.Complete()
+ }
+
+ // record all referenced packages as imports
+ list := append(([]*types.Package)(nil), p.pkgList[1:]...)
+ sort.Sort(byPath(list))
+ pkg.SetImports(list)
+
+ // package was imported completely and without errors
+ pkg.MarkComplete()
+
+ return p.read, pkg, nil
+}
+
+func errorf(format string, args ...interface{}) {
+ panic(fmt.Sprintf(format, args...))
+}
+
+func (p *importer) pkg() *types.Package {
+ // if the package was seen before, i is its index (>= 0)
+ i := p.tagOrIndex()
+ if i >= 0 {
+ return p.pkgList[i]
+ }
+
+ // otherwise, i is the package tag (< 0)
+ if i != packageTag {
+ errorf("unexpected package tag %d version %d", i, p.version)
+ }
+
+ // read package data
+ name := p.string()
+ var path string
+ if p.version >= 5 {
+ path = p.path()
+ } else {
+ path = p.string()
+ }
+ if p.version >= 6 {
+ p.int() // package height; unused by go/types
+ }
+
+ // we should never see an empty package name
+ if name == "" {
+ errorf("empty package name in import")
+ }
+
+ // an empty path denotes the package we are currently importing;
+ // it must be the first package we see
+ if (path == "") != (len(p.pkgList) == 0) {
+ errorf("package path %q for pkg index %d", path, len(p.pkgList))
+ }
+
+ // if the package was imported before, use that one; otherwise create a new one
+ if path == "" {
+ path = p.importpath
+ }
+ pkg := p.imports[path]
+ if pkg == nil {
+ pkg = types.NewPackage(path, name)
+ p.imports[path] = pkg
+ } else if pkg.Name() != name {
+ errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path)
+ }
+ p.pkgList = append(p.pkgList, pkg)
+
+ return pkg
+}
+
+// objTag returns the tag value for each object kind.
+func objTag(obj types.Object) int {
+ switch obj.(type) {
+ case *types.Const:
+ return constTag
+ case *types.TypeName:
+ return typeTag
+ case *types.Var:
+ return varTag
+ case *types.Func:
+ return funcTag
+ default:
+ errorf("unexpected object: %v (%T)", obj, obj) // panics
+ panic("unreachable")
+ }
+}
+
+func sameObj(a, b types.Object) bool {
+ // Because unnamed types are not canonicalized, we cannot simply compare types for
+ // (pointer) identity.
+ // Ideally we'd check equality of constant values as well, but this is good enough.
+ return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type())
+}
+
+func (p *importer) declare(obj types.Object) {
+ pkg := obj.Pkg()
+ if alt := pkg.Scope().Insert(obj); alt != nil {
+ // This can only trigger if we import a (non-type) object a second time.
+ // Excluding type aliases, this cannot happen because 1) we only import a package
+ // once; and b) we ignore compiler-specific export data which may contain
+ // functions whose inlined function bodies refer to other functions that
+ // were already imported.
+ // However, type aliases require reexporting the original type, so we need
+ // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go,
+ // method importer.obj, switch case importing functions).
+ // TODO(gri) review/update this comment once the gc compiler handles type aliases.
+ if !sameObj(obj, alt) {
+ errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt)
+ }
+ }
+}
+
+func (p *importer) obj(tag int) {
+ switch tag {
+ case constTag:
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ typ := p.typ(nil, nil)
+ val := p.value()
+ p.declare(types.NewConst(pos, pkg, name, typ, val))
+
+ case aliasTag:
+ // TODO(gri) verify type alias hookup is correct
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ typ := p.typ(nil, nil)
+ p.declare(types.NewTypeName(pos, pkg, name, typ))
+
+ case typeTag:
+ p.typ(nil, nil)
+
+ case varTag:
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ typ := p.typ(nil, nil)
+ p.declare(types.NewVar(pos, pkg, name, typ))
+
+ case funcTag:
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ sig := types.NewSignature(nil, params, result, isddd)
+ p.declare(types.NewFunc(pos, pkg, name, sig))
+
+ default:
+ errorf("unexpected object tag %d", tag)
+ }
+}
+
+const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go
+
+func (p *importer) pos() token.Pos {
+ if !p.posInfoFormat {
+ return token.NoPos
+ }
+
+ file := p.prevFile
+ line := p.prevLine
+ delta := p.int()
+ line += delta
+ if p.version >= 5 {
+ if delta == deltaNewFile {
+ if n := p.int(); n >= 0 {
+ // file changed
+ file = p.path()
+ line = n
+ }
+ }
+ } else {
+ if delta == 0 {
+ if n := p.int(); n >= 0 {
+ // file changed
+ file = p.prevFile[:n] + p.string()
+ line = p.int()
+ }
+ }
+ }
+ p.prevFile = file
+ p.prevLine = line
+
+ return p.fake.pos(file, line)
+}
+
+// Synthesize a token.Pos
+type fakeFileSet struct {
+ fset *token.FileSet
+ files map[string]*token.File
+}
+
+func (s *fakeFileSet) pos(file string, line int) token.Pos {
+ // Since we don't know the set of needed file positions, we
+ // reserve maxlines positions per file.
+ const maxlines = 64 * 1024
+ f := s.files[file]
+ if f == nil {
+ f = s.fset.AddFile(file, -1, maxlines)
+ s.files[file] = f
+ // Allocate the fake linebreak indices on first use.
+ // TODO(adonovan): opt: save ~512KB using a more complex scheme?
+ fakeLinesOnce.Do(func() {
+ fakeLines = make([]int, maxlines)
+ for i := range fakeLines {
+ fakeLines[i] = i
+ }
+ })
+ f.SetLines(fakeLines)
+ }
+
+ if line > maxlines {
+ line = 1
+ }
+
+ // Treat the file as if it contained only newlines
+ // and column=1: use the line number as the offset.
+ return f.Pos(line - 1)
+}
+
+var (
+ fakeLines []int
+ fakeLinesOnce sync.Once
+)
+
+func (p *importer) qualifiedName() (pkg *types.Package, name string) {
+ name = p.string()
+ pkg = p.pkg()
+ return
+}
+
+func (p *importer) record(t types.Type) {
+ p.typList = append(p.typList, t)
+}
+
+// A dddSlice is a types.Type representing ...T parameters.
+// It only appears for parameter types and does not escape
+// the importer.
+type dddSlice struct {
+ elem types.Type
+}
+
+func (t *dddSlice) Underlying() types.Type { return t }
+func (t *dddSlice) String() string { return "..." + t.elem.String() }
+
+// parent is the package which declared the type; parent == nil means
+// the package currently imported. The parent package is needed for
+// exported struct fields and interface methods which don't contain
+// explicit package information in the export data.
+//
+// A non-nil tname is used as the "owner" of the result type; i.e.,
+// the result type is the underlying type of tname. tname is used
+// to give interface methods a named receiver type where possible.
+func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type {
+ // if the type was seen before, i is its index (>= 0)
+ i := p.tagOrIndex()
+ if i >= 0 {
+ return p.typList[i]
+ }
+
+ // otherwise, i is the type tag (< 0)
+ switch i {
+ case namedTag:
+ // read type object
+ pos := p.pos()
+ parent, name := p.qualifiedName()
+ scope := parent.Scope()
+ obj := scope.Lookup(name)
+
+ // if the object doesn't exist yet, create and insert it
+ if obj == nil {
+ obj = types.NewTypeName(pos, parent, name, nil)
+ scope.Insert(obj)
+ }
+
+ if _, ok := obj.(*types.TypeName); !ok {
+ errorf("pkg = %s, name = %s => %s", parent, name, obj)
+ }
+
+ // associate new named type with obj if it doesn't exist yet
+ t0 := types.NewNamed(obj.(*types.TypeName), nil, nil)
+
+ // but record the existing type, if any
+ tname := obj.Type().(*types.Named) // tname is either t0 or the existing type
+ p.record(tname)
+
+ // read underlying type
+ t0.SetUnderlying(p.typ(parent, t0))
+
+ // interfaces don't have associated methods
+ if types.IsInterface(t0) {
+ return tname
+ }
+
+ // read associated methods
+ for i := p.int(); i > 0; i-- {
+ // TODO(gri) replace this with something closer to fieldName
+ pos := p.pos()
+ name := p.string()
+ if !exported(name) {
+ p.pkg()
+ }
+
+ recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver?
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ p.int() // go:nointerface pragma - discarded
+
+ sig := types.NewSignature(recv.At(0), params, result, isddd)
+ t0.AddMethod(types.NewFunc(pos, parent, name, sig))
+ }
+
+ return tname
+
+ case arrayTag:
+ t := new(types.Array)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ n := p.int64()
+ *t = *types.NewArray(p.typ(parent, nil), n)
+ return t
+
+ case sliceTag:
+ t := new(types.Slice)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ *t = *types.NewSlice(p.typ(parent, nil))
+ return t
+
+ case dddTag:
+ t := new(dddSlice)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ t.elem = p.typ(parent, nil)
+ return t
+
+ case structTag:
+ t := new(types.Struct)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ *t = *types.NewStruct(p.fieldList(parent))
+ return t
+
+ case pointerTag:
+ t := new(types.Pointer)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ *t = *types.NewPointer(p.typ(parent, nil))
+ return t
+
+ case signatureTag:
+ t := new(types.Signature)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ *t = *types.NewSignature(nil, params, result, isddd)
+ return t
+
+ case interfaceTag:
+ // Create a dummy entry in the type list. This is safe because we
+ // cannot expect the interface type to appear in a cycle, as any
+ // such cycle must contain a named type which would have been
+ // first defined earlier.
+ // TODO(gri) Is this still true now that we have type aliases?
+ // See issue #23225.
+ n := len(p.typList)
+ if p.trackAllTypes {
+ p.record(nil)
+ }
+
+ var embeddeds []types.Type
+ for n := p.int(); n > 0; n-- {
+ p.pos()
+ embeddeds = append(embeddeds, p.typ(parent, nil))
+ }
+
+ t := newInterface(p.methodList(parent, tname), embeddeds)
+ p.interfaceList = append(p.interfaceList, t)
+ if p.trackAllTypes {
+ p.typList[n] = t
+ }
+ return t
+
+ case mapTag:
+ t := new(types.Map)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ key := p.typ(parent, nil)
+ val := p.typ(parent, nil)
+ *t = *types.NewMap(key, val)
+ return t
+
+ case chanTag:
+ t := new(types.Chan)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ dir := chanDir(p.int())
+ val := p.typ(parent, nil)
+ *t = *types.NewChan(dir, val)
+ return t
+
+ default:
+ errorf("unexpected type tag %d", i) // panics
+ panic("unreachable")
+ }
+}
+
+func chanDir(d int) types.ChanDir {
+ // tag values must match the constants in cmd/compile/internal/gc/go.go
+ switch d {
+ case 1 /* Crecv */ :
+ return types.RecvOnly
+ case 2 /* Csend */ :
+ return types.SendOnly
+ case 3 /* Cboth */ :
+ return types.SendRecv
+ default:
+ errorf("unexpected channel dir %d", d)
+ return 0
+ }
+}
+
+func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) {
+ if n := p.int(); n > 0 {
+ fields = make([]*types.Var, n)
+ tags = make([]string, n)
+ for i := range fields {
+ fields[i], tags[i] = p.field(parent)
+ }
+ }
+ return
+}
+
+func (p *importer) field(parent *types.Package) (*types.Var, string) {
+ pos := p.pos()
+ pkg, name, alias := p.fieldName(parent)
+ typ := p.typ(parent, nil)
+ tag := p.string()
+
+ anonymous := false
+ if name == "" {
+ // anonymous field - typ must be T or *T and T must be a type name
+ switch typ := deref(typ).(type) {
+ case *types.Basic: // basic types are named types
+ pkg = nil // // objects defined in Universe scope have no package
+ name = typ.Name()
+ case *types.Named:
+ name = typ.Obj().Name()
+ default:
+ errorf("named base type expected")
+ }
+ anonymous = true
+ } else if alias {
+ // anonymous field: we have an explicit name because it's an alias
+ anonymous = true
+ }
+
+ return types.NewField(pos, pkg, name, typ, anonymous), tag
+}
+
+func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) {
+ if n := p.int(); n > 0 {
+ methods = make([]*types.Func, n)
+ for i := range methods {
+ methods[i] = p.method(parent, baseType)
+ }
+ }
+ return
+}
+
+func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func {
+ pos := p.pos()
+ pkg, name, _ := p.fieldName(parent)
+ // If we don't have a baseType, use a nil receiver.
+ // A receiver using the actual interface type (which
+ // we don't know yet) will be filled in when we call
+ // types.Interface.Complete.
+ var recv *types.Var
+ if baseType != nil {
+ recv = types.NewVar(token.NoPos, parent, "", baseType)
+ }
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ sig := types.NewSignature(recv, params, result, isddd)
+ return types.NewFunc(pos, pkg, name, sig)
+}
+
+func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) {
+ name = p.string()
+ pkg = parent
+ if pkg == nil {
+ // use the imported package instead
+ pkg = p.pkgList[0]
+ }
+ if p.version == 0 && name == "_" {
+ // version 0 didn't export a package for _ fields
+ return
+ }
+ switch name {
+ case "":
+ // 1) field name matches base type name and is exported: nothing to do
+ case "?":
+ // 2) field name matches base type name and is not exported: need package
+ name = ""
+ pkg = p.pkg()
+ case "@":
+ // 3) field name doesn't match type name (alias)
+ name = p.string()
+ alias = true
+ fallthrough
+ default:
+ if !exported(name) {
+ pkg = p.pkg()
+ }
+ }
+ return
+}
+
+func (p *importer) paramList() (*types.Tuple, bool) {
+ n := p.int()
+ if n == 0 {
+ return nil, false
+ }
+ // negative length indicates unnamed parameters
+ named := true
+ if n < 0 {
+ n = -n
+ named = false
+ }
+ // n > 0
+ params := make([]*types.Var, n)
+ isddd := false
+ for i := range params {
+ params[i], isddd = p.param(named)
+ }
+ return types.NewTuple(params...), isddd
+}
+
+func (p *importer) param(named bool) (*types.Var, bool) {
+ t := p.typ(nil, nil)
+ td, isddd := t.(*dddSlice)
+ if isddd {
+ t = types.NewSlice(td.elem)
+ }
+
+ var pkg *types.Package
+ var name string
+ if named {
+ name = p.string()
+ if name == "" {
+ errorf("expected named parameter")
+ }
+ if name != "_" {
+ pkg = p.pkg()
+ }
+ if i := strings.Index(name, "·"); i > 0 {
+ name = name[:i] // cut off gc-specific parameter numbering
+ }
+ }
+
+ // read and discard compiler-specific info
+ p.string()
+
+ return types.NewVar(token.NoPos, pkg, name, t), isddd
+}
+
+func exported(name string) bool {
+ ch, _ := utf8.DecodeRuneInString(name)
+ return unicode.IsUpper(ch)
+}
+
+func (p *importer) value() constant.Value {
+ switch tag := p.tagOrIndex(); tag {
+ case falseTag:
+ return constant.MakeBool(false)
+ case trueTag:
+ return constant.MakeBool(true)
+ case int64Tag:
+ return constant.MakeInt64(p.int64())
+ case floatTag:
+ return p.float()
+ case complexTag:
+ re := p.float()
+ im := p.float()
+ return constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+ case stringTag:
+ return constant.MakeString(p.string())
+ case unknownTag:
+ return constant.MakeUnknown()
+ default:
+ errorf("unexpected value tag %d", tag) // panics
+ panic("unreachable")
+ }
+}
+
+func (p *importer) float() constant.Value {
+ sign := p.int()
+ if sign == 0 {
+ return constant.MakeInt64(0)
+ }
+
+ exp := p.int()
+ mant := []byte(p.string()) // big endian
+
+ // remove leading 0's if any
+ for len(mant) > 0 && mant[0] == 0 {
+ mant = mant[1:]
+ }
+
+ // convert to little endian
+ // TODO(gri) go/constant should have a more direct conversion function
+ // (e.g., once it supports a big.Float based implementation)
+ for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 {
+ mant[i], mant[j] = mant[j], mant[i]
+ }
+
+ // adjust exponent (constant.MakeFromBytes creates an integer value,
+ // but mant represents the mantissa bits such that 0.5 <= mant < 1.0)
+ exp -= len(mant) << 3
+ if len(mant) > 0 {
+ for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 {
+ exp++
+ }
+ }
+
+ x := constant.MakeFromBytes(mant)
+ switch {
+ case exp < 0:
+ d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
+ x = constant.BinaryOp(x, token.QUO, d)
+ case exp > 0:
+ x = constant.Shift(x, token.SHL, uint(exp))
+ }
+
+ if sign < 0 {
+ x = constant.UnaryOp(token.SUB, x, 0)
+ }
+ return x
+}
+
+// ----------------------------------------------------------------------------
+// Low-level decoders
+
+func (p *importer) tagOrIndex() int {
+ if p.debugFormat {
+ p.marker('t')
+ }
+
+ return int(p.rawInt64())
+}
+
+func (p *importer) int() int {
+ x := p.int64()
+ if int64(int(x)) != x {
+ errorf("exported integer too large")
+ }
+ return int(x)
+}
+
+func (p *importer) int64() int64 {
+ if p.debugFormat {
+ p.marker('i')
+ }
+
+ return p.rawInt64()
+}
+
+func (p *importer) path() string {
+ if p.debugFormat {
+ p.marker('p')
+ }
+ // if the path was seen before, i is its index (>= 0)
+ // (the empty string is at index 0)
+ i := p.rawInt64()
+ if i >= 0 {
+ return p.pathList[i]
+ }
+ // otherwise, i is the negative path length (< 0)
+ a := make([]string, -i)
+ for n := range a {
+ a[n] = p.string()
+ }
+ s := strings.Join(a, "/")
+ p.pathList = append(p.pathList, s)
+ return s
+}
+
+func (p *importer) string() string {
+ if p.debugFormat {
+ p.marker('s')
+ }
+ // if the string was seen before, i is its index (>= 0)
+ // (the empty string is at index 0)
+ i := p.rawInt64()
+ if i >= 0 {
+ return p.strList[i]
+ }
+ // otherwise, i is the negative string length (< 0)
+ if n := int(-i); n <= cap(p.buf) {
+ p.buf = p.buf[:n]
+ } else {
+ p.buf = make([]byte, n)
+ }
+ for i := range p.buf {
+ p.buf[i] = p.rawByte()
+ }
+ s := string(p.buf)
+ p.strList = append(p.strList, s)
+ return s
+}
+
+func (p *importer) marker(want byte) {
+ if got := p.rawByte(); got != want {
+ errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read)
+ }
+
+ pos := p.read
+ if n := int(p.rawInt64()); n != pos {
+ errorf("incorrect position: got %d; want %d", n, pos)
+ }
+}
+
+// rawInt64 should only be used by low-level decoders.
+func (p *importer) rawInt64() int64 {
+ i, err := binary.ReadVarint(p)
+ if err != nil {
+ errorf("read error: %v", err)
+ }
+ return i
+}
+
+// rawStringln should only be used to read the initial version string.
+func (p *importer) rawStringln(b byte) string {
+ p.buf = p.buf[:0]
+ for b != '\n' {
+ p.buf = append(p.buf, b)
+ b = p.rawByte()
+ }
+ return string(p.buf)
+}
+
+// needed for binary.ReadVarint in rawInt64
+func (p *importer) ReadByte() (byte, error) {
+ return p.rawByte(), nil
+}
+
+// byte is the bottleneck interface for reading p.data.
+// It unescapes '|' 'S' to '$' and '|' '|' to '|'.
+// rawByte should only be used by low-level decoders.
+func (p *importer) rawByte() byte {
+ b := p.data[0]
+ r := 1
+ if b == '|' {
+ b = p.data[1]
+ r = 2
+ switch b {
+ case 'S':
+ b = '$'
+ case '|':
+ // nothing to do
+ default:
+ errorf("unexpected escape sequence in export data")
+ }
+ }
+ p.data = p.data[r:]
+ p.read += r
+ return b
+
+}
+
+// ----------------------------------------------------------------------------
+// Export format
+
+// Tags. Must be < 0.
+const (
+ // Objects
+ packageTag = -(iota + 1)
+ constTag
+ typeTag
+ varTag
+ funcTag
+ endTag
+
+ // Types
+ namedTag
+ arrayTag
+ sliceTag
+ dddTag
+ structTag
+ pointerTag
+ signatureTag
+ interfaceTag
+ mapTag
+ chanTag
+
+ // Values
+ falseTag
+ trueTag
+ int64Tag
+ floatTag
+ fractionTag // not used by gc
+ complexTag
+ stringTag
+ nilTag // only used by gc (appears in exported inlined function bodies)
+ unknownTag // not used by gc (only appears in packages with errors)
+
+ // Type aliases
+ aliasTag
+)
+
+var predecl []types.Type // initialized lazily
+
+func predeclared() []types.Type {
+ if predecl == nil {
+ // initialize lazily to be sure that all
+ // elements have been initialized before
+ predecl = []types.Type{ // basic types
+ types.Typ[types.Bool],
+ types.Typ[types.Int],
+ types.Typ[types.Int8],
+ types.Typ[types.Int16],
+ types.Typ[types.Int32],
+ types.Typ[types.Int64],
+ types.Typ[types.Uint],
+ types.Typ[types.Uint8],
+ types.Typ[types.Uint16],
+ types.Typ[types.Uint32],
+ types.Typ[types.Uint64],
+ types.Typ[types.Uintptr],
+ types.Typ[types.Float32],
+ types.Typ[types.Float64],
+ types.Typ[types.Complex64],
+ types.Typ[types.Complex128],
+ types.Typ[types.String],
+
+ // basic type aliases
+ types.Universe.Lookup("byte").Type(),
+ types.Universe.Lookup("rune").Type(),
+
+ // error
+ types.Universe.Lookup("error").Type(),
+
+ // untyped types
+ types.Typ[types.UntypedBool],
+ types.Typ[types.UntypedInt],
+ types.Typ[types.UntypedRune],
+ types.Typ[types.UntypedFloat],
+ types.Typ[types.UntypedComplex],
+ types.Typ[types.UntypedString],
+ types.Typ[types.UntypedNil],
+
+ // package unsafe
+ types.Typ[types.UnsafePointer],
+
+ // invalid type
+ types.Typ[types.Invalid], // only appears in packages with errors
+
+ // used internally by gc; never used by this package or in .a files
+ anyType{},
+ }
+ }
+ return predecl
+}
+
+type anyType struct{}
+
+func (t anyType) Underlying() types.Type { return t }
+func (t anyType) String() string { return "any" }
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go
new file mode 100644
index 0000000..f33dc56
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go
@@ -0,0 +1,93 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
+
+// This file implements FindExportData.
+
+package gcimporter
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+)
+
+func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
+ // See $GOROOT/include/ar.h.
+ hdr := make([]byte, 16+12+6+6+8+10+2)
+ _, err = io.ReadFull(r, hdr)
+ if err != nil {
+ return
+ }
+ // leave for debugging
+ if false {
+ fmt.Printf("header: %s", hdr)
+ }
+ s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
+ size, err = strconv.Atoi(s)
+ if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
+ err = fmt.Errorf("invalid archive header")
+ return
+ }
+ name = strings.TrimSpace(string(hdr[:16]))
+ return
+}
+
+// FindExportData positions the reader r at the beginning of the
+// export data section of an underlying GC-created object/archive
+// file by reading from it. The reader must be positioned at the
+// start of the file before calling this function. The hdr result
+// is the string before the export data, either "$$" or "$$B".
+//
+func FindExportData(r *bufio.Reader) (hdr string, err error) {
+ // Read first line to make sure this is an object file.
+ line, err := r.ReadSlice('\n')
+ if err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+
+ if string(line) == "!\n" {
+ // Archive file. Scan to __.PKGDEF.
+ var name string
+ if name, _, err = readGopackHeader(r); err != nil {
+ return
+ }
+
+ // First entry should be __.PKGDEF.
+ if name != "__.PKGDEF" {
+ err = fmt.Errorf("go archive is missing __.PKGDEF")
+ return
+ }
+
+ // Read first line of __.PKGDEF data, so that line
+ // is once again the first line of the input.
+ if line, err = r.ReadSlice('\n'); err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+ }
+
+ // Now at __.PKGDEF in archive or still at beginning of file.
+ // Either way, line should begin with "go object ".
+ if !strings.HasPrefix(string(line), "go object ") {
+ err = fmt.Errorf("not a Go object file")
+ return
+ }
+
+ // Skip over object header to export data.
+ // Begins after first line starting with $$.
+ for line[0] != '$' {
+ if line, err = r.ReadSlice('\n'); err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+ }
+ hdr = string(line)
+
+ return
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
new file mode 100644
index 0000000..9cf1866
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
@@ -0,0 +1,1078 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a modified copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go,
+// but it also contains the original source-based importer code for Go1.6.
+// Once we stop supporting 1.6, we can remove that code.
+
+// Package gcimporter provides various functions for reading
+// gc-generated object files that can be used to implement the
+// Importer interface defined by the Go 1.5 standard library package.
+package gcimporter // import "golang.org/x/tools/go/internal/gcimporter"
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "go/build"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+ "text/scanner"
+)
+
+// debugging/development support
+const debug = false
+
+var pkgExts = [...]string{".a", ".o"}
+
+// FindPkg returns the filename and unique package id for an import
+// path based on package information provided by build.Import (using
+// the build.Default build.Context). A relative srcDir is interpreted
+// relative to the current working directory.
+// If no file was found, an empty filename is returned.
+//
+func FindPkg(path, srcDir string) (filename, id string) {
+ if path == "" {
+ return
+ }
+
+ var noext string
+ switch {
+ default:
+ // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
+ // Don't require the source files to be present.
+ if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
+ srcDir = abs
+ }
+ bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
+ if bp.PkgObj == "" {
+ id = path // make sure we have an id to print in error message
+ return
+ }
+ noext = strings.TrimSuffix(bp.PkgObj, ".a")
+ id = bp.ImportPath
+
+ case build.IsLocalImport(path):
+ // "./x" -> "/this/directory/x.ext", "/this/directory/x"
+ noext = filepath.Join(srcDir, path)
+ id = noext
+
+ case filepath.IsAbs(path):
+ // for completeness only - go/build.Import
+ // does not support absolute imports
+ // "/x" -> "/x.ext", "/x"
+ noext = path
+ id = path
+ }
+
+ if false { // for debugging
+ if path != id {
+ fmt.Printf("%s -> %s\n", path, id)
+ }
+ }
+
+ // try extensions
+ for _, ext := range pkgExts {
+ filename = noext + ext
+ if f, err := os.Stat(filename); err == nil && !f.IsDir() {
+ return
+ }
+ }
+
+ filename = "" // not found
+ return
+}
+
+// ImportData imports a package by reading the gc-generated export data,
+// adds the corresponding package object to the packages map indexed by id,
+// and returns the object.
+//
+// The packages map must contains all packages already imported. The data
+// reader position must be the beginning of the export data section. The
+// filename is only used in error messages.
+//
+// If packages[id] contains the completely imported package, that package
+// can be used directly, and there is no need to call this function (but
+// there is also no harm but for extra time used).
+//
+func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
+ // support for parser error handling
+ defer func() {
+ switch r := recover().(type) {
+ case nil:
+ // nothing to do
+ case importError:
+ err = r
+ default:
+ panic(r) // internal error
+ }
+ }()
+
+ var p parser
+ p.init(filename, id, data, packages)
+ pkg = p.parseExport()
+
+ return
+}
+
+// Import imports a gc-generated package given its import path and srcDir, adds
+// the corresponding package object to the packages map, and returns the object.
+// The packages map must contain all packages already imported.
+//
+func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
+ var rc io.ReadCloser
+ var filename, id string
+ if lookup != nil {
+ // With custom lookup specified, assume that caller has
+ // converted path to a canonical import path for use in the map.
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+ id = path
+
+ // No need to re-import if the package was imported completely before.
+ if pkg = packages[id]; pkg != nil && pkg.Complete() {
+ return
+ }
+ f, err := lookup(path)
+ if err != nil {
+ return nil, err
+ }
+ rc = f
+ } else {
+ filename, id = FindPkg(path, srcDir)
+ if filename == "" {
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+ return nil, fmt.Errorf("can't find import: %q", id)
+ }
+
+ // no need to re-import if the package was imported completely before
+ if pkg = packages[id]; pkg != nil && pkg.Complete() {
+ return
+ }
+
+ // open file
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("%s: %v", filename, err)
+ }
+ }()
+ rc = f
+ }
+ defer rc.Close()
+
+ var hdr string
+ buf := bufio.NewReader(rc)
+ if hdr, err = FindExportData(buf); err != nil {
+ return
+ }
+
+ switch hdr {
+ case "$$\n":
+ // Work-around if we don't have a filename; happens only if lookup != nil.
+ // Either way, the filename is only needed for importer error messages, so
+ // this is fine.
+ if filename == "" {
+ filename = path
+ }
+ return ImportData(packages, filename, id, buf)
+
+ case "$$B\n":
+ var data []byte
+ data, err = ioutil.ReadAll(buf)
+ if err != nil {
+ break
+ }
+
+ // TODO(gri): allow clients of go/importer to provide a FileSet.
+ // Or, define a new standard go/types/gcexportdata package.
+ fset := token.NewFileSet()
+
+ // The indexed export format starts with an 'i'; the older
+ // binary export format starts with a 'c', 'd', or 'v'
+ // (from "version"). Select appropriate importer.
+ if len(data) > 0 && data[0] == 'i' {
+ _, pkg, err = IImportData(fset, packages, data[1:], id)
+ } else {
+ _, pkg, err = BImportData(fset, packages, data, id)
+ }
+
+ default:
+ err = fmt.Errorf("unknown export data header: %q", hdr)
+ }
+
+ return
+}
+
+// ----------------------------------------------------------------------------
+// Parser
+
+// TODO(gri) Imported objects don't have position information.
+// Ideally use the debug table line info; alternatively
+// create some fake position (or the position of the
+// import). That way error messages referring to imported
+// objects can print meaningful information.
+
+// parser parses the exports inside a gc compiler-produced
+// object/archive file and populates its scope with the results.
+type parser struct {
+ scanner scanner.Scanner
+ tok rune // current token
+ lit string // literal string; only valid for Ident, Int, String tokens
+ id string // package id of imported package
+ sharedPkgs map[string]*types.Package // package id -> package object (across importer)
+ localPkgs map[string]*types.Package // package id -> package object (just this package)
+}
+
+func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) {
+ p.scanner.Init(src)
+ p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
+ p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
+ p.scanner.Whitespace = 1<<'\t' | 1<<' '
+ p.scanner.Filename = filename // for good error messages
+ p.next()
+ p.id = id
+ p.sharedPkgs = packages
+ if debug {
+ // check consistency of packages map
+ for _, pkg := range packages {
+ if pkg.Name() == "" {
+ fmt.Printf("no package name for %s\n", pkg.Path())
+ }
+ }
+ }
+}
+
+func (p *parser) next() {
+ p.tok = p.scanner.Scan()
+ switch p.tok {
+ case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·':
+ p.lit = p.scanner.TokenText()
+ default:
+ p.lit = ""
+ }
+ if debug {
+ fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
+ }
+}
+
+func declTypeName(pkg *types.Package, name string) *types.TypeName {
+ scope := pkg.Scope()
+ if obj := scope.Lookup(name); obj != nil {
+ return obj.(*types.TypeName)
+ }
+ obj := types.NewTypeName(token.NoPos, pkg, name, nil)
+ // a named type may be referred to before the underlying type
+ // is known - set it up
+ types.NewNamed(obj, nil, nil)
+ scope.Insert(obj)
+ return obj
+}
+
+// ----------------------------------------------------------------------------
+// Error handling
+
+// Internal errors are boxed as importErrors.
+type importError struct {
+ pos scanner.Position
+ err error
+}
+
+func (e importError) Error() string {
+ return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
+}
+
+func (p *parser) error(err interface{}) {
+ if s, ok := err.(string); ok {
+ err = errors.New(s)
+ }
+ // panic with a runtime.Error if err is not an error
+ panic(importError{p.scanner.Pos(), err.(error)})
+}
+
+func (p *parser) errorf(format string, args ...interface{}) {
+ p.error(fmt.Sprintf(format, args...))
+}
+
+func (p *parser) expect(tok rune) string {
+ lit := p.lit
+ if p.tok != tok {
+ p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
+ }
+ p.next()
+ return lit
+}
+
+func (p *parser) expectSpecial(tok string) {
+ sep := 'x' // not white space
+ i := 0
+ for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
+ sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+ p.next()
+ i++
+ }
+ if i < len(tok) {
+ p.errorf("expected %q, got %q", tok, tok[0:i])
+ }
+}
+
+func (p *parser) expectKeyword(keyword string) {
+ lit := p.expect(scanner.Ident)
+ if lit != keyword {
+ p.errorf("expected keyword %s, got %q", keyword, lit)
+ }
+}
+
+// ----------------------------------------------------------------------------
+// Qualified and unqualified names
+
+// PackageId = string_lit .
+//
+func (p *parser) parsePackageId() string {
+ id, err := strconv.Unquote(p.expect(scanner.String))
+ if err != nil {
+ p.error(err)
+ }
+ // id == "" stands for the imported package id
+ // (only known at time of package installation)
+ if id == "" {
+ id = p.id
+ }
+ return id
+}
+
+// PackageName = ident .
+//
+func (p *parser) parsePackageName() string {
+ return p.expect(scanner.Ident)
+}
+
+// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
+func (p *parser) parseDotIdent() string {
+ ident := ""
+ if p.tok != scanner.Int {
+ sep := 'x' // not white space
+ for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
+ ident += p.lit
+ sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+ p.next()
+ }
+ }
+ if ident == "" {
+ p.expect(scanner.Ident) // use expect() for error handling
+ }
+ return ident
+}
+
+// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
+//
+func (p *parser) parseQualifiedName() (id, name string) {
+ p.expect('@')
+ id = p.parsePackageId()
+ p.expect('.')
+ // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
+ if p.tok == '?' {
+ p.next()
+ } else {
+ name = p.parseDotIdent()
+ }
+ return
+}
+
+// getPkg returns the package for a given id. If the package is
+// not found, create the package and add it to the p.localPkgs
+// and p.sharedPkgs maps. name is the (expected) name of the
+// package. If name == "", the package name is expected to be
+// set later via an import clause in the export data.
+//
+// id identifies a package, usually by a canonical package path like
+// "encoding/json" but possibly by a non-canonical import path like
+// "./json".
+//
+func (p *parser) getPkg(id, name string) *types.Package {
+ // package unsafe is not in the packages maps - handle explicitly
+ if id == "unsafe" {
+ return types.Unsafe
+ }
+
+ pkg := p.localPkgs[id]
+ if pkg == nil {
+ // first import of id from this package
+ pkg = p.sharedPkgs[id]
+ if pkg == nil {
+ // first import of id by this importer;
+ // add (possibly unnamed) pkg to shared packages
+ pkg = types.NewPackage(id, name)
+ p.sharedPkgs[id] = pkg
+ }
+ // add (possibly unnamed) pkg to local packages
+ if p.localPkgs == nil {
+ p.localPkgs = make(map[string]*types.Package)
+ }
+ p.localPkgs[id] = pkg
+ } else if name != "" {
+ // package exists already and we have an expected package name;
+ // make sure names match or set package name if necessary
+ if pname := pkg.Name(); pname == "" {
+ pkg.SetName(name)
+ } else if pname != name {
+ p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name)
+ }
+ }
+ return pkg
+}
+
+// parseExportedName is like parseQualifiedName, but
+// the package id is resolved to an imported *types.Package.
+//
+func (p *parser) parseExportedName() (pkg *types.Package, name string) {
+ id, name := p.parseQualifiedName()
+ pkg = p.getPkg(id, "")
+ return
+}
+
+// ----------------------------------------------------------------------------
+// Types
+
+// BasicType = identifier .
+//
+func (p *parser) parseBasicType() types.Type {
+ id := p.expect(scanner.Ident)
+ obj := types.Universe.Lookup(id)
+ if obj, ok := obj.(*types.TypeName); ok {
+ return obj.Type()
+ }
+ p.errorf("not a basic type: %s", id)
+ return nil
+}
+
+// ArrayType = "[" int_lit "]" Type .
+//
+func (p *parser) parseArrayType(parent *types.Package) types.Type {
+ // "[" already consumed and lookahead known not to be "]"
+ lit := p.expect(scanner.Int)
+ p.expect(']')
+ elem := p.parseType(parent)
+ n, err := strconv.ParseInt(lit, 10, 64)
+ if err != nil {
+ p.error(err)
+ }
+ return types.NewArray(elem, n)
+}
+
+// MapType = "map" "[" Type "]" Type .
+//
+func (p *parser) parseMapType(parent *types.Package) types.Type {
+ p.expectKeyword("map")
+ p.expect('[')
+ key := p.parseType(parent)
+ p.expect(']')
+ elem := p.parseType(parent)
+ return types.NewMap(key, elem)
+}
+
+// Name = identifier | "?" | QualifiedName .
+//
+// For unqualified and anonymous names, the returned package is the parent
+// package unless parent == nil, in which case the returned package is the
+// package being imported. (The parent package is not nil if the the name
+// is an unqualified struct field or interface method name belonging to a
+// type declared in another package.)
+//
+// For qualified names, the returned package is nil (and not created if
+// it doesn't exist yet) unless materializePkg is set (which creates an
+// unnamed package with valid package path). In the latter case, a
+// subsequent import clause is expected to provide a name for the package.
+//
+func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
+ pkg = parent
+ if pkg == nil {
+ pkg = p.sharedPkgs[p.id]
+ }
+ switch p.tok {
+ case scanner.Ident:
+ name = p.lit
+ p.next()
+ case '?':
+ // anonymous
+ p.next()
+ case '@':
+ // exported name prefixed with package path
+ pkg = nil
+ var id string
+ id, name = p.parseQualifiedName()
+ if materializePkg {
+ pkg = p.getPkg(id, "")
+ }
+ default:
+ p.error("name expected")
+ }
+ return
+}
+
+func deref(typ types.Type) types.Type {
+ if p, _ := typ.(*types.Pointer); p != nil {
+ return p.Elem()
+ }
+ return typ
+}
+
+// Field = Name Type [ string_lit ] .
+//
+func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
+ pkg, name := p.parseName(parent, true)
+
+ if name == "_" {
+ // Blank fields should be package-qualified because they
+ // are unexported identifiers, but gc does not qualify them.
+ // Assuming that the ident belongs to the current package
+ // causes types to change during re-exporting, leading
+ // to spurious "can't assign A to B" errors from go/types.
+ // As a workaround, pretend all blank fields belong
+ // to the same unique dummy package.
+ const blankpkg = "<_>"
+ pkg = p.getPkg(blankpkg, blankpkg)
+ }
+
+ typ := p.parseType(parent)
+ anonymous := false
+ if name == "" {
+ // anonymous field - typ must be T or *T and T must be a type name
+ switch typ := deref(typ).(type) {
+ case *types.Basic: // basic types are named types
+ pkg = nil // objects defined in Universe scope have no package
+ name = typ.Name()
+ case *types.Named:
+ name = typ.Obj().Name()
+ default:
+ p.errorf("anonymous field expected")
+ }
+ anonymous = true
+ }
+ tag := ""
+ if p.tok == scanner.String {
+ s := p.expect(scanner.String)
+ var err error
+ tag, err = strconv.Unquote(s)
+ if err != nil {
+ p.errorf("invalid struct tag %s: %s", s, err)
+ }
+ }
+ return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
+}
+
+// StructType = "struct" "{" [ FieldList ] "}" .
+// FieldList = Field { ";" Field } .
+//
+func (p *parser) parseStructType(parent *types.Package) types.Type {
+ var fields []*types.Var
+ var tags []string
+
+ p.expectKeyword("struct")
+ p.expect('{')
+ for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
+ if i > 0 {
+ p.expect(';')
+ }
+ fld, tag := p.parseField(parent)
+ if tag != "" && tags == nil {
+ tags = make([]string, i)
+ }
+ if tags != nil {
+ tags = append(tags, tag)
+ }
+ fields = append(fields, fld)
+ }
+ p.expect('}')
+
+ return types.NewStruct(fields, tags)
+}
+
+// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
+//
+func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
+ _, name := p.parseName(nil, false)
+ // remove gc-specific parameter numbering
+ if i := strings.Index(name, "·"); i >= 0 {
+ name = name[:i]
+ }
+ if p.tok == '.' {
+ p.expectSpecial("...")
+ isVariadic = true
+ }
+ typ := p.parseType(nil)
+ if isVariadic {
+ typ = types.NewSlice(typ)
+ }
+ // ignore argument tag (e.g. "noescape")
+ if p.tok == scanner.String {
+ p.next()
+ }
+ // TODO(gri) should we provide a package?
+ par = types.NewVar(token.NoPos, nil, name, typ)
+ return
+}
+
+// Parameters = "(" [ ParameterList ] ")" .
+// ParameterList = { Parameter "," } Parameter .
+//
+func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
+ p.expect('(')
+ for p.tok != ')' && p.tok != scanner.EOF {
+ if len(list) > 0 {
+ p.expect(',')
+ }
+ par, variadic := p.parseParameter()
+ list = append(list, par)
+ if variadic {
+ if isVariadic {
+ p.error("... not on final argument")
+ }
+ isVariadic = true
+ }
+ }
+ p.expect(')')
+
+ return
+}
+
+// Signature = Parameters [ Result ] .
+// Result = Type | Parameters .
+//
+func (p *parser) parseSignature(recv *types.Var) *types.Signature {
+ params, isVariadic := p.parseParameters()
+
+ // optional result type
+ var results []*types.Var
+ if p.tok == '(' {
+ var variadic bool
+ results, variadic = p.parseParameters()
+ if variadic {
+ p.error("... not permitted on result type")
+ }
+ }
+
+ return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
+}
+
+// InterfaceType = "interface" "{" [ MethodList ] "}" .
+// MethodList = Method { ";" Method } .
+// Method = Name Signature .
+//
+// The methods of embedded interfaces are always "inlined"
+// by the compiler and thus embedded interfaces are never
+// visible in the export data.
+//
+func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
+ var methods []*types.Func
+
+ p.expectKeyword("interface")
+ p.expect('{')
+ for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
+ if i > 0 {
+ p.expect(';')
+ }
+ pkg, name := p.parseName(parent, true)
+ sig := p.parseSignature(nil)
+ methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig))
+ }
+ p.expect('}')
+
+ // Complete requires the type's embedded interfaces to be fully defined,
+ // but we do not define any
+ return types.NewInterface(methods, nil).Complete()
+}
+
+// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
+//
+func (p *parser) parseChanType(parent *types.Package) types.Type {
+ dir := types.SendRecv
+ if p.tok == scanner.Ident {
+ p.expectKeyword("chan")
+ if p.tok == '<' {
+ p.expectSpecial("<-")
+ dir = types.SendOnly
+ }
+ } else {
+ p.expectSpecial("<-")
+ p.expectKeyword("chan")
+ dir = types.RecvOnly
+ }
+ elem := p.parseType(parent)
+ return types.NewChan(dir, elem)
+}
+
+// Type =
+// BasicType | TypeName | ArrayType | SliceType | StructType |
+// PointerType | FuncType | InterfaceType | MapType | ChanType |
+// "(" Type ")" .
+//
+// BasicType = ident .
+// TypeName = ExportedName .
+// SliceType = "[" "]" Type .
+// PointerType = "*" Type .
+// FuncType = "func" Signature .
+//
+func (p *parser) parseType(parent *types.Package) types.Type {
+ switch p.tok {
+ case scanner.Ident:
+ switch p.lit {
+ default:
+ return p.parseBasicType()
+ case "struct":
+ return p.parseStructType(parent)
+ case "func":
+ // FuncType
+ p.next()
+ return p.parseSignature(nil)
+ case "interface":
+ return p.parseInterfaceType(parent)
+ case "map":
+ return p.parseMapType(parent)
+ case "chan":
+ return p.parseChanType(parent)
+ }
+ case '@':
+ // TypeName
+ pkg, name := p.parseExportedName()
+ return declTypeName(pkg, name).Type()
+ case '[':
+ p.next() // look ahead
+ if p.tok == ']' {
+ // SliceType
+ p.next()
+ return types.NewSlice(p.parseType(parent))
+ }
+ return p.parseArrayType(parent)
+ case '*':
+ // PointerType
+ p.next()
+ return types.NewPointer(p.parseType(parent))
+ case '<':
+ return p.parseChanType(parent)
+ case '(':
+ // "(" Type ")"
+ p.next()
+ typ := p.parseType(parent)
+ p.expect(')')
+ return typ
+ }
+ p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
+ return nil
+}
+
+// ----------------------------------------------------------------------------
+// Declarations
+
+// ImportDecl = "import" PackageName PackageId .
+//
+func (p *parser) parseImportDecl() {
+ p.expectKeyword("import")
+ name := p.parsePackageName()
+ p.getPkg(p.parsePackageId(), name)
+}
+
+// int_lit = [ "+" | "-" ] { "0" ... "9" } .
+//
+func (p *parser) parseInt() string {
+ s := ""
+ switch p.tok {
+ case '-':
+ s = "-"
+ p.next()
+ case '+':
+ p.next()
+ }
+ return s + p.expect(scanner.Int)
+}
+
+// number = int_lit [ "p" int_lit ] .
+//
+func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
+ // mantissa
+ mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0)
+ if mant == nil {
+ panic("invalid mantissa")
+ }
+
+ if p.lit == "p" {
+ // exponent (base 2)
+ p.next()
+ exp, err := strconv.ParseInt(p.parseInt(), 10, 0)
+ if err != nil {
+ p.error(err)
+ }
+ if exp < 0 {
+ denom := constant.MakeInt64(1)
+ denom = constant.Shift(denom, token.SHL, uint(-exp))
+ typ = types.Typ[types.UntypedFloat]
+ val = constant.BinaryOp(mant, token.QUO, denom)
+ return
+ }
+ if exp > 0 {
+ mant = constant.Shift(mant, token.SHL, uint(exp))
+ }
+ typ = types.Typ[types.UntypedFloat]
+ val = mant
+ return
+ }
+
+ typ = types.Typ[types.UntypedInt]
+ val = mant
+ return
+}
+
+// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
+// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
+// bool_lit = "true" | "false" .
+// complex_lit = "(" float_lit "+" float_lit "i" ")" .
+// rune_lit = "(" int_lit "+" int_lit ")" .
+// string_lit = `"` { unicode_char } `"` .
+//
+func (p *parser) parseConstDecl() {
+ p.expectKeyword("const")
+ pkg, name := p.parseExportedName()
+
+ var typ0 types.Type
+ if p.tok != '=' {
+ // constant types are never structured - no need for parent type
+ typ0 = p.parseType(nil)
+ }
+
+ p.expect('=')
+ var typ types.Type
+ var val constant.Value
+ switch p.tok {
+ case scanner.Ident:
+ // bool_lit
+ if p.lit != "true" && p.lit != "false" {
+ p.error("expected true or false")
+ }
+ typ = types.Typ[types.UntypedBool]
+ val = constant.MakeBool(p.lit == "true")
+ p.next()
+
+ case '-', scanner.Int:
+ // int_lit
+ typ, val = p.parseNumber()
+
+ case '(':
+ // complex_lit or rune_lit
+ p.next()
+ if p.tok == scanner.Char {
+ p.next()
+ p.expect('+')
+ typ = types.Typ[types.UntypedRune]
+ _, val = p.parseNumber()
+ p.expect(')')
+ break
+ }
+ _, re := p.parseNumber()
+ p.expect('+')
+ _, im := p.parseNumber()
+ p.expectKeyword("i")
+ p.expect(')')
+ typ = types.Typ[types.UntypedComplex]
+ val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+
+ case scanner.Char:
+ // rune_lit
+ typ = types.Typ[types.UntypedRune]
+ val = constant.MakeFromLiteral(p.lit, token.CHAR, 0)
+ p.next()
+
+ case scanner.String:
+ // string_lit
+ typ = types.Typ[types.UntypedString]
+ val = constant.MakeFromLiteral(p.lit, token.STRING, 0)
+ p.next()
+
+ default:
+ p.errorf("expected literal got %s", scanner.TokenString(p.tok))
+ }
+
+ if typ0 == nil {
+ typ0 = typ
+ }
+
+ pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
+}
+
+// TypeDecl = "type" ExportedName Type .
+//
+func (p *parser) parseTypeDecl() {
+ p.expectKeyword("type")
+ pkg, name := p.parseExportedName()
+ obj := declTypeName(pkg, name)
+
+ // The type object may have been imported before and thus already
+ // have a type associated with it. We still need to parse the type
+ // structure, but throw it away if the object already has a type.
+ // This ensures that all imports refer to the same type object for
+ // a given type declaration.
+ typ := p.parseType(pkg)
+
+ if name := obj.Type().(*types.Named); name.Underlying() == nil {
+ name.SetUnderlying(typ)
+ }
+}
+
+// VarDecl = "var" ExportedName Type .
+//
+func (p *parser) parseVarDecl() {
+ p.expectKeyword("var")
+ pkg, name := p.parseExportedName()
+ typ := p.parseType(pkg)
+ pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
+}
+
+// Func = Signature [ Body ] .
+// Body = "{" ... "}" .
+//
+func (p *parser) parseFunc(recv *types.Var) *types.Signature {
+ sig := p.parseSignature(recv)
+ if p.tok == '{' {
+ p.next()
+ for i := 1; i > 0; p.next() {
+ switch p.tok {
+ case '{':
+ i++
+ case '}':
+ i--
+ }
+ }
+ }
+ return sig
+}
+
+// MethodDecl = "func" Receiver Name Func .
+// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
+//
+func (p *parser) parseMethodDecl() {
+ // "func" already consumed
+ p.expect('(')
+ recv, _ := p.parseParameter() // receiver
+ p.expect(')')
+
+ // determine receiver base type object
+ base := deref(recv.Type()).(*types.Named)
+
+ // parse method name, signature, and possibly inlined body
+ _, name := p.parseName(nil, false)
+ sig := p.parseFunc(recv)
+
+ // methods always belong to the same package as the base type object
+ pkg := base.Obj().Pkg()
+
+ // add method to type unless type was imported before
+ // and method exists already
+ // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small.
+ base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
+}
+
+// FuncDecl = "func" ExportedName Func .
+//
+func (p *parser) parseFuncDecl() {
+ // "func" already consumed
+ pkg, name := p.parseExportedName()
+ typ := p.parseFunc(nil)
+ pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
+}
+
+// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
+//
+func (p *parser) parseDecl() {
+ if p.tok == scanner.Ident {
+ switch p.lit {
+ case "import":
+ p.parseImportDecl()
+ case "const":
+ p.parseConstDecl()
+ case "type":
+ p.parseTypeDecl()
+ case "var":
+ p.parseVarDecl()
+ case "func":
+ p.next() // look ahead
+ if p.tok == '(' {
+ p.parseMethodDecl()
+ } else {
+ p.parseFuncDecl()
+ }
+ }
+ }
+ p.expect('\n')
+}
+
+// ----------------------------------------------------------------------------
+// Export
+
+// Export = "PackageClause { Decl } "$$" .
+// PackageClause = "package" PackageName [ "safe" ] "\n" .
+//
+func (p *parser) parseExport() *types.Package {
+ p.expectKeyword("package")
+ name := p.parsePackageName()
+ if p.tok == scanner.Ident && p.lit == "safe" {
+ // package was compiled with -u option - ignore
+ p.next()
+ }
+ p.expect('\n')
+
+ pkg := p.getPkg(p.id, name)
+
+ for p.tok != '$' && p.tok != scanner.EOF {
+ p.parseDecl()
+ }
+
+ if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
+ // don't call next()/expect() since reading past the
+ // export data may cause scanner errors (e.g. NUL chars)
+ p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
+ }
+
+ if n := p.scanner.ErrorCount; n != 0 {
+ p.errorf("expected no scanner errors, got %d", n)
+ }
+
+ // Record all locally referenced packages as imports.
+ var imports []*types.Package
+ for id, pkg2 := range p.localPkgs {
+ if pkg2.Name() == "" {
+ p.errorf("%s package has no name", id)
+ }
+ if id == p.id {
+ continue // avoid self-edge
+ }
+ imports = append(imports, pkg2)
+ }
+ sort.Sort(byPath(imports))
+ pkg.SetImports(imports)
+
+ // package was imported completely and without errors
+ pkg.MarkComplete()
+
+ return pkg
+}
+
+type byPath []*types.Package
+
+func (a byPath) Len() int { return len(a) }
+func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
new file mode 100644
index 0000000..be671c7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
@@ -0,0 +1,723 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed binary package export.
+// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go;
+// see that file for specification of the format.
+
+// +build go1.11
+
+package gcimporter
+
+import (
+ "bytes"
+ "encoding/binary"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "io"
+ "math/big"
+ "reflect"
+ "sort"
+)
+
+// Current indexed export format version. Increase with each format change.
+// 0: Go1.11 encoding
+const iexportVersion = 0
+
+// IExportData returns the binary export data for pkg.
+// If no file set is provided, position info will be missing.
+func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
+ defer func() {
+ if e := recover(); e != nil {
+ if ierr, ok := e.(internalError); ok {
+ err = ierr
+ return
+ }
+ // Not an internal error; panic again.
+ panic(e)
+ }
+ }()
+
+ p := iexporter{
+ out: bytes.NewBuffer(nil),
+ fset: fset,
+ allPkgs: map[*types.Package]bool{},
+ stringIndex: map[string]uint64{},
+ declIndex: map[types.Object]uint64{},
+ typIndex: map[types.Type]uint64{},
+ }
+
+ for i, pt := range predeclared() {
+ p.typIndex[pt] = uint64(i)
+ }
+ if len(p.typIndex) > predeclReserved {
+ panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved))
+ }
+
+ // Initialize work queue with exported declarations.
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ if ast.IsExported(name) {
+ p.pushDecl(scope.Lookup(name))
+ }
+ }
+
+ // Loop until no more work.
+ for !p.declTodo.empty() {
+ p.doDecl(p.declTodo.popHead())
+ }
+
+ // Append indices to data0 section.
+ dataLen := uint64(p.data0.Len())
+ w := p.newWriter()
+ w.writeIndex(p.declIndex, pkg)
+ w.flush()
+
+ // Assemble header.
+ var hdr intWriter
+ hdr.WriteByte('i')
+ hdr.uint64(iexportVersion)
+ hdr.uint64(uint64(p.strings.Len()))
+ hdr.uint64(dataLen)
+
+ // Flush output.
+ io.Copy(p.out, &hdr)
+ io.Copy(p.out, &p.strings)
+ io.Copy(p.out, &p.data0)
+
+ return p.out.Bytes(), nil
+}
+
+// writeIndex writes out an object index. mainIndex indicates whether
+// we're writing out the main index, which is also read by
+// non-compiler tools and includes a complete package description
+// (i.e., name and height).
+func (w *exportWriter) writeIndex(index map[types.Object]uint64, localpkg *types.Package) {
+ // Build a map from packages to objects from that package.
+ pkgObjs := map[*types.Package][]types.Object{}
+
+ // For the main index, make sure to include every package that
+ // we reference, even if we're not exporting (or reexporting)
+ // any symbols from it.
+ pkgObjs[localpkg] = nil
+ for pkg := range w.p.allPkgs {
+ pkgObjs[pkg] = nil
+ }
+
+ for obj := range index {
+ pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj)
+ }
+
+ var pkgs []*types.Package
+ for pkg, objs := range pkgObjs {
+ pkgs = append(pkgs, pkg)
+
+ sort.Slice(objs, func(i, j int) bool {
+ return objs[i].Name() < objs[j].Name()
+ })
+ }
+
+ sort.Slice(pkgs, func(i, j int) bool {
+ return pkgs[i].Path() < pkgs[j].Path()
+ })
+
+ w.uint64(uint64(len(pkgs)))
+ for _, pkg := range pkgs {
+ w.string(pkg.Path())
+ w.string(pkg.Name())
+ w.uint64(uint64(0)) // package height is not needed for go/types
+
+ objs := pkgObjs[pkg]
+ w.uint64(uint64(len(objs)))
+ for _, obj := range objs {
+ w.string(obj.Name())
+ w.uint64(index[obj])
+ }
+ }
+}
+
+type iexporter struct {
+ fset *token.FileSet
+ out *bytes.Buffer
+
+ // allPkgs tracks all packages that have been referenced by
+ // the export data, so we can ensure to include them in the
+ // main index.
+ allPkgs map[*types.Package]bool
+
+ declTodo objQueue
+
+ strings intWriter
+ stringIndex map[string]uint64
+
+ data0 intWriter
+ declIndex map[types.Object]uint64
+ typIndex map[types.Type]uint64
+}
+
+// stringOff returns the offset of s within the string section.
+// If not already present, it's added to the end.
+func (p *iexporter) stringOff(s string) uint64 {
+ off, ok := p.stringIndex[s]
+ if !ok {
+ off = uint64(p.strings.Len())
+ p.stringIndex[s] = off
+
+ p.strings.uint64(uint64(len(s)))
+ p.strings.WriteString(s)
+ }
+ return off
+}
+
+// pushDecl adds n to the declaration work queue, if not already present.
+func (p *iexporter) pushDecl(obj types.Object) {
+ // Package unsafe is known to the compiler and predeclared.
+ assert(obj.Pkg() != types.Unsafe)
+
+ if _, ok := p.declIndex[obj]; ok {
+ return
+ }
+
+ p.declIndex[obj] = ^uint64(0) // mark n present in work queue
+ p.declTodo.pushTail(obj)
+}
+
+// exportWriter handles writing out individual data section chunks.
+type exportWriter struct {
+ p *iexporter
+
+ data intWriter
+ currPkg *types.Package
+ prevFile string
+ prevLine int64
+}
+
+func (p *iexporter) doDecl(obj types.Object) {
+ w := p.newWriter()
+ w.setPkg(obj.Pkg(), false)
+
+ switch obj := obj.(type) {
+ case *types.Var:
+ w.tag('V')
+ w.pos(obj.Pos())
+ w.typ(obj.Type(), obj.Pkg())
+
+ case *types.Func:
+ sig, _ := obj.Type().(*types.Signature)
+ if sig.Recv() != nil {
+ panic(internalErrorf("unexpected method: %v", sig))
+ }
+ w.tag('F')
+ w.pos(obj.Pos())
+ w.signature(sig)
+
+ case *types.Const:
+ w.tag('C')
+ w.pos(obj.Pos())
+ w.value(obj.Type(), obj.Val())
+
+ case *types.TypeName:
+ if obj.IsAlias() {
+ w.tag('A')
+ w.pos(obj.Pos())
+ w.typ(obj.Type(), obj.Pkg())
+ break
+ }
+
+ // Defined type.
+ w.tag('T')
+ w.pos(obj.Pos())
+
+ underlying := obj.Type().Underlying()
+ w.typ(underlying, obj.Pkg())
+
+ t := obj.Type()
+ if types.IsInterface(t) {
+ break
+ }
+
+ named, ok := t.(*types.Named)
+ if !ok {
+ panic(internalErrorf("%s is not a defined type", t))
+ }
+
+ n := named.NumMethods()
+ w.uint64(uint64(n))
+ for i := 0; i < n; i++ {
+ m := named.Method(i)
+ w.pos(m.Pos())
+ w.string(m.Name())
+ sig, _ := m.Type().(*types.Signature)
+ w.param(sig.Recv())
+ w.signature(sig)
+ }
+
+ default:
+ panic(internalErrorf("unexpected object: %v", obj))
+ }
+
+ p.declIndex[obj] = w.flush()
+}
+
+func (w *exportWriter) tag(tag byte) {
+ w.data.WriteByte(tag)
+}
+
+func (w *exportWriter) pos(pos token.Pos) {
+ p := w.p.fset.Position(pos)
+ file := p.Filename
+ line := int64(p.Line)
+
+ // When file is the same as the last position (common case),
+ // we can save a few bytes by delta encoding just the line
+ // number.
+ //
+ // Note: Because data objects may be read out of order (or not
+ // at all), we can only apply delta encoding within a single
+ // object. This is handled implicitly by tracking prevFile and
+ // prevLine as fields of exportWriter.
+
+ if file == w.prevFile {
+ delta := line - w.prevLine
+ w.int64(delta)
+ if delta == deltaNewFile {
+ w.int64(-1)
+ }
+ } else {
+ w.int64(deltaNewFile)
+ w.int64(line) // line >= 0
+ w.string(file)
+ w.prevFile = file
+ }
+ w.prevLine = line
+}
+
+func (w *exportWriter) pkg(pkg *types.Package) {
+ // Ensure any referenced packages are declared in the main index.
+ w.p.allPkgs[pkg] = true
+
+ w.string(pkg.Path())
+}
+
+func (w *exportWriter) qualifiedIdent(obj types.Object) {
+ // Ensure any referenced declarations are written out too.
+ w.p.pushDecl(obj)
+
+ w.string(obj.Name())
+ w.pkg(obj.Pkg())
+}
+
+func (w *exportWriter) typ(t types.Type, pkg *types.Package) {
+ w.data.uint64(w.p.typOff(t, pkg))
+}
+
+func (p *iexporter) newWriter() *exportWriter {
+ return &exportWriter{p: p}
+}
+
+func (w *exportWriter) flush() uint64 {
+ off := uint64(w.p.data0.Len())
+ io.Copy(&w.p.data0, &w.data)
+ return off
+}
+
+func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 {
+ off, ok := p.typIndex[t]
+ if !ok {
+ w := p.newWriter()
+ w.doTyp(t, pkg)
+ off = predeclReserved + w.flush()
+ p.typIndex[t] = off
+ }
+ return off
+}
+
+func (w *exportWriter) startType(k itag) {
+ w.data.uint64(uint64(k))
+}
+
+func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
+ switch t := t.(type) {
+ case *types.Named:
+ w.startType(definedType)
+ w.qualifiedIdent(t.Obj())
+
+ case *types.Pointer:
+ w.startType(pointerType)
+ w.typ(t.Elem(), pkg)
+
+ case *types.Slice:
+ w.startType(sliceType)
+ w.typ(t.Elem(), pkg)
+
+ case *types.Array:
+ w.startType(arrayType)
+ w.uint64(uint64(t.Len()))
+ w.typ(t.Elem(), pkg)
+
+ case *types.Chan:
+ w.startType(chanType)
+ // 1 RecvOnly; 2 SendOnly; 3 SendRecv
+ var dir uint64
+ switch t.Dir() {
+ case types.RecvOnly:
+ dir = 1
+ case types.SendOnly:
+ dir = 2
+ case types.SendRecv:
+ dir = 3
+ }
+ w.uint64(dir)
+ w.typ(t.Elem(), pkg)
+
+ case *types.Map:
+ w.startType(mapType)
+ w.typ(t.Key(), pkg)
+ w.typ(t.Elem(), pkg)
+
+ case *types.Signature:
+ w.startType(signatureType)
+ w.setPkg(pkg, true)
+ w.signature(t)
+
+ case *types.Struct:
+ w.startType(structType)
+ w.setPkg(pkg, true)
+
+ n := t.NumFields()
+ w.uint64(uint64(n))
+ for i := 0; i < n; i++ {
+ f := t.Field(i)
+ w.pos(f.Pos())
+ w.string(f.Name())
+ w.typ(f.Type(), pkg)
+ w.bool(f.Embedded())
+ w.string(t.Tag(i)) // note (or tag)
+ }
+
+ case *types.Interface:
+ w.startType(interfaceType)
+ w.setPkg(pkg, true)
+
+ n := t.NumEmbeddeds()
+ w.uint64(uint64(n))
+ for i := 0; i < n; i++ {
+ f := t.Embedded(i)
+ w.pos(f.Obj().Pos())
+ w.typ(f.Obj().Type(), f.Obj().Pkg())
+ }
+
+ n = t.NumExplicitMethods()
+ w.uint64(uint64(n))
+ for i := 0; i < n; i++ {
+ m := t.ExplicitMethod(i)
+ w.pos(m.Pos())
+ w.string(m.Name())
+ sig, _ := m.Type().(*types.Signature)
+ w.signature(sig)
+ }
+
+ default:
+ panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t)))
+ }
+}
+
+func (w *exportWriter) setPkg(pkg *types.Package, write bool) {
+ if write {
+ w.pkg(pkg)
+ }
+
+ w.currPkg = pkg
+}
+
+func (w *exportWriter) signature(sig *types.Signature) {
+ w.paramList(sig.Params())
+ w.paramList(sig.Results())
+ if sig.Params().Len() > 0 {
+ w.bool(sig.Variadic())
+ }
+}
+
+func (w *exportWriter) paramList(tup *types.Tuple) {
+ n := tup.Len()
+ w.uint64(uint64(n))
+ for i := 0; i < n; i++ {
+ w.param(tup.At(i))
+ }
+}
+
+func (w *exportWriter) param(obj types.Object) {
+ w.pos(obj.Pos())
+ w.localIdent(obj)
+ w.typ(obj.Type(), obj.Pkg())
+}
+
+func (w *exportWriter) value(typ types.Type, v constant.Value) {
+ w.typ(typ, nil)
+
+ switch v.Kind() {
+ case constant.Bool:
+ w.bool(constant.BoolVal(v))
+ case constant.Int:
+ var i big.Int
+ if i64, exact := constant.Int64Val(v); exact {
+ i.SetInt64(i64)
+ } else if ui64, exact := constant.Uint64Val(v); exact {
+ i.SetUint64(ui64)
+ } else {
+ i.SetString(v.ExactString(), 10)
+ }
+ w.mpint(&i, typ)
+ case constant.Float:
+ f := constantToFloat(v)
+ w.mpfloat(f, typ)
+ case constant.Complex:
+ w.mpfloat(constantToFloat(constant.Real(v)), typ)
+ w.mpfloat(constantToFloat(constant.Imag(v)), typ)
+ case constant.String:
+ w.string(constant.StringVal(v))
+ case constant.Unknown:
+ // package contains type errors
+ default:
+ panic(internalErrorf("unexpected value %v (%T)", v, v))
+ }
+}
+
+// constantToFloat converts a constant.Value with kind constant.Float to a
+// big.Float.
+func constantToFloat(x constant.Value) *big.Float {
+ assert(x.Kind() == constant.Float)
+ // Use the same floating-point precision (512) as cmd/compile
+ // (see Mpprec in cmd/compile/internal/gc/mpfloat.go).
+ const mpprec = 512
+ var f big.Float
+ f.SetPrec(mpprec)
+ if v, exact := constant.Float64Val(x); exact {
+ // float64
+ f.SetFloat64(v)
+ } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
+ // TODO(gri): add big.Rat accessor to constant.Value.
+ n := valueToRat(num)
+ d := valueToRat(denom)
+ f.SetRat(n.Quo(n, d))
+ } else {
+ // Value too large to represent as a fraction => inaccessible.
+ // TODO(gri): add big.Float accessor to constant.Value.
+ _, ok := f.SetString(x.ExactString())
+ assert(ok)
+ }
+ return &f
+}
+
+// mpint exports a multi-precision integer.
+//
+// For unsigned types, small values are written out as a single
+// byte. Larger values are written out as a length-prefixed big-endian
+// byte string, where the length prefix is encoded as its complement.
+// For example, bytes 0, 1, and 2 directly represent the integer
+// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-,
+// 2-, and 3-byte big-endian string follow.
+//
+// Encoding for signed types use the same general approach as for
+// unsigned types, except small values use zig-zag encoding and the
+// bottom bit of length prefix byte for large values is reserved as a
+// sign bit.
+//
+// The exact boundary between small and large encodings varies
+// according to the maximum number of bytes needed to encode a value
+// of type typ. As a special case, 8-bit types are always encoded as a
+// single byte.
+//
+// TODO(mdempsky): Is this level of complexity really worthwhile?
+func (w *exportWriter) mpint(x *big.Int, typ types.Type) {
+ basic, ok := typ.Underlying().(*types.Basic)
+ if !ok {
+ panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying()))
+ }
+
+ signed, maxBytes := intSize(basic)
+
+ negative := x.Sign() < 0
+ if !signed && negative {
+ panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x))
+ }
+
+ b := x.Bytes()
+ if len(b) > 0 && b[0] == 0 {
+ panic(internalErrorf("leading zeros"))
+ }
+ if uint(len(b)) > maxBytes {
+ panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x))
+ }
+
+ maxSmall := 256 - maxBytes
+ if signed {
+ maxSmall = 256 - 2*maxBytes
+ }
+ if maxBytes == 1 {
+ maxSmall = 256
+ }
+
+ // Check if x can use small value encoding.
+ if len(b) <= 1 {
+ var ux uint
+ if len(b) == 1 {
+ ux = uint(b[0])
+ }
+ if signed {
+ ux <<= 1
+ if negative {
+ ux--
+ }
+ }
+ if ux < maxSmall {
+ w.data.WriteByte(byte(ux))
+ return
+ }
+ }
+
+ n := 256 - uint(len(b))
+ if signed {
+ n = 256 - 2*uint(len(b))
+ if negative {
+ n |= 1
+ }
+ }
+ if n < maxSmall || n >= 256 {
+ panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n))
+ }
+
+ w.data.WriteByte(byte(n))
+ w.data.Write(b)
+}
+
+// mpfloat exports a multi-precision floating point number.
+//
+// The number's value is decomposed into mantissa × 2**exponent, where
+// mantissa is an integer. The value is written out as mantissa (as a
+// multi-precision integer) and then the exponent, except exponent is
+// omitted if mantissa is zero.
+func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) {
+ if f.IsInf() {
+ panic("infinite constant")
+ }
+
+ // Break into f = mant × 2**exp, with 0.5 <= mant < 1.
+ var mant big.Float
+ exp := int64(f.MantExp(&mant))
+
+ // Scale so that mant is an integer.
+ prec := mant.MinPrec()
+ mant.SetMantExp(&mant, int(prec))
+ exp -= int64(prec)
+
+ manti, acc := mant.Int(nil)
+ if acc != big.Exact {
+ panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc))
+ }
+ w.mpint(manti, typ)
+ if manti.Sign() != 0 {
+ w.int64(exp)
+ }
+}
+
+func (w *exportWriter) bool(b bool) bool {
+ var x uint64
+ if b {
+ x = 1
+ }
+ w.uint64(x)
+ return b
+}
+
+func (w *exportWriter) int64(x int64) { w.data.int64(x) }
+func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) }
+func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
+
+func (w *exportWriter) localIdent(obj types.Object) {
+ // Anonymous parameters.
+ if obj == nil {
+ w.string("")
+ return
+ }
+
+ name := obj.Name()
+ if name == "_" {
+ w.string("_")
+ return
+ }
+
+ w.string(name)
+}
+
+type intWriter struct {
+ bytes.Buffer
+}
+
+func (w *intWriter) int64(x int64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutVarint(buf[:], x)
+ w.Write(buf[:n])
+}
+
+func (w *intWriter) uint64(x uint64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutUvarint(buf[:], x)
+ w.Write(buf[:n])
+}
+
+func assert(cond bool) {
+ if !cond {
+ panic("internal error: assertion failed")
+ }
+}
+
+// The below is copied from go/src/cmd/compile/internal/gc/syntax.go.
+
+// objQueue is a FIFO queue of types.Object. The zero value of objQueue is
+// a ready-to-use empty queue.
+type objQueue struct {
+ ring []types.Object
+ head, tail int
+}
+
+// empty returns true if q contains no Nodes.
+func (q *objQueue) empty() bool {
+ return q.head == q.tail
+}
+
+// pushTail appends n to the tail of the queue.
+func (q *objQueue) pushTail(obj types.Object) {
+ if len(q.ring) == 0 {
+ q.ring = make([]types.Object, 16)
+ } else if q.head+len(q.ring) == q.tail {
+ // Grow the ring.
+ nring := make([]types.Object, len(q.ring)*2)
+ // Copy the old elements.
+ part := q.ring[q.head%len(q.ring):]
+ if q.tail-q.head <= len(part) {
+ part = part[:q.tail-q.head]
+ copy(nring, part)
+ } else {
+ pos := copy(nring, part)
+ copy(nring[pos:], q.ring[:q.tail%len(q.ring)])
+ }
+ q.ring, q.head, q.tail = nring, 0, q.tail-q.head
+ }
+
+ q.ring[q.tail%len(q.ring)] = obj
+ q.tail++
+}
+
+// popHead pops a node from the head of the queue. It panics if q is empty.
+func (q *objQueue) popHead() types.Object {
+ if q.empty() {
+ panic("dequeue empty")
+ }
+ obj := q.ring[q.head%len(q.ring)]
+ q.head++
+ return obj
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
new file mode 100644
index 0000000..3cb7ae5
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
@@ -0,0 +1,606 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed package import.
+// See cmd/compile/internal/gc/iexport.go for the export data format.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
+
+package gcimporter
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "io"
+ "sort"
+)
+
+type intReader struct {
+ *bytes.Reader
+ path string
+}
+
+func (r *intReader) int64() int64 {
+ i, err := binary.ReadVarint(r.Reader)
+ if err != nil {
+ errorf("import %q: read varint error: %v", r.path, err)
+ }
+ return i
+}
+
+func (r *intReader) uint64() uint64 {
+ i, err := binary.ReadUvarint(r.Reader)
+ if err != nil {
+ errorf("import %q: read varint error: %v", r.path, err)
+ }
+ return i
+}
+
+const predeclReserved = 32
+
+type itag uint64
+
+const (
+ // Types
+ definedType itag = iota
+ pointerType
+ sliceType
+ arrayType
+ chanType
+ mapType
+ signatureType
+ structType
+ interfaceType
+)
+
+// IImportData imports a package from the serialized package data
+// and returns the number of bytes consumed and a reference to the package.
+// If the export data version is not recognized or the format is otherwise
+// compromised, an error is returned.
+func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ const currentVersion = 0
+ version := -1
+ defer func() {
+ if e := recover(); e != nil {
+ if version > currentVersion {
+ err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
+ } else {
+ err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
+ }
+ }
+ }()
+
+ r := &intReader{bytes.NewReader(data), path}
+
+ version = int(r.uint64())
+ switch version {
+ case currentVersion:
+ default:
+ errorf("unknown iexport format version %d", version)
+ }
+
+ sLen := int64(r.uint64())
+ dLen := int64(r.uint64())
+
+ whence, _ := r.Seek(0, io.SeekCurrent)
+ stringData := data[whence : whence+sLen]
+ declData := data[whence+sLen : whence+sLen+dLen]
+ r.Seek(sLen+dLen, io.SeekCurrent)
+
+ p := iimporter{
+ ipath: path,
+
+ stringData: stringData,
+ stringCache: make(map[uint64]string),
+ pkgCache: make(map[uint64]*types.Package),
+
+ declData: declData,
+ pkgIndex: make(map[*types.Package]map[string]uint64),
+ typCache: make(map[uint64]types.Type),
+
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*token.File),
+ },
+ }
+
+ for i, pt := range predeclared() {
+ p.typCache[uint64(i)] = pt
+ }
+
+ pkgList := make([]*types.Package, r.uint64())
+ for i := range pkgList {
+ pkgPathOff := r.uint64()
+ pkgPath := p.stringAt(pkgPathOff)
+ pkgName := p.stringAt(r.uint64())
+ _ = r.uint64() // package height; unused by go/types
+
+ if pkgPath == "" {
+ pkgPath = path
+ }
+ pkg := imports[pkgPath]
+ if pkg == nil {
+ pkg = types.NewPackage(pkgPath, pkgName)
+ imports[pkgPath] = pkg
+ } else if pkg.Name() != pkgName {
+ errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
+ }
+
+ p.pkgCache[pkgPathOff] = pkg
+
+ nameIndex := make(map[string]uint64)
+ for nSyms := r.uint64(); nSyms > 0; nSyms-- {
+ name := p.stringAt(r.uint64())
+ nameIndex[name] = r.uint64()
+ }
+
+ p.pkgIndex[pkg] = nameIndex
+ pkgList[i] = pkg
+ }
+ var localpkg *types.Package
+ for _, pkg := range pkgList {
+ if pkg.Path() == path {
+ localpkg = pkg
+ }
+ }
+
+ names := make([]string, 0, len(p.pkgIndex[localpkg]))
+ for name := range p.pkgIndex[localpkg] {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ p.doDecl(localpkg, name)
+ }
+
+ for _, typ := range p.interfaceList {
+ typ.Complete()
+ }
+
+ // record all referenced packages as imports
+ list := append(([]*types.Package)(nil), pkgList[1:]...)
+ sort.Sort(byPath(list))
+ localpkg.SetImports(list)
+
+ // package was imported completely and without errors
+ localpkg.MarkComplete()
+
+ consumed, _ := r.Seek(0, io.SeekCurrent)
+ return int(consumed), localpkg, nil
+}
+
+type iimporter struct {
+ ipath string
+
+ stringData []byte
+ stringCache map[uint64]string
+ pkgCache map[uint64]*types.Package
+
+ declData []byte
+ pkgIndex map[*types.Package]map[string]uint64
+ typCache map[uint64]types.Type
+
+ fake fakeFileSet
+ interfaceList []*types.Interface
+}
+
+func (p *iimporter) doDecl(pkg *types.Package, name string) {
+ // See if we've already imported this declaration.
+ if obj := pkg.Scope().Lookup(name); obj != nil {
+ return
+ }
+
+ off, ok := p.pkgIndex[pkg][name]
+ if !ok {
+ errorf("%v.%v not in index", pkg, name)
+ }
+
+ r := &importReader{p: p, currPkg: pkg}
+ r.declReader.Reset(p.declData[off:])
+
+ r.obj(name)
+}
+
+func (p *iimporter) stringAt(off uint64) string {
+ if s, ok := p.stringCache[off]; ok {
+ return s
+ }
+
+ slen, n := binary.Uvarint(p.stringData[off:])
+ if n <= 0 {
+ errorf("varint failed")
+ }
+ spos := off + uint64(n)
+ s := string(p.stringData[spos : spos+slen])
+ p.stringCache[off] = s
+ return s
+}
+
+func (p *iimporter) pkgAt(off uint64) *types.Package {
+ if pkg, ok := p.pkgCache[off]; ok {
+ return pkg
+ }
+ path := p.stringAt(off)
+ errorf("missing package %q in %q", path, p.ipath)
+ return nil
+}
+
+func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
+ if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
+ return t
+ }
+
+ if off < predeclReserved {
+ errorf("predeclared type missing from cache: %v", off)
+ }
+
+ r := &importReader{p: p}
+ r.declReader.Reset(p.declData[off-predeclReserved:])
+ t := r.doType(base)
+
+ if base == nil || !isInterface(t) {
+ p.typCache[off] = t
+ }
+ return t
+}
+
+type importReader struct {
+ p *iimporter
+ declReader bytes.Reader
+ currPkg *types.Package
+ prevFile string
+ prevLine int64
+}
+
+func (r *importReader) obj(name string) {
+ tag := r.byte()
+ pos := r.pos()
+
+ switch tag {
+ case 'A':
+ typ := r.typ()
+
+ r.declare(types.NewTypeName(pos, r.currPkg, name, typ))
+
+ case 'C':
+ typ, val := r.value()
+
+ r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
+
+ case 'F':
+ sig := r.signature(nil)
+
+ r.declare(types.NewFunc(pos, r.currPkg, name, sig))
+
+ case 'T':
+ // Types can be recursive. We need to setup a stub
+ // declaration before recursing.
+ obj := types.NewTypeName(pos, r.currPkg, name, nil)
+ named := types.NewNamed(obj, nil, nil)
+ r.declare(obj)
+
+ underlying := r.p.typAt(r.uint64(), named).Underlying()
+ named.SetUnderlying(underlying)
+
+ if !isInterface(underlying) {
+ for n := r.uint64(); n > 0; n-- {
+ mpos := r.pos()
+ mname := r.ident()
+ recv := r.param()
+ msig := r.signature(recv)
+
+ named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
+ }
+ }
+
+ case 'V':
+ typ := r.typ()
+
+ r.declare(types.NewVar(pos, r.currPkg, name, typ))
+
+ default:
+ errorf("unexpected tag: %v", tag)
+ }
+}
+
+func (r *importReader) declare(obj types.Object) {
+ obj.Pkg().Scope().Insert(obj)
+}
+
+func (r *importReader) value() (typ types.Type, val constant.Value) {
+ typ = r.typ()
+
+ switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
+ case types.IsBoolean:
+ val = constant.MakeBool(r.bool())
+
+ case types.IsString:
+ val = constant.MakeString(r.string())
+
+ case types.IsInteger:
+ val = r.mpint(b)
+
+ case types.IsFloat:
+ val = r.mpfloat(b)
+
+ case types.IsComplex:
+ re := r.mpfloat(b)
+ im := r.mpfloat(b)
+ val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+
+ default:
+ if b.Kind() == types.Invalid {
+ val = constant.MakeUnknown()
+ return
+ }
+ errorf("unexpected type %v", typ) // panics
+ panic("unreachable")
+ }
+
+ return
+}
+
+func intSize(b *types.Basic) (signed bool, maxBytes uint) {
+ if (b.Info() & types.IsUntyped) != 0 {
+ return true, 64
+ }
+
+ switch b.Kind() {
+ case types.Float32, types.Complex64:
+ return true, 3
+ case types.Float64, types.Complex128:
+ return true, 7
+ }
+
+ signed = (b.Info() & types.IsUnsigned) == 0
+ switch b.Kind() {
+ case types.Int8, types.Uint8:
+ maxBytes = 1
+ case types.Int16, types.Uint16:
+ maxBytes = 2
+ case types.Int32, types.Uint32:
+ maxBytes = 4
+ default:
+ maxBytes = 8
+ }
+
+ return
+}
+
+func (r *importReader) mpint(b *types.Basic) constant.Value {
+ signed, maxBytes := intSize(b)
+
+ maxSmall := 256 - maxBytes
+ if signed {
+ maxSmall = 256 - 2*maxBytes
+ }
+ if maxBytes == 1 {
+ maxSmall = 256
+ }
+
+ n, _ := r.declReader.ReadByte()
+ if uint(n) < maxSmall {
+ v := int64(n)
+ if signed {
+ v >>= 1
+ if n&1 != 0 {
+ v = ^v
+ }
+ }
+ return constant.MakeInt64(v)
+ }
+
+ v := -n
+ if signed {
+ v = -(n &^ 1) >> 1
+ }
+ if v < 1 || uint(v) > maxBytes {
+ errorf("weird decoding: %v, %v => %v", n, signed, v)
+ }
+
+ buf := make([]byte, v)
+ io.ReadFull(&r.declReader, buf)
+
+ // convert to little endian
+ // TODO(gri) go/constant should have a more direct conversion function
+ // (e.g., once it supports a big.Float based implementation)
+ for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
+ buf[i], buf[j] = buf[j], buf[i]
+ }
+
+ x := constant.MakeFromBytes(buf)
+ if signed && n&1 != 0 {
+ x = constant.UnaryOp(token.SUB, x, 0)
+ }
+ return x
+}
+
+func (r *importReader) mpfloat(b *types.Basic) constant.Value {
+ x := r.mpint(b)
+ if constant.Sign(x) == 0 {
+ return x
+ }
+
+ exp := r.int64()
+ switch {
+ case exp > 0:
+ x = constant.Shift(x, token.SHL, uint(exp))
+ case exp < 0:
+ d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
+ x = constant.BinaryOp(x, token.QUO, d)
+ }
+ return x
+}
+
+func (r *importReader) ident() string {
+ return r.string()
+}
+
+func (r *importReader) qualifiedIdent() (*types.Package, string) {
+ name := r.string()
+ pkg := r.pkg()
+ return pkg, name
+}
+
+func (r *importReader) pos() token.Pos {
+ delta := r.int64()
+ if delta != deltaNewFile {
+ r.prevLine += delta
+ } else if l := r.int64(); l == -1 {
+ r.prevLine += deltaNewFile
+ } else {
+ r.prevFile = r.string()
+ r.prevLine = l
+ }
+
+ if r.prevFile == "" && r.prevLine == 0 {
+ return token.NoPos
+ }
+
+ return r.p.fake.pos(r.prevFile, int(r.prevLine))
+}
+
+func (r *importReader) typ() types.Type {
+ return r.p.typAt(r.uint64(), nil)
+}
+
+func isInterface(t types.Type) bool {
+ _, ok := t.(*types.Interface)
+ return ok
+}
+
+func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
+func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
+
+func (r *importReader) doType(base *types.Named) types.Type {
+ switch k := r.kind(); k {
+ default:
+ errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
+ return nil
+
+ case definedType:
+ pkg, name := r.qualifiedIdent()
+ r.p.doDecl(pkg, name)
+ return pkg.Scope().Lookup(name).(*types.TypeName).Type()
+ case pointerType:
+ return types.NewPointer(r.typ())
+ case sliceType:
+ return types.NewSlice(r.typ())
+ case arrayType:
+ n := r.uint64()
+ return types.NewArray(r.typ(), int64(n))
+ case chanType:
+ dir := chanDir(int(r.uint64()))
+ return types.NewChan(dir, r.typ())
+ case mapType:
+ return types.NewMap(r.typ(), r.typ())
+ case signatureType:
+ r.currPkg = r.pkg()
+ return r.signature(nil)
+
+ case structType:
+ r.currPkg = r.pkg()
+
+ fields := make([]*types.Var, r.uint64())
+ tags := make([]string, len(fields))
+ for i := range fields {
+ fpos := r.pos()
+ fname := r.ident()
+ ftyp := r.typ()
+ emb := r.bool()
+ tag := r.string()
+
+ fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb)
+ tags[i] = tag
+ }
+ return types.NewStruct(fields, tags)
+
+ case interfaceType:
+ r.currPkg = r.pkg()
+
+ embeddeds := make([]types.Type, r.uint64())
+ for i := range embeddeds {
+ _ = r.pos()
+ embeddeds[i] = r.typ()
+ }
+
+ methods := make([]*types.Func, r.uint64())
+ for i := range methods {
+ mpos := r.pos()
+ mname := r.ident()
+
+ // TODO(mdempsky): Matches bimport.go, but I
+ // don't agree with this.
+ var recv *types.Var
+ if base != nil {
+ recv = types.NewVar(token.NoPos, r.currPkg, "", base)
+ }
+
+ msig := r.signature(recv)
+ methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
+ }
+
+ typ := newInterface(methods, embeddeds)
+ r.p.interfaceList = append(r.p.interfaceList, typ)
+ return typ
+ }
+}
+
+func (r *importReader) kind() itag {
+ return itag(r.uint64())
+}
+
+func (r *importReader) signature(recv *types.Var) *types.Signature {
+ params := r.paramList()
+ results := r.paramList()
+ variadic := params.Len() > 0 && r.bool()
+ return types.NewSignature(recv, params, results, variadic)
+}
+
+func (r *importReader) paramList() *types.Tuple {
+ xs := make([]*types.Var, r.uint64())
+ for i := range xs {
+ xs[i] = r.param()
+ }
+ return types.NewTuple(xs...)
+}
+
+func (r *importReader) param() *types.Var {
+ pos := r.pos()
+ name := r.ident()
+ typ := r.typ()
+ return types.NewParam(pos, r.currPkg, name, typ)
+}
+
+func (r *importReader) bool() bool {
+ return r.uint64() != 0
+}
+
+func (r *importReader) int64() int64 {
+ n, err := binary.ReadVarint(&r.declReader)
+ if err != nil {
+ errorf("readVarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) uint64() uint64 {
+ n, err := binary.ReadUvarint(&r.declReader)
+ if err != nil {
+ errorf("readUvarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) byte() byte {
+ x, err := r.declReader.ReadByte()
+ if err != nil {
+ errorf("declReader.ReadByte: %v", err)
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go
new file mode 100644
index 0000000..463f252
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go
@@ -0,0 +1,21 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !go1.11
+
+package gcimporter
+
+import "go/types"
+
+func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
+ named := make([]*types.Named, len(embeddeds))
+ for i, e := range embeddeds {
+ var ok bool
+ named[i], ok = e.(*types.Named)
+ if !ok {
+ panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11")
+ }
+ }
+ return types.NewInterface(methods, named)
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go
new file mode 100644
index 0000000..ab28b95
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go
@@ -0,0 +1,13 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.11
+
+package gcimporter
+
+import "go/types"
+
+func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
+ return types.NewInterfaceType(methods, embeddeds)
+}
diff --git a/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go
new file mode 100644
index 0000000..fdc7da0
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go
@@ -0,0 +1,160 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package packagesdriver fetches type sizes for go/packages and go/analysis.
+package packagesdriver
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "go/types"
+ "log"
+ "os"
+ "os/exec"
+ "strings"
+ "time"
+)
+
+var debug = false
+
+// GetSizes returns the sizes used by the underlying driver with the given parameters.
+func GetSizes(ctx context.Context, buildFlags, env []string, dir string, usesExportData bool) (types.Sizes, error) {
+ // TODO(matloob): Clean this up. This code is mostly a copy of packages.findExternalDriver.
+ const toolPrefix = "GOPACKAGESDRIVER="
+ tool := ""
+ for _, env := range env {
+ if val := strings.TrimPrefix(env, toolPrefix); val != env {
+ tool = val
+ }
+ }
+
+ if tool == "" {
+ var err error
+ tool, err = exec.LookPath("gopackagesdriver")
+ if err != nil {
+ // We did not find the driver, so use "go list".
+ tool = "off"
+ }
+ }
+
+ if tool == "off" {
+ return GetSizesGolist(ctx, buildFlags, env, dir, usesExportData)
+ }
+
+ req, err := json.Marshal(struct {
+ Command string `json:"command"`
+ Env []string `json:"env"`
+ BuildFlags []string `json:"build_flags"`
+ }{
+ Command: "sizes",
+ Env: env,
+ BuildFlags: buildFlags,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to encode message to driver tool: %v", err)
+ }
+
+ buf := new(bytes.Buffer)
+ cmd := exec.CommandContext(ctx, tool)
+ cmd.Dir = dir
+ cmd.Env = env
+ cmd.Stdin = bytes.NewReader(req)
+ cmd.Stdout = buf
+ cmd.Stderr = new(bytes.Buffer)
+ if err := cmd.Run(); err != nil {
+ return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
+ }
+ var response struct {
+ // Sizes, if not nil, is the types.Sizes to use when type checking.
+ Sizes *types.StdSizes
+ }
+ if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
+ return nil, err
+ }
+ return response.Sizes, nil
+}
+
+func GetSizesGolist(ctx context.Context, buildFlags, env []string, dir string, usesExportData bool) (types.Sizes, error) {
+ args := []string{"list", "-f", "{{context.GOARCH}} {{context.Compiler}}"}
+ args = append(args, buildFlags...)
+ args = append(args, "--", "unsafe")
+ stdout, err := InvokeGo(ctx, env, dir, usesExportData, args...)
+ if err != nil {
+ return nil, err
+ }
+ fields := strings.Fields(stdout.String())
+ if len(fields) < 2 {
+ return nil, fmt.Errorf("could not determine GOARCH and Go compiler")
+ }
+ goarch := fields[0]
+ compiler := fields[1]
+ return types.SizesFor(compiler, goarch), nil
+}
+
+// InvokeGo returns the stdout of a go command invocation.
+func InvokeGo(ctx context.Context, env []string, dir string, usesExportData bool, args ...string) (*bytes.Buffer, error) {
+ if debug {
+ defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(env, args...)) }(time.Now())
+ }
+ stdout := new(bytes.Buffer)
+ stderr := new(bytes.Buffer)
+ cmd := exec.CommandContext(ctx, "go", args...)
+ // On darwin the cwd gets resolved to the real path, which breaks anything that
+ // expects the working directory to keep the original path, including the
+ // go command when dealing with modules.
+ // The Go stdlib has a special feature where if the cwd and the PWD are the
+ // same node then it trusts the PWD, so by setting it in the env for the child
+ // process we fix up all the paths returned by the go command.
+ cmd.Env = append(append([]string{}, env...), "PWD="+dir)
+ cmd.Dir = dir
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ if err := cmd.Run(); err != nil {
+ exitErr, ok := err.(*exec.ExitError)
+ if !ok {
+ // Catastrophic error:
+ // - executable not found
+ // - context cancellation
+ return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
+ }
+
+ // Export mode entails a build.
+ // If that build fails, errors appear on stderr
+ // (despite the -e flag) and the Export field is blank.
+ // Do not fail in that case.
+ if !usesExportData {
+ return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
+ }
+ }
+
+ // As of writing, go list -export prints some non-fatal compilation
+ // errors to stderr, even with -e set. We would prefer that it put
+ // them in the Package.Error JSON (see https://golang.org/issue/26319).
+ // In the meantime, there's nowhere good to put them, but they can
+ // be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS
+ // is set.
+ if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" {
+ fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(env, args...), stderr)
+ }
+
+ // debugging
+ if false {
+ fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(env, args...), stdout)
+ }
+
+ return stdout, nil
+}
+
+func cmdDebugStr(envlist []string, args ...string) string {
+ env := make(map[string]string)
+ for _, kv := range envlist {
+ split := strings.Split(kv, "=")
+ k, v := split[0], split[1]
+ env[k] = v
+ }
+
+ return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["PWD"], args)
+}
diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go
new file mode 100644
index 0000000..3799f8e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/doc.go
@@ -0,0 +1,222 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package packages loads Go packages for inspection and analysis.
+
+The Load function takes as input a list of patterns and return a list of Package
+structs describing individual packages matched by those patterns.
+The LoadMode controls the amount of detail in the loaded packages.
+
+Load passes most patterns directly to the underlying build tool,
+but all patterns with the prefix "query=", where query is a
+non-empty string of letters from [a-z], are reserved and may be
+interpreted as query operators.
+
+Two query operators are currently supported: "file" and "pattern".
+
+The query "file=path/to/file.go" matches the package or packages enclosing
+the Go source file path/to/file.go. For example "file=~/go/src/fmt/print.go"
+might return the packages "fmt" and "fmt [fmt.test]".
+
+The query "pattern=string" causes "string" to be passed directly to
+the underlying build tool. In most cases this is unnecessary,
+but an application can use Load("pattern=" + x) as an escaping mechanism
+to ensure that x is not interpreted as a query operator if it contains '='.
+
+All other query operators are reserved for future use and currently
+cause Load to report an error.
+
+The Package struct provides basic information about the package, including
+
+ - ID, a unique identifier for the package in the returned set;
+ - GoFiles, the names of the package's Go source files;
+ - Imports, a map from source import strings to the Packages they name;
+ - Types, the type information for the package's exported symbols;
+ - Syntax, the parsed syntax trees for the package's source code; and
+ - TypeInfo, the result of a complete type-check of the package syntax trees.
+
+(See the documentation for type Package for the complete list of fields
+and more detailed descriptions.)
+
+For example,
+
+ Load(nil, "bytes", "unicode...")
+
+returns four Package structs describing the standard library packages
+bytes, unicode, unicode/utf16, and unicode/utf8. Note that one pattern
+can match multiple packages and that a package might be matched by
+multiple patterns: in general it is not possible to determine which
+packages correspond to which patterns.
+
+Note that the list returned by Load contains only the packages matched
+by the patterns. Their dependencies can be found by walking the import
+graph using the Imports fields.
+
+The Load function can be configured by passing a pointer to a Config as
+the first argument. A nil Config is equivalent to the zero Config, which
+causes Load to run in LoadFiles mode, collecting minimal information.
+See the documentation for type Config for details.
+
+As noted earlier, the Config.Mode controls the amount of detail
+reported about the loaded packages, with each mode returning all the data of the
+previous mode with some extra added. See the documentation for type LoadMode
+for details.
+
+Most tools should pass their command-line arguments (after any flags)
+uninterpreted to the loader, so that the loader can interpret them
+according to the conventions of the underlying build system.
+See the Example function for typical usage.
+
+*/
+package packages // import "golang.org/x/tools/go/packages"
+
+/*
+
+Motivation and design considerations
+
+The new package's design solves problems addressed by two existing
+packages: go/build, which locates and describes packages, and
+golang.org/x/tools/go/loader, which loads, parses and type-checks them.
+The go/build.Package structure encodes too much of the 'go build' way
+of organizing projects, leaving us in need of a data type that describes a
+package of Go source code independent of the underlying build system.
+We wanted something that works equally well with go build and vgo, and
+also other build systems such as Bazel and Blaze, making it possible to
+construct analysis tools that work in all these environments.
+Tools such as errcheck and staticcheck were essentially unavailable to
+the Go community at Google, and some of Google's internal tools for Go
+are unavailable externally.
+This new package provides a uniform way to obtain package metadata by
+querying each of these build systems, optionally supporting their
+preferred command-line notations for packages, so that tools integrate
+neatly with users' build environments. The Metadata query function
+executes an external query tool appropriate to the current workspace.
+
+Loading packages always returns the complete import graph "all the way down",
+even if all you want is information about a single package, because the query
+mechanisms of all the build systems we currently support ({go,vgo} list, and
+blaze/bazel aspect-based query) cannot provide detailed information
+about one package without visiting all its dependencies too, so there is
+no additional asymptotic cost to providing transitive information.
+(This property might not be true of a hypothetical 5th build system.)
+
+In calls to TypeCheck, all initial packages, and any package that
+transitively depends on one of them, must be loaded from source.
+Consider A->B->C->D->E: if A,C are initial, A,B,C must be loaded from
+source; D may be loaded from export data, and E may not be loaded at all
+(though it's possible that D's export data mentions it, so a
+types.Package may be created for it and exposed.)
+
+The old loader had a feature to suppress type-checking of function
+bodies on a per-package basis, primarily intended to reduce the work of
+obtaining type information for imported packages. Now that imports are
+satisfied by export data, the optimization no longer seems necessary.
+
+Despite some early attempts, the old loader did not exploit export data,
+instead always using the equivalent of WholeProgram mode. This was due
+to the complexity of mixing source and export data packages (now
+resolved by the upward traversal mentioned above), and because export data
+files were nearly always missing or stale. Now that 'go build' supports
+caching, all the underlying build systems can guarantee to produce
+export data in a reasonable (amortized) time.
+
+Test "main" packages synthesized by the build system are now reported as
+first-class packages, avoiding the need for clients (such as go/ssa) to
+reinvent this generation logic.
+
+One way in which go/packages is simpler than the old loader is in its
+treatment of in-package tests. In-package tests are packages that
+consist of all the files of the library under test, plus the test files.
+The old loader constructed in-package tests by a two-phase process of
+mutation called "augmentation": first it would construct and type check
+all the ordinary library packages and type-check the packages that
+depend on them; then it would add more (test) files to the package and
+type-check again. This two-phase approach had four major problems:
+1) in processing the tests, the loader modified the library package,
+ leaving no way for a client application to see both the test
+ package and the library package; one would mutate into the other.
+2) because test files can declare additional methods on types defined in
+ the library portion of the package, the dispatch of method calls in
+ the library portion was affected by the presence of the test files.
+ This should have been a clue that the packages were logically
+ different.
+3) this model of "augmentation" assumed at most one in-package test
+ per library package, which is true of projects using 'go build',
+ but not other build systems.
+4) because of the two-phase nature of test processing, all packages that
+ import the library package had to be processed before augmentation,
+ forcing a "one-shot" API and preventing the client from calling Load
+ in several times in sequence as is now possible in WholeProgram mode.
+ (TypeCheck mode has a similar one-shot restriction for a different reason.)
+
+Early drafts of this package supported "multi-shot" operation.
+Although it allowed clients to make a sequence of calls (or concurrent
+calls) to Load, building up the graph of Packages incrementally,
+it was of marginal value: it complicated the API
+(since it allowed some options to vary across calls but not others),
+it complicated the implementation,
+it cannot be made to work in Types mode, as explained above,
+and it was less efficient than making one combined call (when this is possible).
+Among the clients we have inspected, none made multiple calls to load
+but could not be easily and satisfactorily modified to make only a single call.
+However, applications changes may be required.
+For example, the ssadump command loads the user-specified packages
+and in addition the runtime package. It is tempting to simply append
+"runtime" to the user-provided list, but that does not work if the user
+specified an ad-hoc package such as [a.go b.go].
+Instead, ssadump no longer requests the runtime package,
+but seeks it among the dependencies of the user-specified packages,
+and emits an error if it is not found.
+
+Overlays: The Overlay field in the Config allows providing alternate contents
+for Go source files, by providing a mapping from file path to contents.
+go/packages will pull in new imports added in overlay files when go/packages
+is run in LoadImports mode or greater.
+Overlay support for the go list driver isn't complete yet: if the file doesn't
+exist on disk, it will only be recognized in an overlay if it is a non-test file
+and the package would be reported even without the overlay.
+
+Questions & Tasks
+
+- Add GOARCH/GOOS?
+ They are not portable concepts, but could be made portable.
+ Our goal has been to allow users to express themselves using the conventions
+ of the underlying build system: if the build system honors GOARCH
+ during a build and during a metadata query, then so should
+ applications built atop that query mechanism.
+ Conversely, if the target architecture of the build is determined by
+ command-line flags, the application can pass the relevant
+ flags through to the build system using a command such as:
+ myapp -query_flag="--cpu=amd64" -query_flag="--os=darwin"
+ However, this approach is low-level, unwieldy, and non-portable.
+ GOOS and GOARCH seem important enough to warrant a dedicated option.
+
+- How should we handle partial failures such as a mixture of good and
+ malformed patterns, existing and non-existent packages, successful and
+ failed builds, import failures, import cycles, and so on, in a call to
+ Load?
+
+- Support bazel, blaze, and go1.10 list, not just go1.11 list.
+
+- Handle (and test) various partial success cases, e.g.
+ a mixture of good packages and:
+ invalid patterns
+ nonexistent packages
+ empty packages
+ packages with malformed package or import declarations
+ unreadable files
+ import cycles
+ other parse errors
+ type errors
+ Make sure we record errors at the correct place in the graph.
+
+- Missing packages among initial arguments are not reported.
+ Return bogus packages for them, like golist does.
+
+- "undeclared name" errors (for example) are reported out of source file
+ order. I suspect this is due to the breadth-first resolution now used
+ by go/types. Is that a bug? Discuss with gri.
+
+*/
diff --git a/vendor/golang.org/x/tools/go/packages/external.go b/vendor/golang.org/x/tools/go/packages/external.go
new file mode 100644
index 0000000..860c3ec
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/external.go
@@ -0,0 +1,79 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file enables an external tool to intercept package requests.
+// If the tool is present then its results are used in preference to
+// the go list command.
+
+package packages
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "os/exec"
+ "strings"
+)
+
+// Driver
+type driverRequest struct {
+ Command string `json "command"`
+ Mode LoadMode `json:"mode"`
+ Env []string `json:"env"`
+ BuildFlags []string `json:"build_flags"`
+ Tests bool `json:"tests"`
+ Overlay map[string][]byte `json:"overlay"`
+}
+
+// findExternalDriver returns the file path of a tool that supplies
+// the build system package structure, or "" if not found."
+// If GOPACKAGESDRIVER is set in the environment findExternalTool returns its
+// value, otherwise it searches for a binary named gopackagesdriver on the PATH.
+func findExternalDriver(cfg *Config) driver {
+ const toolPrefix = "GOPACKAGESDRIVER="
+ tool := ""
+ for _, env := range cfg.Env {
+ if val := strings.TrimPrefix(env, toolPrefix); val != env {
+ tool = val
+ }
+ }
+ if tool != "" && tool == "off" {
+ return nil
+ }
+ if tool == "" {
+ var err error
+ tool, err = exec.LookPath("gopackagesdriver")
+ if err != nil {
+ return nil
+ }
+ }
+ return func(cfg *Config, words ...string) (*driverResponse, error) {
+ req, err := json.Marshal(driverRequest{
+ Mode: cfg.Mode,
+ Env: cfg.Env,
+ BuildFlags: cfg.BuildFlags,
+ Tests: cfg.Tests,
+ Overlay: cfg.Overlay,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to encode message to driver tool: %v", err)
+ }
+
+ buf := new(bytes.Buffer)
+ cmd := exec.CommandContext(cfg.Context, tool, words...)
+ cmd.Dir = cfg.Dir
+ cmd.Env = cfg.Env
+ cmd.Stdin = bytes.NewReader(req)
+ cmd.Stdout = buf
+ cmd.Stderr = new(bytes.Buffer)
+ if err := cmd.Run(); err != nil {
+ return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
+ }
+ var response driverResponse
+ if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
+ return nil, err
+ }
+ return &response, nil
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go
new file mode 100644
index 0000000..067b008
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist.go
@@ -0,0 +1,825 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "go/types"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/tools/go/internal/packagesdriver"
+ "golang.org/x/tools/internal/gopathwalk"
+ "golang.org/x/tools/internal/semver"
+)
+
+// debug controls verbose logging.
+var debug, _ = strconv.ParseBool(os.Getenv("GOPACKAGESDEBUG"))
+
+// A goTooOldError reports that the go command
+// found by exec.LookPath is too old to use the new go list behavior.
+type goTooOldError struct {
+ error
+}
+
+// responseDeduper wraps a driverResponse, deduplicating its contents.
+type responseDeduper struct {
+ seenRoots map[string]bool
+ seenPackages map[string]*Package
+ dr *driverResponse
+}
+
+// init fills in r with a driverResponse.
+func (r *responseDeduper) init(dr *driverResponse) {
+ r.dr = dr
+ r.seenRoots = map[string]bool{}
+ r.seenPackages = map[string]*Package{}
+ for _, pkg := range dr.Packages {
+ r.seenPackages[pkg.ID] = pkg
+ }
+ for _, root := range dr.Roots {
+ r.seenRoots[root] = true
+ }
+}
+
+func (r *responseDeduper) addPackage(p *Package) {
+ if r.seenPackages[p.ID] != nil {
+ return
+ }
+ r.seenPackages[p.ID] = p
+ r.dr.Packages = append(r.dr.Packages, p)
+}
+
+func (r *responseDeduper) addRoot(id string) {
+ if r.seenRoots[id] {
+ return
+ }
+ r.seenRoots[id] = true
+ r.dr.Roots = append(r.dr.Roots, id)
+}
+
+// goListDriver uses the go list command to interpret the patterns and produce
+// the build system package structure.
+// See driver for more details.
+func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
+ var sizes types.Sizes
+ var sizeserr error
+ var sizeswg sync.WaitGroup
+ if cfg.Mode >= LoadTypes {
+ sizeswg.Add(1)
+ go func() {
+ sizes, sizeserr = getSizes(cfg)
+ sizeswg.Done()
+ }()
+ }
+
+ // Determine files requested in contains patterns
+ var containFiles []string
+ var packagesNamed []string
+ restPatterns := make([]string, 0, len(patterns))
+ // Extract file= and other [querytype]= patterns. Report an error if querytype
+ // doesn't exist.
+extractQueries:
+ for _, pattern := range patterns {
+ eqidx := strings.Index(pattern, "=")
+ if eqidx < 0 {
+ restPatterns = append(restPatterns, pattern)
+ } else {
+ query, value := pattern[:eqidx], pattern[eqidx+len("="):]
+ switch query {
+ case "file":
+ containFiles = append(containFiles, value)
+ case "pattern":
+ restPatterns = append(restPatterns, value)
+ case "iamashamedtousethedisabledqueryname":
+ packagesNamed = append(packagesNamed, value)
+ case "": // not a reserved query
+ restPatterns = append(restPatterns, pattern)
+ default:
+ for _, rune := range query {
+ if rune < 'a' || rune > 'z' { // not a reserved query
+ restPatterns = append(restPatterns, pattern)
+ continue extractQueries
+ }
+ }
+ // Reject all other patterns containing "="
+ return nil, fmt.Errorf("invalid query type %q in query pattern %q", query, pattern)
+ }
+ }
+ }
+
+ // TODO(matloob): Remove the definition of listfunc and just use golistPackages once go1.12 is released.
+ var listfunc driver
+ var isFallback bool
+ listfunc = func(cfg *Config, words ...string) (*driverResponse, error) {
+ response, err := golistDriverCurrent(cfg, words...)
+ if _, ok := err.(goTooOldError); ok {
+ isFallback = true
+ listfunc = golistDriverFallback
+ return listfunc(cfg, words...)
+ }
+ listfunc = golistDriverCurrent
+ return response, err
+ }
+
+ response := &responseDeduper{}
+ var err error
+
+ // See if we have any patterns to pass through to go list. Zero initial
+ // patterns also requires a go list call, since it's the equivalent of
+ // ".".
+ if len(restPatterns) > 0 || len(patterns) == 0 {
+ dr, err := listfunc(cfg, restPatterns...)
+ if err != nil {
+ return nil, err
+ }
+ response.init(dr)
+ } else {
+ response.init(&driverResponse{})
+ }
+
+ sizeswg.Wait()
+ if sizeserr != nil {
+ return nil, sizeserr
+ }
+ // types.SizesFor always returns nil or a *types.StdSizes
+ response.dr.Sizes, _ = sizes.(*types.StdSizes)
+
+ var containsCandidates []string
+
+ if len(containFiles) != 0 {
+ if err := runContainsQueries(cfg, listfunc, isFallback, response, containFiles); err != nil {
+ return nil, err
+ }
+ }
+
+ if len(packagesNamed) != 0 {
+ if err := runNamedQueries(cfg, listfunc, response, packagesNamed); err != nil {
+ return nil, err
+ }
+ }
+
+ modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response.dr)
+ if err != nil {
+ return nil, err
+ }
+ if len(containFiles) > 0 {
+ containsCandidates = append(containsCandidates, modifiedPkgs...)
+ containsCandidates = append(containsCandidates, needPkgs...)
+ }
+
+ if len(needPkgs) > 0 {
+ addNeededOverlayPackages(cfg, listfunc, response, needPkgs)
+ if err != nil {
+ return nil, err
+ }
+ }
+ // Check candidate packages for containFiles.
+ if len(containFiles) > 0 {
+ for _, id := range containsCandidates {
+ pkg := response.seenPackages[id]
+ for _, f := range containFiles {
+ for _, g := range pkg.GoFiles {
+ if sameFile(f, g) {
+ response.addRoot(id)
+ }
+ }
+ }
+ }
+ }
+
+ return response.dr, nil
+}
+
+func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string) error {
+ dr, err := driver(cfg, pkgs...)
+ if err != nil {
+ return err
+ }
+ for _, pkg := range dr.Packages {
+ response.addPackage(pkg)
+ }
+ return nil
+}
+
+func runContainsQueries(cfg *Config, driver driver, isFallback bool, response *responseDeduper, queries []string) error {
+ for _, query := range queries {
+ // TODO(matloob): Do only one query per directory.
+ fdir := filepath.Dir(query)
+ // Pass absolute path of directory to go list so that it knows to treat it as a directory,
+ // not a package path.
+ pattern, err := filepath.Abs(fdir)
+ if err != nil {
+ return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err)
+ }
+ if isFallback {
+ pattern = "."
+ cfg.Dir = fdir
+ }
+
+ dirResponse, err := driver(cfg, pattern)
+ if err != nil {
+ return err
+ }
+ isRoot := make(map[string]bool, len(dirResponse.Roots))
+ for _, root := range dirResponse.Roots {
+ isRoot[root] = true
+ }
+ for _, pkg := range dirResponse.Packages {
+ // Add any new packages to the main set
+ // We don't bother to filter packages that will be dropped by the changes of roots,
+ // that will happen anyway during graph construction outside this function.
+ // Over-reporting packages is not a problem.
+ response.addPackage(pkg)
+ // if the package was not a root one, it cannot have the file
+ if !isRoot[pkg.ID] {
+ continue
+ }
+ for _, pkgFile := range pkg.GoFiles {
+ if filepath.Base(query) == filepath.Base(pkgFile) {
+ response.addRoot(pkg.ID)
+ break
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// modCacheRegexp splits a path in a module cache into module, module version, and package.
+var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
+
+func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error {
+ // calling `go env` isn't free; bail out if there's nothing to do.
+ if len(queries) == 0 {
+ return nil
+ }
+ // Determine which directories are relevant to scan.
+ roots, modRoot, err := roots(cfg)
+ if err != nil {
+ return err
+ }
+
+ // Scan the selected directories. Simple matches, from GOPATH/GOROOT
+ // or the local module, can simply be "go list"ed. Matches from the
+ // module cache need special treatment.
+ var matchesMu sync.Mutex
+ var simpleMatches, modCacheMatches []string
+ add := func(root gopathwalk.Root, dir string) {
+ // Walk calls this concurrently; protect the result slices.
+ matchesMu.Lock()
+ defer matchesMu.Unlock()
+
+ path := dir
+ if dir != root.Path {
+ path = dir[len(root.Path)+1:]
+ }
+ if pathMatchesQueries(path, queries) {
+ switch root.Type {
+ case gopathwalk.RootModuleCache:
+ modCacheMatches = append(modCacheMatches, path)
+ case gopathwalk.RootCurrentModule:
+ // We'd need to read go.mod to find the full
+ // import path. Relative's easier.
+ rel, err := filepath.Rel(cfg.Dir, dir)
+ if err != nil {
+ // This ought to be impossible, since
+ // we found dir in the current module.
+ panic(err)
+ }
+ simpleMatches = append(simpleMatches, "./"+rel)
+ case gopathwalk.RootGOPATH, gopathwalk.RootGOROOT:
+ simpleMatches = append(simpleMatches, path)
+ }
+ }
+ }
+
+ startWalk := time.Now()
+ gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug})
+ if debug {
+ log.Printf("%v for walk", time.Since(startWalk))
+ }
+
+ // Weird special case: the top-level package in a module will be in
+ // whatever directory the user checked the repository out into. It's
+ // more reasonable for that to not match the package name. So, if there
+ // are any Go files in the mod root, query it just to be safe.
+ if modRoot != "" {
+ rel, err := filepath.Rel(cfg.Dir, modRoot)
+ if err != nil {
+ panic(err) // See above.
+ }
+
+ files, err := ioutil.ReadDir(modRoot)
+ for _, f := range files {
+ if strings.HasSuffix(f.Name(), ".go") {
+ simpleMatches = append(simpleMatches, rel)
+ break
+ }
+ }
+ }
+
+ addResponse := func(r *driverResponse) {
+ for _, pkg := range r.Packages {
+ response.addPackage(pkg)
+ for _, name := range queries {
+ if pkg.Name == name {
+ response.addRoot(pkg.ID)
+ break
+ }
+ }
+ }
+ }
+
+ if len(simpleMatches) != 0 {
+ resp, err := driver(cfg, simpleMatches...)
+ if err != nil {
+ return err
+ }
+ addResponse(resp)
+ }
+
+ // Module cache matches are tricky. We want to avoid downloading new
+ // versions of things, so we need to use the ones present in the cache.
+ // go list doesn't accept version specifiers, so we have to write out a
+ // temporary module, and do the list in that module.
+ if len(modCacheMatches) != 0 {
+ // Collect all the matches, deduplicating by major version
+ // and preferring the newest.
+ type modInfo struct {
+ mod string
+ major string
+ }
+ mods := make(map[modInfo]string)
+ var imports []string
+ for _, modPath := range modCacheMatches {
+ matches := modCacheRegexp.FindStringSubmatch(modPath)
+ mod, ver := filepath.ToSlash(matches[1]), matches[2]
+ importPath := filepath.ToSlash(filepath.Join(matches[1], matches[3]))
+
+ major := semver.Major(ver)
+ if prevVer, ok := mods[modInfo{mod, major}]; !ok || semver.Compare(ver, prevVer) > 0 {
+ mods[modInfo{mod, major}] = ver
+ }
+
+ imports = append(imports, importPath)
+ }
+
+ // Build the temporary module.
+ var gomod bytes.Buffer
+ gomod.WriteString("module modquery\nrequire (\n")
+ for mod, version := range mods {
+ gomod.WriteString("\t" + mod.mod + " " + version + "\n")
+ }
+ gomod.WriteString(")\n")
+
+ tmpCfg := *cfg
+
+ // We're only trying to look at stuff in the module cache, so
+ // disable the network. This should speed things up, and has
+ // prevented errors in at least one case, #28518.
+ tmpCfg.Env = append(append([]string{"GOPROXY=off"}, cfg.Env...))
+
+ var err error
+ tmpCfg.Dir, err = ioutil.TempDir("", "gopackages-modquery")
+ if err != nil {
+ return err
+ }
+ defer os.RemoveAll(tmpCfg.Dir)
+
+ if err := ioutil.WriteFile(filepath.Join(tmpCfg.Dir, "go.mod"), gomod.Bytes(), 0777); err != nil {
+ return fmt.Errorf("writing go.mod for module cache query: %v", err)
+ }
+
+ // Run the query, using the import paths calculated from the matches above.
+ resp, err := driver(&tmpCfg, imports...)
+ if err != nil {
+ return fmt.Errorf("querying module cache matches: %v", err)
+ }
+ addResponse(resp)
+ }
+
+ return nil
+}
+
+func getSizes(cfg *Config) (types.Sizes, error) {
+ return packagesdriver.GetSizesGolist(cfg.Context, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
+}
+
+// roots selects the appropriate paths to walk based on the passed-in configuration,
+// particularly the environment and the presence of a go.mod in cfg.Dir's parents.
+func roots(cfg *Config) ([]gopathwalk.Root, string, error) {
+ stdout, err := invokeGo(cfg, "env", "GOROOT", "GOPATH", "GOMOD")
+ if err != nil {
+ return nil, "", err
+ }
+
+ fields := strings.Split(stdout.String(), "\n")
+ if len(fields) != 4 || len(fields[3]) != 0 {
+ return nil, "", fmt.Errorf("go env returned unexpected output: %q", stdout.String())
+ }
+ goroot, gopath, gomod := fields[0], filepath.SplitList(fields[1]), fields[2]
+ var modDir string
+ if gomod != "" {
+ modDir = filepath.Dir(gomod)
+ }
+
+ var roots []gopathwalk.Root
+ // Always add GOROOT.
+ roots = append(roots, gopathwalk.Root{filepath.Join(goroot, "/src"), gopathwalk.RootGOROOT})
+ // If modules are enabled, scan the module dir.
+ if modDir != "" {
+ roots = append(roots, gopathwalk.Root{modDir, gopathwalk.RootCurrentModule})
+ }
+ // Add either GOPATH/src or GOPATH/pkg/mod, depending on module mode.
+ for _, p := range gopath {
+ if modDir != "" {
+ roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache})
+ } else {
+ roots = append(roots, gopathwalk.Root{filepath.Join(p, "/src"), gopathwalk.RootGOPATH})
+ }
+ }
+
+ return roots, modDir, nil
+}
+
+// These functions were copied from goimports. See further documentation there.
+
+// pathMatchesQueries is adapted from pkgIsCandidate.
+// TODO: is it reasonable to do Contains here, rather than an exact match on a path component?
+func pathMatchesQueries(path string, queries []string) bool {
+ lastTwo := lastTwoComponents(path)
+ for _, query := range queries {
+ if strings.Contains(lastTwo, query) {
+ return true
+ }
+ if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(query) {
+ lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ if strings.Contains(lastTwo, query) {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// lastTwoComponents returns at most the last two path components
+// of v, using either / or \ as the path separator.
+func lastTwoComponents(v string) string {
+ nslash := 0
+ for i := len(v) - 1; i >= 0; i-- {
+ if v[i] == '/' || v[i] == '\\' {
+ nslash++
+ if nslash == 2 {
+ return v[i:]
+ }
+ }
+ }
+ return v
+}
+
+func hasHyphenOrUpperASCII(s string) bool {
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ if b == '-' || ('A' <= b && b <= 'Z') {
+ return true
+ }
+ }
+ return false
+}
+
+func lowerASCIIAndRemoveHyphen(s string) (ret string) {
+ buf := make([]byte, 0, len(s))
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ switch {
+ case b == '-':
+ continue
+ case 'A' <= b && b <= 'Z':
+ buf = append(buf, b+('a'-'A'))
+ default:
+ buf = append(buf, b)
+ }
+ }
+ return string(buf)
+}
+
+// Fields must match go list;
+// see $GOROOT/src/cmd/go/internal/load/pkg.go.
+type jsonPackage struct {
+ ImportPath string
+ Dir string
+ Name string
+ Export string
+ GoFiles []string
+ CompiledGoFiles []string
+ CFiles []string
+ CgoFiles []string
+ CXXFiles []string
+ MFiles []string
+ HFiles []string
+ FFiles []string
+ SFiles []string
+ SwigFiles []string
+ SwigCXXFiles []string
+ SysoFiles []string
+ Imports []string
+ ImportMap map[string]string
+ Deps []string
+ TestGoFiles []string
+ TestImports []string
+ XTestGoFiles []string
+ XTestImports []string
+ ForTest string // q in a "p [q.test]" package, else ""
+ DepOnly bool
+
+ Error *jsonPackageError
+}
+
+type jsonPackageError struct {
+ ImportStack []string
+ Pos string
+ Err string
+}
+
+func otherFiles(p *jsonPackage) [][]string {
+ return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles}
+}
+
+// golistDriverCurrent uses the "go list" command to expand the
+// pattern words and return metadata for the specified packages.
+// dir may be "" and env may be nil, as per os/exec.Command.
+func golistDriverCurrent(cfg *Config, words ...string) (*driverResponse, error) {
+ // go list uses the following identifiers in ImportPath and Imports:
+ //
+ // "p" -- importable package or main (command)
+ // "q.test" -- q's test executable
+ // "p [q.test]" -- variant of p as built for q's test executable
+ // "q_test [q.test]" -- q's external test package
+ //
+ // The packages p that are built differently for a test q.test
+ // are q itself, plus any helpers used by the external test q_test,
+ // typically including "testing" and all its dependencies.
+
+ // Run "go list" for complete
+ // information on the specified packages.
+ buf, err := invokeGo(cfg, golistargs(cfg, words)...)
+ if err != nil {
+ return nil, err
+ }
+ seen := make(map[string]*jsonPackage)
+ // Decode the JSON and convert it to Package form.
+ var response driverResponse
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ if p.ImportPath == "" {
+ // The documentation for go list says that “[e]rroneous packages will have
+ // a non-empty ImportPath”. If for some reason it comes back empty, we
+ // prefer to error out rather than silently discarding data or handing
+ // back a package without any way to refer to it.
+ if p.Error != nil {
+ return nil, Error{
+ Pos: p.Error.Pos,
+ Msg: p.Error.Err,
+ }
+ }
+ return nil, fmt.Errorf("package missing import path: %+v", p)
+ }
+
+ if old, found := seen[p.ImportPath]; found {
+ if !reflect.DeepEqual(p, old) {
+ return nil, fmt.Errorf("go list repeated package %v with different values", p.ImportPath)
+ }
+ // skip the duplicate
+ continue
+ }
+ seen[p.ImportPath] = p
+
+ pkg := &Package{
+ Name: p.Name,
+ ID: p.ImportPath,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
+ CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ }
+
+ // Work around https://golang.org/issue/28749:
+ // cmd/go puts assembly, C, and C++ files in CompiledGoFiles.
+ // Filter out any elements of CompiledGoFiles that are also in OtherFiles.
+ // We have to keep this workaround in place until go1.12 is a distant memory.
+ if len(pkg.OtherFiles) > 0 {
+ other := make(map[string]bool, len(pkg.OtherFiles))
+ for _, f := range pkg.OtherFiles {
+ other[f] = true
+ }
+
+ out := pkg.CompiledGoFiles[:0]
+ for _, f := range pkg.CompiledGoFiles {
+ if other[f] {
+ continue
+ }
+ out = append(out, f)
+ }
+ pkg.CompiledGoFiles = out
+ }
+
+ // Extract the PkgPath from the package's ID.
+ if i := strings.IndexByte(pkg.ID, ' '); i >= 0 {
+ pkg.PkgPath = pkg.ID[:i]
+ } else {
+ pkg.PkgPath = pkg.ID
+ }
+
+ if pkg.PkgPath == "unsafe" {
+ pkg.GoFiles = nil // ignore fake unsafe.go file
+ }
+
+ // Assume go list emits only absolute paths for Dir.
+ if p.Dir != "" && !filepath.IsAbs(p.Dir) {
+ log.Fatalf("internal error: go list returned non-absolute Package.Dir: %s", p.Dir)
+ }
+
+ if p.Export != "" && !filepath.IsAbs(p.Export) {
+ pkg.ExportFile = filepath.Join(p.Dir, p.Export)
+ } else {
+ pkg.ExportFile = p.Export
+ }
+
+ // imports
+ //
+ // Imports contains the IDs of all imported packages.
+ // ImportsMap records (path, ID) only where they differ.
+ ids := make(map[string]bool)
+ for _, id := range p.Imports {
+ ids[id] = true
+ }
+ pkg.Imports = make(map[string]*Package)
+ for path, id := range p.ImportMap {
+ pkg.Imports[path] = &Package{ID: id} // non-identity import
+ delete(ids, id)
+ }
+ for id := range ids {
+ if id == "C" {
+ continue
+ }
+
+ pkg.Imports[id] = &Package{ID: id} // identity import
+ }
+ if !p.DepOnly {
+ response.Roots = append(response.Roots, pkg.ID)
+ }
+
+ // Work around for pre-go.1.11 versions of go list.
+ // TODO(matloob): they should be handled by the fallback.
+ // Can we delete this?
+ if len(pkg.CompiledGoFiles) == 0 {
+ pkg.CompiledGoFiles = pkg.GoFiles
+ }
+
+ if p.Error != nil {
+ pkg.Errors = append(pkg.Errors, Error{
+ Pos: p.Error.Pos,
+ Msg: p.Error.Err,
+ })
+ }
+
+ response.Packages = append(response.Packages, pkg)
+ }
+
+ return &response, nil
+}
+
+// absJoin absolutizes and flattens the lists of files.
+func absJoin(dir string, fileses ...[]string) (res []string) {
+ for _, files := range fileses {
+ for _, file := range files {
+ if !filepath.IsAbs(file) {
+ file = filepath.Join(dir, file)
+ }
+ res = append(res, file)
+ }
+ }
+ return res
+}
+
+func golistargs(cfg *Config, words []string) []string {
+ fullargs := []string{
+ "list", "-e", "-json", "-compiled",
+ fmt.Sprintf("-test=%t", cfg.Tests),
+ fmt.Sprintf("-export=%t", usesExportData(cfg)),
+ fmt.Sprintf("-deps=%t", cfg.Mode >= LoadImports),
+ // go list doesn't let you pass -test and -find together,
+ // probably because you'd just get the TestMain.
+ fmt.Sprintf("-find=%t", cfg.Mode < LoadImports && !cfg.Tests),
+ }
+ fullargs = append(fullargs, cfg.BuildFlags...)
+ fullargs = append(fullargs, "--")
+ fullargs = append(fullargs, words...)
+ return fullargs
+}
+
+// invokeGo returns the stdout of a go command invocation.
+func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
+ stdout := new(bytes.Buffer)
+ stderr := new(bytes.Buffer)
+ cmd := exec.CommandContext(cfg.Context, "go", args...)
+ // On darwin the cwd gets resolved to the real path, which breaks anything that
+ // expects the working directory to keep the original path, including the
+ // go command when dealing with modules.
+ // The Go stdlib has a special feature where if the cwd and the PWD are the
+ // same node then it trusts the PWD, so by setting it in the env for the child
+ // process we fix up all the paths returned by the go command.
+ cmd.Env = append(append([]string{}, cfg.Env...), "PWD="+cfg.Dir)
+ cmd.Dir = cfg.Dir
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ if debug {
+ defer func(start time.Time) {
+ log.Printf("%s for %v, stderr: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr)
+ }(time.Now())
+ }
+
+ if err := cmd.Run(); err != nil {
+ exitErr, ok := err.(*exec.ExitError)
+ if !ok {
+ // Catastrophic error:
+ // - executable not found
+ // - context cancellation
+ return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
+ }
+
+ // Old go version?
+ if strings.Contains(stderr.String(), "flag provided but not defined") {
+ return nil, goTooOldError{fmt.Errorf("unsupported version of go: %s: %s", exitErr, stderr)}
+ }
+
+ // Export mode entails a build.
+ // If that build fails, errors appear on stderr
+ // (despite the -e flag) and the Export field is blank.
+ // Do not fail in that case.
+ // The same is true if an ad-hoc package given to go list doesn't exist.
+ // TODO(matloob): Remove these once we can depend on go list to exit with a zero status with -e even when
+ // packages don't exist or a build fails.
+ if !usesExportData(cfg) && !containsGoFile(args) {
+ return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
+ }
+ }
+
+ // As of writing, go list -export prints some non-fatal compilation
+ // errors to stderr, even with -e set. We would prefer that it put
+ // them in the Package.Error JSON (see https://golang.org/issue/26319).
+ // In the meantime, there's nowhere good to put them, but they can
+ // be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS
+ // is set.
+ if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" {
+ fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd, args...), stderr)
+ }
+
+ // debugging
+ if false {
+ fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(cmd, args...), stdout)
+ }
+
+ return stdout, nil
+}
+
+func containsGoFile(s []string) bool {
+ for _, f := range s {
+ if strings.HasSuffix(f, ".go") {
+ return true
+ }
+ }
+ return false
+}
+
+func cmdDebugStr(cmd *exec.Cmd, args ...string) string {
+ env := make(map[string]string)
+ for _, kv := range cmd.Env {
+ split := strings.Split(kv, "=")
+ k, v := split[0], split[1]
+ env[k] = v
+ }
+ var quotedArgs []string
+ for _, arg := range args {
+ quotedArgs = append(quotedArgs, strconv.Quote(arg))
+ }
+
+ return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v PWD=%v go %s", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["PWD"], strings.Join(quotedArgs, " "))
+}
diff --git a/vendor/golang.org/x/tools/go/packages/golist_fallback.go b/vendor/golang.org/x/tools/go/packages/golist_fallback.go
new file mode 100644
index 0000000..141fa19
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist_fallback.go
@@ -0,0 +1,450 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/build"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "golang.org/x/tools/go/internal/cgo"
+)
+
+// TODO(matloob): Delete this file once Go 1.12 is released.
+
+// This file provides backwards compatibility support for
+// loading for versions of Go earlier than 1.11. This support is meant to
+// assist with migration to the Package API until there's
+// widespread adoption of these newer Go versions.
+// This support will be removed once Go 1.12 is released
+// in Q1 2019.
+
+func golistDriverFallback(cfg *Config, words ...string) (*driverResponse, error) {
+ // Turn absolute paths into GOROOT and GOPATH-relative paths to provide to go list.
+ // This will have surprising behavior if GOROOT or GOPATH contain multiple packages with the same
+ // path and a user provides an absolute path to a directory that's shadowed by an earlier
+ // directory in GOROOT or GOPATH with the same package path.
+ words = cleanAbsPaths(cfg, words)
+
+ original, deps, err := getDeps(cfg, words...)
+ if err != nil {
+ return nil, err
+ }
+
+ var tmpdir string // used for generated cgo files
+ var needsTestVariant []struct {
+ pkg, xtestPkg *Package
+ }
+
+ var response driverResponse
+ allPkgs := make(map[string]bool)
+ addPackage := func(p *jsonPackage, isRoot bool) {
+ id := p.ImportPath
+
+ if allPkgs[id] {
+ return
+ }
+ allPkgs[id] = true
+
+ pkgpath := id
+
+ if pkgpath == "unsafe" {
+ p.GoFiles = nil // ignore fake unsafe.go file
+ }
+
+ importMap := func(importlist []string) map[string]*Package {
+ importMap := make(map[string]*Package)
+ for _, id := range importlist {
+
+ if id == "C" {
+ for _, path := range []string{"unsafe", "syscall", "runtime/cgo"} {
+ if pkgpath != path && importMap[path] == nil {
+ importMap[path] = &Package{ID: path}
+ }
+ }
+ continue
+ }
+ importMap[vendorlessPath(id)] = &Package{ID: id}
+ }
+ return importMap
+ }
+ compiledGoFiles := absJoin(p.Dir, p.GoFiles)
+ // Use a function to simplify control flow. It's just a bunch of gotos.
+ var cgoErrors []error
+ var outdir string
+ getOutdir := func() (string, error) {
+ if outdir != "" {
+ return outdir, nil
+ }
+ if tmpdir == "" {
+ if tmpdir, err = ioutil.TempDir("", "gopackages"); err != nil {
+ return "", err
+ }
+ }
+ outdir = filepath.Join(tmpdir, strings.Replace(p.ImportPath, "/", "_", -1))
+ if err := os.MkdirAll(outdir, 0755); err != nil {
+ outdir = ""
+ return "", err
+ }
+ return outdir, nil
+ }
+ processCgo := func() bool {
+ // Suppress any cgo errors. Any relevant errors will show up in typechecking.
+ // TODO(matloob): Skip running cgo if Mode < LoadTypes.
+ outdir, err := getOutdir()
+ if err != nil {
+ cgoErrors = append(cgoErrors, err)
+ return false
+ }
+ files, _, err := runCgo(p.Dir, outdir, cfg.Env)
+ if err != nil {
+ cgoErrors = append(cgoErrors, err)
+ return false
+ }
+ compiledGoFiles = append(compiledGoFiles, files...)
+ return true
+ }
+ if len(p.CgoFiles) == 0 || !processCgo() {
+ compiledGoFiles = append(compiledGoFiles, absJoin(p.Dir, p.CgoFiles)...) // Punt to typechecker.
+ }
+ if isRoot {
+ response.Roots = append(response.Roots, id)
+ }
+ pkg := &Package{
+ ID: id,
+ Name: p.Name,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
+ CompiledGoFiles: compiledGoFiles,
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ PkgPath: pkgpath,
+ Imports: importMap(p.Imports),
+ // TODO(matloob): set errors on the Package to cgoErrors
+ }
+ if p.Error != nil {
+ pkg.Errors = append(pkg.Errors, Error{
+ Pos: p.Error.Pos,
+ Msg: p.Error.Err,
+ })
+ }
+ response.Packages = append(response.Packages, pkg)
+ if cfg.Tests && isRoot {
+ testID := fmt.Sprintf("%s [%s.test]", id, id)
+ if len(p.TestGoFiles) > 0 || len(p.XTestGoFiles) > 0 {
+ response.Roots = append(response.Roots, testID)
+ testPkg := &Package{
+ ID: testID,
+ Name: p.Name,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles, p.TestGoFiles),
+ CompiledGoFiles: append(compiledGoFiles, absJoin(p.Dir, p.TestGoFiles)...),
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ PkgPath: pkgpath,
+ Imports: importMap(append(p.Imports, p.TestImports...)),
+ // TODO(matloob): set errors on the Package to cgoErrors
+ }
+ response.Packages = append(response.Packages, testPkg)
+ var xtestPkg *Package
+ if len(p.XTestGoFiles) > 0 {
+ xtestID := fmt.Sprintf("%s_test [%s.test]", id, id)
+ response.Roots = append(response.Roots, xtestID)
+ // Generate test variants for all packages q where a path exists
+ // such that xtestPkg -> ... -> q -> ... -> p (where p is the package under test)
+ // and rewrite all import map entries of p to point to testPkg (the test variant of
+ // p), and of each q to point to the test variant of that q.
+ xtestPkg = &Package{
+ ID: xtestID,
+ Name: p.Name + "_test",
+ GoFiles: absJoin(p.Dir, p.XTestGoFiles),
+ CompiledGoFiles: absJoin(p.Dir, p.XTestGoFiles),
+ PkgPath: pkgpath + "_test",
+ Imports: importMap(p.XTestImports),
+ }
+ // Add to list of packages we need to rewrite imports for to refer to test variants.
+ // We may need to create a test variant of a package that hasn't been loaded yet, so
+ // the test variants need to be created later.
+ needsTestVariant = append(needsTestVariant, struct{ pkg, xtestPkg *Package }{pkg, xtestPkg})
+ response.Packages = append(response.Packages, xtestPkg)
+ }
+ // testmain package
+ testmainID := id + ".test"
+ response.Roots = append(response.Roots, testmainID)
+ imports := map[string]*Package{}
+ imports[testPkg.PkgPath] = &Package{ID: testPkg.ID}
+ if xtestPkg != nil {
+ imports[xtestPkg.PkgPath] = &Package{ID: xtestPkg.ID}
+ }
+ testmainPkg := &Package{
+ ID: testmainID,
+ Name: "main",
+ PkgPath: testmainID,
+ Imports: imports,
+ }
+ response.Packages = append(response.Packages, testmainPkg)
+ outdir, err := getOutdir()
+ if err != nil {
+ testmainPkg.Errors = append(testmainPkg.Errors, Error{
+ Pos: "-",
+ Msg: fmt.Sprintf("failed to generate testmain: %v", err),
+ Kind: ListError,
+ })
+ return
+ }
+ // Don't use a .go extension on the file, so that the tests think the file is inside GOCACHE.
+ // This allows the same test to test the pre- and post-Go 1.11 go list logic because the Go 1.11
+ // go list generates test mains in the cache, and the test code knows not to rely on paths in the
+ // cache to stay stable.
+ testmain := filepath.Join(outdir, "testmain-go")
+ extraimports, extradeps, err := generateTestmain(testmain, testPkg, xtestPkg)
+ if err != nil {
+ testmainPkg.Errors = append(testmainPkg.Errors, Error{
+ Pos: "-",
+ Msg: fmt.Sprintf("failed to generate testmain: %v", err),
+ Kind: ListError,
+ })
+ }
+ deps = append(deps, extradeps...)
+ for _, imp := range extraimports { // testing, testing/internal/testdeps, and maybe os
+ imports[imp] = &Package{ID: imp}
+ }
+ testmainPkg.GoFiles = []string{testmain}
+ testmainPkg.CompiledGoFiles = []string{testmain}
+ }
+ }
+ }
+
+ for _, pkg := range original {
+ addPackage(pkg, true)
+ }
+ if cfg.Mode < LoadImports || len(deps) == 0 {
+ return &response, nil
+ }
+
+ buf, err := invokeGo(cfg, golistArgsFallback(cfg, deps)...)
+ if err != nil {
+ return nil, err
+ }
+
+ // Decode the JSON and convert it to Package form.
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ addPackage(p, false)
+ }
+
+ for _, v := range needsTestVariant {
+ createTestVariants(&response, v.pkg, v.xtestPkg)
+ }
+
+ return &response, nil
+}
+
+func createTestVariants(response *driverResponse, pkgUnderTest, xtestPkg *Package) {
+ allPkgs := make(map[string]*Package)
+ for _, pkg := range response.Packages {
+ allPkgs[pkg.ID] = pkg
+ }
+ needsTestVariant := make(map[string]bool)
+ needsTestVariant[pkgUnderTest.ID] = true
+ var needsVariantRec func(p *Package) bool
+ needsVariantRec = func(p *Package) bool {
+ if needsTestVariant[p.ID] {
+ return true
+ }
+ for _, imp := range p.Imports {
+ if needsVariantRec(allPkgs[imp.ID]) {
+ // Don't break because we want to make sure all dependencies
+ // have been processed, and all required test variants of our dependencies
+ // exist.
+ needsTestVariant[p.ID] = true
+ }
+ }
+ if !needsTestVariant[p.ID] {
+ return false
+ }
+ // Create a clone of the package. It will share the same strings and lists of source files,
+ // but that's okay. It's only necessary for the Imports map to have a separate identity.
+ testVariant := *p
+ testVariant.ID = fmt.Sprintf("%s [%s.test]", p.ID, pkgUnderTest.ID)
+ testVariant.Imports = make(map[string]*Package)
+ for imp, pkg := range p.Imports {
+ testVariant.Imports[imp] = pkg
+ if needsTestVariant[pkg.ID] {
+ testVariant.Imports[imp] = &Package{ID: fmt.Sprintf("%s [%s.test]", pkg.ID, pkgUnderTest.ID)}
+ }
+ }
+ response.Packages = append(response.Packages, &testVariant)
+ return needsTestVariant[p.ID]
+ }
+ // finally, update the xtest package's imports
+ for imp, pkg := range xtestPkg.Imports {
+ if allPkgs[pkg.ID] == nil {
+ fmt.Printf("for %s: package %s doesn't exist\n", xtestPkg.ID, pkg.ID)
+ }
+ if needsVariantRec(allPkgs[pkg.ID]) {
+ xtestPkg.Imports[imp] = &Package{ID: fmt.Sprintf("%s [%s.test]", pkg.ID, pkgUnderTest.ID)}
+ }
+ }
+}
+
+// cleanAbsPaths replaces all absolute paths with GOPATH- and GOROOT-relative
+// paths. If an absolute path is not GOPATH- or GOROOT- relative, it is left as an
+// absolute path so an error can be returned later.
+func cleanAbsPaths(cfg *Config, words []string) []string {
+ var searchpaths []string
+ var cleaned = make([]string, len(words))
+ for i := range cleaned {
+ cleaned[i] = words[i]
+ // Ignore relative directory paths (they must already be goroot-relative) and Go source files
+ // (absolute source files are already allowed for ad-hoc packages).
+ // TODO(matloob): Can there be non-.go files in ad-hoc packages.
+ if !filepath.IsAbs(cleaned[i]) || strings.HasSuffix(cleaned[i], ".go") {
+ continue
+ }
+ // otherwise, it's an absolute path. Search GOPATH and GOROOT to find it.
+ if searchpaths == nil {
+ cmd := exec.Command("go", "env", "GOPATH", "GOROOT")
+ cmd.Env = cfg.Env
+ out, err := cmd.Output()
+ if err != nil {
+ searchpaths = []string{}
+ continue // suppress the error, it will show up again when running go list
+ }
+ lines := strings.Split(string(out), "\n")
+ if len(lines) != 3 || lines[0] == "" || lines[1] == "" || lines[2] != "" {
+ continue // suppress error
+ }
+ // first line is GOPATH
+ for _, path := range filepath.SplitList(lines[0]) {
+ searchpaths = append(searchpaths, filepath.Join(path, "src"))
+ }
+ // second line is GOROOT
+ searchpaths = append(searchpaths, filepath.Join(lines[1], "src"))
+ }
+ for _, sp := range searchpaths {
+ if strings.HasPrefix(cleaned[i], sp) {
+ cleaned[i] = strings.TrimPrefix(cleaned[i], sp)
+ cleaned[i] = strings.TrimLeft(cleaned[i], string(filepath.Separator))
+ }
+ }
+ }
+ return cleaned
+}
+
+// vendorlessPath returns the devendorized version of the import path ipath.
+// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
+// Copied from golang.org/x/tools/imports/fix.go.
+func vendorlessPath(ipath string) string {
+ // Devendorize for use in import statement.
+ if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
+ return ipath[i+len("/vendor/"):]
+ }
+ if strings.HasPrefix(ipath, "vendor/") {
+ return ipath[len("vendor/"):]
+ }
+ return ipath
+}
+
+// getDeps runs an initial go list to determine all the dependency packages.
+func getDeps(cfg *Config, words ...string) (initial []*jsonPackage, deps []string, err error) {
+ buf, err := invokeGo(cfg, golistArgsFallback(cfg, words)...)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ depsSet := make(map[string]bool)
+ var testImports []string
+
+ // Extract deps from the JSON.
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ initial = append(initial, p)
+ for _, dep := range p.Deps {
+ depsSet[dep] = true
+ }
+ if cfg.Tests {
+ // collect the additional imports of the test packages.
+ pkgTestImports := append(p.TestImports, p.XTestImports...)
+ for _, imp := range pkgTestImports {
+ if depsSet[imp] {
+ continue
+ }
+ depsSet[imp] = true
+ testImports = append(testImports, imp)
+ }
+ }
+ }
+ // Get the deps of the packages imported by tests.
+ if len(testImports) > 0 {
+ buf, err = invokeGo(cfg, golistArgsFallback(cfg, testImports)...)
+ if err != nil {
+ return nil, nil, err
+ }
+ // Extract deps from the JSON.
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+ for _, dep := range p.Deps {
+ depsSet[dep] = true
+ }
+ }
+ }
+
+ for _, orig := range initial {
+ delete(depsSet, orig.ImportPath)
+ }
+
+ deps = make([]string, 0, len(depsSet))
+ for dep := range depsSet {
+ deps = append(deps, dep)
+ }
+ sort.Strings(deps) // ensure output is deterministic
+ return initial, deps, nil
+}
+
+func golistArgsFallback(cfg *Config, words []string) []string {
+ fullargs := []string{"list", "-e", "-json"}
+ fullargs = append(fullargs, cfg.BuildFlags...)
+ fullargs = append(fullargs, "--")
+ fullargs = append(fullargs, words...)
+ return fullargs
+}
+
+func runCgo(pkgdir, tmpdir string, env []string) (files, displayfiles []string, err error) {
+ // Use go/build to open cgo files and determine the cgo flags, etc, from them.
+ // This is tricky so it's best to avoid reimplementing as much as we can, and
+ // we plan to delete this support once Go 1.12 is released anyways.
+ // TODO(matloob): This isn't completely correct because we're using the Default
+ // context. Perhaps we should more accurately fill in the context.
+ bp, err := build.ImportDir(pkgdir, build.ImportMode(0))
+ if err != nil {
+ return nil, nil, err
+ }
+ for _, ev := range env {
+ if v := strings.TrimPrefix(ev, "CGO_CPPFLAGS"); v != ev {
+ bp.CgoCPPFLAGS = append(bp.CgoCPPFLAGS, strings.Fields(v)...)
+ } else if v := strings.TrimPrefix(ev, "CGO_CFLAGS"); v != ev {
+ bp.CgoCFLAGS = append(bp.CgoCFLAGS, strings.Fields(v)...)
+ } else if v := strings.TrimPrefix(ev, "CGO_CXXFLAGS"); v != ev {
+ bp.CgoCXXFLAGS = append(bp.CgoCXXFLAGS, strings.Fields(v)...)
+ } else if v := strings.TrimPrefix(ev, "CGO_LDFLAGS"); v != ev {
+ bp.CgoLDFLAGS = append(bp.CgoLDFLAGS, strings.Fields(v)...)
+ }
+ }
+ return cgo.Run(bp, pkgdir, tmpdir, true)
+}
diff --git a/vendor/golang.org/x/tools/go/packages/golist_fallback_testmain.go b/vendor/golang.org/x/tools/go/packages/golist_fallback_testmain.go
new file mode 100644
index 0000000..128e00e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist_fallback_testmain.go
@@ -0,0 +1,318 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is largely based on the Go 1.10-era cmd/go/internal/test/test.go
+// testmain generation code.
+
+package packages
+
+import (
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/doc"
+ "go/parser"
+ "go/token"
+ "os"
+ "sort"
+ "strings"
+ "text/template"
+ "unicode"
+ "unicode/utf8"
+)
+
+// TODO(matloob): Delete this file once Go 1.12 is released.
+
+// This file complements golist_fallback.go by providing
+// support for generating testmains.
+
+func generateTestmain(out string, testPkg, xtestPkg *Package) (extraimports, extradeps []string, err error) {
+ testFuncs, err := loadTestFuncs(testPkg, xtestPkg)
+ if err != nil {
+ return nil, nil, err
+ }
+ extraimports = []string{"testing", "testing/internal/testdeps"}
+ if testFuncs.TestMain == nil {
+ extraimports = append(extraimports, "os")
+ }
+ // Transitive dependencies of ("testing", "testing/internal/testdeps").
+ // os is part of the transitive closure so it and its transitive dependencies are
+ // included regardless of whether it's imported in the template below.
+ extradeps = []string{
+ "errors",
+ "internal/cpu",
+ "unsafe",
+ "internal/bytealg",
+ "internal/race",
+ "runtime/internal/atomic",
+ "runtime/internal/sys",
+ "runtime",
+ "sync/atomic",
+ "sync",
+ "io",
+ "unicode",
+ "unicode/utf8",
+ "bytes",
+ "math",
+ "syscall",
+ "time",
+ "internal/poll",
+ "internal/syscall/unix",
+ "internal/testlog",
+ "os",
+ "math/bits",
+ "strconv",
+ "reflect",
+ "fmt",
+ "sort",
+ "strings",
+ "flag",
+ "runtime/debug",
+ "context",
+ "runtime/trace",
+ "testing",
+ "bufio",
+ "regexp/syntax",
+ "regexp",
+ "compress/flate",
+ "encoding/binary",
+ "hash",
+ "hash/crc32",
+ "compress/gzip",
+ "path/filepath",
+ "io/ioutil",
+ "text/tabwriter",
+ "runtime/pprof",
+ "testing/internal/testdeps",
+ }
+ return extraimports, extradeps, writeTestmain(out, testFuncs)
+}
+
+// The following is adapted from the cmd/go testmain generation code.
+
+// isTestFunc tells whether fn has the type of a testing function. arg
+// specifies the parameter type we look for: B, M or T.
+func isTestFunc(fn *ast.FuncDecl, arg string) bool {
+ if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
+ fn.Type.Params.List == nil ||
+ len(fn.Type.Params.List) != 1 ||
+ len(fn.Type.Params.List[0].Names) > 1 {
+ return false
+ }
+ ptr, ok := fn.Type.Params.List[0].Type.(*ast.StarExpr)
+ if !ok {
+ return false
+ }
+ // We can't easily check that the type is *testing.M
+ // because we don't know how testing has been imported,
+ // but at least check that it's *M or *something.M.
+ // Same applies for B and T.
+ if name, ok := ptr.X.(*ast.Ident); ok && name.Name == arg {
+ return true
+ }
+ if sel, ok := ptr.X.(*ast.SelectorExpr); ok && sel.Sel.Name == arg {
+ return true
+ }
+ return false
+}
+
+// isTest tells whether name looks like a test (or benchmark, according to prefix).
+// It is a Test (say) if there is a character after Test that is not a lower-case letter.
+// We don't want TesticularCancer.
+func isTest(name, prefix string) bool {
+ if !strings.HasPrefix(name, prefix) {
+ return false
+ }
+ if len(name) == len(prefix) { // "Test" is ok
+ return true
+ }
+ rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
+ return !unicode.IsLower(rune)
+}
+
+// loadTestFuncs returns the testFuncs describing the tests that will be run.
+func loadTestFuncs(ptest, pxtest *Package) (*testFuncs, error) {
+ t := &testFuncs{
+ TestPackage: ptest,
+ XTestPackage: pxtest,
+ }
+ for _, file := range ptest.GoFiles {
+ if !strings.HasSuffix(file, "_test.go") {
+ continue
+ }
+ if err := t.load(file, "_test", &t.ImportTest, &t.NeedTest); err != nil {
+ return nil, err
+ }
+ }
+ if pxtest != nil {
+ for _, file := range pxtest.GoFiles {
+ if err := t.load(file, "_xtest", &t.ImportXtest, &t.NeedXtest); err != nil {
+ return nil, err
+ }
+ }
+ }
+ return t, nil
+}
+
+// writeTestmain writes the _testmain.go file for t to the file named out.
+func writeTestmain(out string, t *testFuncs) error {
+ f, err := os.Create(out)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ if err := testmainTmpl.Execute(f, t); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+type testFuncs struct {
+ Tests []testFunc
+ Benchmarks []testFunc
+ Examples []testFunc
+ TestMain *testFunc
+ TestPackage *Package
+ XTestPackage *Package
+ ImportTest bool
+ NeedTest bool
+ ImportXtest bool
+ NeedXtest bool
+}
+
+// Tested returns the name of the package being tested.
+func (t *testFuncs) Tested() string {
+ return t.TestPackage.Name
+}
+
+type testFunc struct {
+ Package string // imported package name (_test or _xtest)
+ Name string // function name
+ Output string // output, for examples
+ Unordered bool // output is allowed to be unordered.
+}
+
+func (t *testFuncs) load(filename, pkg string, doImport, seen *bool) error {
+ var fset = token.NewFileSet()
+
+ f, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
+ if err != nil {
+ return errors.New("failed to parse test file " + filename)
+ }
+ for _, d := range f.Decls {
+ n, ok := d.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+ if n.Recv != nil {
+ continue
+ }
+ name := n.Name.String()
+ switch {
+ case name == "TestMain":
+ if isTestFunc(n, "T") {
+ t.Tests = append(t.Tests, testFunc{pkg, name, "", false})
+ *doImport, *seen = true, true
+ continue
+ }
+ err := checkTestFunc(fset, n, "M")
+ if err != nil {
+ return err
+ }
+ if t.TestMain != nil {
+ return errors.New("multiple definitions of TestMain")
+ }
+ t.TestMain = &testFunc{pkg, name, "", false}
+ *doImport, *seen = true, true
+ case isTest(name, "Test"):
+ err := checkTestFunc(fset, n, "T")
+ if err != nil {
+ return err
+ }
+ t.Tests = append(t.Tests, testFunc{pkg, name, "", false})
+ *doImport, *seen = true, true
+ case isTest(name, "Benchmark"):
+ err := checkTestFunc(fset, n, "B")
+ if err != nil {
+ return err
+ }
+ t.Benchmarks = append(t.Benchmarks, testFunc{pkg, name, "", false})
+ *doImport, *seen = true, true
+ }
+ }
+ ex := doc.Examples(f)
+ sort.Slice(ex, func(i, j int) bool { return ex[i].Order < ex[j].Order })
+ for _, e := range ex {
+ *doImport = true // import test file whether executed or not
+ if e.Output == "" && !e.EmptyOutput {
+ // Don't run examples with no output.
+ continue
+ }
+ t.Examples = append(t.Examples, testFunc{pkg, "Example" + e.Name, e.Output, e.Unordered})
+ *seen = true
+ }
+ return nil
+}
+
+func checkTestFunc(fset *token.FileSet, fn *ast.FuncDecl, arg string) error {
+ if !isTestFunc(fn, arg) {
+ name := fn.Name.String()
+ pos := fset.Position(fn.Pos())
+ return fmt.Errorf("%s: wrong signature for %s, must be: func %s(%s *testing.%s)", pos, name, name, strings.ToLower(arg), arg)
+ }
+ return nil
+}
+
+var testmainTmpl = template.Must(template.New("main").Parse(`
+package main
+
+import (
+{{if not .TestMain}}
+ "os"
+{{end}}
+ "testing"
+ "testing/internal/testdeps"
+
+{{if .ImportTest}}
+ {{if .NeedTest}}_test{{else}}_{{end}} {{.TestPackage.PkgPath | printf "%q"}}
+{{end}}
+{{if .ImportXtest}}
+ {{if .NeedXtest}}_xtest{{else}}_{{end}} {{.XTestPackage.PkgPath | printf "%q"}}
+{{end}}
+)
+
+var tests = []testing.InternalTest{
+{{range .Tests}}
+ {"{{.Name}}", {{.Package}}.{{.Name}}},
+{{end}}
+}
+
+var benchmarks = []testing.InternalBenchmark{
+{{range .Benchmarks}}
+ {"{{.Name}}", {{.Package}}.{{.Name}}},
+{{end}}
+}
+
+var examples = []testing.InternalExample{
+{{range .Examples}}
+ {"{{.Name}}", {{.Package}}.{{.Name}}, {{.Output | printf "%q"}}, {{.Unordered}}},
+{{end}}
+}
+
+func init() {
+ testdeps.ImportPath = {{.TestPackage.PkgPath | printf "%q"}}
+}
+
+func main() {
+ m := testing.MainStart(testdeps.TestDeps{}, tests, benchmarks, examples)
+{{with .TestMain}}
+ {{.Package}}.{{.Name}}(m)
+{{else}}
+ os.Exit(m.Run())
+{{end}}
+}
+
+`))
diff --git a/vendor/golang.org/x/tools/go/packages/golist_overlay.go b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
new file mode 100644
index 0000000..71ffcd9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
@@ -0,0 +1,104 @@
+package packages
+
+import (
+ "go/parser"
+ "go/token"
+ "path/filepath"
+ "strconv"
+ "strings"
+)
+
+// processGolistOverlay provides rudimentary support for adding
+// files that don't exist on disk to an overlay. The results can be
+// sometimes incorrect.
+// TODO(matloob): Handle unsupported cases, including the following:
+// - test files
+// - adding test and non-test files to test variants of packages
+// - determining the correct package to add given a new import path
+// - creating packages that don't exist
+func processGolistOverlay(cfg *Config, response *driverResponse) (modifiedPkgs, needPkgs []string, err error) {
+ havePkgs := make(map[string]string) // importPath -> non-test package ID
+ needPkgsSet := make(map[string]bool)
+ modifiedPkgsSet := make(map[string]bool)
+
+ for _, pkg := range response.Packages {
+ // This is an approximation of import path to id. This can be
+ // wrong for tests, vendored packages, and a number of other cases.
+ havePkgs[pkg.PkgPath] = pkg.ID
+ }
+
+outer:
+ for path, contents := range cfg.Overlay {
+ base := filepath.Base(path)
+ if strings.HasSuffix(path, "_test.go") {
+ // Overlays don't support adding new test files yet.
+ // TODO(matloob): support adding new test files.
+ continue
+ }
+ dir := filepath.Dir(path)
+ for _, pkg := range response.Packages {
+ var dirContains, fileExists bool
+ for _, f := range pkg.GoFiles {
+ if sameFile(filepath.Dir(f), dir) {
+ dirContains = true
+ }
+ if filepath.Base(f) == base {
+ fileExists = true
+ }
+ }
+ if dirContains {
+ if !fileExists {
+ pkg.GoFiles = append(pkg.GoFiles, path) // TODO(matloob): should the file just be added to GoFiles?
+ pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, path)
+ modifiedPkgsSet[pkg.ID] = true
+ }
+ imports, err := extractImports(path, contents)
+ if err != nil {
+ // Let the parser or type checker report errors later.
+ continue outer
+ }
+ for _, imp := range imports {
+ _, found := pkg.Imports[imp]
+ if !found {
+ needPkgsSet[imp] = true
+ // TODO(matloob): Handle cases when the following block isn't correct.
+ // These include imports of test variants, imports of vendored packages, etc.
+ id, ok := havePkgs[imp]
+ if !ok {
+ id = imp
+ }
+ pkg.Imports[imp] = &Package{ID: id}
+ }
+ }
+ continue outer
+ }
+ }
+ }
+
+ needPkgs = make([]string, 0, len(needPkgsSet))
+ for pkg := range needPkgsSet {
+ needPkgs = append(needPkgs, pkg)
+ }
+ modifiedPkgs = make([]string, 0, len(modifiedPkgsSet))
+ for pkg := range modifiedPkgsSet {
+ modifiedPkgs = append(modifiedPkgs, pkg)
+ }
+ return modifiedPkgs, needPkgs, err
+}
+
+func extractImports(filename string, contents []byte) ([]string, error) {
+ f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.ImportsOnly) // TODO(matloob): reuse fileset?
+ if err != nil {
+ return nil, err
+ }
+ var res []string
+ for _, imp := range f.Imports {
+ quotedPath := imp.Path.Value
+ path, err := strconv.Unquote(quotedPath)
+ if err != nil {
+ return nil, err
+ }
+ res = append(res, path)
+ }
+ return res, nil
+}
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
new file mode 100644
index 0000000..1e5836c
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -0,0 +1,956 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+// See doc.go for package documentation and implementation notes.
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/scanner"
+ "go/token"
+ "go/types"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "golang.org/x/tools/go/gcexportdata"
+)
+
+// A LoadMode specifies the amount of detail to return when loading.
+// Higher-numbered modes cause Load to return more information,
+// but may be slower. Load may return more information than requested.
+type LoadMode int
+
+const (
+ // LoadFiles finds the packages and computes their source file lists.
+ // Package fields: ID, Name, Errors, GoFiles, and OtherFiles.
+ LoadFiles LoadMode = iota
+
+ // LoadImports adds import information for each package
+ // and its dependencies.
+ // Package fields added: Imports.
+ LoadImports
+
+ // LoadTypes adds type information for package-level
+ // declarations in the packages matching the patterns.
+ // Package fields added: Types, Fset, and IllTyped.
+ // This mode uses type information provided by the build system when
+ // possible, and may fill in the ExportFile field.
+ LoadTypes
+
+ // LoadSyntax adds typed syntax trees for the packages matching the patterns.
+ // Package fields added: Syntax, and TypesInfo, for direct pattern matches only.
+ LoadSyntax
+
+ // LoadAllSyntax adds typed syntax trees for the packages matching the patterns
+ // and all dependencies.
+ // Package fields added: Types, Fset, IllTyped, Syntax, and TypesInfo,
+ // for all packages in the import graph.
+ LoadAllSyntax
+)
+
+// A Config specifies details about how packages should be loaded.
+// The zero value is a valid configuration.
+// Calls to Load do not modify this struct.
+type Config struct {
+ // Mode controls the level of information returned for each package.
+ Mode LoadMode
+
+ // Context specifies the context for the load operation.
+ // If the context is cancelled, the loader may stop early
+ // and return an ErrCancelled error.
+ // If Context is nil, the load cannot be cancelled.
+ Context context.Context
+
+ // Dir is the directory in which to run the build system's query tool
+ // that provides information about the packages.
+ // If Dir is empty, the tool is run in the current directory.
+ Dir string
+
+ // Env is the environment to use when invoking the build system's query tool.
+ // If Env is nil, the current environment is used.
+ // As in os/exec's Cmd, only the last value in the slice for
+ // each environment key is used. To specify the setting of only
+ // a few variables, append to the current environment, as in:
+ //
+ // opt.Env = append(os.Environ(), "GOOS=plan9", "GOARCH=386")
+ //
+ Env []string
+
+ // BuildFlags is a list of command-line flags to be passed through to
+ // the build system's query tool.
+ BuildFlags []string
+
+ // Fset provides source position information for syntax trees and types.
+ // If Fset is nil, the loader will create a new FileSet.
+ Fset *token.FileSet
+
+ // ParseFile is called to read and parse each file
+ // when preparing a package's type-checked syntax tree.
+ // It must be safe to call ParseFile simultaneously from multiple goroutines.
+ // If ParseFile is nil, the loader will uses parser.ParseFile.
+ //
+ // ParseFile should parse the source from src and use filename only for
+ // recording position information.
+ //
+ // An application may supply a custom implementation of ParseFile
+ // to change the effective file contents or the behavior of the parser,
+ // or to modify the syntax tree. For example, selectively eliminating
+ // unwanted function bodies can significantly accelerate type checking.
+ ParseFile func(fset *token.FileSet, filename string, src []byte) (*ast.File, error)
+
+ // If Tests is set, the loader includes not just the packages
+ // matching a particular pattern but also any related test packages,
+ // including test-only variants of the package and the test executable.
+ //
+ // For example, when using the go command, loading "fmt" with Tests=true
+ // returns four packages, with IDs "fmt" (the standard package),
+ // "fmt [fmt.test]" (the package as compiled for the test),
+ // "fmt_test" (the test functions from source files in package fmt_test),
+ // and "fmt.test" (the test binary).
+ //
+ // In build systems with explicit names for tests,
+ // setting Tests may have no effect.
+ Tests bool
+
+ // Overlay provides a mapping of absolute file paths to file contents.
+ // If the file with the given path already exists, the parser will use the
+ // alternative file contents provided by the map.
+ //
+ // Overlays provide incomplete support for when a given file doesn't
+ // already exist on disk. See the package doc above for more details.
+ Overlay map[string][]byte
+}
+
+// driver is the type for functions that query the build system for the
+// packages named by the patterns.
+type driver func(cfg *Config, patterns ...string) (*driverResponse, error)
+
+// driverResponse contains the results for a driver query.
+type driverResponse struct {
+ // Sizes, if not nil, is the types.Sizes to use when type checking.
+ Sizes *types.StdSizes
+
+ // Roots is the set of package IDs that make up the root packages.
+ // We have to encode this separately because when we encode a single package
+ // we cannot know if it is one of the roots as that requires knowledge of the
+ // graph it is part of.
+ Roots []string `json:",omitempty"`
+
+ // Packages is the full set of packages in the graph.
+ // The packages are not connected into a graph.
+ // The Imports if populated will be stubs that only have their ID set.
+ // Imports will be connected and then type and syntax information added in a
+ // later pass (see refine).
+ Packages []*Package
+}
+
+// Load loads and returns the Go packages named by the given patterns.
+//
+// Config specifies loading options;
+// nil behaves the same as an empty Config.
+//
+// Load returns an error if any of the patterns was invalid
+// as defined by the underlying build system.
+// It may return an empty list of packages without an error,
+// for instance for an empty expansion of a valid wildcard.
+// Errors associated with a particular package are recorded in the
+// corresponding Package's Errors list, and do not cause Load to
+// return an error. Clients may need to handle such errors before
+// proceeding with further analysis. The PrintErrors function is
+// provided for convenient display of all errors.
+func Load(cfg *Config, patterns ...string) ([]*Package, error) {
+ l := newLoader(cfg)
+ response, err := defaultDriver(&l.Config, patterns...)
+ if err != nil {
+ return nil, err
+ }
+ l.sizes = response.Sizes
+ return l.refine(response.Roots, response.Packages...)
+}
+
+// defaultDriver is a driver that looks for an external driver binary, and if
+// it does not find it falls back to the built in go list driver.
+func defaultDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
+ driver := findExternalDriver(cfg)
+ if driver == nil {
+ driver = goListDriver
+ }
+ return driver(cfg, patterns...)
+}
+
+// A Package describes a loaded Go package.
+type Package struct {
+ // ID is a unique identifier for a package,
+ // in a syntax provided by the underlying build system.
+ //
+ // Because the syntax varies based on the build system,
+ // clients should treat IDs as opaque and not attempt to
+ // interpret them.
+ ID string
+
+ // Name is the package name as it appears in the package source code.
+ Name string
+
+ // PkgPath is the package path as used by the go/types package.
+ PkgPath string
+
+ // Errors contains any errors encountered querying the metadata
+ // of the package, or while parsing or type-checking its files.
+ Errors []Error
+
+ // GoFiles lists the absolute file paths of the package's Go source files.
+ GoFiles []string
+
+ // CompiledGoFiles lists the absolute file paths of the package's source
+ // files that were presented to the compiler.
+ // This may differ from GoFiles if files are processed before compilation.
+ CompiledGoFiles []string
+
+ // OtherFiles lists the absolute file paths of the package's non-Go source files,
+ // including assembly, C, C++, Fortran, Objective-C, SWIG, and so on.
+ OtherFiles []string
+
+ // ExportFile is the absolute path to a file containing type
+ // information for the package as provided by the build system.
+ ExportFile string
+
+ // Imports maps import paths appearing in the package's Go source files
+ // to corresponding loaded Packages.
+ Imports map[string]*Package
+
+ // Types provides type information for the package.
+ // Modes LoadTypes and above set this field for packages matching the
+ // patterns; type information for dependencies may be missing or incomplete.
+ // Mode LoadAllSyntax sets this field for all packages, including dependencies.
+ Types *types.Package
+
+ // Fset provides position information for Types, TypesInfo, and Syntax.
+ // It is set only when Types is set.
+ Fset *token.FileSet
+
+ // IllTyped indicates whether the package or any dependency contains errors.
+ // It is set only when Types is set.
+ IllTyped bool
+
+ // Syntax is the package's syntax trees, for the files listed in CompiledGoFiles.
+ //
+ // Mode LoadSyntax sets this field for packages matching the patterns.
+ // Mode LoadAllSyntax sets this field for all packages, including dependencies.
+ Syntax []*ast.File
+
+ // TypesInfo provides type information about the package's syntax trees.
+ // It is set only when Syntax is set.
+ TypesInfo *types.Info
+
+ // TypesSizes provides the effective size function for types in TypesInfo.
+ TypesSizes types.Sizes
+}
+
+// An Error describes a problem with a package's metadata, syntax, or types.
+type Error struct {
+ Pos string // "file:line:col" or "file:line" or "" or "-"
+ Msg string
+ Kind ErrorKind
+}
+
+// ErrorKind describes the source of the error, allowing the user to
+// differentiate between errors generated by the driver, the parser, or the
+// type-checker.
+type ErrorKind int
+
+const (
+ UnknownError ErrorKind = iota
+ ListError
+ ParseError
+ TypeError
+)
+
+func (err Error) Error() string {
+ pos := err.Pos
+ if pos == "" {
+ pos = "-" // like token.Position{}.String()
+ }
+ return pos + ": " + err.Msg
+}
+
+// flatPackage is the JSON form of Package
+// It drops all the type and syntax fields, and transforms the Imports
+//
+// TODO(adonovan): identify this struct with Package, effectively
+// publishing the JSON protocol.
+type flatPackage struct {
+ ID string
+ Name string `json:",omitempty"`
+ PkgPath string `json:",omitempty"`
+ Errors []Error `json:",omitempty"`
+ GoFiles []string `json:",omitempty"`
+ CompiledGoFiles []string `json:",omitempty"`
+ OtherFiles []string `json:",omitempty"`
+ ExportFile string `json:",omitempty"`
+ Imports map[string]string `json:",omitempty"`
+}
+
+// MarshalJSON returns the Package in its JSON form.
+// For the most part, the structure fields are written out unmodified, and
+// the type and syntax fields are skipped.
+// The imports are written out as just a map of path to package id.
+// The errors are written using a custom type that tries to preserve the
+// structure of error types we know about.
+//
+// This method exists to enable support for additional build systems. It is
+// not intended for use by clients of the API and we may change the format.
+func (p *Package) MarshalJSON() ([]byte, error) {
+ flat := &flatPackage{
+ ID: p.ID,
+ Name: p.Name,
+ PkgPath: p.PkgPath,
+ Errors: p.Errors,
+ GoFiles: p.GoFiles,
+ CompiledGoFiles: p.CompiledGoFiles,
+ OtherFiles: p.OtherFiles,
+ ExportFile: p.ExportFile,
+ }
+ if len(p.Imports) > 0 {
+ flat.Imports = make(map[string]string, len(p.Imports))
+ for path, ipkg := range p.Imports {
+ flat.Imports[path] = ipkg.ID
+ }
+ }
+ return json.Marshal(flat)
+}
+
+// UnmarshalJSON reads in a Package from its JSON format.
+// See MarshalJSON for details about the format accepted.
+func (p *Package) UnmarshalJSON(b []byte) error {
+ flat := &flatPackage{}
+ if err := json.Unmarshal(b, &flat); err != nil {
+ return err
+ }
+ *p = Package{
+ ID: flat.ID,
+ Name: flat.Name,
+ PkgPath: flat.PkgPath,
+ Errors: flat.Errors,
+ GoFiles: flat.GoFiles,
+ CompiledGoFiles: flat.CompiledGoFiles,
+ OtherFiles: flat.OtherFiles,
+ ExportFile: flat.ExportFile,
+ }
+ if len(flat.Imports) > 0 {
+ p.Imports = make(map[string]*Package, len(flat.Imports))
+ for path, id := range flat.Imports {
+ p.Imports[path] = &Package{ID: id}
+ }
+ }
+ return nil
+}
+
+func (p *Package) String() string { return p.ID }
+
+// loaderPackage augments Package with state used during the loading phase
+type loaderPackage struct {
+ *Package
+ importErrors map[string]error // maps each bad import to its error
+ loadOnce sync.Once
+ color uint8 // for cycle detection
+ needsrc bool // load from source (Mode >= LoadTypes)
+ needtypes bool // type information is either requested or depended on
+ initial bool // package was matched by a pattern
+}
+
+// loader holds the working state of a single call to load.
+type loader struct {
+ pkgs map[string]*loaderPackage
+ Config
+ sizes types.Sizes
+ exportMu sync.Mutex // enforces mutual exclusion of exportdata operations
+}
+
+func newLoader(cfg *Config) *loader {
+ ld := &loader{}
+ if cfg != nil {
+ ld.Config = *cfg
+ }
+ if ld.Config.Env == nil {
+ ld.Config.Env = os.Environ()
+ }
+ if ld.Context == nil {
+ ld.Context = context.Background()
+ }
+ if ld.Dir == "" {
+ if dir, err := os.Getwd(); err == nil {
+ ld.Dir = dir
+ }
+ }
+
+ if ld.Mode >= LoadTypes {
+ if ld.Fset == nil {
+ ld.Fset = token.NewFileSet()
+ }
+
+ // ParseFile is required even in LoadTypes mode
+ // because we load source if export data is missing.
+ if ld.ParseFile == nil {
+ ld.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) {
+ var isrc interface{}
+ if src != nil {
+ isrc = src
+ }
+ const mode = parser.AllErrors | parser.ParseComments
+ return parser.ParseFile(fset, filename, isrc, mode)
+ }
+ }
+ }
+ return ld
+}
+
+// refine connects the supplied packages into a graph and then adds type and
+// and syntax information as requested by the LoadMode.
+func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
+ rootMap := make(map[string]int, len(roots))
+ for i, root := range roots {
+ rootMap[root] = i
+ }
+ ld.pkgs = make(map[string]*loaderPackage)
+ // first pass, fixup and build the map and roots
+ var initial = make([]*loaderPackage, len(roots))
+ for _, pkg := range list {
+ rootIndex := -1
+ if i, found := rootMap[pkg.ID]; found {
+ rootIndex = i
+ }
+ lpkg := &loaderPackage{
+ Package: pkg,
+ needtypes: ld.Mode >= LoadAllSyntax ||
+ ld.Mode >= LoadTypes && rootIndex >= 0,
+ needsrc: ld.Mode >= LoadAllSyntax ||
+ ld.Mode >= LoadSyntax && rootIndex >= 0 ||
+ len(ld.Overlay) > 0 || // Overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files
+ pkg.ExportFile == "" && pkg.PkgPath != "unsafe",
+ }
+ ld.pkgs[lpkg.ID] = lpkg
+ if rootIndex >= 0 {
+ initial[rootIndex] = lpkg
+ lpkg.initial = true
+ }
+ }
+ for i, root := range roots {
+ if initial[i] == nil {
+ return nil, fmt.Errorf("root package %v is missing", root)
+ }
+ }
+
+ // Materialize the import graph.
+
+ const (
+ white = 0 // new
+ grey = 1 // in progress
+ black = 2 // complete
+ )
+
+ // visit traverses the import graph, depth-first,
+ // and materializes the graph as Packages.Imports.
+ //
+ // Valid imports are saved in the Packages.Import map.
+ // Invalid imports (cycles and missing nodes) are saved in the importErrors map.
+ // Thus, even in the presence of both kinds of errors, the Import graph remains a DAG.
+ //
+ // visit returns whether the package needs src or has a transitive
+ // dependency on a package that does. These are the only packages
+ // for which we load source code.
+ var stack []*loaderPackage
+ var visit func(lpkg *loaderPackage) bool
+ var srcPkgs []*loaderPackage
+ visit = func(lpkg *loaderPackage) bool {
+ switch lpkg.color {
+ case black:
+ return lpkg.needsrc
+ case grey:
+ panic("internal error: grey node")
+ }
+ lpkg.color = grey
+ stack = append(stack, lpkg) // push
+ stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports
+ lpkg.Imports = make(map[string]*Package, len(stubs))
+ for importPath, ipkg := range stubs {
+ var importErr error
+ imp := ld.pkgs[ipkg.ID]
+ if imp == nil {
+ // (includes package "C" when DisableCgo)
+ importErr = fmt.Errorf("missing package: %q", ipkg.ID)
+ } else if imp.color == grey {
+ importErr = fmt.Errorf("import cycle: %s", stack)
+ }
+ if importErr != nil {
+ if lpkg.importErrors == nil {
+ lpkg.importErrors = make(map[string]error)
+ }
+ lpkg.importErrors[importPath] = importErr
+ continue
+ }
+
+ if visit(imp) {
+ lpkg.needsrc = true
+ }
+ lpkg.Imports[importPath] = imp.Package
+ }
+ if lpkg.needsrc {
+ srcPkgs = append(srcPkgs, lpkg)
+ }
+ stack = stack[:len(stack)-1] // pop
+ lpkg.color = black
+
+ return lpkg.needsrc
+ }
+
+ if ld.Mode < LoadImports {
+ //we do this to drop the stub import packages that we are not even going to try to resolve
+ for _, lpkg := range initial {
+ lpkg.Imports = nil
+ }
+ } else {
+ // For each initial package, create its import DAG.
+ for _, lpkg := range initial {
+ visit(lpkg)
+ }
+ }
+ for _, lpkg := range srcPkgs {
+ // Complete type information is required for the
+ // immediate dependencies of each source package.
+ for _, ipkg := range lpkg.Imports {
+ imp := ld.pkgs[ipkg.ID]
+ imp.needtypes = true
+ }
+ }
+ // Load type data if needed, starting at
+ // the initial packages (roots of the import DAG).
+ if ld.Mode >= LoadTypes {
+ var wg sync.WaitGroup
+ for _, lpkg := range initial {
+ wg.Add(1)
+ go func(lpkg *loaderPackage) {
+ ld.loadRecursive(lpkg)
+ wg.Done()
+ }(lpkg)
+ }
+ wg.Wait()
+ }
+
+ result := make([]*Package, len(initial))
+ for i, lpkg := range initial {
+ result[i] = lpkg.Package
+ }
+ return result, nil
+}
+
+// loadRecursive loads the specified package and its dependencies,
+// recursively, in parallel, in topological order.
+// It is atomic and idempotent.
+// Precondition: ld.Mode >= LoadTypes.
+func (ld *loader) loadRecursive(lpkg *loaderPackage) {
+ lpkg.loadOnce.Do(func() {
+ // Load the direct dependencies, in parallel.
+ var wg sync.WaitGroup
+ for _, ipkg := range lpkg.Imports {
+ imp := ld.pkgs[ipkg.ID]
+ wg.Add(1)
+ go func(imp *loaderPackage) {
+ ld.loadRecursive(imp)
+ wg.Done()
+ }(imp)
+ }
+ wg.Wait()
+
+ ld.loadPackage(lpkg)
+ })
+}
+
+// loadPackage loads the specified package.
+// It must be called only once per Package,
+// after immediate dependencies are loaded.
+// Precondition: ld.Mode >= LoadTypes.
+func (ld *loader) loadPackage(lpkg *loaderPackage) {
+ if lpkg.PkgPath == "unsafe" {
+ // Fill in the blanks to avoid surprises.
+ lpkg.Types = types.Unsafe
+ lpkg.Fset = ld.Fset
+ lpkg.Syntax = []*ast.File{}
+ lpkg.TypesInfo = new(types.Info)
+ lpkg.TypesSizes = ld.sizes
+ return
+ }
+
+ // Call NewPackage directly with explicit name.
+ // This avoids skew between golist and go/types when the files'
+ // package declarations are inconsistent.
+ lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name)
+ lpkg.Fset = ld.Fset
+
+ // Subtle: we populate all Types fields with an empty Package
+ // before loading export data so that export data processing
+ // never has to create a types.Package for an indirect dependency,
+ // which would then require that such created packages be explicitly
+ // inserted back into the Import graph as a final step after export data loading.
+ // The Diamond test exercises this case.
+ if !lpkg.needtypes {
+ return
+ }
+ if !lpkg.needsrc {
+ ld.loadFromExportData(lpkg)
+ return // not a source package, don't get syntax trees
+ }
+
+ appendError := func(err error) {
+ // Convert various error types into the one true Error.
+ var errs []Error
+ switch err := err.(type) {
+ case Error:
+ // from driver
+ errs = append(errs, err)
+
+ case *os.PathError:
+ // from parser
+ errs = append(errs, Error{
+ Pos: err.Path + ":1",
+ Msg: err.Err.Error(),
+ Kind: ParseError,
+ })
+
+ case scanner.ErrorList:
+ // from parser
+ for _, err := range err {
+ errs = append(errs, Error{
+ Pos: err.Pos.String(),
+ Msg: err.Msg,
+ Kind: ParseError,
+ })
+ }
+
+ case types.Error:
+ // from type checker
+ errs = append(errs, Error{
+ Pos: err.Fset.Position(err.Pos).String(),
+ Msg: err.Msg,
+ Kind: TypeError,
+ })
+
+ default:
+ // unexpected impoverished error from parser?
+ errs = append(errs, Error{
+ Pos: "-",
+ Msg: err.Error(),
+ Kind: UnknownError,
+ })
+
+ // If you see this error message, please file a bug.
+ log.Printf("internal error: error %q (%T) without position", err, err)
+ }
+
+ lpkg.Errors = append(lpkg.Errors, errs...)
+ }
+
+ files, errs := ld.parseFiles(lpkg.CompiledGoFiles)
+ for _, err := range errs {
+ appendError(err)
+ }
+
+ lpkg.Syntax = files
+
+ lpkg.TypesInfo = &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+ lpkg.TypesSizes = ld.sizes
+
+ importer := importerFunc(func(path string) (*types.Package, error) {
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+
+ // The imports map is keyed by import path.
+ ipkg := lpkg.Imports[path]
+ if ipkg == nil {
+ if err := lpkg.importErrors[path]; err != nil {
+ return nil, err
+ }
+ // There was skew between the metadata and the
+ // import declarations, likely due to an edit
+ // race, or because the ParseFile feature was
+ // used to supply alternative file contents.
+ return nil, fmt.Errorf("no metadata for %s", path)
+ }
+
+ if ipkg.Types != nil && ipkg.Types.Complete() {
+ return ipkg.Types, nil
+ }
+ log.Fatalf("internal error: nil Pkg importing %q from %q", path, lpkg)
+ panic("unreachable")
+ })
+
+ // type-check
+ tc := &types.Config{
+ Importer: importer,
+
+ // Type-check bodies of functions only in non-initial packages.
+ // Example: for import graph A->B->C and initial packages {A,C},
+ // we can ignore function bodies in B.
+ IgnoreFuncBodies: ld.Mode < LoadAllSyntax && !lpkg.initial,
+
+ Error: appendError,
+ Sizes: ld.sizes,
+ }
+ types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax)
+
+ lpkg.importErrors = nil // no longer needed
+
+ // If !Cgo, the type-checker uses FakeImportC mode, so
+ // it doesn't invoke the importer for import "C",
+ // nor report an error for the import,
+ // or for any undefined C.f reference.
+ // We must detect this explicitly and correctly
+ // mark the package as IllTyped (by reporting an error).
+ // TODO(adonovan): if these errors are annoying,
+ // we could just set IllTyped quietly.
+ if tc.FakeImportC {
+ outer:
+ for _, f := range lpkg.Syntax {
+ for _, imp := range f.Imports {
+ if imp.Path.Value == `"C"` {
+ err := types.Error{Fset: ld.Fset, Pos: imp.Pos(), Msg: `import "C" ignored`}
+ appendError(err)
+ break outer
+ }
+ }
+ }
+ }
+
+ // Record accumulated errors.
+ illTyped := len(lpkg.Errors) > 0
+ if !illTyped {
+ for _, imp := range lpkg.Imports {
+ if imp.IllTyped {
+ illTyped = true
+ break
+ }
+ }
+ }
+ lpkg.IllTyped = illTyped
+}
+
+// An importFunc is an implementation of the single-method
+// types.Importer interface based on a function value.
+type importerFunc func(path string) (*types.Package, error)
+
+func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }
+
+// We use a counting semaphore to limit
+// the number of parallel I/O calls per process.
+var ioLimit = make(chan bool, 20)
+
+// parseFiles reads and parses the Go source files and returns the ASTs
+// of the ones that could be at least partially parsed, along with a
+// list of I/O and parse errors encountered.
+//
+// Because files are scanned in parallel, the token.Pos
+// positions of the resulting ast.Files are not ordered.
+//
+func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
+ var wg sync.WaitGroup
+ n := len(filenames)
+ parsed := make([]*ast.File, n)
+ errors := make([]error, n)
+ for i, file := range filenames {
+ if ld.Config.Context.Err() != nil {
+ parsed[i] = nil
+ errors[i] = ld.Config.Context.Err()
+ continue
+ }
+ wg.Add(1)
+ go func(i int, filename string) {
+ ioLimit <- true // wait
+ // ParseFile may return both an AST and an error.
+ var src []byte
+ for f, contents := range ld.Config.Overlay {
+ if sameFile(f, filename) {
+ src = contents
+ }
+ }
+ var err error
+ if src == nil {
+ src, err = ioutil.ReadFile(filename)
+ }
+ if err != nil {
+ parsed[i], errors[i] = nil, err
+ } else {
+ parsed[i], errors[i] = ld.ParseFile(ld.Fset, filename, src)
+ }
+ <-ioLimit // signal
+ wg.Done()
+ }(i, file)
+ }
+ wg.Wait()
+
+ // Eliminate nils, preserving order.
+ var o int
+ for _, f := range parsed {
+ if f != nil {
+ parsed[o] = f
+ o++
+ }
+ }
+ parsed = parsed[:o]
+
+ o = 0
+ for _, err := range errors {
+ if err != nil {
+ errors[o] = err
+ o++
+ }
+ }
+ errors = errors[:o]
+
+ return parsed, errors
+}
+
+// sameFile returns true if x and y have the same basename and denote
+// the same file.
+//
+func sameFile(x, y string) bool {
+ if x == y {
+ // It could be the case that y doesn't exist.
+ // For instance, it may be an overlay file that
+ // hasn't been written to disk. To handle that case
+ // let x == y through. (We added the exact absolute path
+ // string to the CompiledGoFiles list, so the unwritten
+ // overlay case implies x==y.)
+ return true
+ }
+ if strings.EqualFold(filepath.Base(x), filepath.Base(y)) { // (optimisation)
+ if xi, err := os.Stat(x); err == nil {
+ if yi, err := os.Stat(y); err == nil {
+ return os.SameFile(xi, yi)
+ }
+ }
+ }
+ return false
+}
+
+// loadFromExportData returns type information for the specified
+// package, loading it from an export data file on the first request.
+func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) {
+ if lpkg.PkgPath == "" {
+ log.Fatalf("internal error: Package %s has no PkgPath", lpkg)
+ }
+
+ // Because gcexportdata.Read has the potential to create or
+ // modify the types.Package for each node in the transitive
+ // closure of dependencies of lpkg, all exportdata operations
+ // must be sequential. (Finer-grained locking would require
+ // changes to the gcexportdata API.)
+ //
+ // The exportMu lock guards the Package.Pkg field and the
+ // types.Package it points to, for each Package in the graph.
+ //
+ // Not all accesses to Package.Pkg need to be protected by exportMu:
+ // graph ordering ensures that direct dependencies of source
+ // packages are fully loaded before the importer reads their Pkg field.
+ ld.exportMu.Lock()
+ defer ld.exportMu.Unlock()
+
+ if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() {
+ return tpkg, nil // cache hit
+ }
+
+ lpkg.IllTyped = true // fail safe
+
+ if lpkg.ExportFile == "" {
+ // Errors while building export data will have been printed to stderr.
+ return nil, fmt.Errorf("no export data file")
+ }
+ f, err := os.Open(lpkg.ExportFile)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+
+ // Read gc export data.
+ //
+ // We don't currently support gccgo export data because all
+ // underlying workspaces use the gc toolchain. (Even build
+ // systems that support gccgo don't use it for workspace
+ // queries.)
+ r, err := gcexportdata.NewReader(f)
+ if err != nil {
+ return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ }
+
+ // Build the view.
+ //
+ // The gcexportdata machinery has no concept of package ID.
+ // It identifies packages by their PkgPath, which although not
+ // globally unique is unique within the scope of one invocation
+ // of the linker, type-checker, or gcexportdata.
+ //
+ // So, we must build a PkgPath-keyed view of the global
+ // (conceptually ID-keyed) cache of packages and pass it to
+ // gcexportdata. The view must contain every existing
+ // package that might possibly be mentioned by the
+ // current package---its transitive closure.
+ //
+ // In loadPackage, we unconditionally create a types.Package for
+ // each dependency so that export data loading does not
+ // create new ones.
+ //
+ // TODO(adonovan): it would be simpler and more efficient
+ // if the export data machinery invoked a callback to
+ // get-or-create a package instead of a map.
+ //
+ view := make(map[string]*types.Package) // view seen by gcexportdata
+ seen := make(map[*loaderPackage]bool) // all visited packages
+ var visit func(pkgs map[string]*Package)
+ visit = func(pkgs map[string]*Package) {
+ for _, p := range pkgs {
+ lpkg := ld.pkgs[p.ID]
+ if !seen[lpkg] {
+ seen[lpkg] = true
+ view[lpkg.PkgPath] = lpkg.Types
+ visit(lpkg.Imports)
+ }
+ }
+ }
+ visit(lpkg.Imports)
+
+ viewLen := len(view) + 1 // adding the self package
+ // Parse the export data.
+ // (May modify incomplete packages in view but not create new ones.)
+ tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)
+ if err != nil {
+ return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ }
+ if viewLen != len(view) {
+ log.Fatalf("Unexpected package creation during export data loading")
+ }
+
+ lpkg.Types = tpkg
+ lpkg.IllTyped = false
+
+ return tpkg, nil
+}
+
+func usesExportData(cfg *Config) bool {
+ return LoadTypes <= cfg.Mode && cfg.Mode < LoadAllSyntax
+}
diff --git a/vendor/golang.org/x/tools/go/packages/visit.go b/vendor/golang.org/x/tools/go/packages/visit.go
new file mode 100644
index 0000000..b13cb08
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/visit.go
@@ -0,0 +1,55 @@
+package packages
+
+import (
+ "fmt"
+ "os"
+ "sort"
+)
+
+// Visit visits all the packages in the import graph whose roots are
+// pkgs, calling the optional pre function the first time each package
+// is encountered (preorder), and the optional post function after a
+// package's dependencies have been visited (postorder).
+// The boolean result of pre(pkg) determines whether
+// the imports of package pkg are visited.
+func Visit(pkgs []*Package, pre func(*Package) bool, post func(*Package)) {
+ seen := make(map[*Package]bool)
+ var visit func(*Package)
+ visit = func(pkg *Package) {
+ if !seen[pkg] {
+ seen[pkg] = true
+
+ if pre == nil || pre(pkg) {
+ paths := make([]string, 0, len(pkg.Imports))
+ for path := range pkg.Imports {
+ paths = append(paths, path)
+ }
+ sort.Strings(paths) // Imports is a map, this makes visit stable
+ for _, path := range paths {
+ visit(pkg.Imports[path])
+ }
+ }
+
+ if post != nil {
+ post(pkg)
+ }
+ }
+ }
+ for _, pkg := range pkgs {
+ visit(pkg)
+ }
+}
+
+// PrintErrors prints to os.Stderr the accumulated errors of all
+// packages in the import graph rooted at pkgs, dependencies first.
+// PrintErrors returns the number of errors printed.
+func PrintErrors(pkgs []*Package) int {
+ var n int
+ Visit(pkgs, nil, func(pkg *Package) {
+ for _, err := range pkg.Errors {
+ fmt.Fprintln(os.Stderr, err)
+ n++
+ }
+ })
+ return n
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/callee.go b/vendor/golang.org/x/tools/go/types/typeutil/callee.go
new file mode 100644
index 0000000..38f596d
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/callee.go
@@ -0,0 +1,46 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import (
+ "go/ast"
+ "go/types"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+// Callee returns the named target of a function call, if any:
+// a function, method, builtin, or variable.
+func Callee(info *types.Info, call *ast.CallExpr) types.Object {
+ var obj types.Object
+ switch fun := astutil.Unparen(call.Fun).(type) {
+ case *ast.Ident:
+ obj = info.Uses[fun] // type, var, builtin, or declared func
+ case *ast.SelectorExpr:
+ if sel, ok := info.Selections[fun]; ok {
+ obj = sel.Obj() // method or field
+ } else {
+ obj = info.Uses[fun.Sel] // qualified identifier?
+ }
+ }
+ if _, ok := obj.(*types.TypeName); ok {
+ return nil // T(x) is a conversion, not a call
+ }
+ return obj
+}
+
+// StaticCallee returns the target (function or method) of a static
+// function call, if any. It returns nil for calls to builtins.
+func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func {
+ if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) {
+ return f
+ }
+ return nil
+}
+
+func interfaceMethod(f *types.Func) bool {
+ recv := f.Type().(*types.Signature).Recv()
+ return recv != nil && types.IsInterface(recv.Type())
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/imports.go b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
new file mode 100644
index 0000000..9c441db
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
@@ -0,0 +1,31 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import "go/types"
+
+// Dependencies returns all dependencies of the specified packages.
+//
+// Dependent packages appear in topological order: if package P imports
+// package Q, Q appears earlier than P in the result.
+// The algorithm follows import statements in the order they
+// appear in the source code, so the result is a total order.
+//
+func Dependencies(pkgs ...*types.Package) []*types.Package {
+ var result []*types.Package
+ seen := make(map[*types.Package]bool)
+ var visit func(pkgs []*types.Package)
+ visit = func(pkgs []*types.Package) {
+ for _, p := range pkgs {
+ if !seen[p] {
+ seen[p] = true
+ visit(p.Imports())
+ result = append(result, p)
+ }
+ }
+ }
+ visit(pkgs)
+ return result
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go
new file mode 100644
index 0000000..c7f7545
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go
@@ -0,0 +1,313 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package typeutil defines various utilities for types, such as Map,
+// a mapping from types.Type to interface{} values.
+package typeutil // import "golang.org/x/tools/go/types/typeutil"
+
+import (
+ "bytes"
+ "fmt"
+ "go/types"
+ "reflect"
+)
+
+// Map is a hash-table-based mapping from types (types.Type) to
+// arbitrary interface{} values. The concrete types that implement
+// the Type interface are pointers. Since they are not canonicalized,
+// == cannot be used to check for equivalence, and thus we cannot
+// simply use a Go map.
+//
+// Just as with map[K]V, a nil *Map is a valid empty map.
+//
+// Not thread-safe.
+//
+type Map struct {
+ hasher Hasher // shared by many Maps
+ table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
+ length int // number of map entries
+}
+
+// entry is an entry (key/value association) in a hash bucket.
+type entry struct {
+ key types.Type
+ value interface{}
+}
+
+// SetHasher sets the hasher used by Map.
+//
+// All Hashers are functionally equivalent but contain internal state
+// used to cache the results of hashing previously seen types.
+//
+// A single Hasher created by MakeHasher() may be shared among many
+// Maps. This is recommended if the instances have many keys in
+// common, as it will amortize the cost of hash computation.
+//
+// A Hasher may grow without bound as new types are seen. Even when a
+// type is deleted from the map, the Hasher never shrinks, since other
+// types in the map may reference the deleted type indirectly.
+//
+// Hashers are not thread-safe, and read-only operations such as
+// Map.Lookup require updates to the hasher, so a full Mutex lock (not a
+// read-lock) is require around all Map operations if a shared
+// hasher is accessed from multiple threads.
+//
+// If SetHasher is not called, the Map will create a private hasher at
+// the first call to Insert.
+//
+func (m *Map) SetHasher(hasher Hasher) {
+ m.hasher = hasher
+}
+
+// Delete removes the entry with the given key, if any.
+// It returns true if the entry was found.
+//
+func (m *Map) Delete(key types.Type) bool {
+ if m != nil && m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ for i, e := range bucket {
+ if e.key != nil && types.Identical(key, e.key) {
+ // We can't compact the bucket as it
+ // would disturb iterators.
+ bucket[i] = entry{}
+ m.length--
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// At returns the map entry for the given key.
+// The result is nil if the entry is not present.
+//
+func (m *Map) At(key types.Type) interface{} {
+ if m != nil && m.table != nil {
+ for _, e := range m.table[m.hasher.Hash(key)] {
+ if e.key != nil && types.Identical(key, e.key) {
+ return e.value
+ }
+ }
+ }
+ return nil
+}
+
+// Set sets the map entry for key to val,
+// and returns the previous entry, if any.
+func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) {
+ if m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ var hole *entry
+ for i, e := range bucket {
+ if e.key == nil {
+ hole = &bucket[i]
+ } else if types.Identical(key, e.key) {
+ prev = e.value
+ bucket[i].value = value
+ return
+ }
+ }
+
+ if hole != nil {
+ *hole = entry{key, value} // overwrite deleted entry
+ } else {
+ m.table[hash] = append(bucket, entry{key, value})
+ }
+ } else {
+ if m.hasher.memo == nil {
+ m.hasher = MakeHasher()
+ }
+ hash := m.hasher.Hash(key)
+ m.table = map[uint32][]entry{hash: {entry{key, value}}}
+ }
+
+ m.length++
+ return
+}
+
+// Len returns the number of map entries.
+func (m *Map) Len() int {
+ if m != nil {
+ return m.length
+ }
+ return 0
+}
+
+// Iterate calls function f on each entry in the map in unspecified order.
+//
+// If f should mutate the map, Iterate provides the same guarantees as
+// Go maps: if f deletes a map entry that Iterate has not yet reached,
+// f will not be invoked for it, but if f inserts a map entry that
+// Iterate has not yet reached, whether or not f will be invoked for
+// it is unspecified.
+//
+func (m *Map) Iterate(f func(key types.Type, value interface{})) {
+ if m != nil {
+ for _, bucket := range m.table {
+ for _, e := range bucket {
+ if e.key != nil {
+ f(e.key, e.value)
+ }
+ }
+ }
+ }
+}
+
+// Keys returns a new slice containing the set of map keys.
+// The order is unspecified.
+func (m *Map) Keys() []types.Type {
+ keys := make([]types.Type, 0, m.Len())
+ m.Iterate(func(key types.Type, _ interface{}) {
+ keys = append(keys, key)
+ })
+ return keys
+}
+
+func (m *Map) toString(values bool) string {
+ if m == nil {
+ return "{}"
+ }
+ var buf bytes.Buffer
+ fmt.Fprint(&buf, "{")
+ sep := ""
+ m.Iterate(func(key types.Type, value interface{}) {
+ fmt.Fprint(&buf, sep)
+ sep = ", "
+ fmt.Fprint(&buf, key)
+ if values {
+ fmt.Fprintf(&buf, ": %q", value)
+ }
+ })
+ fmt.Fprint(&buf, "}")
+ return buf.String()
+}
+
+// String returns a string representation of the map's entries.
+// Values are printed using fmt.Sprintf("%v", v).
+// Order is unspecified.
+//
+func (m *Map) String() string {
+ return m.toString(true)
+}
+
+// KeysString returns a string representation of the map's key set.
+// Order is unspecified.
+//
+func (m *Map) KeysString() string {
+ return m.toString(false)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Hasher
+
+// A Hasher maps each type to its hash value.
+// For efficiency, a hasher uses memoization; thus its memory
+// footprint grows monotonically over time.
+// Hashers are not thread-safe.
+// Hashers have reference semantics.
+// Call MakeHasher to create a Hasher.
+type Hasher struct {
+ memo map[types.Type]uint32
+}
+
+// MakeHasher returns a new Hasher instance.
+func MakeHasher() Hasher {
+ return Hasher{make(map[types.Type]uint32)}
+}
+
+// Hash computes a hash value for the given type t such that
+// Identical(t, t') => Hash(t) == Hash(t').
+func (h Hasher) Hash(t types.Type) uint32 {
+ hash, ok := h.memo[t]
+ if !ok {
+ hash = h.hashFor(t)
+ h.memo[t] = hash
+ }
+ return hash
+}
+
+// hashString computes the Fowler–Noll–Vo hash of s.
+func hashString(s string) uint32 {
+ var h uint32
+ for i := 0; i < len(s); i++ {
+ h ^= uint32(s[i])
+ h *= 16777619
+ }
+ return h
+}
+
+// hashFor computes the hash of t.
+func (h Hasher) hashFor(t types.Type) uint32 {
+ // See Identical for rationale.
+ switch t := t.(type) {
+ case *types.Basic:
+ return uint32(t.Kind())
+
+ case *types.Array:
+ return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem())
+
+ case *types.Slice:
+ return 9049 + 2*h.Hash(t.Elem())
+
+ case *types.Struct:
+ var hash uint32 = 9059
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ f := t.Field(i)
+ if f.Anonymous() {
+ hash += 8861
+ }
+ hash += hashString(t.Tag(i))
+ hash += hashString(f.Name()) // (ignore f.Pkg)
+ hash += h.Hash(f.Type())
+ }
+ return hash
+
+ case *types.Pointer:
+ return 9067 + 2*h.Hash(t.Elem())
+
+ case *types.Signature:
+ var hash uint32 = 9091
+ if t.Variadic() {
+ hash *= 8863
+ }
+ return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
+
+ case *types.Interface:
+ var hash uint32 = 9103
+ for i, n := 0, t.NumMethods(); i < n; i++ {
+ // See go/types.identicalMethods for rationale.
+ // Method order is not significant.
+ // Ignore m.Pkg().
+ m := t.Method(i)
+ hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type())
+ }
+ return hash
+
+ case *types.Map:
+ return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem())
+
+ case *types.Chan:
+ return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem())
+
+ case *types.Named:
+ // Not safe with a copying GC; objects may move.
+ return uint32(reflect.ValueOf(t.Obj()).Pointer())
+
+ case *types.Tuple:
+ return h.hashTuple(t)
+ }
+ panic(t)
+}
+
+func (h Hasher) hashTuple(tuple *types.Tuple) uint32 {
+ // See go/types.identicalTypes for rationale.
+ n := tuple.Len()
+ var hash uint32 = 9137 + 2*uint32(n)
+ for i := 0; i < n; i++ {
+ hash += 3 * h.Hash(tuple.At(i).Type())
+ }
+ return hash
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
new file mode 100644
index 0000000..3208461
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
@@ -0,0 +1,72 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements a cache of method sets.
+
+package typeutil
+
+import (
+ "go/types"
+ "sync"
+)
+
+// A MethodSetCache records the method set of each type T for which
+// MethodSet(T) is called so that repeat queries are fast.
+// The zero value is a ready-to-use cache instance.
+type MethodSetCache struct {
+ mu sync.Mutex
+ named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N
+ others map[types.Type]*types.MethodSet // all other types
+}
+
+// MethodSet returns the method set of type T. It is thread-safe.
+//
+// If cache is nil, this function is equivalent to types.NewMethodSet(T).
+// Utility functions can thus expose an optional *MethodSetCache
+// parameter to clients that care about performance.
+//
+func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet {
+ if cache == nil {
+ return types.NewMethodSet(T)
+ }
+ cache.mu.Lock()
+ defer cache.mu.Unlock()
+
+ switch T := T.(type) {
+ case *types.Named:
+ return cache.lookupNamed(T).value
+
+ case *types.Pointer:
+ if N, ok := T.Elem().(*types.Named); ok {
+ return cache.lookupNamed(N).pointer
+ }
+ }
+
+ // all other types
+ // (The map uses pointer equivalence, not type identity.)
+ mset := cache.others[T]
+ if mset == nil {
+ mset = types.NewMethodSet(T)
+ if cache.others == nil {
+ cache.others = make(map[types.Type]*types.MethodSet)
+ }
+ cache.others[T] = mset
+ }
+ return mset
+}
+
+func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } {
+ if cache.named == nil {
+ cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet })
+ }
+ // Avoid recomputing mset(*T) for each distinct Pointer
+ // instance whose underlying type is a named type.
+ msets, ok := cache.named[named]
+ if !ok {
+ msets.value = types.NewMethodSet(named)
+ msets.pointer = types.NewMethodSet(types.NewPointer(named))
+ cache.named[named] = msets
+ }
+ return msets
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
new file mode 100644
index 0000000..9849c24
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
@@ -0,0 +1,52 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+// This file defines utilities for user interfaces that display types.
+
+import "go/types"
+
+// IntuitiveMethodSet returns the intuitive method set of a type T,
+// which is the set of methods you can call on an addressable value of
+// that type.
+//
+// The result always contains MethodSet(T), and is exactly MethodSet(T)
+// for interface types and for pointer-to-concrete types.
+// For all other concrete types T, the result additionally
+// contains each method belonging to *T if there is no identically
+// named method on T itself.
+//
+// This corresponds to user intuition about method sets;
+// this function is intended only for user interfaces.
+//
+// The order of the result is as for types.MethodSet(T).
+//
+func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection {
+ isPointerToConcrete := func(T types.Type) bool {
+ ptr, ok := T.(*types.Pointer)
+ return ok && !types.IsInterface(ptr.Elem())
+ }
+
+ var result []*types.Selection
+ mset := msets.MethodSet(T)
+ if types.IsInterface(T) || isPointerToConcrete(T) {
+ for i, n := 0, mset.Len(); i < n; i++ {
+ result = append(result, mset.At(i))
+ }
+ } else {
+ // T is some other concrete type.
+ // Report methods of T and *T, preferring those of T.
+ pmset := msets.MethodSet(types.NewPointer(T))
+ for i, n := 0, pmset.Len(); i < n; i++ {
+ meth := pmset.At(i)
+ if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil {
+ meth = m
+ }
+ result = append(result, meth)
+ }
+
+ }
+ return result
+}
diff --git a/vendor/golang.org/x/tools/imports/fix.go b/vendor/golang.org/x/tools/imports/fix.go
new file mode 100644
index 0000000..4c03393
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/fix.go
@@ -0,0 +1,1259 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "log"
+ "os"
+ "os/exec"
+ "path"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/packages"
+ "golang.org/x/tools/internal/gopathwalk"
+)
+
+// Debug controls verbose logging.
+var Debug = false
+
+// LocalPrefix is a comma-separated string of import path prefixes, which, if
+// set, instructs Process to sort the import paths with the given prefixes
+// into another group after 3rd-party packages.
+var LocalPrefix string
+
+func localPrefixes() []string {
+ if LocalPrefix != "" {
+ return strings.Split(LocalPrefix, ",")
+ }
+ return nil
+}
+
+// importToGroup is a list of functions which map from an import path to
+// a group number.
+var importToGroup = []func(importPath string) (num int, ok bool){
+ func(importPath string) (num int, ok bool) {
+ for _, p := range localPrefixes() {
+ if strings.HasPrefix(importPath, p) || strings.TrimSuffix(p, "/") == importPath {
+ return 3, true
+ }
+ }
+ return
+ },
+ func(importPath string) (num int, ok bool) {
+ if strings.HasPrefix(importPath, "appengine") {
+ return 2, true
+ }
+ return
+ },
+ func(importPath string) (num int, ok bool) {
+ if strings.Contains(importPath, ".") {
+ return 1, true
+ }
+ return
+ },
+}
+
+func importGroup(importPath string) int {
+ for _, fn := range importToGroup {
+ if n, ok := fn(importPath); ok {
+ return n
+ }
+ }
+ return 0
+}
+
+// An importInfo represents a single import statement.
+type importInfo struct {
+ importPath string // import path, e.g. "crypto/rand".
+ name string // import name, e.g. "crand", or "" if none.
+}
+
+// A packageInfo represents what's known about a package.
+type packageInfo struct {
+ name string // real package name, if known.
+ exports map[string]bool // known exports.
+}
+
+// parseOtherFiles parses all the Go files in srcDir except filename, including
+// test files if filename looks like a test.
+func parseOtherFiles(fset *token.FileSet, srcDir, filename string) []*ast.File {
+ // This could use go/packages but it doesn't buy much, and it fails
+ // with https://golang.org/issue/26296 in LoadFiles mode in some cases.
+ considerTests := strings.HasSuffix(filename, "_test.go")
+
+ fileBase := filepath.Base(filename)
+ packageFileInfos, err := ioutil.ReadDir(srcDir)
+ if err != nil {
+ return nil
+ }
+
+ var files []*ast.File
+ for _, fi := range packageFileInfos {
+ if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") {
+ continue
+ }
+ if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") {
+ continue
+ }
+
+ f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, 0)
+ if err != nil {
+ continue
+ }
+
+ files = append(files, f)
+ }
+
+ return files
+}
+
+// addGlobals puts the names of package vars into the provided map.
+func addGlobals(f *ast.File, globals map[string]bool) {
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ for _, spec := range genDecl.Specs {
+ valueSpec, ok := spec.(*ast.ValueSpec)
+ if !ok {
+ continue
+ }
+ globals[valueSpec.Names[0].Name] = true
+ }
+ }
+}
+
+// collectReferences builds a map of selector expressions, from
+// left hand side (X) to a set of right hand sides (Sel).
+func collectReferences(f *ast.File) references {
+ refs := references{}
+
+ var visitor visitFn
+ visitor = func(node ast.Node) ast.Visitor {
+ if node == nil {
+ return visitor
+ }
+ switch v := node.(type) {
+ case *ast.SelectorExpr:
+ xident, ok := v.X.(*ast.Ident)
+ if !ok {
+ break
+ }
+ if xident.Obj != nil {
+ // If the parser can resolve it, it's not a package ref.
+ break
+ }
+ if !ast.IsExported(v.Sel.Name) {
+ // Whatever this is, it's not exported from a package.
+ break
+ }
+ pkgName := xident.Name
+ r := refs[pkgName]
+ if r == nil {
+ r = make(map[string]bool)
+ refs[pkgName] = r
+ }
+ r[v.Sel.Name] = true
+ }
+ return visitor
+ }
+ ast.Walk(visitor, f)
+ return refs
+}
+
+// collectImports returns all the imports in f, keyed by their package name as
+// determined by pathToName. Unnamed imports (., _) and "C" are ignored.
+func collectImports(f *ast.File) []*importInfo {
+ var imports []*importInfo
+ for _, imp := range f.Imports {
+ var name string
+ if imp.Name != nil {
+ name = imp.Name.Name
+ }
+ if imp.Path.Value == `"C"` || name == "_" || name == "." {
+ continue
+ }
+ path := strings.Trim(imp.Path.Value, `"`)
+ imports = append(imports, &importInfo{
+ name: name,
+ importPath: path,
+ })
+ }
+ return imports
+}
+
+// findMissingImport searches pass's candidates for an import that provides
+// pkg, containing all of syms.
+func (p *pass) findMissingImport(pkg string, syms map[string]bool) *importInfo {
+ for _, candidate := range p.candidates {
+ pkgInfo, ok := p.knownPackages[candidate.importPath]
+ if !ok {
+ continue
+ }
+ if p.importIdentifier(candidate) != pkg {
+ continue
+ }
+
+ allFound := true
+ for right := range syms {
+ if !pkgInfo.exports[right] {
+ allFound = false
+ break
+ }
+ }
+
+ if allFound {
+ return candidate
+ }
+ }
+ return nil
+}
+
+// references is set of references found in a Go file. The first map key is the
+// left hand side of a selector expression, the second key is the right hand
+// side, and the value should always be true.
+type references map[string]map[string]bool
+
+// A pass contains all the inputs and state necessary to fix a file's imports.
+// It can be modified in some ways during use; see comments below.
+type pass struct {
+ // Inputs. These must be set before a call to load, and not modified after.
+ fset *token.FileSet // fset used to parse f and its siblings.
+ f *ast.File // the file being fixed.
+ srcDir string // the directory containing f.
+ fixEnv *fixEnv // the environment to use for go commands, etc.
+ loadRealPackageNames bool // if true, load package names from disk rather than guessing them.
+ otherFiles []*ast.File // sibling files.
+
+ // Intermediate state, generated by load.
+ existingImports map[string]*importInfo
+ allRefs references
+ missingRefs references
+
+ // Inputs to fix. These can be augmented between successive fix calls.
+ lastTry bool // indicates that this is the last call and fix should clean up as best it can.
+ candidates []*importInfo // candidate imports in priority order.
+ knownPackages map[string]*packageInfo // information about all known packages.
+}
+
+// loadPackageNames saves the package names for everything referenced by imports.
+func (p *pass) loadPackageNames(imports []*importInfo) error {
+ var unknown []string
+ for _, imp := range imports {
+ if _, ok := p.knownPackages[imp.importPath]; ok {
+ continue
+ }
+ unknown = append(unknown, imp.importPath)
+ }
+
+ names, err := p.fixEnv.getResolver().loadPackageNames(unknown, p.srcDir)
+ if err != nil {
+ return err
+ }
+
+ for path, name := range names {
+ p.knownPackages[path] = &packageInfo{
+ name: name,
+ exports: map[string]bool{},
+ }
+ }
+ return nil
+}
+
+// importIdentifier returns the identifier that imp will introduce. It will
+// guess if the package name has not been loaded, e.g. because the source
+// is not available.
+func (p *pass) importIdentifier(imp *importInfo) string {
+ if imp.name != "" {
+ return imp.name
+ }
+ known := p.knownPackages[imp.importPath]
+ if known != nil && known.name != "" {
+ return known.name
+ }
+ return importPathToAssumedName(imp.importPath)
+}
+
+// load reads in everything necessary to run a pass, and reports whether the
+// file already has all the imports it needs. It fills in p.missingRefs with the
+// file's missing symbols, if any, or removes unused imports if not.
+func (p *pass) load() bool {
+ p.knownPackages = map[string]*packageInfo{}
+ p.missingRefs = references{}
+ p.existingImports = map[string]*importInfo{}
+
+ // Load basic information about the file in question.
+ p.allRefs = collectReferences(p.f)
+
+ // Load stuff from other files in the same package:
+ // global variables so we know they don't need resolving, and imports
+ // that we might want to mimic.
+ globals := map[string]bool{}
+ for _, otherFile := range p.otherFiles {
+ // Don't load globals from files that are in the same directory
+ // but a different package. Using them to suggest imports is OK.
+ if p.f.Name.Name == otherFile.Name.Name {
+ addGlobals(otherFile, globals)
+ }
+ p.candidates = append(p.candidates, collectImports(otherFile)...)
+ }
+
+ // Resolve all the import paths we've seen to package names, and store
+ // f's imports by the identifier they introduce.
+ imports := collectImports(p.f)
+ if p.loadRealPackageNames {
+ err := p.loadPackageNames(append(imports, p.candidates...))
+ if err != nil {
+ if Debug {
+ log.Printf("loading package names: %v", err)
+ }
+ return false
+ }
+ }
+ for _, imp := range imports {
+ p.existingImports[p.importIdentifier(imp)] = imp
+ }
+
+ // Find missing references.
+ for left, rights := range p.allRefs {
+ if globals[left] {
+ continue
+ }
+ _, ok := p.existingImports[left]
+ if !ok {
+ p.missingRefs[left] = rights
+ continue
+ }
+ }
+ if len(p.missingRefs) != 0 {
+ return false
+ }
+
+ return p.fix()
+}
+
+// fix attempts to satisfy missing imports using p.candidates. If it finds
+// everything, or if p.lastTry is true, it adds the imports it found,
+// removes anything unused, and returns true.
+func (p *pass) fix() bool {
+ // Find missing imports.
+ var selected []*importInfo
+ for left, rights := range p.missingRefs {
+ if imp := p.findMissingImport(left, rights); imp != nil {
+ selected = append(selected, imp)
+ }
+ }
+
+ if !p.lastTry && len(selected) != len(p.missingRefs) {
+ return false
+ }
+
+ // Found everything, or giving up. Add the new imports and remove any unused.
+ for _, imp := range p.existingImports {
+ // We deliberately ignore globals here, because we can't be sure
+ // they're in the same package. People do things like put multiple
+ // main packages in the same directory, and we don't want to
+ // remove imports if they happen to have the same name as a var in
+ // a different package.
+ if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok {
+ astutil.DeleteNamedImport(p.fset, p.f, imp.name, imp.importPath)
+ }
+ }
+
+ for _, imp := range selected {
+ astutil.AddNamedImport(p.fset, p.f, imp.name, imp.importPath)
+ }
+
+ if p.loadRealPackageNames {
+ for _, imp := range p.f.Imports {
+ if imp.Name != nil {
+ continue
+ }
+ path := strings.Trim(imp.Path.Value, `""`)
+ ident := p.importIdentifier(&importInfo{importPath: path})
+ if ident != importPathToAssumedName(path) {
+ imp.Name = &ast.Ident{Name: ident, NamePos: imp.Pos()}
+ }
+ }
+ }
+
+ return true
+}
+
+// assumeSiblingImportsValid assumes that siblings' use of packages is valid,
+// adding the exports they use.
+func (p *pass) assumeSiblingImportsValid() {
+ for _, f := range p.otherFiles {
+ refs := collectReferences(f)
+ imports := collectImports(f)
+ importsByName := map[string]*importInfo{}
+ for _, imp := range imports {
+ importsByName[p.importIdentifier(imp)] = imp
+ }
+ for left, rights := range refs {
+ if imp, ok := importsByName[left]; ok {
+ if _, ok := stdlib[imp.importPath]; ok {
+ // We have the stdlib in memory; no need to guess.
+ rights = stdlib[imp.importPath]
+ }
+ p.addCandidate(imp, &packageInfo{
+ // no name; we already know it.
+ exports: rights,
+ })
+ }
+ }
+ }
+}
+
+// addCandidate adds a candidate import to p, and merges in the information
+// in pkg.
+func (p *pass) addCandidate(imp *importInfo, pkg *packageInfo) {
+ p.candidates = append(p.candidates, imp)
+ if existing, ok := p.knownPackages[imp.importPath]; ok {
+ if existing.name == "" {
+ existing.name = pkg.name
+ }
+ for export := range pkg.exports {
+ existing.exports[export] = true
+ }
+ } else {
+ p.knownPackages[imp.importPath] = pkg
+ }
+}
+
+// fixImports adds and removes imports from f so that all its references are
+// satisfied and there are no unused imports.
+//
+// This is declared as a variable rather than a function so goimports can
+// easily be extended by adding a file with an init function.
+var fixImports = fixImportsDefault
+
+func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *fixEnv) error {
+ abs, err := filepath.Abs(filename)
+ if err != nil {
+ return err
+ }
+ srcDir := filepath.Dir(abs)
+ if Debug {
+ log.Printf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
+ }
+
+ // First pass: looking only at f, and using the naive algorithm to
+ // derive package names from import paths, see if the file is already
+ // complete. We can't add any imports yet, because we don't know
+ // if missing references are actually package vars.
+ p := &pass{fset: fset, f: f, srcDir: srcDir}
+ if p.load() {
+ return nil
+ }
+
+ otherFiles := parseOtherFiles(fset, srcDir, filename)
+
+ // Second pass: add information from other files in the same package,
+ // like their package vars and imports.
+ p.otherFiles = otherFiles
+ if p.load() {
+ return nil
+ }
+
+ // Now we can try adding imports from the stdlib.
+ p.assumeSiblingImportsValid()
+ addStdlibCandidates(p, p.missingRefs)
+ if p.fix() {
+ return nil
+ }
+
+ // Third pass: get real package names where we had previously used
+ // the naive algorithm. This is the first step that will use the
+ // environment, so we provide it here for the first time.
+ p = &pass{fset: fset, f: f, srcDir: srcDir, fixEnv: env}
+ p.loadRealPackageNames = true
+ p.otherFiles = otherFiles
+ if p.load() {
+ return nil
+ }
+
+ addStdlibCandidates(p, p.missingRefs)
+ p.assumeSiblingImportsValid()
+ if p.fix() {
+ return nil
+ }
+
+ // Go look for candidates in $GOPATH, etc. We don't necessarily load
+ // the real exports of sibling imports, so keep assuming their contents.
+ if err := addExternalCandidates(p, p.missingRefs, filename); err != nil {
+ return err
+ }
+
+ p.lastTry = true
+ p.fix()
+ return nil
+}
+
+// fixEnv contains environment variables and settings that affect the use of
+// the go command, the go/build package, etc.
+type fixEnv struct {
+ // If non-empty, these will be used instead of the
+ // process-wide values.
+ GOPATH, GOROOT, GO111MODULE, GOPROXY, GOFLAGS string
+ WorkingDir string
+
+ // If true, use go/packages regardless of the environment.
+ ForceGoPackages bool
+
+ resolver resolver
+}
+
+func (e *fixEnv) env() []string {
+ env := os.Environ()
+ add := func(k, v string) {
+ if v != "" {
+ env = append(env, k+"="+v)
+ }
+ }
+ add("GOPATH", e.GOPATH)
+ add("GOROOT", e.GOROOT)
+ add("GO111MODULE", e.GO111MODULE)
+ add("GOPROXY", e.GOPROXY)
+ add("GOFLAGS", e.GOFLAGS)
+ if e.WorkingDir != "" {
+ add("PWD", e.WorkingDir)
+ }
+ return env
+}
+
+func (e *fixEnv) getResolver() resolver {
+ if e.resolver != nil {
+ return e.resolver
+ }
+ if e.ForceGoPackages {
+ return &goPackagesResolver{env: e}
+ }
+
+ out, err := e.invokeGo("env", "GOMOD")
+ if err != nil || len(bytes.TrimSpace(out.Bytes())) == 0 {
+ return &gopathResolver{env: e}
+ }
+ return &moduleResolver{env: e}
+}
+
+func (e *fixEnv) newPackagesConfig(mode packages.LoadMode) *packages.Config {
+ return &packages.Config{
+ Mode: mode,
+ Dir: e.WorkingDir,
+ Env: e.env(),
+ }
+}
+
+func (e *fixEnv) buildContext() *build.Context {
+ ctx := build.Default
+ ctx.GOROOT = e.GOROOT
+ ctx.GOPATH = e.GOPATH
+ return &ctx
+}
+
+func (e *fixEnv) invokeGo(args ...string) (*bytes.Buffer, error) {
+ cmd := exec.Command("go", args...)
+ stdout := &bytes.Buffer{}
+ stderr := &bytes.Buffer{}
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ cmd.Env = e.env()
+ cmd.Dir = e.WorkingDir
+
+ if Debug {
+ defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now())
+ }
+ if err := cmd.Run(); err != nil {
+ return nil, fmt.Errorf("running go: %v (stderr:\n%s)", err, stderr)
+ }
+ return stdout, nil
+}
+
+func cmdDebugStr(cmd *exec.Cmd) string {
+ env := make(map[string]string)
+ for _, kv := range cmd.Env {
+ split := strings.Split(kv, "=")
+ k, v := split[0], split[1]
+ env[k] = v
+ }
+
+ return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], cmd.Args)
+}
+
+func addStdlibCandidates(pass *pass, refs references) {
+ add := func(pkg string) {
+ pass.addCandidate(
+ &importInfo{importPath: pkg},
+ &packageInfo{name: path.Base(pkg), exports: stdlib[pkg]})
+ }
+ for left := range refs {
+ if left == "rand" {
+ // Make sure we try crypto/rand before math/rand.
+ add("crypto/rand")
+ add("math/rand")
+ continue
+ }
+ for importPath := range stdlib {
+ if path.Base(importPath) == left {
+ add(importPath)
+ }
+ }
+ }
+}
+
+// A resolver does the build-system-specific parts of goimports.
+type resolver interface {
+ // loadPackageNames loads the package names in importPaths.
+ loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
+ // scan finds (at least) the packages satisfying refs. The returned slice is unordered.
+ scan(refs references) ([]*pkg, error)
+}
+
+// gopathResolver implements resolver for GOPATH and module workspaces using go/packages.
+type goPackagesResolver struct {
+ env *fixEnv
+}
+
+func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
+ cfg := r.env.newPackagesConfig(packages.LoadFiles)
+ pkgs, err := packages.Load(cfg, importPaths...)
+ if err != nil {
+ return nil, err
+ }
+ names := map[string]string{}
+ for _, pkg := range pkgs {
+ names[VendorlessPath(pkg.PkgPath)] = pkg.Name
+ }
+ // We may not have found all the packages. Guess the rest.
+ for _, path := range importPaths {
+ if _, ok := names[path]; ok {
+ continue
+ }
+ names[path] = importPathToAssumedName(path)
+ }
+ return names, nil
+
+}
+
+func (r *goPackagesResolver) scan(refs references) ([]*pkg, error) {
+ var loadQueries []string
+ for pkgName := range refs {
+ loadQueries = append(loadQueries, "iamashamedtousethedisabledqueryname="+pkgName)
+ }
+ sort.Strings(loadQueries)
+ cfg := r.env.newPackagesConfig(packages.LoadFiles)
+ goPackages, err := packages.Load(cfg, loadQueries...)
+ if err != nil {
+ return nil, err
+ }
+
+ var scan []*pkg
+ for _, goPackage := range goPackages {
+ scan = append(scan, &pkg{
+ dir: filepath.Dir(goPackage.CompiledGoFiles[0]),
+ importPathShort: VendorlessPath(goPackage.PkgPath),
+ goPackage: goPackage,
+ })
+ }
+ return scan, nil
+}
+
+func addExternalCandidates(pass *pass, refs references, filename string) error {
+ dirScan, err := pass.fixEnv.getResolver().scan(refs)
+ if err != nil {
+ return err
+ }
+
+ // Search for imports matching potential package references.
+ type result struct {
+ imp *importInfo
+ pkg *packageInfo
+ }
+ results := make(chan result, len(refs))
+
+ ctx, cancel := context.WithCancel(context.TODO())
+ var wg sync.WaitGroup
+ defer func() {
+ cancel()
+ wg.Wait()
+ }()
+ var (
+ firstErr error
+ firstErrOnce sync.Once
+ )
+ for pkgName, symbols := range refs {
+ wg.Add(1)
+ go func(pkgName string, symbols map[string]bool) {
+ defer wg.Done()
+
+ found, err := findImport(ctx, pass.fixEnv, dirScan, pkgName, symbols, filename)
+
+ if err != nil {
+ firstErrOnce.Do(func() {
+ firstErr = err
+ cancel()
+ })
+ return
+ }
+
+ if found == nil {
+ return // No matching package.
+ }
+
+ imp := &importInfo{
+ importPath: found.importPathShort,
+ }
+
+ pkg := &packageInfo{
+ name: pkgName,
+ exports: symbols,
+ }
+ results <- result{imp, pkg}
+ }(pkgName, symbols)
+ }
+ go func() {
+ wg.Wait()
+ close(results)
+ }()
+
+ for result := range results {
+ pass.addCandidate(result.imp, result.pkg)
+ }
+ return firstErr
+}
+
+// notIdentifier reports whether ch is an invalid identifier character.
+func notIdentifier(ch rune) bool {
+ return !('a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' ||
+ '0' <= ch && ch <= '9' ||
+ ch == '_' ||
+ ch >= utf8.RuneSelf && (unicode.IsLetter(ch) || unicode.IsDigit(ch)))
+}
+
+// importPathToAssumedName returns the assumed package name of an import path.
+// It does this using only string parsing of the import path.
+// It picks the last element of the path that does not look like a major
+// version, and then picks the valid identifier off the start of that element.
+// It is used to determine if a local rename should be added to an import for
+// clarity.
+// This function could be moved to a standard package and exported if we want
+// for use in other tools.
+func importPathToAssumedName(importPath string) string {
+ base := path.Base(importPath)
+ if strings.HasPrefix(base, "v") {
+ if _, err := strconv.Atoi(base[1:]); err == nil {
+ dir := path.Dir(importPath)
+ if dir != "." {
+ base = path.Base(dir)
+ }
+ }
+ }
+ base = strings.TrimPrefix(base, "go-")
+ if i := strings.IndexFunc(base, notIdentifier); i >= 0 {
+ base = base[:i]
+ }
+ return base
+}
+
+// gopathResolver implements resolver for GOPATH workspaces.
+type gopathResolver struct {
+ env *fixEnv
+}
+
+func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
+ names := map[string]string{}
+ for _, path := range importPaths {
+ names[path] = importPathToName(r.env, path, srcDir)
+ }
+ return names, nil
+}
+
+// importPathToNameGoPath finds out the actual package name, as declared in its .go files.
+// If there's a problem, it returns "".
+func importPathToName(env *fixEnv, importPath, srcDir string) (packageName string) {
+ // Fast path for standard library without going to disk.
+ if _, ok := stdlib[importPath]; ok {
+ return path.Base(importPath) // stdlib packages always match their paths.
+ }
+
+ buildPkg, err := env.buildContext().Import(importPath, srcDir, build.FindOnly)
+ if err != nil {
+ return ""
+ }
+ pkgName, err := packageDirToName(buildPkg.Dir)
+ if err != nil {
+ return ""
+ }
+ return pkgName
+}
+
+// packageDirToName is a faster version of build.Import if
+// the only thing desired is the package name. It uses build.FindOnly
+// to find the directory and then only parses one file in the package,
+// trusting that the files in the directory are consistent.
+func packageDirToName(dir string) (packageName string, err error) {
+ d, err := os.Open(dir)
+ if err != nil {
+ return "", err
+ }
+ names, err := d.Readdirnames(-1)
+ d.Close()
+ if err != nil {
+ return "", err
+ }
+ sort.Strings(names) // to have predictable behavior
+ var lastErr error
+ var nfile int
+ for _, name := range names {
+ if !strings.HasSuffix(name, ".go") {
+ continue
+ }
+ if strings.HasSuffix(name, "_test.go") {
+ continue
+ }
+ nfile++
+ fullFile := filepath.Join(dir, name)
+
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly)
+ if err != nil {
+ lastErr = err
+ continue
+ }
+ pkgName := f.Name.Name
+ if pkgName == "documentation" {
+ // Special case from go/build.ImportDir, not
+ // handled by ctx.MatchFile.
+ continue
+ }
+ if pkgName == "main" {
+ // Also skip package main, assuming it's a +build ignore generator or example.
+ // Since you can't import a package main anyway, there's no harm here.
+ continue
+ }
+ return pkgName, nil
+ }
+ if lastErr != nil {
+ return "", lastErr
+ }
+ return "", fmt.Errorf("no importable package found in %d Go files", nfile)
+}
+
+type pkg struct {
+ goPackage *packages.Package
+ dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
+ importPathShort string // vendorless import path ("net/http", "a/b")
+}
+
+type pkgDistance struct {
+ pkg *pkg
+ distance int // relative distance to target
+}
+
+// byDistanceOrImportPathShortLength sorts by relative distance breaking ties
+// on the short import path length and then the import string itself.
+type byDistanceOrImportPathShortLength []pkgDistance
+
+func (s byDistanceOrImportPathShortLength) Len() int { return len(s) }
+func (s byDistanceOrImportPathShortLength) Less(i, j int) bool {
+ di, dj := s[i].distance, s[j].distance
+ if di == -1 {
+ return false
+ }
+ if dj == -1 {
+ return true
+ }
+ if di != dj {
+ return di < dj
+ }
+
+ vi, vj := s[i].pkg.importPathShort, s[j].pkg.importPathShort
+ if len(vi) != len(vj) {
+ return len(vi) < len(vj)
+ }
+ return vi < vj
+}
+func (s byDistanceOrImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+
+func distance(basepath, targetpath string) int {
+ p, err := filepath.Rel(basepath, targetpath)
+ if err != nil {
+ return -1
+ }
+ if p == "." {
+ return 0
+ }
+ return strings.Count(p, string(filepath.Separator)) + 1
+}
+
+func (r *gopathResolver) scan(_ references) ([]*pkg, error) {
+ dupCheck := make(map[string]bool)
+ var result []*pkg
+
+ var mu sync.Mutex
+
+ add := func(root gopathwalk.Root, dir string) {
+ mu.Lock()
+ defer mu.Unlock()
+
+ if _, dup := dupCheck[dir]; dup {
+ return
+ }
+ dupCheck[dir] = true
+ importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):])
+ result = append(result, &pkg{
+ importPathShort: VendorlessPath(importpath),
+ dir: dir,
+ })
+ }
+ gopathwalk.Walk(gopathwalk.SrcDirsRoots(r.env.buildContext()), add, gopathwalk.Options{Debug: Debug, ModulesEnabled: false})
+ return result, nil
+}
+
+// VendorlessPath returns the devendorized version of the import path ipath.
+// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
+func VendorlessPath(ipath string) string {
+ // Devendorize for use in import statement.
+ if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
+ return ipath[i+len("/vendor/"):]
+ }
+ if strings.HasPrefix(ipath, "vendor/") {
+ return ipath[len("vendor/"):]
+ }
+ return ipath
+}
+
+// loadExports returns the set of exported symbols in the package at dir.
+// It returns nil on error or if the package name in dir does not match expectPackage.
+func loadExports(ctx context.Context, env *fixEnv, expectPackage string, pkg *pkg) (map[string]bool, error) {
+ if Debug {
+ log.Printf("loading exports in dir %s (seeking package %s)", pkg.dir, expectPackage)
+ }
+ if pkg.goPackage != nil {
+ exports := map[string]bool{}
+ fset := token.NewFileSet()
+ for _, fname := range pkg.goPackage.CompiledGoFiles {
+ f, err := parser.ParseFile(fset, fname, nil, 0)
+ if err != nil {
+ return nil, fmt.Errorf("parsing %s: %v", fname, err)
+ }
+ for name := range f.Scope.Objects {
+ if ast.IsExported(name) {
+ exports[name] = true
+ }
+ }
+ }
+ return exports, nil
+ }
+
+ exports := make(map[string]bool)
+
+ // Look for non-test, buildable .go files which could provide exports.
+ all, err := ioutil.ReadDir(pkg.dir)
+ if err != nil {
+ return nil, err
+ }
+ var files []os.FileInfo
+ for _, fi := range all {
+ name := fi.Name()
+ if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
+ continue
+ }
+ match, err := env.buildContext().MatchFile(pkg.dir, fi.Name())
+ if err != nil || !match {
+ continue
+ }
+ files = append(files, fi)
+ }
+
+ if len(files) == 0 {
+ return nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", pkg.dir)
+ }
+
+ fset := token.NewFileSet()
+ for _, fi := range files {
+ select {
+ case <-ctx.Done():
+ return nil, ctx.Err()
+ default:
+ }
+
+ fullFile := filepath.Join(pkg.dir, fi.Name())
+ f, err := parser.ParseFile(fset, fullFile, nil, 0)
+ if err != nil {
+ return nil, fmt.Errorf("parsing %s: %v", fullFile, err)
+ }
+ pkgName := f.Name.Name
+ if pkgName == "documentation" {
+ // Special case from go/build.ImportDir, not
+ // handled by MatchFile above.
+ continue
+ }
+ if pkgName != expectPackage {
+ return nil, fmt.Errorf("scan of dir %v is not expected package %v (actually %v)", pkg.dir, expectPackage, pkgName)
+ }
+ for name := range f.Scope.Objects {
+ if ast.IsExported(name) {
+ exports[name] = true
+ }
+ }
+ }
+
+ if Debug {
+ exportList := make([]string, 0, len(exports))
+ for k := range exports {
+ exportList = append(exportList, k)
+ }
+ sort.Strings(exportList)
+ log.Printf("loaded exports in dir %v (package %v): %v", pkg.dir, expectPackage, strings.Join(exportList, ", "))
+ }
+ return exports, nil
+}
+
+// findImport searches for a package with the given symbols.
+// If no package is found, findImport returns ("", false, nil)
+func findImport(ctx context.Context, env *fixEnv, dirScan []*pkg, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
+ pkgDir, err := filepath.Abs(filename)
+ if err != nil {
+ return nil, err
+ }
+ pkgDir = filepath.Dir(pkgDir)
+
+ // Find candidate packages, looking only at their directory names first.
+ var candidates []pkgDistance
+ for _, pkg := range dirScan {
+ if pkgIsCandidate(filename, pkgName, pkg) {
+ candidates = append(candidates, pkgDistance{
+ pkg: pkg,
+ distance: distance(pkgDir, pkg.dir),
+ })
+ }
+ }
+
+ // Sort the candidates by their import package length,
+ // assuming that shorter package names are better than long
+ // ones. Note that this sorts by the de-vendored name, so
+ // there's no "penalty" for vendoring.
+ sort.Sort(byDistanceOrImportPathShortLength(candidates))
+ if Debug {
+ for i, c := range candidates {
+ log.Printf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir)
+ }
+ }
+
+ // Collect exports for packages with matching names.
+
+ rescv := make([]chan *pkg, len(candidates))
+ for i := range candidates {
+ rescv[i] = make(chan *pkg, 1)
+ }
+ const maxConcurrentPackageImport = 4
+ loadExportsSem := make(chan struct{}, maxConcurrentPackageImport)
+
+ ctx, cancel := context.WithCancel(ctx)
+ var wg sync.WaitGroup
+ defer func() {
+ cancel()
+ wg.Wait()
+ }()
+
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for i, c := range candidates {
+ select {
+ case loadExportsSem <- struct{}{}:
+ case <-ctx.Done():
+ return
+ }
+
+ wg.Add(1)
+ go func(c pkgDistance, resc chan<- *pkg) {
+ defer func() {
+ <-loadExportsSem
+ wg.Done()
+ }()
+
+ exports, err := loadExports(ctx, env, pkgName, c.pkg)
+ if err != nil {
+ if Debug {
+ log.Printf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
+ }
+ resc <- nil
+ return
+ }
+
+ // If it doesn't have the right
+ // symbols, send nil to mean no match.
+ for symbol := range symbols {
+ if !exports[symbol] {
+ resc <- nil
+ return
+ }
+ }
+ resc <- c.pkg
+ }(c, rescv[i])
+ }
+ }()
+
+ for _, resc := range rescv {
+ pkg := <-resc
+ if pkg == nil {
+ continue
+ }
+ return pkg, nil
+ }
+ return nil, nil
+}
+
+// pkgIsCandidate reports whether pkg is a candidate for satisfying the
+// finding which package pkgIdent in the file named by filename is trying
+// to refer to.
+//
+// This check is purely lexical and is meant to be as fast as possible
+// because it's run over all $GOPATH directories to filter out poor
+// candidates in order to limit the CPU and I/O later parsing the
+// exports in candidate packages.
+//
+// filename is the file being formatted.
+// pkgIdent is the package being searched for, like "client" (if
+// searching for "client.New")
+func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
+ // Check "internal" and "vendor" visibility:
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+
+ // Speed optimization to minimize disk I/O:
+ // the last two components on disk must contain the
+ // package name somewhere.
+ //
+ // This permits mismatch naming like directory
+ // "go-foo" being package "foo", or "pkg.v3" being "pkg",
+ // or directory "google.golang.org/api/cloudbilling/v1"
+ // being package "cloudbilling", but doesn't
+ // permit a directory "foo" to be package
+ // "bar", which is strongly discouraged
+ // anyway. There's no reason goimports needs
+ // to be slow just to accommodate that.
+ lastTwo := lastTwoComponents(pkg.importPathShort)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
+ lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ }
+
+ return false
+}
+
+func hasHyphenOrUpperASCII(s string) bool {
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ if b == '-' || ('A' <= b && b <= 'Z') {
+ return true
+ }
+ }
+ return false
+}
+
+func lowerASCIIAndRemoveHyphen(s string) (ret string) {
+ buf := make([]byte, 0, len(s))
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ switch {
+ case b == '-':
+ continue
+ case 'A' <= b && b <= 'Z':
+ buf = append(buf, b+('a'-'A'))
+ default:
+ buf = append(buf, b)
+ }
+ }
+ return string(buf)
+}
+
+// canUse reports whether the package in dir is usable from filename,
+// respecting the Go "internal" and "vendor" visibility rules.
+func canUse(filename, dir string) bool {
+ // Fast path check, before any allocations. If it doesn't contain vendor
+ // or internal, it's not tricky:
+ // Note that this can false-negative on directories like "notinternal",
+ // but we check it correctly below. This is just a fast path.
+ if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") {
+ return true
+ }
+
+ dirSlash := filepath.ToSlash(dir)
+ if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") {
+ return true
+ }
+ // Vendor or internal directory only visible from children of parent.
+ // That means the path from the current directory to the target directory
+ // can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal
+ // or bar/vendor or bar/internal.
+ // After stripping all the leading ../, the only okay place to see vendor or internal
+ // is at the very beginning of the path.
+ absfile, err := filepath.Abs(filename)
+ if err != nil {
+ return false
+ }
+ absdir, err := filepath.Abs(dir)
+ if err != nil {
+ return false
+ }
+ rel, err := filepath.Rel(absfile, absdir)
+ if err != nil {
+ return false
+ }
+ relSlash := filepath.ToSlash(rel)
+ if i := strings.LastIndex(relSlash, "../"); i >= 0 {
+ relSlash = relSlash[i+len("../"):]
+ }
+ return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal")
+}
+
+// lastTwoComponents returns at most the last two path components
+// of v, using either / or \ as the path separator.
+func lastTwoComponents(v string) string {
+ nslash := 0
+ for i := len(v) - 1; i >= 0; i-- {
+ if v[i] == '/' || v[i] == '\\' {
+ nslash++
+ if nslash == 2 {
+ return v[i:]
+ }
+ }
+ }
+ return v
+}
+
+type visitFn func(node ast.Node) ast.Visitor
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ return fn(node)
+}
diff --git a/vendor/golang.org/x/tools/imports/imports.go b/vendor/golang.org/x/tools/imports/imports.go
new file mode 100644
index 0000000..07101cb
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/imports.go
@@ -0,0 +1,315 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:generate go run mkstdlib.go
+
+// Package imports implements a Go pretty-printer (like package "go/format")
+// that also adds or removes import statements as necessary.
+package imports // import "golang.org/x/tools/imports"
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/format"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "io"
+ "io/ioutil"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+// Options specifies options for processing files.
+type Options struct {
+ Fragment bool // Accept fragment of a source file (no package statement)
+ AllErrors bool // Report all errors (not just the first 10 on different lines)
+
+ Comments bool // Print comments (true if nil *Options provided)
+ TabIndent bool // Use tabs for indent (true if nil *Options provided)
+ TabWidth int // Tab width (8 if nil *Options provided)
+
+ FormatOnly bool // Disable the insertion and deletion of imports
+}
+
+// Process formats and adjusts imports for the provided file.
+// If opt is nil the defaults are used.
+//
+// Note that filename's directory influences which imports can be chosen,
+// so it is important that filename be accurate.
+// To process data ``as if'' it were in filename, pass the data as a non-nil src.
+func Process(filename string, src []byte, opt *Options) ([]byte, error) {
+ env := &fixEnv{GOPATH: build.Default.GOPATH, GOROOT: build.Default.GOROOT}
+ return process(filename, src, opt, env)
+}
+
+func process(filename string, src []byte, opt *Options, env *fixEnv) ([]byte, error) {
+ if opt == nil {
+ opt = &Options{Comments: true, TabIndent: true, TabWidth: 8}
+ }
+ if src == nil {
+ b, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return nil, err
+ }
+ src = b
+ }
+
+ fileSet := token.NewFileSet()
+ file, adjust, err := parse(fileSet, filename, src, opt)
+ if err != nil {
+ return nil, err
+ }
+
+ if !opt.FormatOnly {
+ if err := fixImports(fileSet, file, filename, env); err != nil {
+ return nil, err
+ }
+ }
+
+ sortImports(fileSet, file)
+ imps := astutil.Imports(fileSet, file)
+ var spacesBefore []string // import paths we need spaces before
+ for _, impSection := range imps {
+ // Within each block of contiguous imports, see if any
+ // import lines are in different group numbers. If so,
+ // we'll need to put a space between them so it's
+ // compatible with gofmt.
+ lastGroup := -1
+ for _, importSpec := range impSection {
+ importPath, _ := strconv.Unquote(importSpec.Path.Value)
+ groupNum := importGroup(importPath)
+ if groupNum != lastGroup && lastGroup != -1 {
+ spacesBefore = append(spacesBefore, importPath)
+ }
+ lastGroup = groupNum
+ }
+
+ }
+
+ printerMode := printer.UseSpaces
+ if opt.TabIndent {
+ printerMode |= printer.TabIndent
+ }
+ printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
+
+ var buf bytes.Buffer
+ err = printConfig.Fprint(&buf, fileSet, file)
+ if err != nil {
+ return nil, err
+ }
+ out := buf.Bytes()
+ if adjust != nil {
+ out = adjust(src, out)
+ }
+ if len(spacesBefore) > 0 {
+ out, err = addImportSpaces(bytes.NewReader(out), spacesBefore)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ out, err = format.Source(out)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+// parse parses src, which was read from filename,
+// as a Go source file or statement list.
+func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) {
+ parserMode := parser.Mode(0)
+ if opt.Comments {
+ parserMode |= parser.ParseComments
+ }
+ if opt.AllErrors {
+ parserMode |= parser.AllErrors
+ }
+
+ // Try as whole source file.
+ file, err := parser.ParseFile(fset, filename, src, parserMode)
+ if err == nil {
+ return file, nil, nil
+ }
+ // If the error is that the source file didn't begin with a
+ // package line and we accept fragmented input, fall through to
+ // try as a source fragment. Stop and return on any other error.
+ if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") {
+ return nil, nil, err
+ }
+
+ // If this is a declaration list, make it a source file
+ // by inserting a package clause.
+ // Insert using a ;, not a newline, so that parse errors are on
+ // the correct line.
+ const prefix = "package main;"
+ psrc := append([]byte(prefix), src...)
+ file, err = parser.ParseFile(fset, filename, psrc, parserMode)
+ if err == nil {
+ // Gofmt will turn the ; into a \n.
+ // Do that ourselves now and update the file contents,
+ // so that positions and line numbers are correct going forward.
+ psrc[len(prefix)-1] = '\n'
+ fset.File(file.Package).SetLinesForContent(psrc)
+
+ // If a main function exists, we will assume this is a main
+ // package and leave the file.
+ if containsMainFunc(file) {
+ return file, nil, nil
+ }
+
+ adjust := func(orig, src []byte) []byte {
+ // Remove the package clause.
+ src = src[len(prefix):]
+ return matchSpace(orig, src)
+ }
+ return file, adjust, nil
+ }
+ // If the error is that the source file didn't begin with a
+ // declaration, fall through to try as a statement list.
+ // Stop and return on any other error.
+ if !strings.Contains(err.Error(), "expected declaration") {
+ return nil, nil, err
+ }
+
+ // If this is a statement list, make it a source file
+ // by inserting a package clause and turning the list
+ // into a function body. This handles expressions too.
+ // Insert using a ;, not a newline, so that the line numbers
+ // in fsrc match the ones in src.
+ fsrc := append(append([]byte("package p; func _() {"), src...), '}')
+ file, err = parser.ParseFile(fset, filename, fsrc, parserMode)
+ if err == nil {
+ adjust := func(orig, src []byte) []byte {
+ // Remove the wrapping.
+ // Gofmt has turned the ; into a \n\n.
+ src = src[len("package p\n\nfunc _() {"):]
+ src = src[:len(src)-len("}\n")]
+ // Gofmt has also indented the function body one level.
+ // Remove that indent.
+ src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1)
+ return matchSpace(orig, src)
+ }
+ return file, adjust, nil
+ }
+
+ // Failed, and out of options.
+ return nil, nil, err
+}
+
+// containsMainFunc checks if a file contains a function declaration with the
+// function signature 'func main()'
+func containsMainFunc(file *ast.File) bool {
+ for _, decl := range file.Decls {
+ if f, ok := decl.(*ast.FuncDecl); ok {
+ if f.Name.Name != "main" {
+ continue
+ }
+
+ if len(f.Type.Params.List) != 0 {
+ continue
+ }
+
+ if f.Type.Results != nil && len(f.Type.Results.List) != 0 {
+ continue
+ }
+
+ return true
+ }
+ }
+
+ return false
+}
+
+func cutSpace(b []byte) (before, middle, after []byte) {
+ i := 0
+ for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') {
+ i++
+ }
+ j := len(b)
+ for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') {
+ j--
+ }
+ if i <= j {
+ return b[:i], b[i:j], b[j:]
+ }
+ return nil, nil, b[j:]
+}
+
+// matchSpace reformats src to use the same space context as orig.
+// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src.
+// 2) matchSpace copies the indentation of the first non-blank line in orig
+// to every non-blank line in src.
+// 3) matchSpace copies the trailing space from orig and uses it in place
+// of src's trailing space.
+func matchSpace(orig []byte, src []byte) []byte {
+ before, _, after := cutSpace(orig)
+ i := bytes.LastIndex(before, []byte{'\n'})
+ before, indent := before[:i+1], before[i+1:]
+
+ _, src, _ = cutSpace(src)
+
+ var b bytes.Buffer
+ b.Write(before)
+ for len(src) > 0 {
+ line := src
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, src = line[:i+1], line[i+1:]
+ } else {
+ src = nil
+ }
+ if len(line) > 0 && line[0] != '\n' { // not blank
+ b.Write(indent)
+ }
+ b.Write(line)
+ }
+ b.Write(after)
+ return b.Bytes()
+}
+
+var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`)
+
+func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) {
+ var out bytes.Buffer
+ in := bufio.NewReader(r)
+ inImports := false
+ done := false
+ for {
+ s, err := in.ReadString('\n')
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, err
+ }
+
+ if !inImports && !done && strings.HasPrefix(s, "import") {
+ inImports = true
+ }
+ if inImports && (strings.HasPrefix(s, "var") ||
+ strings.HasPrefix(s, "func") ||
+ strings.HasPrefix(s, "const") ||
+ strings.HasPrefix(s, "type")) {
+ done = true
+ inImports = false
+ }
+ if inImports && len(breaks) > 0 {
+ if m := impLine.FindStringSubmatch(s); m != nil {
+ if m[1] == breaks[0] {
+ out.WriteByte('\n')
+ breaks = breaks[1:]
+ }
+ }
+ }
+
+ fmt.Fprint(&out, s)
+ }
+ return out.Bytes(), nil
+}
diff --git a/vendor/golang.org/x/tools/imports/mkindex.go b/vendor/golang.org/x/tools/imports/mkindex.go
new file mode 100644
index 0000000..755e239
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/mkindex.go
@@ -0,0 +1,173 @@
+// +build ignore
+
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Command mkindex creates the file "pkgindex.go" containing an index of the Go
+// standard library. The file is intended to be built as part of the imports
+// package, so that the package may be used in environments where a GOROOT is
+// not available (such as App Engine).
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+)
+
+var (
+ pkgIndex = make(map[string][]pkg)
+ exports = make(map[string]map[string]bool)
+)
+
+func main() {
+ // Don't use GOPATH.
+ ctx := build.Default
+ ctx.GOPATH = ""
+
+ // Populate pkgIndex global from GOROOT.
+ for _, path := range ctx.SrcDirs() {
+ f, err := os.Open(path)
+ if err != nil {
+ log.Print(err)
+ continue
+ }
+ children, err := f.Readdir(-1)
+ f.Close()
+ if err != nil {
+ log.Print(err)
+ continue
+ }
+ for _, child := range children {
+ if child.IsDir() {
+ loadPkg(path, child.Name())
+ }
+ }
+ }
+ // Populate exports global.
+ for _, ps := range pkgIndex {
+ for _, p := range ps {
+ e := loadExports(p.dir)
+ if e != nil {
+ exports[p.dir] = e
+ }
+ }
+ }
+
+ // Construct source file.
+ var buf bytes.Buffer
+ fmt.Fprint(&buf, pkgIndexHead)
+ fmt.Fprintf(&buf, "var pkgIndexMaster = %#v\n", pkgIndex)
+ fmt.Fprintf(&buf, "var exportsMaster = %#v\n", exports)
+ src := buf.Bytes()
+
+ // Replace main.pkg type name with pkg.
+ src = bytes.Replace(src, []byte("main.pkg"), []byte("pkg"), -1)
+ // Replace actual GOROOT with "/go".
+ src = bytes.Replace(src, []byte(ctx.GOROOT), []byte("/go"), -1)
+ // Add some line wrapping.
+ src = bytes.Replace(src, []byte("}, "), []byte("},\n"), -1)
+ src = bytes.Replace(src, []byte("true, "), []byte("true,\n"), -1)
+
+ var err error
+ src, err = format.Source(src)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Write out source file.
+ err = ioutil.WriteFile("pkgindex.go", src, 0644)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
+
+const pkgIndexHead = `package imports
+
+func init() {
+ pkgIndexOnce.Do(func() {
+ pkgIndex.m = pkgIndexMaster
+ })
+ loadExports = func(dir string) map[string]bool {
+ return exportsMaster[dir]
+ }
+}
+`
+
+type pkg struct {
+ importpath string // full pkg import path, e.g. "net/http"
+ dir string // absolute file path to pkg directory e.g. "/usr/lib/go/src/fmt"
+}
+
+var fset = token.NewFileSet()
+
+func loadPkg(root, importpath string) {
+ shortName := path.Base(importpath)
+ if shortName == "testdata" {
+ return
+ }
+
+ dir := filepath.Join(root, importpath)
+ pkgIndex[shortName] = append(pkgIndex[shortName], pkg{
+ importpath: importpath,
+ dir: dir,
+ })
+
+ pkgDir, err := os.Open(dir)
+ if err != nil {
+ return
+ }
+ children, err := pkgDir.Readdir(-1)
+ pkgDir.Close()
+ if err != nil {
+ return
+ }
+ for _, child := range children {
+ name := child.Name()
+ if name == "" {
+ continue
+ }
+ if c := name[0]; c == '.' || ('0' <= c && c <= '9') {
+ continue
+ }
+ if child.IsDir() {
+ loadPkg(root, filepath.Join(importpath, name))
+ }
+ }
+}
+
+func loadExports(dir string) map[string]bool {
+ exports := make(map[string]bool)
+ buildPkg, err := build.ImportDir(dir, 0)
+ if err != nil {
+ if strings.Contains(err.Error(), "no buildable Go source files in") {
+ return nil
+ }
+ log.Printf("could not import %q: %v", dir, err)
+ return nil
+ }
+ for _, file := range buildPkg.GoFiles {
+ f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0)
+ if err != nil {
+ log.Printf("could not parse %q: %v", file, err)
+ continue
+ }
+ for name := range f.Scope.Objects {
+ if ast.IsExported(name) {
+ exports[name] = true
+ }
+ }
+ }
+ return exports
+}
diff --git a/vendor/golang.org/x/tools/imports/mkstdlib.go b/vendor/golang.org/x/tools/imports/mkstdlib.go
new file mode 100644
index 0000000..5059ad4
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/mkstdlib.go
@@ -0,0 +1,112 @@
+// +build ignore
+
+// mkstdlib generates the zstdlib.go file, containing the Go standard
+// library API symbols. It's baked into the binary to avoid scanning
+// GOPATH in the common case.
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/format"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "sort"
+ "strings"
+)
+
+func mustOpen(name string) io.Reader {
+ f, err := os.Open(name)
+ if err != nil {
+ log.Fatal(err)
+ }
+ return f
+}
+
+func api(base string) string {
+ return filepath.Join(runtime.GOROOT(), "api", base)
+}
+
+var sym = regexp.MustCompile(`^pkg (\S+).*?, (?:var|func|type|const) ([A-Z]\w*)`)
+
+var unsafeSyms = map[string]bool{"Alignof": true, "ArbitraryType": true, "Offsetof": true, "Pointer": true, "Sizeof": true}
+
+func main() {
+ var buf bytes.Buffer
+ outf := func(format string, args ...interface{}) {
+ fmt.Fprintf(&buf, format, args...)
+ }
+ outf("// Code generated by mkstdlib.go. DO NOT EDIT.\n\n")
+ outf("package imports\n")
+ outf("var stdlib = map[string]map[string]bool{\n")
+ f := io.MultiReader(
+ mustOpen(api("go1.txt")),
+ mustOpen(api("go1.1.txt")),
+ mustOpen(api("go1.2.txt")),
+ mustOpen(api("go1.3.txt")),
+ mustOpen(api("go1.4.txt")),
+ mustOpen(api("go1.5.txt")),
+ mustOpen(api("go1.6.txt")),
+ mustOpen(api("go1.7.txt")),
+ mustOpen(api("go1.8.txt")),
+ mustOpen(api("go1.9.txt")),
+ mustOpen(api("go1.10.txt")),
+ mustOpen(api("go1.11.txt")),
+ mustOpen(api("go1.12.txt")),
+ )
+ sc := bufio.NewScanner(f)
+
+ pkgs := map[string]map[string]bool{
+ "unsafe": unsafeSyms,
+ }
+ paths := []string{"unsafe"}
+
+ for sc.Scan() {
+ l := sc.Text()
+ has := func(v string) bool { return strings.Contains(l, v) }
+ if has("struct, ") || has("interface, ") || has(", method (") {
+ continue
+ }
+ if m := sym.FindStringSubmatch(l); m != nil {
+ path, sym := m[1], m[2]
+
+ if _, ok := pkgs[path]; !ok {
+ pkgs[path] = map[string]bool{}
+ paths = append(paths, path)
+ }
+ pkgs[path][sym] = true
+ }
+ }
+ if err := sc.Err(); err != nil {
+ log.Fatal(err)
+ }
+ sort.Strings(paths)
+ for _, path := range paths {
+ outf("\t%q: map[string]bool{\n", path)
+ pkg := pkgs[path]
+ var syms []string
+ for sym := range pkg {
+ syms = append(syms, sym)
+ }
+ sort.Strings(syms)
+ for _, sym := range syms {
+ outf("\t\t%q: true,\n", sym)
+ }
+ outf("},\n")
+ }
+ outf("}\n")
+ fmtbuf, err := format.Source(buf.Bytes())
+ if err != nil {
+ log.Fatal(err)
+ }
+ err = ioutil.WriteFile("zstdlib.go", fmtbuf, 0666)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/vendor/golang.org/x/tools/imports/mod.go b/vendor/golang.org/x/tools/imports/mod.go
new file mode 100644
index 0000000..018c43c
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/mod.go
@@ -0,0 +1,355 @@
+package imports
+
+import (
+ "bytes"
+ "encoding/json"
+ "io/ioutil"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/tools/internal/gopathwalk"
+ "golang.org/x/tools/internal/module"
+)
+
+// moduleResolver implements resolver for modules using the go command as little
+// as feasible.
+type moduleResolver struct {
+ env *fixEnv
+
+ initialized bool
+ main *moduleJSON
+ modsByModPath []*moduleJSON // All modules, ordered by # of path components in module Path...
+ modsByDir []*moduleJSON // ...or Dir.
+}
+
+type moduleJSON struct {
+ Path string // module path
+ Version string // module version
+ Versions []string // available module versions (with -versions)
+ Replace *moduleJSON // replaced by this module
+ Time *time.Time // time version was created
+ Update *moduleJSON // available update, if any (with -u)
+ Main bool // is this the main module?
+ Indirect bool // is this module only an indirect dependency of main module?
+ Dir string // directory holding files for this module, if any
+ GoMod string // path to go.mod file for this module, if any
+ Error *moduleErrorJSON // error loading module
+}
+
+type moduleErrorJSON struct {
+ Err string // the error itself
+}
+
+func (r *moduleResolver) init() error {
+ if r.initialized {
+ return nil
+ }
+ stdout, err := r.env.invokeGo("list", "-m", "-json", "...")
+ if err != nil {
+ return err
+ }
+ for dec := json.NewDecoder(stdout); dec.More(); {
+ mod := &moduleJSON{}
+ if err := dec.Decode(mod); err != nil {
+ return err
+ }
+ if mod.Dir == "" {
+ if Debug {
+ log.Printf("module %v has not been downloaded and will be ignored", mod.Path)
+ }
+ // Can't do anything with a module that's not downloaded.
+ continue
+ }
+ r.modsByModPath = append(r.modsByModPath, mod)
+ r.modsByDir = append(r.modsByDir, mod)
+ if mod.Main {
+ r.main = mod
+ }
+ }
+
+ sort.Slice(r.modsByModPath, func(i, j int) bool {
+ count := func(x int) int {
+ return strings.Count(r.modsByModPath[x].Path, "/")
+ }
+ return count(j) < count(i) // descending order
+ })
+ sort.Slice(r.modsByDir, func(i, j int) bool {
+ count := func(x int) int {
+ return strings.Count(r.modsByDir[x].Dir, "/")
+ }
+ return count(j) < count(i) // descending order
+ })
+
+ r.initialized = true
+ return nil
+}
+
+// findPackage returns the module and directory that contains the package at
+// the given import path, or returns nil, "" if no module is in scope.
+func (r *moduleResolver) findPackage(importPath string) (*moduleJSON, string) {
+ for _, m := range r.modsByModPath {
+ if !strings.HasPrefix(importPath, m.Path) {
+ continue
+ }
+ pathInModule := importPath[len(m.Path):]
+ pkgDir := filepath.Join(m.Dir, pathInModule)
+ if dirIsNestedModule(pkgDir, m) {
+ continue
+ }
+
+ pkgFiles, err := ioutil.ReadDir(pkgDir)
+ if err != nil {
+ continue
+ }
+
+ // A module only contains a package if it has buildable go
+ // files in that directory. If not, it could be provided by an
+ // outer module. See #29736.
+ for _, fi := range pkgFiles {
+ if ok, _ := r.env.buildContext().MatchFile(pkgDir, fi.Name()); ok {
+ return m, pkgDir
+ }
+ }
+ }
+ return nil, ""
+}
+
+// findModuleByDir returns the module that contains dir, or nil if no such
+// module is in scope.
+func (r *moduleResolver) findModuleByDir(dir string) *moduleJSON {
+ // This is quite tricky and may not be correct. dir could be:
+ // - a package in the main module.
+ // - a replace target underneath the main module's directory.
+ // - a nested module in the above.
+ // - a replace target somewhere totally random.
+ // - a nested module in the above.
+ // - in the mod cache.
+ // - in /vendor/ in -mod=vendor mode.
+ // - nested module? Dunno.
+ // Rumor has it that replace targets cannot contain other replace targets.
+ for _, m := range r.modsByDir {
+ if !strings.HasPrefix(dir, m.Dir) {
+ continue
+ }
+
+ if dirIsNestedModule(dir, m) {
+ continue
+ }
+
+ return m
+ }
+ return nil
+}
+
+// dirIsNestedModule reports if dir is contained in a nested module underneath
+// mod, not actually in mod.
+func dirIsNestedModule(dir string, mod *moduleJSON) bool {
+ if !strings.HasPrefix(dir, mod.Dir) {
+ return false
+ }
+ mf := findModFile(dir)
+ if mf == "" {
+ return false
+ }
+ return filepath.Dir(mf) != mod.Dir
+}
+
+func findModFile(dir string) string {
+ for {
+ f := filepath.Join(dir, "go.mod")
+ info, err := os.Stat(f)
+ if err == nil && !info.IsDir() {
+ return f
+ }
+ d := filepath.Dir(dir)
+ if len(d) >= len(dir) {
+ return "" // reached top of file system, no go.mod
+ }
+ dir = d
+ }
+}
+
+func (r *moduleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
+ if err := r.init(); err != nil {
+ return nil, err
+ }
+ names := map[string]string{}
+ for _, path := range importPaths {
+ _, packageDir := r.findPackage(path)
+ if packageDir == "" {
+ continue
+ }
+ name, err := packageDirToName(packageDir)
+ if err != nil {
+ continue
+ }
+ names[path] = name
+ }
+ return names, nil
+}
+
+func (r *moduleResolver) scan(_ references) ([]*pkg, error) {
+ if err := r.init(); err != nil {
+ return nil, err
+ }
+
+ // Walk GOROOT, GOPATH/pkg/mod, and the main module.
+ roots := []gopathwalk.Root{
+ {filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
+ }
+ if r.main != nil {
+ roots = append(roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule})
+ }
+ for _, p := range filepath.SplitList(r.env.GOPATH) {
+ roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache})
+ }
+
+ // Walk replace targets, just in case they're not in any of the above.
+ for _, mod := range r.modsByModPath {
+ if mod.Replace != nil {
+ roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
+ }
+ }
+
+ var result []*pkg
+ dupCheck := make(map[string]bool)
+ var mu sync.Mutex
+
+ gopathwalk.Walk(roots, func(root gopathwalk.Root, dir string) {
+ mu.Lock()
+ defer mu.Unlock()
+
+ if _, dup := dupCheck[dir]; dup {
+ return
+ }
+
+ dupCheck[dir] = true
+
+ subdir := ""
+ if dir != root.Path {
+ subdir = dir[len(root.Path)+len("/"):]
+ }
+ importPath := filepath.ToSlash(subdir)
+ if strings.HasPrefix(importPath, "vendor/") {
+ // Ignore vendor dirs. If -mod=vendor is on, then things
+ // should mostly just work, but when it's not vendor/
+ // is a mess. There's no easy way to tell if it's on.
+ // We can still find things in the mod cache and
+ // map them into /vendor when -mod=vendor is on.
+ return
+ }
+ switch root.Type {
+ case gopathwalk.RootCurrentModule:
+ importPath = path.Join(r.main.Path, filepath.ToSlash(subdir))
+ case gopathwalk.RootModuleCache:
+ matches := modCacheRegexp.FindStringSubmatch(subdir)
+ modPath, err := module.DecodePath(filepath.ToSlash(matches[1]))
+ if err != nil {
+ if Debug {
+ log.Printf("decoding module cache path %q: %v", subdir, err)
+ }
+ return
+ }
+ importPath = path.Join(modPath, filepath.ToSlash(matches[3]))
+ case gopathwalk.RootGOROOT:
+ importPath = subdir
+ }
+
+ // Check if the directory is underneath a module that's in scope.
+ if mod := r.findModuleByDir(dir); mod != nil {
+ // It is. If dir is the target of a replace directive,
+ // our guessed import path is wrong. Use the real one.
+ if mod.Dir == dir {
+ importPath = mod.Path
+ } else {
+ dirInMod := dir[len(mod.Dir)+len("/"):]
+ importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
+ }
+ } else {
+ // The package is in an unknown module. Check that it's
+ // not obviously impossible to import.
+ var modFile string
+ switch root.Type {
+ case gopathwalk.RootModuleCache:
+ matches := modCacheRegexp.FindStringSubmatch(subdir)
+ modFile = filepath.Join(matches[1], "@", matches[2], "go.mod")
+ default:
+ modFile = findModFile(dir)
+ }
+
+ modBytes, err := ioutil.ReadFile(modFile)
+ if err == nil && !strings.HasPrefix(importPath, modulePath(modBytes)) {
+ // The module's declared path does not match
+ // its expected path. It probably needs a
+ // replace directive we don't have.
+ return
+ }
+ }
+ // We may have discovered a package that has a different version
+ // in scope already. Canonicalize to that one if possible.
+ if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" {
+ dir = canonicalDir
+ }
+
+ result = append(result, &pkg{
+ importPathShort: VendorlessPath(importPath),
+ dir: dir,
+ })
+ }, gopathwalk.Options{Debug: Debug, ModulesEnabled: true})
+ return result, nil
+}
+
+// modCacheRegexp splits a path in a module cache into module, module version, and package.
+var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
+
+var (
+ slashSlash = []byte("//")
+ moduleStr = []byte("module")
+)
+
+// modulePath returns the module path from the gomod file text.
+// If it cannot find a module path, it returns an empty string.
+// It is tolerant of unrelated problems in the go.mod file.
+//
+// Copied from cmd/go/internal/modfile.
+func modulePath(mod []byte) string {
+ for len(mod) > 0 {
+ line := mod
+ mod = nil
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, mod = line[:i], line[i+1:]
+ }
+ if i := bytes.Index(line, slashSlash); i >= 0 {
+ line = line[:i]
+ }
+ line = bytes.TrimSpace(line)
+ if !bytes.HasPrefix(line, moduleStr) {
+ continue
+ }
+ line = line[len(moduleStr):]
+ n := len(line)
+ line = bytes.TrimSpace(line)
+ if len(line) == n || len(line) == 0 {
+ continue
+ }
+
+ if line[0] == '"' || line[0] == '`' {
+ p, err := strconv.Unquote(string(line))
+ if err != nil {
+ return "" // malformed quoted string or multiline module path
+ }
+ return p
+ }
+
+ return string(line)
+ }
+ return "" // missing module path
+}
diff --git a/vendor/golang.org/x/tools/imports/sortimports.go b/vendor/golang.org/x/tools/imports/sortimports.go
new file mode 100644
index 0000000..f3dd56c
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/sortimports.go
@@ -0,0 +1,230 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Hacked up copy of go/ast/import.go
+
+package imports
+
+import (
+ "go/ast"
+ "go/token"
+ "sort"
+ "strconv"
+)
+
+// sortImports sorts runs of consecutive import lines in import blocks in f.
+// It also removes duplicate imports when it is possible to do so without data loss.
+func sortImports(fset *token.FileSet, f *ast.File) {
+ for i, d := range f.Decls {
+ d, ok := d.(*ast.GenDecl)
+ if !ok || d.Tok != token.IMPORT {
+ // Not an import declaration, so we're done.
+ // Imports are always first.
+ break
+ }
+
+ if len(d.Specs) == 0 {
+ // Empty import block, remove it.
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ }
+
+ if !d.Lparen.IsValid() {
+ // Not a block: sorted by default.
+ continue
+ }
+
+ // Identify and sort runs of specs on successive lines.
+ i := 0
+ specs := d.Specs[:0]
+ for j, s := range d.Specs {
+ if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line {
+ // j begins a new run. End this one.
+ specs = append(specs, sortSpecs(fset, f, d.Specs[i:j])...)
+ i = j
+ }
+ }
+ specs = append(specs, sortSpecs(fset, f, d.Specs[i:])...)
+ d.Specs = specs
+
+ // Deduping can leave a blank line before the rparen; clean that up.
+ if len(d.Specs) > 0 {
+ lastSpec := d.Specs[len(d.Specs)-1]
+ lastLine := fset.Position(lastSpec.Pos()).Line
+ if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 {
+ fset.File(d.Rparen).MergeLine(rParenLine - 1)
+ }
+ }
+ }
+}
+
+func importPath(s ast.Spec) string {
+ t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value)
+ if err == nil {
+ return t
+ }
+ return ""
+}
+
+func importName(s ast.Spec) string {
+ n := s.(*ast.ImportSpec).Name
+ if n == nil {
+ return ""
+ }
+ return n.Name
+}
+
+func importComment(s ast.Spec) string {
+ c := s.(*ast.ImportSpec).Comment
+ if c == nil {
+ return ""
+ }
+ return c.Text()
+}
+
+// collapse indicates whether prev may be removed, leaving only next.
+func collapse(prev, next ast.Spec) bool {
+ if importPath(next) != importPath(prev) || importName(next) != importName(prev) {
+ return false
+ }
+ return prev.(*ast.ImportSpec).Comment == nil
+}
+
+type posSpan struct {
+ Start token.Pos
+ End token.Pos
+}
+
+func sortSpecs(fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec {
+ // Can't short-circuit here even if specs are already sorted,
+ // since they might yet need deduplication.
+ // A lone import, however, may be safely ignored.
+ if len(specs) <= 1 {
+ return specs
+ }
+
+ // Record positions for specs.
+ pos := make([]posSpan, len(specs))
+ for i, s := range specs {
+ pos[i] = posSpan{s.Pos(), s.End()}
+ }
+
+ // Identify comments in this range.
+ // Any comment from pos[0].Start to the final line counts.
+ lastLine := fset.Position(pos[len(pos)-1].End).Line
+ cstart := len(f.Comments)
+ cend := len(f.Comments)
+ for i, g := range f.Comments {
+ if g.Pos() < pos[0].Start {
+ continue
+ }
+ if i < cstart {
+ cstart = i
+ }
+ if fset.Position(g.End()).Line > lastLine {
+ cend = i
+ break
+ }
+ }
+ comments := f.Comments[cstart:cend]
+
+ // Assign each comment to the import spec preceding it.
+ importComment := map[*ast.ImportSpec][]*ast.CommentGroup{}
+ specIndex := 0
+ for _, g := range comments {
+ for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() {
+ specIndex++
+ }
+ s := specs[specIndex].(*ast.ImportSpec)
+ importComment[s] = append(importComment[s], g)
+ }
+
+ // Sort the import specs by import path.
+ // Remove duplicates, when possible without data loss.
+ // Reassign the import paths to have the same position sequence.
+ // Reassign each comment to abut the end of its spec.
+ // Sort the comments by new position.
+ sort.Sort(byImportSpec(specs))
+
+ // Dedup. Thanks to our sorting, we can just consider
+ // adjacent pairs of imports.
+ deduped := specs[:0]
+ for i, s := range specs {
+ if i == len(specs)-1 || !collapse(s, specs[i+1]) {
+ deduped = append(deduped, s)
+ } else {
+ p := s.Pos()
+ fset.File(p).MergeLine(fset.Position(p).Line)
+ }
+ }
+ specs = deduped
+
+ // Fix up comment positions
+ for i, s := range specs {
+ s := s.(*ast.ImportSpec)
+ if s.Name != nil {
+ s.Name.NamePos = pos[i].Start
+ }
+ s.Path.ValuePos = pos[i].Start
+ s.EndPos = pos[i].End
+ nextSpecPos := pos[i].End
+
+ for _, g := range importComment[s] {
+ for _, c := range g.List {
+ c.Slash = pos[i].End
+ nextSpecPos = c.End()
+ }
+ }
+ if i < len(specs)-1 {
+ pos[i+1].Start = nextSpecPos
+ pos[i+1].End = nextSpecPos
+ }
+ }
+
+ sort.Sort(byCommentPos(comments))
+
+ // Fixup comments can insert blank lines, because import specs are on different lines.
+ // We remove those blank lines here by merging import spec to the first import spec line.
+ firstSpecLine := fset.Position(specs[0].Pos()).Line
+ for _, s := range specs[1:] {
+ p := s.Pos()
+ line := fset.File(p).Line(p)
+ for previousLine := line - 1; previousLine >= firstSpecLine; {
+ fset.File(p).MergeLine(previousLine)
+ previousLine--
+ }
+ }
+ return specs
+}
+
+type byImportSpec []ast.Spec // slice of *ast.ImportSpec
+
+func (x byImportSpec) Len() int { return len(x) }
+func (x byImportSpec) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x byImportSpec) Less(i, j int) bool {
+ ipath := importPath(x[i])
+ jpath := importPath(x[j])
+
+ igroup := importGroup(ipath)
+ jgroup := importGroup(jpath)
+ if igroup != jgroup {
+ return igroup < jgroup
+ }
+
+ if ipath != jpath {
+ return ipath < jpath
+ }
+ iname := importName(x[i])
+ jname := importName(x[j])
+
+ if iname != jname {
+ return iname < jname
+ }
+ return importComment(x[i]) < importComment(x[j])
+}
+
+type byCommentPos []*ast.CommentGroup
+
+func (x byCommentPos) Len() int { return len(x) }
+func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() }
diff --git a/vendor/golang.org/x/tools/imports/zstdlib.go b/vendor/golang.org/x/tools/imports/zstdlib.go
new file mode 100644
index 0000000..c18a009
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/zstdlib.go
@@ -0,0 +1,10302 @@
+// Code generated by mkstdlib.go. DO NOT EDIT.
+
+package imports
+
+var stdlib = map[string]map[string]bool{
+ "archive/tar": map[string]bool{
+ "ErrFieldTooLong": true,
+ "ErrHeader": true,
+ "ErrWriteAfterClose": true,
+ "ErrWriteTooLong": true,
+ "FileInfoHeader": true,
+ "Format": true,
+ "FormatGNU": true,
+ "FormatPAX": true,
+ "FormatUSTAR": true,
+ "FormatUnknown": true,
+ "Header": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "Reader": true,
+ "TypeBlock": true,
+ "TypeChar": true,
+ "TypeCont": true,
+ "TypeDir": true,
+ "TypeFifo": true,
+ "TypeGNULongLink": true,
+ "TypeGNULongName": true,
+ "TypeGNUSparse": true,
+ "TypeLink": true,
+ "TypeReg": true,
+ "TypeRegA": true,
+ "TypeSymlink": true,
+ "TypeXGlobalHeader": true,
+ "TypeXHeader": true,
+ "Writer": true,
+ },
+ "archive/zip": map[string]bool{
+ "Compressor": true,
+ "Decompressor": true,
+ "Deflate": true,
+ "ErrAlgorithm": true,
+ "ErrChecksum": true,
+ "ErrFormat": true,
+ "File": true,
+ "FileHeader": true,
+ "FileInfoHeader": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "OpenReader": true,
+ "ReadCloser": true,
+ "Reader": true,
+ "RegisterCompressor": true,
+ "RegisterDecompressor": true,
+ "Store": true,
+ "Writer": true,
+ },
+ "bufio": map[string]bool{
+ "ErrAdvanceTooFar": true,
+ "ErrBufferFull": true,
+ "ErrFinalToken": true,
+ "ErrInvalidUnreadByte": true,
+ "ErrInvalidUnreadRune": true,
+ "ErrNegativeAdvance": true,
+ "ErrNegativeCount": true,
+ "ErrTooLong": true,
+ "MaxScanTokenSize": true,
+ "NewReadWriter": true,
+ "NewReader": true,
+ "NewReaderSize": true,
+ "NewScanner": true,
+ "NewWriter": true,
+ "NewWriterSize": true,
+ "ReadWriter": true,
+ "Reader": true,
+ "ScanBytes": true,
+ "ScanLines": true,
+ "ScanRunes": true,
+ "ScanWords": true,
+ "Scanner": true,
+ "SplitFunc": true,
+ "Writer": true,
+ },
+ "bytes": map[string]bool{
+ "Buffer": true,
+ "Compare": true,
+ "Contains": true,
+ "ContainsAny": true,
+ "ContainsRune": true,
+ "Count": true,
+ "Equal": true,
+ "EqualFold": true,
+ "ErrTooLarge": true,
+ "Fields": true,
+ "FieldsFunc": true,
+ "HasPrefix": true,
+ "HasSuffix": true,
+ "Index": true,
+ "IndexAny": true,
+ "IndexByte": true,
+ "IndexFunc": true,
+ "IndexRune": true,
+ "Join": true,
+ "LastIndex": true,
+ "LastIndexAny": true,
+ "LastIndexByte": true,
+ "LastIndexFunc": true,
+ "Map": true,
+ "MinRead": true,
+ "NewBuffer": true,
+ "NewBufferString": true,
+ "NewReader": true,
+ "Reader": true,
+ "Repeat": true,
+ "Replace": true,
+ "ReplaceAll": true,
+ "Runes": true,
+ "Split": true,
+ "SplitAfter": true,
+ "SplitAfterN": true,
+ "SplitN": true,
+ "Title": true,
+ "ToLower": true,
+ "ToLowerSpecial": true,
+ "ToTitle": true,
+ "ToTitleSpecial": true,
+ "ToUpper": true,
+ "ToUpperSpecial": true,
+ "Trim": true,
+ "TrimFunc": true,
+ "TrimLeft": true,
+ "TrimLeftFunc": true,
+ "TrimPrefix": true,
+ "TrimRight": true,
+ "TrimRightFunc": true,
+ "TrimSpace": true,
+ "TrimSuffix": true,
+ },
+ "compress/bzip2": map[string]bool{
+ "NewReader": true,
+ "StructuralError": true,
+ },
+ "compress/flate": map[string]bool{
+ "BestCompression": true,
+ "BestSpeed": true,
+ "CorruptInputError": true,
+ "DefaultCompression": true,
+ "HuffmanOnly": true,
+ "InternalError": true,
+ "NewReader": true,
+ "NewReaderDict": true,
+ "NewWriter": true,
+ "NewWriterDict": true,
+ "NoCompression": true,
+ "ReadError": true,
+ "Reader": true,
+ "Resetter": true,
+ "WriteError": true,
+ "Writer": true,
+ },
+ "compress/gzip": map[string]bool{
+ "BestCompression": true,
+ "BestSpeed": true,
+ "DefaultCompression": true,
+ "ErrChecksum": true,
+ "ErrHeader": true,
+ "Header": true,
+ "HuffmanOnly": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "NewWriterLevel": true,
+ "NoCompression": true,
+ "Reader": true,
+ "Writer": true,
+ },
+ "compress/lzw": map[string]bool{
+ "LSB": true,
+ "MSB": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "Order": true,
+ },
+ "compress/zlib": map[string]bool{
+ "BestCompression": true,
+ "BestSpeed": true,
+ "DefaultCompression": true,
+ "ErrChecksum": true,
+ "ErrDictionary": true,
+ "ErrHeader": true,
+ "HuffmanOnly": true,
+ "NewReader": true,
+ "NewReaderDict": true,
+ "NewWriter": true,
+ "NewWriterLevel": true,
+ "NewWriterLevelDict": true,
+ "NoCompression": true,
+ "Resetter": true,
+ "Writer": true,
+ },
+ "container/heap": map[string]bool{
+ "Fix": true,
+ "Init": true,
+ "Interface": true,
+ "Pop": true,
+ "Push": true,
+ "Remove": true,
+ },
+ "container/list": map[string]bool{
+ "Element": true,
+ "List": true,
+ "New": true,
+ },
+ "container/ring": map[string]bool{
+ "New": true,
+ "Ring": true,
+ },
+ "context": map[string]bool{
+ "Background": true,
+ "CancelFunc": true,
+ "Canceled": true,
+ "Context": true,
+ "DeadlineExceeded": true,
+ "TODO": true,
+ "WithCancel": true,
+ "WithDeadline": true,
+ "WithTimeout": true,
+ "WithValue": true,
+ },
+ "crypto": map[string]bool{
+ "BLAKE2b_256": true,
+ "BLAKE2b_384": true,
+ "BLAKE2b_512": true,
+ "BLAKE2s_256": true,
+ "Decrypter": true,
+ "DecrypterOpts": true,
+ "Hash": true,
+ "MD4": true,
+ "MD5": true,
+ "MD5SHA1": true,
+ "PrivateKey": true,
+ "PublicKey": true,
+ "RIPEMD160": true,
+ "RegisterHash": true,
+ "SHA1": true,
+ "SHA224": true,
+ "SHA256": true,
+ "SHA384": true,
+ "SHA3_224": true,
+ "SHA3_256": true,
+ "SHA3_384": true,
+ "SHA3_512": true,
+ "SHA512": true,
+ "SHA512_224": true,
+ "SHA512_256": true,
+ "Signer": true,
+ "SignerOpts": true,
+ },
+ "crypto/aes": map[string]bool{
+ "BlockSize": true,
+ "KeySizeError": true,
+ "NewCipher": true,
+ },
+ "crypto/cipher": map[string]bool{
+ "AEAD": true,
+ "Block": true,
+ "BlockMode": true,
+ "NewCBCDecrypter": true,
+ "NewCBCEncrypter": true,
+ "NewCFBDecrypter": true,
+ "NewCFBEncrypter": true,
+ "NewCTR": true,
+ "NewGCM": true,
+ "NewGCMWithNonceSize": true,
+ "NewGCMWithTagSize": true,
+ "NewOFB": true,
+ "Stream": true,
+ "StreamReader": true,
+ "StreamWriter": true,
+ },
+ "crypto/des": map[string]bool{
+ "BlockSize": true,
+ "KeySizeError": true,
+ "NewCipher": true,
+ "NewTripleDESCipher": true,
+ },
+ "crypto/dsa": map[string]bool{
+ "ErrInvalidPublicKey": true,
+ "GenerateKey": true,
+ "GenerateParameters": true,
+ "L1024N160": true,
+ "L2048N224": true,
+ "L2048N256": true,
+ "L3072N256": true,
+ "ParameterSizes": true,
+ "Parameters": true,
+ "PrivateKey": true,
+ "PublicKey": true,
+ "Sign": true,
+ "Verify": true,
+ },
+ "crypto/ecdsa": map[string]bool{
+ "GenerateKey": true,
+ "PrivateKey": true,
+ "PublicKey": true,
+ "Sign": true,
+ "Verify": true,
+ },
+ "crypto/elliptic": map[string]bool{
+ "Curve": true,
+ "CurveParams": true,
+ "GenerateKey": true,
+ "Marshal": true,
+ "P224": true,
+ "P256": true,
+ "P384": true,
+ "P521": true,
+ "Unmarshal": true,
+ },
+ "crypto/hmac": map[string]bool{
+ "Equal": true,
+ "New": true,
+ },
+ "crypto/md5": map[string]bool{
+ "BlockSize": true,
+ "New": true,
+ "Size": true,
+ "Sum": true,
+ },
+ "crypto/rand": map[string]bool{
+ "Int": true,
+ "Prime": true,
+ "Read": true,
+ "Reader": true,
+ },
+ "crypto/rc4": map[string]bool{
+ "Cipher": true,
+ "KeySizeError": true,
+ "NewCipher": true,
+ },
+ "crypto/rsa": map[string]bool{
+ "CRTValue": true,
+ "DecryptOAEP": true,
+ "DecryptPKCS1v15": true,
+ "DecryptPKCS1v15SessionKey": true,
+ "EncryptOAEP": true,
+ "EncryptPKCS1v15": true,
+ "ErrDecryption": true,
+ "ErrMessageTooLong": true,
+ "ErrVerification": true,
+ "GenerateKey": true,
+ "GenerateMultiPrimeKey": true,
+ "OAEPOptions": true,
+ "PKCS1v15DecryptOptions": true,
+ "PSSOptions": true,
+ "PSSSaltLengthAuto": true,
+ "PSSSaltLengthEqualsHash": true,
+ "PrecomputedValues": true,
+ "PrivateKey": true,
+ "PublicKey": true,
+ "SignPKCS1v15": true,
+ "SignPSS": true,
+ "VerifyPKCS1v15": true,
+ "VerifyPSS": true,
+ },
+ "crypto/sha1": map[string]bool{
+ "BlockSize": true,
+ "New": true,
+ "Size": true,
+ "Sum": true,
+ },
+ "crypto/sha256": map[string]bool{
+ "BlockSize": true,
+ "New": true,
+ "New224": true,
+ "Size": true,
+ "Size224": true,
+ "Sum224": true,
+ "Sum256": true,
+ },
+ "crypto/sha512": map[string]bool{
+ "BlockSize": true,
+ "New": true,
+ "New384": true,
+ "New512_224": true,
+ "New512_256": true,
+ "Size": true,
+ "Size224": true,
+ "Size256": true,
+ "Size384": true,
+ "Sum384": true,
+ "Sum512": true,
+ "Sum512_224": true,
+ "Sum512_256": true,
+ },
+ "crypto/subtle": map[string]bool{
+ "ConstantTimeByteEq": true,
+ "ConstantTimeCompare": true,
+ "ConstantTimeCopy": true,
+ "ConstantTimeEq": true,
+ "ConstantTimeLessOrEq": true,
+ "ConstantTimeSelect": true,
+ },
+ "crypto/tls": map[string]bool{
+ "Certificate": true,
+ "CertificateRequestInfo": true,
+ "Client": true,
+ "ClientAuthType": true,
+ "ClientHelloInfo": true,
+ "ClientSessionCache": true,
+ "ClientSessionState": true,
+ "Config": true,
+ "Conn": true,
+ "ConnectionState": true,
+ "CurveID": true,
+ "CurveP256": true,
+ "CurveP384": true,
+ "CurveP521": true,
+ "Dial": true,
+ "DialWithDialer": true,
+ "ECDSAWithP256AndSHA256": true,
+ "ECDSAWithP384AndSHA384": true,
+ "ECDSAWithP521AndSHA512": true,
+ "ECDSAWithSHA1": true,
+ "Listen": true,
+ "LoadX509KeyPair": true,
+ "NewLRUClientSessionCache": true,
+ "NewListener": true,
+ "NoClientCert": true,
+ "PKCS1WithSHA1": true,
+ "PKCS1WithSHA256": true,
+ "PKCS1WithSHA384": true,
+ "PKCS1WithSHA512": true,
+ "PSSWithSHA256": true,
+ "PSSWithSHA384": true,
+ "PSSWithSHA512": true,
+ "RecordHeaderError": true,
+ "RenegotiateFreelyAsClient": true,
+ "RenegotiateNever": true,
+ "RenegotiateOnceAsClient": true,
+ "RenegotiationSupport": true,
+ "RequestClientCert": true,
+ "RequireAndVerifyClientCert": true,
+ "RequireAnyClientCert": true,
+ "Server": true,
+ "SignatureScheme": true,
+ "TLS_AES_128_GCM_SHA256": true,
+ "TLS_AES_256_GCM_SHA384": true,
+ "TLS_CHACHA20_POLY1305_SHA256": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA": true,
+ "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384": true,
+ "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305": true,
+ "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA": true,
+ "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA": true,
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA": true,
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256": true,
+ "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256": true,
+ "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA": true,
+ "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384": true,
+ "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305": true,
+ "TLS_ECDHE_RSA_WITH_RC4_128_SHA": true,
+ "TLS_FALLBACK_SCSV": true,
+ "TLS_RSA_WITH_3DES_EDE_CBC_SHA": true,
+ "TLS_RSA_WITH_AES_128_CBC_SHA": true,
+ "TLS_RSA_WITH_AES_128_CBC_SHA256": true,
+ "TLS_RSA_WITH_AES_128_GCM_SHA256": true,
+ "TLS_RSA_WITH_AES_256_CBC_SHA": true,
+ "TLS_RSA_WITH_AES_256_GCM_SHA384": true,
+ "TLS_RSA_WITH_RC4_128_SHA": true,
+ "VerifyClientCertIfGiven": true,
+ "VersionSSL30": true,
+ "VersionTLS10": true,
+ "VersionTLS11": true,
+ "VersionTLS12": true,
+ "VersionTLS13": true,
+ "X25519": true,
+ "X509KeyPair": true,
+ },
+ "crypto/x509": map[string]bool{
+ "CANotAuthorizedForExtKeyUsage": true,
+ "CANotAuthorizedForThisName": true,
+ "CertPool": true,
+ "Certificate": true,
+ "CertificateInvalidError": true,
+ "CertificateRequest": true,
+ "ConstraintViolationError": true,
+ "CreateCertificate": true,
+ "CreateCertificateRequest": true,
+ "DSA": true,
+ "DSAWithSHA1": true,
+ "DSAWithSHA256": true,
+ "DecryptPEMBlock": true,
+ "ECDSA": true,
+ "ECDSAWithSHA1": true,
+ "ECDSAWithSHA256": true,
+ "ECDSAWithSHA384": true,
+ "ECDSAWithSHA512": true,
+ "EncryptPEMBlock": true,
+ "ErrUnsupportedAlgorithm": true,
+ "Expired": true,
+ "ExtKeyUsage": true,
+ "ExtKeyUsageAny": true,
+ "ExtKeyUsageClientAuth": true,
+ "ExtKeyUsageCodeSigning": true,
+ "ExtKeyUsageEmailProtection": true,
+ "ExtKeyUsageIPSECEndSystem": true,
+ "ExtKeyUsageIPSECTunnel": true,
+ "ExtKeyUsageIPSECUser": true,
+ "ExtKeyUsageMicrosoftCommercialCodeSigning": true,
+ "ExtKeyUsageMicrosoftKernelCodeSigning": true,
+ "ExtKeyUsageMicrosoftServerGatedCrypto": true,
+ "ExtKeyUsageNetscapeServerGatedCrypto": true,
+ "ExtKeyUsageOCSPSigning": true,
+ "ExtKeyUsageServerAuth": true,
+ "ExtKeyUsageTimeStamping": true,
+ "HostnameError": true,
+ "IncompatibleUsage": true,
+ "IncorrectPasswordError": true,
+ "InsecureAlgorithmError": true,
+ "InvalidReason": true,
+ "IsEncryptedPEMBlock": true,
+ "KeyUsage": true,
+ "KeyUsageCRLSign": true,
+ "KeyUsageCertSign": true,
+ "KeyUsageContentCommitment": true,
+ "KeyUsageDataEncipherment": true,
+ "KeyUsageDecipherOnly": true,
+ "KeyUsageDigitalSignature": true,
+ "KeyUsageEncipherOnly": true,
+ "KeyUsageKeyAgreement": true,
+ "KeyUsageKeyEncipherment": true,
+ "MD2WithRSA": true,
+ "MD5WithRSA": true,
+ "MarshalECPrivateKey": true,
+ "MarshalPKCS1PrivateKey": true,
+ "MarshalPKCS1PublicKey": true,
+ "MarshalPKCS8PrivateKey": true,
+ "MarshalPKIXPublicKey": true,
+ "NameConstraintsWithoutSANs": true,
+ "NameMismatch": true,
+ "NewCertPool": true,
+ "NotAuthorizedToSign": true,
+ "PEMCipher": true,
+ "PEMCipher3DES": true,
+ "PEMCipherAES128": true,
+ "PEMCipherAES192": true,
+ "PEMCipherAES256": true,
+ "PEMCipherDES": true,
+ "ParseCRL": true,
+ "ParseCertificate": true,
+ "ParseCertificateRequest": true,
+ "ParseCertificates": true,
+ "ParseDERCRL": true,
+ "ParseECPrivateKey": true,
+ "ParsePKCS1PrivateKey": true,
+ "ParsePKCS1PublicKey": true,
+ "ParsePKCS8PrivateKey": true,
+ "ParsePKIXPublicKey": true,
+ "PublicKeyAlgorithm": true,
+ "RSA": true,
+ "SHA1WithRSA": true,
+ "SHA256WithRSA": true,
+ "SHA256WithRSAPSS": true,
+ "SHA384WithRSA": true,
+ "SHA384WithRSAPSS": true,
+ "SHA512WithRSA": true,
+ "SHA512WithRSAPSS": true,
+ "SignatureAlgorithm": true,
+ "SystemCertPool": true,
+ "SystemRootsError": true,
+ "TooManyConstraints": true,
+ "TooManyIntermediates": true,
+ "UnconstrainedName": true,
+ "UnhandledCriticalExtension": true,
+ "UnknownAuthorityError": true,
+ "UnknownPublicKeyAlgorithm": true,
+ "UnknownSignatureAlgorithm": true,
+ "VerifyOptions": true,
+ },
+ "crypto/x509/pkix": map[string]bool{
+ "AlgorithmIdentifier": true,
+ "AttributeTypeAndValue": true,
+ "AttributeTypeAndValueSET": true,
+ "CertificateList": true,
+ "Extension": true,
+ "Name": true,
+ "RDNSequence": true,
+ "RelativeDistinguishedNameSET": true,
+ "RevokedCertificate": true,
+ "TBSCertificateList": true,
+ },
+ "database/sql": map[string]bool{
+ "ColumnType": true,
+ "Conn": true,
+ "DB": true,
+ "DBStats": true,
+ "Drivers": true,
+ "ErrConnDone": true,
+ "ErrNoRows": true,
+ "ErrTxDone": true,
+ "IsolationLevel": true,
+ "LevelDefault": true,
+ "LevelLinearizable": true,
+ "LevelReadCommitted": true,
+ "LevelReadUncommitted": true,
+ "LevelRepeatableRead": true,
+ "LevelSerializable": true,
+ "LevelSnapshot": true,
+ "LevelWriteCommitted": true,
+ "Named": true,
+ "NamedArg": true,
+ "NullBool": true,
+ "NullFloat64": true,
+ "NullInt64": true,
+ "NullString": true,
+ "Open": true,
+ "OpenDB": true,
+ "Out": true,
+ "RawBytes": true,
+ "Register": true,
+ "Result": true,
+ "Row": true,
+ "Rows": true,
+ "Scanner": true,
+ "Stmt": true,
+ "Tx": true,
+ "TxOptions": true,
+ },
+ "database/sql/driver": map[string]bool{
+ "Bool": true,
+ "ColumnConverter": true,
+ "Conn": true,
+ "ConnBeginTx": true,
+ "ConnPrepareContext": true,
+ "Connector": true,
+ "DefaultParameterConverter": true,
+ "Driver": true,
+ "DriverContext": true,
+ "ErrBadConn": true,
+ "ErrRemoveArgument": true,
+ "ErrSkip": true,
+ "Execer": true,
+ "ExecerContext": true,
+ "Int32": true,
+ "IsScanValue": true,
+ "IsValue": true,
+ "IsolationLevel": true,
+ "NamedValue": true,
+ "NamedValueChecker": true,
+ "NotNull": true,
+ "Null": true,
+ "Pinger": true,
+ "Queryer": true,
+ "QueryerContext": true,
+ "Result": true,
+ "ResultNoRows": true,
+ "Rows": true,
+ "RowsAffected": true,
+ "RowsColumnTypeDatabaseTypeName": true,
+ "RowsColumnTypeLength": true,
+ "RowsColumnTypeNullable": true,
+ "RowsColumnTypePrecisionScale": true,
+ "RowsColumnTypeScanType": true,
+ "RowsNextResultSet": true,
+ "SessionResetter": true,
+ "Stmt": true,
+ "StmtExecContext": true,
+ "StmtQueryContext": true,
+ "String": true,
+ "Tx": true,
+ "TxOptions": true,
+ "Value": true,
+ "ValueConverter": true,
+ "Valuer": true,
+ },
+ "debug/dwarf": map[string]bool{
+ "AddrType": true,
+ "ArrayType": true,
+ "Attr": true,
+ "AttrAbstractOrigin": true,
+ "AttrAccessibility": true,
+ "AttrAddrClass": true,
+ "AttrAllocated": true,
+ "AttrArtificial": true,
+ "AttrAssociated": true,
+ "AttrBaseTypes": true,
+ "AttrBitOffset": true,
+ "AttrBitSize": true,
+ "AttrByteSize": true,
+ "AttrCallColumn": true,
+ "AttrCallFile": true,
+ "AttrCallLine": true,
+ "AttrCalling": true,
+ "AttrCommonRef": true,
+ "AttrCompDir": true,
+ "AttrConstValue": true,
+ "AttrContainingType": true,
+ "AttrCount": true,
+ "AttrDataLocation": true,
+ "AttrDataMemberLoc": true,
+ "AttrDeclColumn": true,
+ "AttrDeclFile": true,
+ "AttrDeclLine": true,
+ "AttrDeclaration": true,
+ "AttrDefaultValue": true,
+ "AttrDescription": true,
+ "AttrDiscr": true,
+ "AttrDiscrList": true,
+ "AttrDiscrValue": true,
+ "AttrEncoding": true,
+ "AttrEntrypc": true,
+ "AttrExtension": true,
+ "AttrExternal": true,
+ "AttrFrameBase": true,
+ "AttrFriend": true,
+ "AttrHighpc": true,
+ "AttrIdentifierCase": true,
+ "AttrImport": true,
+ "AttrInline": true,
+ "AttrIsOptional": true,
+ "AttrLanguage": true,
+ "AttrLocation": true,
+ "AttrLowerBound": true,
+ "AttrLowpc": true,
+ "AttrMacroInfo": true,
+ "AttrName": true,
+ "AttrNamelistItem": true,
+ "AttrOrdering": true,
+ "AttrPriority": true,
+ "AttrProducer": true,
+ "AttrPrototyped": true,
+ "AttrRanges": true,
+ "AttrReturnAddr": true,
+ "AttrSegment": true,
+ "AttrSibling": true,
+ "AttrSpecification": true,
+ "AttrStartScope": true,
+ "AttrStaticLink": true,
+ "AttrStmtList": true,
+ "AttrStride": true,
+ "AttrStrideSize": true,
+ "AttrStringLength": true,
+ "AttrTrampoline": true,
+ "AttrType": true,
+ "AttrUpperBound": true,
+ "AttrUseLocation": true,
+ "AttrUseUTF8": true,
+ "AttrVarParam": true,
+ "AttrVirtuality": true,
+ "AttrVisibility": true,
+ "AttrVtableElemLoc": true,
+ "BasicType": true,
+ "BoolType": true,
+ "CharType": true,
+ "Class": true,
+ "ClassAddress": true,
+ "ClassBlock": true,
+ "ClassConstant": true,
+ "ClassExprLoc": true,
+ "ClassFlag": true,
+ "ClassLinePtr": true,
+ "ClassLocListPtr": true,
+ "ClassMacPtr": true,
+ "ClassRangeListPtr": true,
+ "ClassReference": true,
+ "ClassReferenceAlt": true,
+ "ClassReferenceSig": true,
+ "ClassString": true,
+ "ClassStringAlt": true,
+ "ClassUnknown": true,
+ "CommonType": true,
+ "ComplexType": true,
+ "Data": true,
+ "DecodeError": true,
+ "DotDotDotType": true,
+ "Entry": true,
+ "EnumType": true,
+ "EnumValue": true,
+ "ErrUnknownPC": true,
+ "Field": true,
+ "FloatType": true,
+ "FuncType": true,
+ "IntType": true,
+ "LineEntry": true,
+ "LineFile": true,
+ "LineReader": true,
+ "LineReaderPos": true,
+ "New": true,
+ "Offset": true,
+ "PtrType": true,
+ "QualType": true,
+ "Reader": true,
+ "StructField": true,
+ "StructType": true,
+ "Tag": true,
+ "TagAccessDeclaration": true,
+ "TagArrayType": true,
+ "TagBaseType": true,
+ "TagCatchDwarfBlock": true,
+ "TagClassType": true,
+ "TagCommonDwarfBlock": true,
+ "TagCommonInclusion": true,
+ "TagCompileUnit": true,
+ "TagCondition": true,
+ "TagConstType": true,
+ "TagConstant": true,
+ "TagDwarfProcedure": true,
+ "TagEntryPoint": true,
+ "TagEnumerationType": true,
+ "TagEnumerator": true,
+ "TagFileType": true,
+ "TagFormalParameter": true,
+ "TagFriend": true,
+ "TagImportedDeclaration": true,
+ "TagImportedModule": true,
+ "TagImportedUnit": true,
+ "TagInheritance": true,
+ "TagInlinedSubroutine": true,
+ "TagInterfaceType": true,
+ "TagLabel": true,
+ "TagLexDwarfBlock": true,
+ "TagMember": true,
+ "TagModule": true,
+ "TagMutableType": true,
+ "TagNamelist": true,
+ "TagNamelistItem": true,
+ "TagNamespace": true,
+ "TagPackedType": true,
+ "TagPartialUnit": true,
+ "TagPointerType": true,
+ "TagPtrToMemberType": true,
+ "TagReferenceType": true,
+ "TagRestrictType": true,
+ "TagRvalueReferenceType": true,
+ "TagSetType": true,
+ "TagSharedType": true,
+ "TagStringType": true,
+ "TagStructType": true,
+ "TagSubprogram": true,
+ "TagSubrangeType": true,
+ "TagSubroutineType": true,
+ "TagTemplateAlias": true,
+ "TagTemplateTypeParameter": true,
+ "TagTemplateValueParameter": true,
+ "TagThrownType": true,
+ "TagTryDwarfBlock": true,
+ "TagTypeUnit": true,
+ "TagTypedef": true,
+ "TagUnionType": true,
+ "TagUnspecifiedParameters": true,
+ "TagUnspecifiedType": true,
+ "TagVariable": true,
+ "TagVariant": true,
+ "TagVariantPart": true,
+ "TagVolatileType": true,
+ "TagWithStmt": true,
+ "Type": true,
+ "TypedefType": true,
+ "UcharType": true,
+ "UintType": true,
+ "UnspecifiedType": true,
+ "VoidType": true,
+ },
+ "debug/elf": map[string]bool{
+ "ARM_MAGIC_TRAMP_NUMBER": true,
+ "COMPRESS_HIOS": true,
+ "COMPRESS_HIPROC": true,
+ "COMPRESS_LOOS": true,
+ "COMPRESS_LOPROC": true,
+ "COMPRESS_ZLIB": true,
+ "Chdr32": true,
+ "Chdr64": true,
+ "Class": true,
+ "CompressionType": true,
+ "DF_BIND_NOW": true,
+ "DF_ORIGIN": true,
+ "DF_STATIC_TLS": true,
+ "DF_SYMBOLIC": true,
+ "DF_TEXTREL": true,
+ "DT_BIND_NOW": true,
+ "DT_DEBUG": true,
+ "DT_ENCODING": true,
+ "DT_FINI": true,
+ "DT_FINI_ARRAY": true,
+ "DT_FINI_ARRAYSZ": true,
+ "DT_FLAGS": true,
+ "DT_HASH": true,
+ "DT_HIOS": true,
+ "DT_HIPROC": true,
+ "DT_INIT": true,
+ "DT_INIT_ARRAY": true,
+ "DT_INIT_ARRAYSZ": true,
+ "DT_JMPREL": true,
+ "DT_LOOS": true,
+ "DT_LOPROC": true,
+ "DT_NEEDED": true,
+ "DT_NULL": true,
+ "DT_PLTGOT": true,
+ "DT_PLTREL": true,
+ "DT_PLTRELSZ": true,
+ "DT_PREINIT_ARRAY": true,
+ "DT_PREINIT_ARRAYSZ": true,
+ "DT_REL": true,
+ "DT_RELA": true,
+ "DT_RELAENT": true,
+ "DT_RELASZ": true,
+ "DT_RELENT": true,
+ "DT_RELSZ": true,
+ "DT_RPATH": true,
+ "DT_RUNPATH": true,
+ "DT_SONAME": true,
+ "DT_STRSZ": true,
+ "DT_STRTAB": true,
+ "DT_SYMBOLIC": true,
+ "DT_SYMENT": true,
+ "DT_SYMTAB": true,
+ "DT_TEXTREL": true,
+ "DT_VERNEED": true,
+ "DT_VERNEEDNUM": true,
+ "DT_VERSYM": true,
+ "Data": true,
+ "Dyn32": true,
+ "Dyn64": true,
+ "DynFlag": true,
+ "DynTag": true,
+ "EI_ABIVERSION": true,
+ "EI_CLASS": true,
+ "EI_DATA": true,
+ "EI_NIDENT": true,
+ "EI_OSABI": true,
+ "EI_PAD": true,
+ "EI_VERSION": true,
+ "ELFCLASS32": true,
+ "ELFCLASS64": true,
+ "ELFCLASSNONE": true,
+ "ELFDATA2LSB": true,
+ "ELFDATA2MSB": true,
+ "ELFDATANONE": true,
+ "ELFMAG": true,
+ "ELFOSABI_86OPEN": true,
+ "ELFOSABI_AIX": true,
+ "ELFOSABI_ARM": true,
+ "ELFOSABI_AROS": true,
+ "ELFOSABI_CLOUDABI": true,
+ "ELFOSABI_FENIXOS": true,
+ "ELFOSABI_FREEBSD": true,
+ "ELFOSABI_HPUX": true,
+ "ELFOSABI_HURD": true,
+ "ELFOSABI_IRIX": true,
+ "ELFOSABI_LINUX": true,
+ "ELFOSABI_MODESTO": true,
+ "ELFOSABI_NETBSD": true,
+ "ELFOSABI_NONE": true,
+ "ELFOSABI_NSK": true,
+ "ELFOSABI_OPENBSD": true,
+ "ELFOSABI_OPENVMS": true,
+ "ELFOSABI_SOLARIS": true,
+ "ELFOSABI_STANDALONE": true,
+ "ELFOSABI_TRU64": true,
+ "EM_386": true,
+ "EM_486": true,
+ "EM_56800EX": true,
+ "EM_68HC05": true,
+ "EM_68HC08": true,
+ "EM_68HC11": true,
+ "EM_68HC12": true,
+ "EM_68HC16": true,
+ "EM_68K": true,
+ "EM_78KOR": true,
+ "EM_8051": true,
+ "EM_860": true,
+ "EM_88K": true,
+ "EM_960": true,
+ "EM_AARCH64": true,
+ "EM_ALPHA": true,
+ "EM_ALPHA_STD": true,
+ "EM_ALTERA_NIOS2": true,
+ "EM_AMDGPU": true,
+ "EM_ARC": true,
+ "EM_ARCA": true,
+ "EM_ARC_COMPACT": true,
+ "EM_ARC_COMPACT2": true,
+ "EM_ARM": true,
+ "EM_AVR": true,
+ "EM_AVR32": true,
+ "EM_BA1": true,
+ "EM_BA2": true,
+ "EM_BLACKFIN": true,
+ "EM_BPF": true,
+ "EM_C166": true,
+ "EM_CDP": true,
+ "EM_CE": true,
+ "EM_CLOUDSHIELD": true,
+ "EM_COGE": true,
+ "EM_COLDFIRE": true,
+ "EM_COOL": true,
+ "EM_COREA_1ST": true,
+ "EM_COREA_2ND": true,
+ "EM_CR": true,
+ "EM_CR16": true,
+ "EM_CRAYNV2": true,
+ "EM_CRIS": true,
+ "EM_CRX": true,
+ "EM_CSR_KALIMBA": true,
+ "EM_CUDA": true,
+ "EM_CYPRESS_M8C": true,
+ "EM_D10V": true,
+ "EM_D30V": true,
+ "EM_DSP24": true,
+ "EM_DSPIC30F": true,
+ "EM_DXP": true,
+ "EM_ECOG1": true,
+ "EM_ECOG16": true,
+ "EM_ECOG1X": true,
+ "EM_ECOG2": true,
+ "EM_ETPU": true,
+ "EM_EXCESS": true,
+ "EM_F2MC16": true,
+ "EM_FIREPATH": true,
+ "EM_FR20": true,
+ "EM_FR30": true,
+ "EM_FT32": true,
+ "EM_FX66": true,
+ "EM_H8S": true,
+ "EM_H8_300": true,
+ "EM_H8_300H": true,
+ "EM_H8_500": true,
+ "EM_HUANY": true,
+ "EM_IA_64": true,
+ "EM_INTEL205": true,
+ "EM_INTEL206": true,
+ "EM_INTEL207": true,
+ "EM_INTEL208": true,
+ "EM_INTEL209": true,
+ "EM_IP2K": true,
+ "EM_JAVELIN": true,
+ "EM_K10M": true,
+ "EM_KM32": true,
+ "EM_KMX16": true,
+ "EM_KMX32": true,
+ "EM_KMX8": true,
+ "EM_KVARC": true,
+ "EM_L10M": true,
+ "EM_LANAI": true,
+ "EM_LATTICEMICO32": true,
+ "EM_M16C": true,
+ "EM_M32": true,
+ "EM_M32C": true,
+ "EM_M32R": true,
+ "EM_MANIK": true,
+ "EM_MAX": true,
+ "EM_MAXQ30": true,
+ "EM_MCHP_PIC": true,
+ "EM_MCST_ELBRUS": true,
+ "EM_ME16": true,
+ "EM_METAG": true,
+ "EM_MICROBLAZE": true,
+ "EM_MIPS": true,
+ "EM_MIPS_RS3_LE": true,
+ "EM_MIPS_RS4_BE": true,
+ "EM_MIPS_X": true,
+ "EM_MMA": true,
+ "EM_MMDSP_PLUS": true,
+ "EM_MMIX": true,
+ "EM_MN10200": true,
+ "EM_MN10300": true,
+ "EM_MOXIE": true,
+ "EM_MSP430": true,
+ "EM_NCPU": true,
+ "EM_NDR1": true,
+ "EM_NDS32": true,
+ "EM_NONE": true,
+ "EM_NORC": true,
+ "EM_NS32K": true,
+ "EM_OPEN8": true,
+ "EM_OPENRISC": true,
+ "EM_PARISC": true,
+ "EM_PCP": true,
+ "EM_PDP10": true,
+ "EM_PDP11": true,
+ "EM_PDSP": true,
+ "EM_PJ": true,
+ "EM_PPC": true,
+ "EM_PPC64": true,
+ "EM_PRISM": true,
+ "EM_QDSP6": true,
+ "EM_R32C": true,
+ "EM_RCE": true,
+ "EM_RH32": true,
+ "EM_RISCV": true,
+ "EM_RL78": true,
+ "EM_RS08": true,
+ "EM_RX": true,
+ "EM_S370": true,
+ "EM_S390": true,
+ "EM_SCORE7": true,
+ "EM_SEP": true,
+ "EM_SE_C17": true,
+ "EM_SE_C33": true,
+ "EM_SH": true,
+ "EM_SHARC": true,
+ "EM_SLE9X": true,
+ "EM_SNP1K": true,
+ "EM_SPARC": true,
+ "EM_SPARC32PLUS": true,
+ "EM_SPARCV9": true,
+ "EM_ST100": true,
+ "EM_ST19": true,
+ "EM_ST200": true,
+ "EM_ST7": true,
+ "EM_ST9PLUS": true,
+ "EM_STARCORE": true,
+ "EM_STM8": true,
+ "EM_STXP7X": true,
+ "EM_SVX": true,
+ "EM_TILE64": true,
+ "EM_TILEGX": true,
+ "EM_TILEPRO": true,
+ "EM_TINYJ": true,
+ "EM_TI_ARP32": true,
+ "EM_TI_C2000": true,
+ "EM_TI_C5500": true,
+ "EM_TI_C6000": true,
+ "EM_TI_PRU": true,
+ "EM_TMM_GPP": true,
+ "EM_TPC": true,
+ "EM_TRICORE": true,
+ "EM_TRIMEDIA": true,
+ "EM_TSK3000": true,
+ "EM_UNICORE": true,
+ "EM_V800": true,
+ "EM_V850": true,
+ "EM_VAX": true,
+ "EM_VIDEOCORE": true,
+ "EM_VIDEOCORE3": true,
+ "EM_VIDEOCORE5": true,
+ "EM_VISIUM": true,
+ "EM_VPP500": true,
+ "EM_X86_64": true,
+ "EM_XCORE": true,
+ "EM_XGATE": true,
+ "EM_XIMO16": true,
+ "EM_XTENSA": true,
+ "EM_Z80": true,
+ "EM_ZSP": true,
+ "ET_CORE": true,
+ "ET_DYN": true,
+ "ET_EXEC": true,
+ "ET_HIOS": true,
+ "ET_HIPROC": true,
+ "ET_LOOS": true,
+ "ET_LOPROC": true,
+ "ET_NONE": true,
+ "ET_REL": true,
+ "EV_CURRENT": true,
+ "EV_NONE": true,
+ "ErrNoSymbols": true,
+ "File": true,
+ "FileHeader": true,
+ "FormatError": true,
+ "Header32": true,
+ "Header64": true,
+ "ImportedSymbol": true,
+ "Machine": true,
+ "NT_FPREGSET": true,
+ "NT_PRPSINFO": true,
+ "NT_PRSTATUS": true,
+ "NType": true,
+ "NewFile": true,
+ "OSABI": true,
+ "Open": true,
+ "PF_MASKOS": true,
+ "PF_MASKPROC": true,
+ "PF_R": true,
+ "PF_W": true,
+ "PF_X": true,
+ "PT_DYNAMIC": true,
+ "PT_HIOS": true,
+ "PT_HIPROC": true,
+ "PT_INTERP": true,
+ "PT_LOAD": true,
+ "PT_LOOS": true,
+ "PT_LOPROC": true,
+ "PT_NOTE": true,
+ "PT_NULL": true,
+ "PT_PHDR": true,
+ "PT_SHLIB": true,
+ "PT_TLS": true,
+ "Prog": true,
+ "Prog32": true,
+ "Prog64": true,
+ "ProgFlag": true,
+ "ProgHeader": true,
+ "ProgType": true,
+ "R_386": true,
+ "R_386_16": true,
+ "R_386_32": true,
+ "R_386_32PLT": true,
+ "R_386_8": true,
+ "R_386_COPY": true,
+ "R_386_GLOB_DAT": true,
+ "R_386_GOT32": true,
+ "R_386_GOT32X": true,
+ "R_386_GOTOFF": true,
+ "R_386_GOTPC": true,
+ "R_386_IRELATIVE": true,
+ "R_386_JMP_SLOT": true,
+ "R_386_NONE": true,
+ "R_386_PC16": true,
+ "R_386_PC32": true,
+ "R_386_PC8": true,
+ "R_386_PLT32": true,
+ "R_386_RELATIVE": true,
+ "R_386_SIZE32": true,
+ "R_386_TLS_DESC": true,
+ "R_386_TLS_DESC_CALL": true,
+ "R_386_TLS_DTPMOD32": true,
+ "R_386_TLS_DTPOFF32": true,
+ "R_386_TLS_GD": true,
+ "R_386_TLS_GD_32": true,
+ "R_386_TLS_GD_CALL": true,
+ "R_386_TLS_GD_POP": true,
+ "R_386_TLS_GD_PUSH": true,
+ "R_386_TLS_GOTDESC": true,
+ "R_386_TLS_GOTIE": true,
+ "R_386_TLS_IE": true,
+ "R_386_TLS_IE_32": true,
+ "R_386_TLS_LDM": true,
+ "R_386_TLS_LDM_32": true,
+ "R_386_TLS_LDM_CALL": true,
+ "R_386_TLS_LDM_POP": true,
+ "R_386_TLS_LDM_PUSH": true,
+ "R_386_TLS_LDO_32": true,
+ "R_386_TLS_LE": true,
+ "R_386_TLS_LE_32": true,
+ "R_386_TLS_TPOFF": true,
+ "R_386_TLS_TPOFF32": true,
+ "R_390": true,
+ "R_390_12": true,
+ "R_390_16": true,
+ "R_390_20": true,
+ "R_390_32": true,
+ "R_390_64": true,
+ "R_390_8": true,
+ "R_390_COPY": true,
+ "R_390_GLOB_DAT": true,
+ "R_390_GOT12": true,
+ "R_390_GOT16": true,
+ "R_390_GOT20": true,
+ "R_390_GOT32": true,
+ "R_390_GOT64": true,
+ "R_390_GOTENT": true,
+ "R_390_GOTOFF": true,
+ "R_390_GOTOFF16": true,
+ "R_390_GOTOFF64": true,
+ "R_390_GOTPC": true,
+ "R_390_GOTPCDBL": true,
+ "R_390_GOTPLT12": true,
+ "R_390_GOTPLT16": true,
+ "R_390_GOTPLT20": true,
+ "R_390_GOTPLT32": true,
+ "R_390_GOTPLT64": true,
+ "R_390_GOTPLTENT": true,
+ "R_390_GOTPLTOFF16": true,
+ "R_390_GOTPLTOFF32": true,
+ "R_390_GOTPLTOFF64": true,
+ "R_390_JMP_SLOT": true,
+ "R_390_NONE": true,
+ "R_390_PC16": true,
+ "R_390_PC16DBL": true,
+ "R_390_PC32": true,
+ "R_390_PC32DBL": true,
+ "R_390_PC64": true,
+ "R_390_PLT16DBL": true,
+ "R_390_PLT32": true,
+ "R_390_PLT32DBL": true,
+ "R_390_PLT64": true,
+ "R_390_RELATIVE": true,
+ "R_390_TLS_DTPMOD": true,
+ "R_390_TLS_DTPOFF": true,
+ "R_390_TLS_GD32": true,
+ "R_390_TLS_GD64": true,
+ "R_390_TLS_GDCALL": true,
+ "R_390_TLS_GOTIE12": true,
+ "R_390_TLS_GOTIE20": true,
+ "R_390_TLS_GOTIE32": true,
+ "R_390_TLS_GOTIE64": true,
+ "R_390_TLS_IE32": true,
+ "R_390_TLS_IE64": true,
+ "R_390_TLS_IEENT": true,
+ "R_390_TLS_LDCALL": true,
+ "R_390_TLS_LDM32": true,
+ "R_390_TLS_LDM64": true,
+ "R_390_TLS_LDO32": true,
+ "R_390_TLS_LDO64": true,
+ "R_390_TLS_LE32": true,
+ "R_390_TLS_LE64": true,
+ "R_390_TLS_LOAD": true,
+ "R_390_TLS_TPOFF": true,
+ "R_AARCH64": true,
+ "R_AARCH64_ABS16": true,
+ "R_AARCH64_ABS32": true,
+ "R_AARCH64_ABS64": true,
+ "R_AARCH64_ADD_ABS_LO12_NC": true,
+ "R_AARCH64_ADR_GOT_PAGE": true,
+ "R_AARCH64_ADR_PREL_LO21": true,
+ "R_AARCH64_ADR_PREL_PG_HI21": true,
+ "R_AARCH64_ADR_PREL_PG_HI21_NC": true,
+ "R_AARCH64_CALL26": true,
+ "R_AARCH64_CONDBR19": true,
+ "R_AARCH64_COPY": true,
+ "R_AARCH64_GLOB_DAT": true,
+ "R_AARCH64_GOT_LD_PREL19": true,
+ "R_AARCH64_IRELATIVE": true,
+ "R_AARCH64_JUMP26": true,
+ "R_AARCH64_JUMP_SLOT": true,
+ "R_AARCH64_LD64_GOTOFF_LO15": true,
+ "R_AARCH64_LD64_GOTPAGE_LO15": true,
+ "R_AARCH64_LD64_GOT_LO12_NC": true,
+ "R_AARCH64_LDST128_ABS_LO12_NC": true,
+ "R_AARCH64_LDST16_ABS_LO12_NC": true,
+ "R_AARCH64_LDST32_ABS_LO12_NC": true,
+ "R_AARCH64_LDST64_ABS_LO12_NC": true,
+ "R_AARCH64_LDST8_ABS_LO12_NC": true,
+ "R_AARCH64_LD_PREL_LO19": true,
+ "R_AARCH64_MOVW_SABS_G0": true,
+ "R_AARCH64_MOVW_SABS_G1": true,
+ "R_AARCH64_MOVW_SABS_G2": true,
+ "R_AARCH64_MOVW_UABS_G0": true,
+ "R_AARCH64_MOVW_UABS_G0_NC": true,
+ "R_AARCH64_MOVW_UABS_G1": true,
+ "R_AARCH64_MOVW_UABS_G1_NC": true,
+ "R_AARCH64_MOVW_UABS_G2": true,
+ "R_AARCH64_MOVW_UABS_G2_NC": true,
+ "R_AARCH64_MOVW_UABS_G3": true,
+ "R_AARCH64_NONE": true,
+ "R_AARCH64_NULL": true,
+ "R_AARCH64_P32_ABS16": true,
+ "R_AARCH64_P32_ABS32": true,
+ "R_AARCH64_P32_ADD_ABS_LO12_NC": true,
+ "R_AARCH64_P32_ADR_GOT_PAGE": true,
+ "R_AARCH64_P32_ADR_PREL_LO21": true,
+ "R_AARCH64_P32_ADR_PREL_PG_HI21": true,
+ "R_AARCH64_P32_CALL26": true,
+ "R_AARCH64_P32_CONDBR19": true,
+ "R_AARCH64_P32_COPY": true,
+ "R_AARCH64_P32_GLOB_DAT": true,
+ "R_AARCH64_P32_GOT_LD_PREL19": true,
+ "R_AARCH64_P32_IRELATIVE": true,
+ "R_AARCH64_P32_JUMP26": true,
+ "R_AARCH64_P32_JUMP_SLOT": true,
+ "R_AARCH64_P32_LD32_GOT_LO12_NC": true,
+ "R_AARCH64_P32_LDST128_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LDST16_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LDST32_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LDST64_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LDST8_ABS_LO12_NC": true,
+ "R_AARCH64_P32_LD_PREL_LO19": true,
+ "R_AARCH64_P32_MOVW_SABS_G0": true,
+ "R_AARCH64_P32_MOVW_UABS_G0": true,
+ "R_AARCH64_P32_MOVW_UABS_G0_NC": true,
+ "R_AARCH64_P32_MOVW_UABS_G1": true,
+ "R_AARCH64_P32_PREL16": true,
+ "R_AARCH64_P32_PREL32": true,
+ "R_AARCH64_P32_RELATIVE": true,
+ "R_AARCH64_P32_TLSDESC": true,
+ "R_AARCH64_P32_TLSDESC_ADD_LO12_NC": true,
+ "R_AARCH64_P32_TLSDESC_ADR_PAGE21": true,
+ "R_AARCH64_P32_TLSDESC_ADR_PREL21": true,
+ "R_AARCH64_P32_TLSDESC_CALL": true,
+ "R_AARCH64_P32_TLSDESC_LD32_LO12_NC": true,
+ "R_AARCH64_P32_TLSDESC_LD_PREL19": true,
+ "R_AARCH64_P32_TLSGD_ADD_LO12_NC": true,
+ "R_AARCH64_P32_TLSGD_ADR_PAGE21": true,
+ "R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21": true,
+ "R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC": true,
+ "R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19": true,
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_HI12": true,
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12": true,
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC": true,
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0": true,
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC": true,
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G1": true,
+ "R_AARCH64_P32_TLS_DTPMOD": true,
+ "R_AARCH64_P32_TLS_DTPREL": true,
+ "R_AARCH64_P32_TLS_TPREL": true,
+ "R_AARCH64_P32_TSTBR14": true,
+ "R_AARCH64_PREL16": true,
+ "R_AARCH64_PREL32": true,
+ "R_AARCH64_PREL64": true,
+ "R_AARCH64_RELATIVE": true,
+ "R_AARCH64_TLSDESC": true,
+ "R_AARCH64_TLSDESC_ADD": true,
+ "R_AARCH64_TLSDESC_ADD_LO12_NC": true,
+ "R_AARCH64_TLSDESC_ADR_PAGE21": true,
+ "R_AARCH64_TLSDESC_ADR_PREL21": true,
+ "R_AARCH64_TLSDESC_CALL": true,
+ "R_AARCH64_TLSDESC_LD64_LO12_NC": true,
+ "R_AARCH64_TLSDESC_LDR": true,
+ "R_AARCH64_TLSDESC_LD_PREL19": true,
+ "R_AARCH64_TLSDESC_OFF_G0_NC": true,
+ "R_AARCH64_TLSDESC_OFF_G1": true,
+ "R_AARCH64_TLSGD_ADD_LO12_NC": true,
+ "R_AARCH64_TLSGD_ADR_PAGE21": true,
+ "R_AARCH64_TLSGD_ADR_PREL21": true,
+ "R_AARCH64_TLSGD_MOVW_G0_NC": true,
+ "R_AARCH64_TLSGD_MOVW_G1": true,
+ "R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21": true,
+ "R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC": true,
+ "R_AARCH64_TLSIE_LD_GOTTPREL_PREL19": true,
+ "R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC": true,
+ "R_AARCH64_TLSIE_MOVW_GOTTPREL_G1": true,
+ "R_AARCH64_TLSLD_ADR_PAGE21": true,
+ "R_AARCH64_TLSLD_ADR_PREL21": true,
+ "R_AARCH64_TLSLD_LDST128_DTPREL_LO12": true,
+ "R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC": true,
+ "R_AARCH64_TLSLE_ADD_TPREL_HI12": true,
+ "R_AARCH64_TLSLE_ADD_TPREL_LO12": true,
+ "R_AARCH64_TLSLE_ADD_TPREL_LO12_NC": true,
+ "R_AARCH64_TLSLE_LDST128_TPREL_LO12": true,
+ "R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G0": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G0_NC": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G1": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G1_NC": true,
+ "R_AARCH64_TLSLE_MOVW_TPREL_G2": true,
+ "R_AARCH64_TLS_DTPMOD64": true,
+ "R_AARCH64_TLS_DTPREL64": true,
+ "R_AARCH64_TLS_TPREL64": true,
+ "R_AARCH64_TSTBR14": true,
+ "R_ALPHA": true,
+ "R_ALPHA_BRADDR": true,
+ "R_ALPHA_COPY": true,
+ "R_ALPHA_GLOB_DAT": true,
+ "R_ALPHA_GPDISP": true,
+ "R_ALPHA_GPREL32": true,
+ "R_ALPHA_GPRELHIGH": true,
+ "R_ALPHA_GPRELLOW": true,
+ "R_ALPHA_GPVALUE": true,
+ "R_ALPHA_HINT": true,
+ "R_ALPHA_IMMED_BR_HI32": true,
+ "R_ALPHA_IMMED_GP_16": true,
+ "R_ALPHA_IMMED_GP_HI32": true,
+ "R_ALPHA_IMMED_LO32": true,
+ "R_ALPHA_IMMED_SCN_HI32": true,
+ "R_ALPHA_JMP_SLOT": true,
+ "R_ALPHA_LITERAL": true,
+ "R_ALPHA_LITUSE": true,
+ "R_ALPHA_NONE": true,
+ "R_ALPHA_OP_PRSHIFT": true,
+ "R_ALPHA_OP_PSUB": true,
+ "R_ALPHA_OP_PUSH": true,
+ "R_ALPHA_OP_STORE": true,
+ "R_ALPHA_REFLONG": true,
+ "R_ALPHA_REFQUAD": true,
+ "R_ALPHA_RELATIVE": true,
+ "R_ALPHA_SREL16": true,
+ "R_ALPHA_SREL32": true,
+ "R_ALPHA_SREL64": true,
+ "R_ARM": true,
+ "R_ARM_ABS12": true,
+ "R_ARM_ABS16": true,
+ "R_ARM_ABS32": true,
+ "R_ARM_ABS32_NOI": true,
+ "R_ARM_ABS8": true,
+ "R_ARM_ALU_PCREL_15_8": true,
+ "R_ARM_ALU_PCREL_23_15": true,
+ "R_ARM_ALU_PCREL_7_0": true,
+ "R_ARM_ALU_PC_G0": true,
+ "R_ARM_ALU_PC_G0_NC": true,
+ "R_ARM_ALU_PC_G1": true,
+ "R_ARM_ALU_PC_G1_NC": true,
+ "R_ARM_ALU_PC_G2": true,
+ "R_ARM_ALU_SBREL_19_12_NC": true,
+ "R_ARM_ALU_SBREL_27_20_CK": true,
+ "R_ARM_ALU_SB_G0": true,
+ "R_ARM_ALU_SB_G0_NC": true,
+ "R_ARM_ALU_SB_G1": true,
+ "R_ARM_ALU_SB_G1_NC": true,
+ "R_ARM_ALU_SB_G2": true,
+ "R_ARM_AMP_VCALL9": true,
+ "R_ARM_BASE_ABS": true,
+ "R_ARM_CALL": true,
+ "R_ARM_COPY": true,
+ "R_ARM_GLOB_DAT": true,
+ "R_ARM_GNU_VTENTRY": true,
+ "R_ARM_GNU_VTINHERIT": true,
+ "R_ARM_GOT32": true,
+ "R_ARM_GOTOFF": true,
+ "R_ARM_GOTOFF12": true,
+ "R_ARM_GOTPC": true,
+ "R_ARM_GOTRELAX": true,
+ "R_ARM_GOT_ABS": true,
+ "R_ARM_GOT_BREL12": true,
+ "R_ARM_GOT_PREL": true,
+ "R_ARM_IRELATIVE": true,
+ "R_ARM_JUMP24": true,
+ "R_ARM_JUMP_SLOT": true,
+ "R_ARM_LDC_PC_G0": true,
+ "R_ARM_LDC_PC_G1": true,
+ "R_ARM_LDC_PC_G2": true,
+ "R_ARM_LDC_SB_G0": true,
+ "R_ARM_LDC_SB_G1": true,
+ "R_ARM_LDC_SB_G2": true,
+ "R_ARM_LDRS_PC_G0": true,
+ "R_ARM_LDRS_PC_G1": true,
+ "R_ARM_LDRS_PC_G2": true,
+ "R_ARM_LDRS_SB_G0": true,
+ "R_ARM_LDRS_SB_G1": true,
+ "R_ARM_LDRS_SB_G2": true,
+ "R_ARM_LDR_PC_G1": true,
+ "R_ARM_LDR_PC_G2": true,
+ "R_ARM_LDR_SBREL_11_10_NC": true,
+ "R_ARM_LDR_SB_G0": true,
+ "R_ARM_LDR_SB_G1": true,
+ "R_ARM_LDR_SB_G2": true,
+ "R_ARM_ME_TOO": true,
+ "R_ARM_MOVT_ABS": true,
+ "R_ARM_MOVT_BREL": true,
+ "R_ARM_MOVT_PREL": true,
+ "R_ARM_MOVW_ABS_NC": true,
+ "R_ARM_MOVW_BREL": true,
+ "R_ARM_MOVW_BREL_NC": true,
+ "R_ARM_MOVW_PREL_NC": true,
+ "R_ARM_NONE": true,
+ "R_ARM_PC13": true,
+ "R_ARM_PC24": true,
+ "R_ARM_PLT32": true,
+ "R_ARM_PLT32_ABS": true,
+ "R_ARM_PREL31": true,
+ "R_ARM_PRIVATE_0": true,
+ "R_ARM_PRIVATE_1": true,
+ "R_ARM_PRIVATE_10": true,
+ "R_ARM_PRIVATE_11": true,
+ "R_ARM_PRIVATE_12": true,
+ "R_ARM_PRIVATE_13": true,
+ "R_ARM_PRIVATE_14": true,
+ "R_ARM_PRIVATE_15": true,
+ "R_ARM_PRIVATE_2": true,
+ "R_ARM_PRIVATE_3": true,
+ "R_ARM_PRIVATE_4": true,
+ "R_ARM_PRIVATE_5": true,
+ "R_ARM_PRIVATE_6": true,
+ "R_ARM_PRIVATE_7": true,
+ "R_ARM_PRIVATE_8": true,
+ "R_ARM_PRIVATE_9": true,
+ "R_ARM_RABS32": true,
+ "R_ARM_RBASE": true,
+ "R_ARM_REL32": true,
+ "R_ARM_REL32_NOI": true,
+ "R_ARM_RELATIVE": true,
+ "R_ARM_RPC24": true,
+ "R_ARM_RREL32": true,
+ "R_ARM_RSBREL32": true,
+ "R_ARM_RXPC25": true,
+ "R_ARM_SBREL31": true,
+ "R_ARM_SBREL32": true,
+ "R_ARM_SWI24": true,
+ "R_ARM_TARGET1": true,
+ "R_ARM_TARGET2": true,
+ "R_ARM_THM_ABS5": true,
+ "R_ARM_THM_ALU_ABS_G0_NC": true,
+ "R_ARM_THM_ALU_ABS_G1_NC": true,
+ "R_ARM_THM_ALU_ABS_G2_NC": true,
+ "R_ARM_THM_ALU_ABS_G3": true,
+ "R_ARM_THM_ALU_PREL_11_0": true,
+ "R_ARM_THM_GOT_BREL12": true,
+ "R_ARM_THM_JUMP11": true,
+ "R_ARM_THM_JUMP19": true,
+ "R_ARM_THM_JUMP24": true,
+ "R_ARM_THM_JUMP6": true,
+ "R_ARM_THM_JUMP8": true,
+ "R_ARM_THM_MOVT_ABS": true,
+ "R_ARM_THM_MOVT_BREL": true,
+ "R_ARM_THM_MOVT_PREL": true,
+ "R_ARM_THM_MOVW_ABS_NC": true,
+ "R_ARM_THM_MOVW_BREL": true,
+ "R_ARM_THM_MOVW_BREL_NC": true,
+ "R_ARM_THM_MOVW_PREL_NC": true,
+ "R_ARM_THM_PC12": true,
+ "R_ARM_THM_PC22": true,
+ "R_ARM_THM_PC8": true,
+ "R_ARM_THM_RPC22": true,
+ "R_ARM_THM_SWI8": true,
+ "R_ARM_THM_TLS_CALL": true,
+ "R_ARM_THM_TLS_DESCSEQ16": true,
+ "R_ARM_THM_TLS_DESCSEQ32": true,
+ "R_ARM_THM_XPC22": true,
+ "R_ARM_TLS_CALL": true,
+ "R_ARM_TLS_DESCSEQ": true,
+ "R_ARM_TLS_DTPMOD32": true,
+ "R_ARM_TLS_DTPOFF32": true,
+ "R_ARM_TLS_GD32": true,
+ "R_ARM_TLS_GOTDESC": true,
+ "R_ARM_TLS_IE12GP": true,
+ "R_ARM_TLS_IE32": true,
+ "R_ARM_TLS_LDM32": true,
+ "R_ARM_TLS_LDO12": true,
+ "R_ARM_TLS_LDO32": true,
+ "R_ARM_TLS_LE12": true,
+ "R_ARM_TLS_LE32": true,
+ "R_ARM_TLS_TPOFF32": true,
+ "R_ARM_V4BX": true,
+ "R_ARM_XPC25": true,
+ "R_INFO": true,
+ "R_INFO32": true,
+ "R_MIPS": true,
+ "R_MIPS_16": true,
+ "R_MIPS_26": true,
+ "R_MIPS_32": true,
+ "R_MIPS_64": true,
+ "R_MIPS_ADD_IMMEDIATE": true,
+ "R_MIPS_CALL16": true,
+ "R_MIPS_CALL_HI16": true,
+ "R_MIPS_CALL_LO16": true,
+ "R_MIPS_DELETE": true,
+ "R_MIPS_GOT16": true,
+ "R_MIPS_GOT_DISP": true,
+ "R_MIPS_GOT_HI16": true,
+ "R_MIPS_GOT_LO16": true,
+ "R_MIPS_GOT_OFST": true,
+ "R_MIPS_GOT_PAGE": true,
+ "R_MIPS_GPREL16": true,
+ "R_MIPS_GPREL32": true,
+ "R_MIPS_HI16": true,
+ "R_MIPS_HIGHER": true,
+ "R_MIPS_HIGHEST": true,
+ "R_MIPS_INSERT_A": true,
+ "R_MIPS_INSERT_B": true,
+ "R_MIPS_JALR": true,
+ "R_MIPS_LITERAL": true,
+ "R_MIPS_LO16": true,
+ "R_MIPS_NONE": true,
+ "R_MIPS_PC16": true,
+ "R_MIPS_PJUMP": true,
+ "R_MIPS_REL16": true,
+ "R_MIPS_REL32": true,
+ "R_MIPS_RELGOT": true,
+ "R_MIPS_SCN_DISP": true,
+ "R_MIPS_SHIFT5": true,
+ "R_MIPS_SHIFT6": true,
+ "R_MIPS_SUB": true,
+ "R_MIPS_TLS_DTPMOD32": true,
+ "R_MIPS_TLS_DTPMOD64": true,
+ "R_MIPS_TLS_DTPREL32": true,
+ "R_MIPS_TLS_DTPREL64": true,
+ "R_MIPS_TLS_DTPREL_HI16": true,
+ "R_MIPS_TLS_DTPREL_LO16": true,
+ "R_MIPS_TLS_GD": true,
+ "R_MIPS_TLS_GOTTPREL": true,
+ "R_MIPS_TLS_LDM": true,
+ "R_MIPS_TLS_TPREL32": true,
+ "R_MIPS_TLS_TPREL64": true,
+ "R_MIPS_TLS_TPREL_HI16": true,
+ "R_MIPS_TLS_TPREL_LO16": true,
+ "R_PPC": true,
+ "R_PPC64": true,
+ "R_PPC64_ADDR14": true,
+ "R_PPC64_ADDR14_BRNTAKEN": true,
+ "R_PPC64_ADDR14_BRTAKEN": true,
+ "R_PPC64_ADDR16": true,
+ "R_PPC64_ADDR16_DS": true,
+ "R_PPC64_ADDR16_HA": true,
+ "R_PPC64_ADDR16_HI": true,
+ "R_PPC64_ADDR16_HIGH": true,
+ "R_PPC64_ADDR16_HIGHA": true,
+ "R_PPC64_ADDR16_HIGHER": true,
+ "R_PPC64_ADDR16_HIGHERA": true,
+ "R_PPC64_ADDR16_HIGHEST": true,
+ "R_PPC64_ADDR16_HIGHESTA": true,
+ "R_PPC64_ADDR16_LO": true,
+ "R_PPC64_ADDR16_LO_DS": true,
+ "R_PPC64_ADDR24": true,
+ "R_PPC64_ADDR32": true,
+ "R_PPC64_ADDR64": true,
+ "R_PPC64_ADDR64_LOCAL": true,
+ "R_PPC64_DTPMOD64": true,
+ "R_PPC64_DTPREL16": true,
+ "R_PPC64_DTPREL16_DS": true,
+ "R_PPC64_DTPREL16_HA": true,
+ "R_PPC64_DTPREL16_HI": true,
+ "R_PPC64_DTPREL16_HIGH": true,
+ "R_PPC64_DTPREL16_HIGHA": true,
+ "R_PPC64_DTPREL16_HIGHER": true,
+ "R_PPC64_DTPREL16_HIGHERA": true,
+ "R_PPC64_DTPREL16_HIGHEST": true,
+ "R_PPC64_DTPREL16_HIGHESTA": true,
+ "R_PPC64_DTPREL16_LO": true,
+ "R_PPC64_DTPREL16_LO_DS": true,
+ "R_PPC64_DTPREL64": true,
+ "R_PPC64_ENTRY": true,
+ "R_PPC64_GOT16": true,
+ "R_PPC64_GOT16_DS": true,
+ "R_PPC64_GOT16_HA": true,
+ "R_PPC64_GOT16_HI": true,
+ "R_PPC64_GOT16_LO": true,
+ "R_PPC64_GOT16_LO_DS": true,
+ "R_PPC64_GOT_DTPREL16_DS": true,
+ "R_PPC64_GOT_DTPREL16_HA": true,
+ "R_PPC64_GOT_DTPREL16_HI": true,
+ "R_PPC64_GOT_DTPREL16_LO_DS": true,
+ "R_PPC64_GOT_TLSGD16": true,
+ "R_PPC64_GOT_TLSGD16_HA": true,
+ "R_PPC64_GOT_TLSGD16_HI": true,
+ "R_PPC64_GOT_TLSGD16_LO": true,
+ "R_PPC64_GOT_TLSLD16": true,
+ "R_PPC64_GOT_TLSLD16_HA": true,
+ "R_PPC64_GOT_TLSLD16_HI": true,
+ "R_PPC64_GOT_TLSLD16_LO": true,
+ "R_PPC64_GOT_TPREL16_DS": true,
+ "R_PPC64_GOT_TPREL16_HA": true,
+ "R_PPC64_GOT_TPREL16_HI": true,
+ "R_PPC64_GOT_TPREL16_LO_DS": true,
+ "R_PPC64_IRELATIVE": true,
+ "R_PPC64_JMP_IREL": true,
+ "R_PPC64_JMP_SLOT": true,
+ "R_PPC64_NONE": true,
+ "R_PPC64_PLT16_LO_DS": true,
+ "R_PPC64_PLTGOT16": true,
+ "R_PPC64_PLTGOT16_DS": true,
+ "R_PPC64_PLTGOT16_HA": true,
+ "R_PPC64_PLTGOT16_HI": true,
+ "R_PPC64_PLTGOT16_LO": true,
+ "R_PPC64_PLTGOT_LO_DS": true,
+ "R_PPC64_REL14": true,
+ "R_PPC64_REL14_BRNTAKEN": true,
+ "R_PPC64_REL14_BRTAKEN": true,
+ "R_PPC64_REL16": true,
+ "R_PPC64_REL16DX_HA": true,
+ "R_PPC64_REL16_HA": true,
+ "R_PPC64_REL16_HI": true,
+ "R_PPC64_REL16_LO": true,
+ "R_PPC64_REL24": true,
+ "R_PPC64_REL24_NOTOC": true,
+ "R_PPC64_REL32": true,
+ "R_PPC64_REL64": true,
+ "R_PPC64_SECTOFF_DS": true,
+ "R_PPC64_SECTOFF_LO_DS": true,
+ "R_PPC64_TLS": true,
+ "R_PPC64_TLSGD": true,
+ "R_PPC64_TLSLD": true,
+ "R_PPC64_TOC": true,
+ "R_PPC64_TOC16": true,
+ "R_PPC64_TOC16_DS": true,
+ "R_PPC64_TOC16_HA": true,
+ "R_PPC64_TOC16_HI": true,
+ "R_PPC64_TOC16_LO": true,
+ "R_PPC64_TOC16_LO_DS": true,
+ "R_PPC64_TOCSAVE": true,
+ "R_PPC64_TPREL16": true,
+ "R_PPC64_TPREL16_DS": true,
+ "R_PPC64_TPREL16_HA": true,
+ "R_PPC64_TPREL16_HI": true,
+ "R_PPC64_TPREL16_HIGH": true,
+ "R_PPC64_TPREL16_HIGHA": true,
+ "R_PPC64_TPREL16_HIGHER": true,
+ "R_PPC64_TPREL16_HIGHERA": true,
+ "R_PPC64_TPREL16_HIGHEST": true,
+ "R_PPC64_TPREL16_HIGHESTA": true,
+ "R_PPC64_TPREL16_LO": true,
+ "R_PPC64_TPREL16_LO_DS": true,
+ "R_PPC64_TPREL64": true,
+ "R_PPC_ADDR14": true,
+ "R_PPC_ADDR14_BRNTAKEN": true,
+ "R_PPC_ADDR14_BRTAKEN": true,
+ "R_PPC_ADDR16": true,
+ "R_PPC_ADDR16_HA": true,
+ "R_PPC_ADDR16_HI": true,
+ "R_PPC_ADDR16_LO": true,
+ "R_PPC_ADDR24": true,
+ "R_PPC_ADDR32": true,
+ "R_PPC_COPY": true,
+ "R_PPC_DTPMOD32": true,
+ "R_PPC_DTPREL16": true,
+ "R_PPC_DTPREL16_HA": true,
+ "R_PPC_DTPREL16_HI": true,
+ "R_PPC_DTPREL16_LO": true,
+ "R_PPC_DTPREL32": true,
+ "R_PPC_EMB_BIT_FLD": true,
+ "R_PPC_EMB_MRKREF": true,
+ "R_PPC_EMB_NADDR16": true,
+ "R_PPC_EMB_NADDR16_HA": true,
+ "R_PPC_EMB_NADDR16_HI": true,
+ "R_PPC_EMB_NADDR16_LO": true,
+ "R_PPC_EMB_NADDR32": true,
+ "R_PPC_EMB_RELSDA": true,
+ "R_PPC_EMB_RELSEC16": true,
+ "R_PPC_EMB_RELST_HA": true,
+ "R_PPC_EMB_RELST_HI": true,
+ "R_PPC_EMB_RELST_LO": true,
+ "R_PPC_EMB_SDA21": true,
+ "R_PPC_EMB_SDA2I16": true,
+ "R_PPC_EMB_SDA2REL": true,
+ "R_PPC_EMB_SDAI16": true,
+ "R_PPC_GLOB_DAT": true,
+ "R_PPC_GOT16": true,
+ "R_PPC_GOT16_HA": true,
+ "R_PPC_GOT16_HI": true,
+ "R_PPC_GOT16_LO": true,
+ "R_PPC_GOT_TLSGD16": true,
+ "R_PPC_GOT_TLSGD16_HA": true,
+ "R_PPC_GOT_TLSGD16_HI": true,
+ "R_PPC_GOT_TLSGD16_LO": true,
+ "R_PPC_GOT_TLSLD16": true,
+ "R_PPC_GOT_TLSLD16_HA": true,
+ "R_PPC_GOT_TLSLD16_HI": true,
+ "R_PPC_GOT_TLSLD16_LO": true,
+ "R_PPC_GOT_TPREL16": true,
+ "R_PPC_GOT_TPREL16_HA": true,
+ "R_PPC_GOT_TPREL16_HI": true,
+ "R_PPC_GOT_TPREL16_LO": true,
+ "R_PPC_JMP_SLOT": true,
+ "R_PPC_LOCAL24PC": true,
+ "R_PPC_NONE": true,
+ "R_PPC_PLT16_HA": true,
+ "R_PPC_PLT16_HI": true,
+ "R_PPC_PLT16_LO": true,
+ "R_PPC_PLT32": true,
+ "R_PPC_PLTREL24": true,
+ "R_PPC_PLTREL32": true,
+ "R_PPC_REL14": true,
+ "R_PPC_REL14_BRNTAKEN": true,
+ "R_PPC_REL14_BRTAKEN": true,
+ "R_PPC_REL24": true,
+ "R_PPC_REL32": true,
+ "R_PPC_RELATIVE": true,
+ "R_PPC_SDAREL16": true,
+ "R_PPC_SECTOFF": true,
+ "R_PPC_SECTOFF_HA": true,
+ "R_PPC_SECTOFF_HI": true,
+ "R_PPC_SECTOFF_LO": true,
+ "R_PPC_TLS": true,
+ "R_PPC_TPREL16": true,
+ "R_PPC_TPREL16_HA": true,
+ "R_PPC_TPREL16_HI": true,
+ "R_PPC_TPREL16_LO": true,
+ "R_PPC_TPREL32": true,
+ "R_PPC_UADDR16": true,
+ "R_PPC_UADDR32": true,
+ "R_RISCV": true,
+ "R_RISCV_32": true,
+ "R_RISCV_32_PCREL": true,
+ "R_RISCV_64": true,
+ "R_RISCV_ADD16": true,
+ "R_RISCV_ADD32": true,
+ "R_RISCV_ADD64": true,
+ "R_RISCV_ADD8": true,
+ "R_RISCV_ALIGN": true,
+ "R_RISCV_BRANCH": true,
+ "R_RISCV_CALL": true,
+ "R_RISCV_CALL_PLT": true,
+ "R_RISCV_COPY": true,
+ "R_RISCV_GNU_VTENTRY": true,
+ "R_RISCV_GNU_VTINHERIT": true,
+ "R_RISCV_GOT_HI20": true,
+ "R_RISCV_GPREL_I": true,
+ "R_RISCV_GPREL_S": true,
+ "R_RISCV_HI20": true,
+ "R_RISCV_JAL": true,
+ "R_RISCV_JUMP_SLOT": true,
+ "R_RISCV_LO12_I": true,
+ "R_RISCV_LO12_S": true,
+ "R_RISCV_NONE": true,
+ "R_RISCV_PCREL_HI20": true,
+ "R_RISCV_PCREL_LO12_I": true,
+ "R_RISCV_PCREL_LO12_S": true,
+ "R_RISCV_RELATIVE": true,
+ "R_RISCV_RELAX": true,
+ "R_RISCV_RVC_BRANCH": true,
+ "R_RISCV_RVC_JUMP": true,
+ "R_RISCV_RVC_LUI": true,
+ "R_RISCV_SET16": true,
+ "R_RISCV_SET32": true,
+ "R_RISCV_SET6": true,
+ "R_RISCV_SET8": true,
+ "R_RISCV_SUB16": true,
+ "R_RISCV_SUB32": true,
+ "R_RISCV_SUB6": true,
+ "R_RISCV_SUB64": true,
+ "R_RISCV_SUB8": true,
+ "R_RISCV_TLS_DTPMOD32": true,
+ "R_RISCV_TLS_DTPMOD64": true,
+ "R_RISCV_TLS_DTPREL32": true,
+ "R_RISCV_TLS_DTPREL64": true,
+ "R_RISCV_TLS_GD_HI20": true,
+ "R_RISCV_TLS_GOT_HI20": true,
+ "R_RISCV_TLS_TPREL32": true,
+ "R_RISCV_TLS_TPREL64": true,
+ "R_RISCV_TPREL_ADD": true,
+ "R_RISCV_TPREL_HI20": true,
+ "R_RISCV_TPREL_I": true,
+ "R_RISCV_TPREL_LO12_I": true,
+ "R_RISCV_TPREL_LO12_S": true,
+ "R_RISCV_TPREL_S": true,
+ "R_SPARC": true,
+ "R_SPARC_10": true,
+ "R_SPARC_11": true,
+ "R_SPARC_13": true,
+ "R_SPARC_16": true,
+ "R_SPARC_22": true,
+ "R_SPARC_32": true,
+ "R_SPARC_5": true,
+ "R_SPARC_6": true,
+ "R_SPARC_64": true,
+ "R_SPARC_7": true,
+ "R_SPARC_8": true,
+ "R_SPARC_COPY": true,
+ "R_SPARC_DISP16": true,
+ "R_SPARC_DISP32": true,
+ "R_SPARC_DISP64": true,
+ "R_SPARC_DISP8": true,
+ "R_SPARC_GLOB_DAT": true,
+ "R_SPARC_GLOB_JMP": true,
+ "R_SPARC_GOT10": true,
+ "R_SPARC_GOT13": true,
+ "R_SPARC_GOT22": true,
+ "R_SPARC_H44": true,
+ "R_SPARC_HH22": true,
+ "R_SPARC_HI22": true,
+ "R_SPARC_HIPLT22": true,
+ "R_SPARC_HIX22": true,
+ "R_SPARC_HM10": true,
+ "R_SPARC_JMP_SLOT": true,
+ "R_SPARC_L44": true,
+ "R_SPARC_LM22": true,
+ "R_SPARC_LO10": true,
+ "R_SPARC_LOPLT10": true,
+ "R_SPARC_LOX10": true,
+ "R_SPARC_M44": true,
+ "R_SPARC_NONE": true,
+ "R_SPARC_OLO10": true,
+ "R_SPARC_PC10": true,
+ "R_SPARC_PC22": true,
+ "R_SPARC_PCPLT10": true,
+ "R_SPARC_PCPLT22": true,
+ "R_SPARC_PCPLT32": true,
+ "R_SPARC_PC_HH22": true,
+ "R_SPARC_PC_HM10": true,
+ "R_SPARC_PC_LM22": true,
+ "R_SPARC_PLT32": true,
+ "R_SPARC_PLT64": true,
+ "R_SPARC_REGISTER": true,
+ "R_SPARC_RELATIVE": true,
+ "R_SPARC_UA16": true,
+ "R_SPARC_UA32": true,
+ "R_SPARC_UA64": true,
+ "R_SPARC_WDISP16": true,
+ "R_SPARC_WDISP19": true,
+ "R_SPARC_WDISP22": true,
+ "R_SPARC_WDISP30": true,
+ "R_SPARC_WPLT30": true,
+ "R_SYM32": true,
+ "R_SYM64": true,
+ "R_TYPE32": true,
+ "R_TYPE64": true,
+ "R_X86_64": true,
+ "R_X86_64_16": true,
+ "R_X86_64_32": true,
+ "R_X86_64_32S": true,
+ "R_X86_64_64": true,
+ "R_X86_64_8": true,
+ "R_X86_64_COPY": true,
+ "R_X86_64_DTPMOD64": true,
+ "R_X86_64_DTPOFF32": true,
+ "R_X86_64_DTPOFF64": true,
+ "R_X86_64_GLOB_DAT": true,
+ "R_X86_64_GOT32": true,
+ "R_X86_64_GOT64": true,
+ "R_X86_64_GOTOFF64": true,
+ "R_X86_64_GOTPC32": true,
+ "R_X86_64_GOTPC32_TLSDESC": true,
+ "R_X86_64_GOTPC64": true,
+ "R_X86_64_GOTPCREL": true,
+ "R_X86_64_GOTPCREL64": true,
+ "R_X86_64_GOTPCRELX": true,
+ "R_X86_64_GOTPLT64": true,
+ "R_X86_64_GOTTPOFF": true,
+ "R_X86_64_IRELATIVE": true,
+ "R_X86_64_JMP_SLOT": true,
+ "R_X86_64_NONE": true,
+ "R_X86_64_PC16": true,
+ "R_X86_64_PC32": true,
+ "R_X86_64_PC32_BND": true,
+ "R_X86_64_PC64": true,
+ "R_X86_64_PC8": true,
+ "R_X86_64_PLT32": true,
+ "R_X86_64_PLT32_BND": true,
+ "R_X86_64_PLTOFF64": true,
+ "R_X86_64_RELATIVE": true,
+ "R_X86_64_RELATIVE64": true,
+ "R_X86_64_REX_GOTPCRELX": true,
+ "R_X86_64_SIZE32": true,
+ "R_X86_64_SIZE64": true,
+ "R_X86_64_TLSDESC": true,
+ "R_X86_64_TLSDESC_CALL": true,
+ "R_X86_64_TLSGD": true,
+ "R_X86_64_TLSLD": true,
+ "R_X86_64_TPOFF32": true,
+ "R_X86_64_TPOFF64": true,
+ "Rel32": true,
+ "Rel64": true,
+ "Rela32": true,
+ "Rela64": true,
+ "SHF_ALLOC": true,
+ "SHF_COMPRESSED": true,
+ "SHF_EXECINSTR": true,
+ "SHF_GROUP": true,
+ "SHF_INFO_LINK": true,
+ "SHF_LINK_ORDER": true,
+ "SHF_MASKOS": true,
+ "SHF_MASKPROC": true,
+ "SHF_MERGE": true,
+ "SHF_OS_NONCONFORMING": true,
+ "SHF_STRINGS": true,
+ "SHF_TLS": true,
+ "SHF_WRITE": true,
+ "SHN_ABS": true,
+ "SHN_COMMON": true,
+ "SHN_HIOS": true,
+ "SHN_HIPROC": true,
+ "SHN_HIRESERVE": true,
+ "SHN_LOOS": true,
+ "SHN_LOPROC": true,
+ "SHN_LORESERVE": true,
+ "SHN_UNDEF": true,
+ "SHN_XINDEX": true,
+ "SHT_DYNAMIC": true,
+ "SHT_DYNSYM": true,
+ "SHT_FINI_ARRAY": true,
+ "SHT_GNU_ATTRIBUTES": true,
+ "SHT_GNU_HASH": true,
+ "SHT_GNU_LIBLIST": true,
+ "SHT_GNU_VERDEF": true,
+ "SHT_GNU_VERNEED": true,
+ "SHT_GNU_VERSYM": true,
+ "SHT_GROUP": true,
+ "SHT_HASH": true,
+ "SHT_HIOS": true,
+ "SHT_HIPROC": true,
+ "SHT_HIUSER": true,
+ "SHT_INIT_ARRAY": true,
+ "SHT_LOOS": true,
+ "SHT_LOPROC": true,
+ "SHT_LOUSER": true,
+ "SHT_NOBITS": true,
+ "SHT_NOTE": true,
+ "SHT_NULL": true,
+ "SHT_PREINIT_ARRAY": true,
+ "SHT_PROGBITS": true,
+ "SHT_REL": true,
+ "SHT_RELA": true,
+ "SHT_SHLIB": true,
+ "SHT_STRTAB": true,
+ "SHT_SYMTAB": true,
+ "SHT_SYMTAB_SHNDX": true,
+ "STB_GLOBAL": true,
+ "STB_HIOS": true,
+ "STB_HIPROC": true,
+ "STB_LOCAL": true,
+ "STB_LOOS": true,
+ "STB_LOPROC": true,
+ "STB_WEAK": true,
+ "STT_COMMON": true,
+ "STT_FILE": true,
+ "STT_FUNC": true,
+ "STT_HIOS": true,
+ "STT_HIPROC": true,
+ "STT_LOOS": true,
+ "STT_LOPROC": true,
+ "STT_NOTYPE": true,
+ "STT_OBJECT": true,
+ "STT_SECTION": true,
+ "STT_TLS": true,
+ "STV_DEFAULT": true,
+ "STV_HIDDEN": true,
+ "STV_INTERNAL": true,
+ "STV_PROTECTED": true,
+ "ST_BIND": true,
+ "ST_INFO": true,
+ "ST_TYPE": true,
+ "ST_VISIBILITY": true,
+ "Section": true,
+ "Section32": true,
+ "Section64": true,
+ "SectionFlag": true,
+ "SectionHeader": true,
+ "SectionIndex": true,
+ "SectionType": true,
+ "Sym32": true,
+ "Sym32Size": true,
+ "Sym64": true,
+ "Sym64Size": true,
+ "SymBind": true,
+ "SymType": true,
+ "SymVis": true,
+ "Symbol": true,
+ "Type": true,
+ "Version": true,
+ },
+ "debug/gosym": map[string]bool{
+ "DecodingError": true,
+ "Func": true,
+ "LineTable": true,
+ "NewLineTable": true,
+ "NewTable": true,
+ "Obj": true,
+ "Sym": true,
+ "Table": true,
+ "UnknownFileError": true,
+ "UnknownLineError": true,
+ },
+ "debug/macho": map[string]bool{
+ "ARM64_RELOC_ADDEND": true,
+ "ARM64_RELOC_BRANCH26": true,
+ "ARM64_RELOC_GOT_LOAD_PAGE21": true,
+ "ARM64_RELOC_GOT_LOAD_PAGEOFF12": true,
+ "ARM64_RELOC_PAGE21": true,
+ "ARM64_RELOC_PAGEOFF12": true,
+ "ARM64_RELOC_POINTER_TO_GOT": true,
+ "ARM64_RELOC_SUBTRACTOR": true,
+ "ARM64_RELOC_TLVP_LOAD_PAGE21": true,
+ "ARM64_RELOC_TLVP_LOAD_PAGEOFF12": true,
+ "ARM64_RELOC_UNSIGNED": true,
+ "ARM_RELOC_BR24": true,
+ "ARM_RELOC_HALF": true,
+ "ARM_RELOC_HALF_SECTDIFF": true,
+ "ARM_RELOC_LOCAL_SECTDIFF": true,
+ "ARM_RELOC_PAIR": true,
+ "ARM_RELOC_PB_LA_PTR": true,
+ "ARM_RELOC_SECTDIFF": true,
+ "ARM_RELOC_VANILLA": true,
+ "ARM_THUMB_32BIT_BRANCH": true,
+ "ARM_THUMB_RELOC_BR22": true,
+ "Cpu": true,
+ "Cpu386": true,
+ "CpuAmd64": true,
+ "CpuArm": true,
+ "CpuArm64": true,
+ "CpuPpc": true,
+ "CpuPpc64": true,
+ "Dylib": true,
+ "DylibCmd": true,
+ "Dysymtab": true,
+ "DysymtabCmd": true,
+ "ErrNotFat": true,
+ "FatArch": true,
+ "FatArchHeader": true,
+ "FatFile": true,
+ "File": true,
+ "FileHeader": true,
+ "FlagAllModsBound": true,
+ "FlagAllowStackExecution": true,
+ "FlagAppExtensionSafe": true,
+ "FlagBindAtLoad": true,
+ "FlagBindsToWeak": true,
+ "FlagCanonical": true,
+ "FlagDeadStrippableDylib": true,
+ "FlagDyldLink": true,
+ "FlagForceFlat": true,
+ "FlagHasTLVDescriptors": true,
+ "FlagIncrLink": true,
+ "FlagLazyInit": true,
+ "FlagNoFixPrebinding": true,
+ "FlagNoHeapExecution": true,
+ "FlagNoMultiDefs": true,
+ "FlagNoReexportedDylibs": true,
+ "FlagNoUndefs": true,
+ "FlagPIE": true,
+ "FlagPrebindable": true,
+ "FlagPrebound": true,
+ "FlagRootSafe": true,
+ "FlagSetuidSafe": true,
+ "FlagSplitSegs": true,
+ "FlagSubsectionsViaSymbols": true,
+ "FlagTwoLevel": true,
+ "FlagWeakDefines": true,
+ "FormatError": true,
+ "GENERIC_RELOC_LOCAL_SECTDIFF": true,
+ "GENERIC_RELOC_PAIR": true,
+ "GENERIC_RELOC_PB_LA_PTR": true,
+ "GENERIC_RELOC_SECTDIFF": true,
+ "GENERIC_RELOC_TLV": true,
+ "GENERIC_RELOC_VANILLA": true,
+ "Load": true,
+ "LoadBytes": true,
+ "LoadCmd": true,
+ "LoadCmdDylib": true,
+ "LoadCmdDylinker": true,
+ "LoadCmdDysymtab": true,
+ "LoadCmdRpath": true,
+ "LoadCmdSegment": true,
+ "LoadCmdSegment64": true,
+ "LoadCmdSymtab": true,
+ "LoadCmdThread": true,
+ "LoadCmdUnixThread": true,
+ "Magic32": true,
+ "Magic64": true,
+ "MagicFat": true,
+ "NewFatFile": true,
+ "NewFile": true,
+ "Nlist32": true,
+ "Nlist64": true,
+ "Open": true,
+ "OpenFat": true,
+ "Regs386": true,
+ "RegsAMD64": true,
+ "Reloc": true,
+ "RelocTypeARM": true,
+ "RelocTypeARM64": true,
+ "RelocTypeGeneric": true,
+ "RelocTypeX86_64": true,
+ "Rpath": true,
+ "RpathCmd": true,
+ "Section": true,
+ "Section32": true,
+ "Section64": true,
+ "SectionHeader": true,
+ "Segment": true,
+ "Segment32": true,
+ "Segment64": true,
+ "SegmentHeader": true,
+ "Symbol": true,
+ "Symtab": true,
+ "SymtabCmd": true,
+ "Thread": true,
+ "Type": true,
+ "TypeBundle": true,
+ "TypeDylib": true,
+ "TypeExec": true,
+ "TypeObj": true,
+ "X86_64_RELOC_BRANCH": true,
+ "X86_64_RELOC_GOT": true,
+ "X86_64_RELOC_GOT_LOAD": true,
+ "X86_64_RELOC_SIGNED": true,
+ "X86_64_RELOC_SIGNED_1": true,
+ "X86_64_RELOC_SIGNED_2": true,
+ "X86_64_RELOC_SIGNED_4": true,
+ "X86_64_RELOC_SUBTRACTOR": true,
+ "X86_64_RELOC_TLV": true,
+ "X86_64_RELOC_UNSIGNED": true,
+ },
+ "debug/pe": map[string]bool{
+ "COFFSymbol": true,
+ "COFFSymbolSize": true,
+ "DataDirectory": true,
+ "File": true,
+ "FileHeader": true,
+ "FormatError": true,
+ "IMAGE_DIRECTORY_ENTRY_ARCHITECTURE": true,
+ "IMAGE_DIRECTORY_ENTRY_BASERELOC": true,
+ "IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT": true,
+ "IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR": true,
+ "IMAGE_DIRECTORY_ENTRY_DEBUG": true,
+ "IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT": true,
+ "IMAGE_DIRECTORY_ENTRY_EXCEPTION": true,
+ "IMAGE_DIRECTORY_ENTRY_EXPORT": true,
+ "IMAGE_DIRECTORY_ENTRY_GLOBALPTR": true,
+ "IMAGE_DIRECTORY_ENTRY_IAT": true,
+ "IMAGE_DIRECTORY_ENTRY_IMPORT": true,
+ "IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG": true,
+ "IMAGE_DIRECTORY_ENTRY_RESOURCE": true,
+ "IMAGE_DIRECTORY_ENTRY_SECURITY": true,
+ "IMAGE_DIRECTORY_ENTRY_TLS": true,
+ "IMAGE_FILE_MACHINE_AM33": true,
+ "IMAGE_FILE_MACHINE_AMD64": true,
+ "IMAGE_FILE_MACHINE_ARM": true,
+ "IMAGE_FILE_MACHINE_ARM64": true,
+ "IMAGE_FILE_MACHINE_ARMNT": true,
+ "IMAGE_FILE_MACHINE_EBC": true,
+ "IMAGE_FILE_MACHINE_I386": true,
+ "IMAGE_FILE_MACHINE_IA64": true,
+ "IMAGE_FILE_MACHINE_M32R": true,
+ "IMAGE_FILE_MACHINE_MIPS16": true,
+ "IMAGE_FILE_MACHINE_MIPSFPU": true,
+ "IMAGE_FILE_MACHINE_MIPSFPU16": true,
+ "IMAGE_FILE_MACHINE_POWERPC": true,
+ "IMAGE_FILE_MACHINE_POWERPCFP": true,
+ "IMAGE_FILE_MACHINE_R4000": true,
+ "IMAGE_FILE_MACHINE_SH3": true,
+ "IMAGE_FILE_MACHINE_SH3DSP": true,
+ "IMAGE_FILE_MACHINE_SH4": true,
+ "IMAGE_FILE_MACHINE_SH5": true,
+ "IMAGE_FILE_MACHINE_THUMB": true,
+ "IMAGE_FILE_MACHINE_UNKNOWN": true,
+ "IMAGE_FILE_MACHINE_WCEMIPSV2": true,
+ "ImportDirectory": true,
+ "NewFile": true,
+ "Open": true,
+ "OptionalHeader32": true,
+ "OptionalHeader64": true,
+ "Reloc": true,
+ "Section": true,
+ "SectionHeader": true,
+ "SectionHeader32": true,
+ "StringTable": true,
+ "Symbol": true,
+ },
+ "debug/plan9obj": map[string]bool{
+ "File": true,
+ "FileHeader": true,
+ "Magic386": true,
+ "Magic64": true,
+ "MagicAMD64": true,
+ "MagicARM": true,
+ "NewFile": true,
+ "Open": true,
+ "Section": true,
+ "SectionHeader": true,
+ "Sym": true,
+ },
+ "encoding": map[string]bool{
+ "BinaryMarshaler": true,
+ "BinaryUnmarshaler": true,
+ "TextMarshaler": true,
+ "TextUnmarshaler": true,
+ },
+ "encoding/ascii85": map[string]bool{
+ "CorruptInputError": true,
+ "Decode": true,
+ "Encode": true,
+ "MaxEncodedLen": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ },
+ "encoding/asn1": map[string]bool{
+ "BitString": true,
+ "ClassApplication": true,
+ "ClassContextSpecific": true,
+ "ClassPrivate": true,
+ "ClassUniversal": true,
+ "Enumerated": true,
+ "Flag": true,
+ "Marshal": true,
+ "MarshalWithParams": true,
+ "NullBytes": true,
+ "NullRawValue": true,
+ "ObjectIdentifier": true,
+ "RawContent": true,
+ "RawValue": true,
+ "StructuralError": true,
+ "SyntaxError": true,
+ "TagBitString": true,
+ "TagBoolean": true,
+ "TagEnum": true,
+ "TagGeneralString": true,
+ "TagGeneralizedTime": true,
+ "TagIA5String": true,
+ "TagInteger": true,
+ "TagNull": true,
+ "TagNumericString": true,
+ "TagOID": true,
+ "TagOctetString": true,
+ "TagPrintableString": true,
+ "TagSequence": true,
+ "TagSet": true,
+ "TagT61String": true,
+ "TagUTCTime": true,
+ "TagUTF8String": true,
+ "Unmarshal": true,
+ "UnmarshalWithParams": true,
+ },
+ "encoding/base32": map[string]bool{
+ "CorruptInputError": true,
+ "Encoding": true,
+ "HexEncoding": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "NewEncoding": true,
+ "NoPadding": true,
+ "StdEncoding": true,
+ "StdPadding": true,
+ },
+ "encoding/base64": map[string]bool{
+ "CorruptInputError": true,
+ "Encoding": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "NewEncoding": true,
+ "NoPadding": true,
+ "RawStdEncoding": true,
+ "RawURLEncoding": true,
+ "StdEncoding": true,
+ "StdPadding": true,
+ "URLEncoding": true,
+ },
+ "encoding/binary": map[string]bool{
+ "BigEndian": true,
+ "ByteOrder": true,
+ "LittleEndian": true,
+ "MaxVarintLen16": true,
+ "MaxVarintLen32": true,
+ "MaxVarintLen64": true,
+ "PutUvarint": true,
+ "PutVarint": true,
+ "Read": true,
+ "ReadUvarint": true,
+ "ReadVarint": true,
+ "Size": true,
+ "Uvarint": true,
+ "Varint": true,
+ "Write": true,
+ },
+ "encoding/csv": map[string]bool{
+ "ErrBareQuote": true,
+ "ErrFieldCount": true,
+ "ErrQuote": true,
+ "ErrTrailingComma": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "ParseError": true,
+ "Reader": true,
+ "Writer": true,
+ },
+ "encoding/gob": map[string]bool{
+ "CommonType": true,
+ "Decoder": true,
+ "Encoder": true,
+ "GobDecoder": true,
+ "GobEncoder": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "Register": true,
+ "RegisterName": true,
+ },
+ "encoding/hex": map[string]bool{
+ "Decode": true,
+ "DecodeString": true,
+ "DecodedLen": true,
+ "Dump": true,
+ "Dumper": true,
+ "Encode": true,
+ "EncodeToString": true,
+ "EncodedLen": true,
+ "ErrLength": true,
+ "InvalidByteError": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ },
+ "encoding/json": map[string]bool{
+ "Compact": true,
+ "Decoder": true,
+ "Delim": true,
+ "Encoder": true,
+ "HTMLEscape": true,
+ "Indent": true,
+ "InvalidUTF8Error": true,
+ "InvalidUnmarshalError": true,
+ "Marshal": true,
+ "MarshalIndent": true,
+ "Marshaler": true,
+ "MarshalerError": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "Number": true,
+ "RawMessage": true,
+ "SyntaxError": true,
+ "Token": true,
+ "Unmarshal": true,
+ "UnmarshalFieldError": true,
+ "UnmarshalTypeError": true,
+ "Unmarshaler": true,
+ "UnsupportedTypeError": true,
+ "UnsupportedValueError": true,
+ "Valid": true,
+ },
+ "encoding/pem": map[string]bool{
+ "Block": true,
+ "Decode": true,
+ "Encode": true,
+ "EncodeToMemory": true,
+ },
+ "encoding/xml": map[string]bool{
+ "Attr": true,
+ "CharData": true,
+ "Comment": true,
+ "CopyToken": true,
+ "Decoder": true,
+ "Directive": true,
+ "Encoder": true,
+ "EndElement": true,
+ "Escape": true,
+ "EscapeText": true,
+ "HTMLAutoClose": true,
+ "HTMLEntity": true,
+ "Header": true,
+ "Marshal": true,
+ "MarshalIndent": true,
+ "Marshaler": true,
+ "MarshalerAttr": true,
+ "Name": true,
+ "NewDecoder": true,
+ "NewEncoder": true,
+ "NewTokenDecoder": true,
+ "ProcInst": true,
+ "StartElement": true,
+ "SyntaxError": true,
+ "TagPathError": true,
+ "Token": true,
+ "TokenReader": true,
+ "Unmarshal": true,
+ "UnmarshalError": true,
+ "Unmarshaler": true,
+ "UnmarshalerAttr": true,
+ "UnsupportedTypeError": true,
+ },
+ "errors": map[string]bool{
+ "New": true,
+ },
+ "expvar": map[string]bool{
+ "Do": true,
+ "Float": true,
+ "Func": true,
+ "Get": true,
+ "Handler": true,
+ "Int": true,
+ "KeyValue": true,
+ "Map": true,
+ "NewFloat": true,
+ "NewInt": true,
+ "NewMap": true,
+ "NewString": true,
+ "Publish": true,
+ "String": true,
+ "Var": true,
+ },
+ "flag": map[string]bool{
+ "Arg": true,
+ "Args": true,
+ "Bool": true,
+ "BoolVar": true,
+ "CommandLine": true,
+ "ContinueOnError": true,
+ "Duration": true,
+ "DurationVar": true,
+ "ErrHelp": true,
+ "ErrorHandling": true,
+ "ExitOnError": true,
+ "Flag": true,
+ "FlagSet": true,
+ "Float64": true,
+ "Float64Var": true,
+ "Getter": true,
+ "Int": true,
+ "Int64": true,
+ "Int64Var": true,
+ "IntVar": true,
+ "Lookup": true,
+ "NArg": true,
+ "NFlag": true,
+ "NewFlagSet": true,
+ "PanicOnError": true,
+ "Parse": true,
+ "Parsed": true,
+ "PrintDefaults": true,
+ "Set": true,
+ "String": true,
+ "StringVar": true,
+ "Uint": true,
+ "Uint64": true,
+ "Uint64Var": true,
+ "UintVar": true,
+ "UnquoteUsage": true,
+ "Usage": true,
+ "Value": true,
+ "Var": true,
+ "Visit": true,
+ "VisitAll": true,
+ },
+ "fmt": map[string]bool{
+ "Errorf": true,
+ "Formatter": true,
+ "Fprint": true,
+ "Fprintf": true,
+ "Fprintln": true,
+ "Fscan": true,
+ "Fscanf": true,
+ "Fscanln": true,
+ "GoStringer": true,
+ "Print": true,
+ "Printf": true,
+ "Println": true,
+ "Scan": true,
+ "ScanState": true,
+ "Scanf": true,
+ "Scanln": true,
+ "Scanner": true,
+ "Sprint": true,
+ "Sprintf": true,
+ "Sprintln": true,
+ "Sscan": true,
+ "Sscanf": true,
+ "Sscanln": true,
+ "State": true,
+ "Stringer": true,
+ },
+ "go/ast": map[string]bool{
+ "ArrayType": true,
+ "AssignStmt": true,
+ "Bad": true,
+ "BadDecl": true,
+ "BadExpr": true,
+ "BadStmt": true,
+ "BasicLit": true,
+ "BinaryExpr": true,
+ "BlockStmt": true,
+ "BranchStmt": true,
+ "CallExpr": true,
+ "CaseClause": true,
+ "ChanDir": true,
+ "ChanType": true,
+ "CommClause": true,
+ "Comment": true,
+ "CommentGroup": true,
+ "CommentMap": true,
+ "CompositeLit": true,
+ "Con": true,
+ "DeclStmt": true,
+ "DeferStmt": true,
+ "Ellipsis": true,
+ "EmptyStmt": true,
+ "ExprStmt": true,
+ "Field": true,
+ "FieldFilter": true,
+ "FieldList": true,
+ "File": true,
+ "FileExports": true,
+ "Filter": true,
+ "FilterDecl": true,
+ "FilterFile": true,
+ "FilterFuncDuplicates": true,
+ "FilterImportDuplicates": true,
+ "FilterPackage": true,
+ "FilterUnassociatedComments": true,
+ "ForStmt": true,
+ "Fprint": true,
+ "Fun": true,
+ "FuncDecl": true,
+ "FuncLit": true,
+ "FuncType": true,
+ "GenDecl": true,
+ "GoStmt": true,
+ "Ident": true,
+ "IfStmt": true,
+ "ImportSpec": true,
+ "Importer": true,
+ "IncDecStmt": true,
+ "IndexExpr": true,
+ "Inspect": true,
+ "InterfaceType": true,
+ "IsExported": true,
+ "KeyValueExpr": true,
+ "LabeledStmt": true,
+ "Lbl": true,
+ "MapType": true,
+ "MergeMode": true,
+ "MergePackageFiles": true,
+ "NewCommentMap": true,
+ "NewIdent": true,
+ "NewObj": true,
+ "NewPackage": true,
+ "NewScope": true,
+ "Node": true,
+ "NotNilFilter": true,
+ "ObjKind": true,
+ "Object": true,
+ "Package": true,
+ "PackageExports": true,
+ "ParenExpr": true,
+ "Pkg": true,
+ "Print": true,
+ "RECV": true,
+ "RangeStmt": true,
+ "ReturnStmt": true,
+ "SEND": true,
+ "Scope": true,
+ "SelectStmt": true,
+ "SelectorExpr": true,
+ "SendStmt": true,
+ "SliceExpr": true,
+ "SortImports": true,
+ "StarExpr": true,
+ "StructType": true,
+ "SwitchStmt": true,
+ "Typ": true,
+ "TypeAssertExpr": true,
+ "TypeSpec": true,
+ "TypeSwitchStmt": true,
+ "UnaryExpr": true,
+ "ValueSpec": true,
+ "Var": true,
+ "Visitor": true,
+ "Walk": true,
+ },
+ "go/build": map[string]bool{
+ "AllowBinary": true,
+ "ArchChar": true,
+ "Context": true,
+ "Default": true,
+ "FindOnly": true,
+ "IgnoreVendor": true,
+ "Import": true,
+ "ImportComment": true,
+ "ImportDir": true,
+ "ImportMode": true,
+ "IsLocalImport": true,
+ "MultiplePackageError": true,
+ "NoGoError": true,
+ "Package": true,
+ "ToolDir": true,
+ },
+ "go/constant": map[string]bool{
+ "BinaryOp": true,
+ "BitLen": true,
+ "Bool": true,
+ "BoolVal": true,
+ "Bytes": true,
+ "Compare": true,
+ "Complex": true,
+ "Denom": true,
+ "Float": true,
+ "Float32Val": true,
+ "Float64Val": true,
+ "Imag": true,
+ "Int": true,
+ "Int64Val": true,
+ "Kind": true,
+ "MakeBool": true,
+ "MakeFloat64": true,
+ "MakeFromBytes": true,
+ "MakeFromLiteral": true,
+ "MakeImag": true,
+ "MakeInt64": true,
+ "MakeString": true,
+ "MakeUint64": true,
+ "MakeUnknown": true,
+ "Num": true,
+ "Real": true,
+ "Shift": true,
+ "Sign": true,
+ "String": true,
+ "StringVal": true,
+ "ToComplex": true,
+ "ToFloat": true,
+ "ToInt": true,
+ "Uint64Val": true,
+ "UnaryOp": true,
+ "Unknown": true,
+ },
+ "go/doc": map[string]bool{
+ "AllDecls": true,
+ "AllMethods": true,
+ "Example": true,
+ "Examples": true,
+ "Filter": true,
+ "Func": true,
+ "IllegalPrefixes": true,
+ "IsPredeclared": true,
+ "Mode": true,
+ "New": true,
+ "Note": true,
+ "Package": true,
+ "PreserveAST": true,
+ "Synopsis": true,
+ "ToHTML": true,
+ "ToText": true,
+ "Type": true,
+ "Value": true,
+ },
+ "go/format": map[string]bool{
+ "Node": true,
+ "Source": true,
+ },
+ "go/importer": map[string]bool{
+ "Default": true,
+ "For": true,
+ "ForCompiler": true,
+ "Lookup": true,
+ },
+ "go/parser": map[string]bool{
+ "AllErrors": true,
+ "DeclarationErrors": true,
+ "ImportsOnly": true,
+ "Mode": true,
+ "PackageClauseOnly": true,
+ "ParseComments": true,
+ "ParseDir": true,
+ "ParseExpr": true,
+ "ParseExprFrom": true,
+ "ParseFile": true,
+ "SpuriousErrors": true,
+ "Trace": true,
+ },
+ "go/printer": map[string]bool{
+ "CommentedNode": true,
+ "Config": true,
+ "Fprint": true,
+ "Mode": true,
+ "RawFormat": true,
+ "SourcePos": true,
+ "TabIndent": true,
+ "UseSpaces": true,
+ },
+ "go/scanner": map[string]bool{
+ "Error": true,
+ "ErrorHandler": true,
+ "ErrorList": true,
+ "Mode": true,
+ "PrintError": true,
+ "ScanComments": true,
+ "Scanner": true,
+ },
+ "go/token": map[string]bool{
+ "ADD": true,
+ "ADD_ASSIGN": true,
+ "AND": true,
+ "AND_ASSIGN": true,
+ "AND_NOT": true,
+ "AND_NOT_ASSIGN": true,
+ "ARROW": true,
+ "ASSIGN": true,
+ "BREAK": true,
+ "CASE": true,
+ "CHAN": true,
+ "CHAR": true,
+ "COLON": true,
+ "COMMA": true,
+ "COMMENT": true,
+ "CONST": true,
+ "CONTINUE": true,
+ "DEC": true,
+ "DEFAULT": true,
+ "DEFER": true,
+ "DEFINE": true,
+ "ELLIPSIS": true,
+ "ELSE": true,
+ "EOF": true,
+ "EQL": true,
+ "FALLTHROUGH": true,
+ "FLOAT": true,
+ "FOR": true,
+ "FUNC": true,
+ "File": true,
+ "FileSet": true,
+ "GEQ": true,
+ "GO": true,
+ "GOTO": true,
+ "GTR": true,
+ "HighestPrec": true,
+ "IDENT": true,
+ "IF": true,
+ "ILLEGAL": true,
+ "IMAG": true,
+ "IMPORT": true,
+ "INC": true,
+ "INT": true,
+ "INTERFACE": true,
+ "LAND": true,
+ "LBRACE": true,
+ "LBRACK": true,
+ "LEQ": true,
+ "LOR": true,
+ "LPAREN": true,
+ "LSS": true,
+ "Lookup": true,
+ "LowestPrec": true,
+ "MAP": true,
+ "MUL": true,
+ "MUL_ASSIGN": true,
+ "NEQ": true,
+ "NOT": true,
+ "NewFileSet": true,
+ "NoPos": true,
+ "OR": true,
+ "OR_ASSIGN": true,
+ "PACKAGE": true,
+ "PERIOD": true,
+ "Pos": true,
+ "Position": true,
+ "QUO": true,
+ "QUO_ASSIGN": true,
+ "RANGE": true,
+ "RBRACE": true,
+ "RBRACK": true,
+ "REM": true,
+ "REM_ASSIGN": true,
+ "RETURN": true,
+ "RPAREN": true,
+ "SELECT": true,
+ "SEMICOLON": true,
+ "SHL": true,
+ "SHL_ASSIGN": true,
+ "SHR": true,
+ "SHR_ASSIGN": true,
+ "STRING": true,
+ "STRUCT": true,
+ "SUB": true,
+ "SUB_ASSIGN": true,
+ "SWITCH": true,
+ "TYPE": true,
+ "Token": true,
+ "UnaryPrec": true,
+ "VAR": true,
+ "XOR": true,
+ "XOR_ASSIGN": true,
+ },
+ "go/types": map[string]bool{
+ "Array": true,
+ "AssertableTo": true,
+ "AssignableTo": true,
+ "Basic": true,
+ "BasicInfo": true,
+ "BasicKind": true,
+ "Bool": true,
+ "Builtin": true,
+ "Byte": true,
+ "Chan": true,
+ "ChanDir": true,
+ "Checker": true,
+ "Comparable": true,
+ "Complex128": true,
+ "Complex64": true,
+ "Config": true,
+ "Const": true,
+ "ConvertibleTo": true,
+ "DefPredeclaredTestFuncs": true,
+ "Default": true,
+ "Error": true,
+ "Eval": true,
+ "ExprString": true,
+ "FieldVal": true,
+ "Float32": true,
+ "Float64": true,
+ "Func": true,
+ "Id": true,
+ "Identical": true,
+ "IdenticalIgnoreTags": true,
+ "Implements": true,
+ "ImportMode": true,
+ "Importer": true,
+ "ImporterFrom": true,
+ "Info": true,
+ "Initializer": true,
+ "Int": true,
+ "Int16": true,
+ "Int32": true,
+ "Int64": true,
+ "Int8": true,
+ "Interface": true,
+ "Invalid": true,
+ "IsBoolean": true,
+ "IsComplex": true,
+ "IsConstType": true,
+ "IsFloat": true,
+ "IsInteger": true,
+ "IsInterface": true,
+ "IsNumeric": true,
+ "IsOrdered": true,
+ "IsString": true,
+ "IsUnsigned": true,
+ "IsUntyped": true,
+ "Label": true,
+ "LookupFieldOrMethod": true,
+ "Map": true,
+ "MethodExpr": true,
+ "MethodSet": true,
+ "MethodVal": true,
+ "MissingMethod": true,
+ "Named": true,
+ "NewArray": true,
+ "NewChan": true,
+ "NewChecker": true,
+ "NewConst": true,
+ "NewField": true,
+ "NewFunc": true,
+ "NewInterface": true,
+ "NewInterfaceType": true,
+ "NewLabel": true,
+ "NewMap": true,
+ "NewMethodSet": true,
+ "NewNamed": true,
+ "NewPackage": true,
+ "NewParam": true,
+ "NewPkgName": true,
+ "NewPointer": true,
+ "NewScope": true,
+ "NewSignature": true,
+ "NewSlice": true,
+ "NewStruct": true,
+ "NewTuple": true,
+ "NewTypeName": true,
+ "NewVar": true,
+ "Nil": true,
+ "ObjectString": true,
+ "Package": true,
+ "PkgName": true,
+ "Pointer": true,
+ "Qualifier": true,
+ "RecvOnly": true,
+ "RelativeTo": true,
+ "Rune": true,
+ "Scope": true,
+ "Selection": true,
+ "SelectionKind": true,
+ "SelectionString": true,
+ "SendOnly": true,
+ "SendRecv": true,
+ "Signature": true,
+ "Sizes": true,
+ "SizesFor": true,
+ "Slice": true,
+ "StdSizes": true,
+ "String": true,
+ "Struct": true,
+ "Tuple": true,
+ "Typ": true,
+ "Type": true,
+ "TypeAndValue": true,
+ "TypeName": true,
+ "TypeString": true,
+ "Uint": true,
+ "Uint16": true,
+ "Uint32": true,
+ "Uint64": true,
+ "Uint8": true,
+ "Uintptr": true,
+ "Universe": true,
+ "Unsafe": true,
+ "UnsafePointer": true,
+ "UntypedBool": true,
+ "UntypedComplex": true,
+ "UntypedFloat": true,
+ "UntypedInt": true,
+ "UntypedNil": true,
+ "UntypedRune": true,
+ "UntypedString": true,
+ "Var": true,
+ "WriteExpr": true,
+ "WriteSignature": true,
+ "WriteType": true,
+ },
+ "hash": map[string]bool{
+ "Hash": true,
+ "Hash32": true,
+ "Hash64": true,
+ },
+ "hash/adler32": map[string]bool{
+ "Checksum": true,
+ "New": true,
+ "Size": true,
+ },
+ "hash/crc32": map[string]bool{
+ "Castagnoli": true,
+ "Checksum": true,
+ "ChecksumIEEE": true,
+ "IEEE": true,
+ "IEEETable": true,
+ "Koopman": true,
+ "MakeTable": true,
+ "New": true,
+ "NewIEEE": true,
+ "Size": true,
+ "Table": true,
+ "Update": true,
+ },
+ "hash/crc64": map[string]bool{
+ "Checksum": true,
+ "ECMA": true,
+ "ISO": true,
+ "MakeTable": true,
+ "New": true,
+ "Size": true,
+ "Table": true,
+ "Update": true,
+ },
+ "hash/fnv": map[string]bool{
+ "New128": true,
+ "New128a": true,
+ "New32": true,
+ "New32a": true,
+ "New64": true,
+ "New64a": true,
+ },
+ "html": map[string]bool{
+ "EscapeString": true,
+ "UnescapeString": true,
+ },
+ "html/template": map[string]bool{
+ "CSS": true,
+ "ErrAmbigContext": true,
+ "ErrBadHTML": true,
+ "ErrBranchEnd": true,
+ "ErrEndContext": true,
+ "ErrNoSuchTemplate": true,
+ "ErrOutputContext": true,
+ "ErrPartialCharset": true,
+ "ErrPartialEscape": true,
+ "ErrPredefinedEscaper": true,
+ "ErrRangeLoopReentry": true,
+ "ErrSlashAmbig": true,
+ "Error": true,
+ "ErrorCode": true,
+ "FuncMap": true,
+ "HTML": true,
+ "HTMLAttr": true,
+ "HTMLEscape": true,
+ "HTMLEscapeString": true,
+ "HTMLEscaper": true,
+ "IsTrue": true,
+ "JS": true,
+ "JSEscape": true,
+ "JSEscapeString": true,
+ "JSEscaper": true,
+ "JSStr": true,
+ "Must": true,
+ "New": true,
+ "OK": true,
+ "ParseFiles": true,
+ "ParseGlob": true,
+ "Srcset": true,
+ "Template": true,
+ "URL": true,
+ "URLQueryEscaper": true,
+ },
+ "image": map[string]bool{
+ "Alpha": true,
+ "Alpha16": true,
+ "Black": true,
+ "CMYK": true,
+ "Config": true,
+ "Decode": true,
+ "DecodeConfig": true,
+ "ErrFormat": true,
+ "Gray": true,
+ "Gray16": true,
+ "Image": true,
+ "NRGBA": true,
+ "NRGBA64": true,
+ "NYCbCrA": true,
+ "NewAlpha": true,
+ "NewAlpha16": true,
+ "NewCMYK": true,
+ "NewGray": true,
+ "NewGray16": true,
+ "NewNRGBA": true,
+ "NewNRGBA64": true,
+ "NewNYCbCrA": true,
+ "NewPaletted": true,
+ "NewRGBA": true,
+ "NewRGBA64": true,
+ "NewUniform": true,
+ "NewYCbCr": true,
+ "Opaque": true,
+ "Paletted": true,
+ "PalettedImage": true,
+ "Point": true,
+ "Pt": true,
+ "RGBA": true,
+ "RGBA64": true,
+ "Rect": true,
+ "Rectangle": true,
+ "RegisterFormat": true,
+ "Transparent": true,
+ "Uniform": true,
+ "White": true,
+ "YCbCr": true,
+ "YCbCrSubsampleRatio": true,
+ "YCbCrSubsampleRatio410": true,
+ "YCbCrSubsampleRatio411": true,
+ "YCbCrSubsampleRatio420": true,
+ "YCbCrSubsampleRatio422": true,
+ "YCbCrSubsampleRatio440": true,
+ "YCbCrSubsampleRatio444": true,
+ "ZP": true,
+ "ZR": true,
+ },
+ "image/color": map[string]bool{
+ "Alpha": true,
+ "Alpha16": true,
+ "Alpha16Model": true,
+ "AlphaModel": true,
+ "Black": true,
+ "CMYK": true,
+ "CMYKModel": true,
+ "CMYKToRGB": true,
+ "Color": true,
+ "Gray": true,
+ "Gray16": true,
+ "Gray16Model": true,
+ "GrayModel": true,
+ "Model": true,
+ "ModelFunc": true,
+ "NRGBA": true,
+ "NRGBA64": true,
+ "NRGBA64Model": true,
+ "NRGBAModel": true,
+ "NYCbCrA": true,
+ "NYCbCrAModel": true,
+ "Opaque": true,
+ "Palette": true,
+ "RGBA": true,
+ "RGBA64": true,
+ "RGBA64Model": true,
+ "RGBAModel": true,
+ "RGBToCMYK": true,
+ "RGBToYCbCr": true,
+ "Transparent": true,
+ "White": true,
+ "YCbCr": true,
+ "YCbCrModel": true,
+ "YCbCrToRGB": true,
+ },
+ "image/color/palette": map[string]bool{
+ "Plan9": true,
+ "WebSafe": true,
+ },
+ "image/draw": map[string]bool{
+ "Draw": true,
+ "DrawMask": true,
+ "Drawer": true,
+ "FloydSteinberg": true,
+ "Image": true,
+ "Op": true,
+ "Over": true,
+ "Quantizer": true,
+ "Src": true,
+ },
+ "image/gif": map[string]bool{
+ "Decode": true,
+ "DecodeAll": true,
+ "DecodeConfig": true,
+ "DisposalBackground": true,
+ "DisposalNone": true,
+ "DisposalPrevious": true,
+ "Encode": true,
+ "EncodeAll": true,
+ "GIF": true,
+ "Options": true,
+ },
+ "image/jpeg": map[string]bool{
+ "Decode": true,
+ "DecodeConfig": true,
+ "DefaultQuality": true,
+ "Encode": true,
+ "FormatError": true,
+ "Options": true,
+ "Reader": true,
+ "UnsupportedError": true,
+ },
+ "image/png": map[string]bool{
+ "BestCompression": true,
+ "BestSpeed": true,
+ "CompressionLevel": true,
+ "Decode": true,
+ "DecodeConfig": true,
+ "DefaultCompression": true,
+ "Encode": true,
+ "Encoder": true,
+ "EncoderBuffer": true,
+ "EncoderBufferPool": true,
+ "FormatError": true,
+ "NoCompression": true,
+ "UnsupportedError": true,
+ },
+ "index/suffixarray": map[string]bool{
+ "Index": true,
+ "New": true,
+ },
+ "io": map[string]bool{
+ "ByteReader": true,
+ "ByteScanner": true,
+ "ByteWriter": true,
+ "Closer": true,
+ "Copy": true,
+ "CopyBuffer": true,
+ "CopyN": true,
+ "EOF": true,
+ "ErrClosedPipe": true,
+ "ErrNoProgress": true,
+ "ErrShortBuffer": true,
+ "ErrShortWrite": true,
+ "ErrUnexpectedEOF": true,
+ "LimitReader": true,
+ "LimitedReader": true,
+ "MultiReader": true,
+ "MultiWriter": true,
+ "NewSectionReader": true,
+ "Pipe": true,
+ "PipeReader": true,
+ "PipeWriter": true,
+ "ReadAtLeast": true,
+ "ReadCloser": true,
+ "ReadFull": true,
+ "ReadSeeker": true,
+ "ReadWriteCloser": true,
+ "ReadWriteSeeker": true,
+ "ReadWriter": true,
+ "Reader": true,
+ "ReaderAt": true,
+ "ReaderFrom": true,
+ "RuneReader": true,
+ "RuneScanner": true,
+ "SectionReader": true,
+ "SeekCurrent": true,
+ "SeekEnd": true,
+ "SeekStart": true,
+ "Seeker": true,
+ "StringWriter": true,
+ "TeeReader": true,
+ "WriteCloser": true,
+ "WriteSeeker": true,
+ "WriteString": true,
+ "Writer": true,
+ "WriterAt": true,
+ "WriterTo": true,
+ },
+ "io/ioutil": map[string]bool{
+ "Discard": true,
+ "NopCloser": true,
+ "ReadAll": true,
+ "ReadDir": true,
+ "ReadFile": true,
+ "TempDir": true,
+ "TempFile": true,
+ "WriteFile": true,
+ },
+ "log": map[string]bool{
+ "Fatal": true,
+ "Fatalf": true,
+ "Fatalln": true,
+ "Flags": true,
+ "LUTC": true,
+ "Ldate": true,
+ "Llongfile": true,
+ "Lmicroseconds": true,
+ "Logger": true,
+ "Lshortfile": true,
+ "LstdFlags": true,
+ "Ltime": true,
+ "New": true,
+ "Output": true,
+ "Panic": true,
+ "Panicf": true,
+ "Panicln": true,
+ "Prefix": true,
+ "Print": true,
+ "Printf": true,
+ "Println": true,
+ "SetFlags": true,
+ "SetOutput": true,
+ "SetPrefix": true,
+ },
+ "log/syslog": map[string]bool{
+ "Dial": true,
+ "LOG_ALERT": true,
+ "LOG_AUTH": true,
+ "LOG_AUTHPRIV": true,
+ "LOG_CRIT": true,
+ "LOG_CRON": true,
+ "LOG_DAEMON": true,
+ "LOG_DEBUG": true,
+ "LOG_EMERG": true,
+ "LOG_ERR": true,
+ "LOG_FTP": true,
+ "LOG_INFO": true,
+ "LOG_KERN": true,
+ "LOG_LOCAL0": true,
+ "LOG_LOCAL1": true,
+ "LOG_LOCAL2": true,
+ "LOG_LOCAL3": true,
+ "LOG_LOCAL4": true,
+ "LOG_LOCAL5": true,
+ "LOG_LOCAL6": true,
+ "LOG_LOCAL7": true,
+ "LOG_LPR": true,
+ "LOG_MAIL": true,
+ "LOG_NEWS": true,
+ "LOG_NOTICE": true,
+ "LOG_SYSLOG": true,
+ "LOG_USER": true,
+ "LOG_UUCP": true,
+ "LOG_WARNING": true,
+ "New": true,
+ "NewLogger": true,
+ "Priority": true,
+ "Writer": true,
+ },
+ "math": map[string]bool{
+ "Abs": true,
+ "Acos": true,
+ "Acosh": true,
+ "Asin": true,
+ "Asinh": true,
+ "Atan": true,
+ "Atan2": true,
+ "Atanh": true,
+ "Cbrt": true,
+ "Ceil": true,
+ "Copysign": true,
+ "Cos": true,
+ "Cosh": true,
+ "Dim": true,
+ "E": true,
+ "Erf": true,
+ "Erfc": true,
+ "Erfcinv": true,
+ "Erfinv": true,
+ "Exp": true,
+ "Exp2": true,
+ "Expm1": true,
+ "Float32bits": true,
+ "Float32frombits": true,
+ "Float64bits": true,
+ "Float64frombits": true,
+ "Floor": true,
+ "Frexp": true,
+ "Gamma": true,
+ "Hypot": true,
+ "Ilogb": true,
+ "Inf": true,
+ "IsInf": true,
+ "IsNaN": true,
+ "J0": true,
+ "J1": true,
+ "Jn": true,
+ "Ldexp": true,
+ "Lgamma": true,
+ "Ln10": true,
+ "Ln2": true,
+ "Log": true,
+ "Log10": true,
+ "Log10E": true,
+ "Log1p": true,
+ "Log2": true,
+ "Log2E": true,
+ "Logb": true,
+ "Max": true,
+ "MaxFloat32": true,
+ "MaxFloat64": true,
+ "MaxInt16": true,
+ "MaxInt32": true,
+ "MaxInt64": true,
+ "MaxInt8": true,
+ "MaxUint16": true,
+ "MaxUint32": true,
+ "MaxUint64": true,
+ "MaxUint8": true,
+ "Min": true,
+ "MinInt16": true,
+ "MinInt32": true,
+ "MinInt64": true,
+ "MinInt8": true,
+ "Mod": true,
+ "Modf": true,
+ "NaN": true,
+ "Nextafter": true,
+ "Nextafter32": true,
+ "Phi": true,
+ "Pi": true,
+ "Pow": true,
+ "Pow10": true,
+ "Remainder": true,
+ "Round": true,
+ "RoundToEven": true,
+ "Signbit": true,
+ "Sin": true,
+ "Sincos": true,
+ "Sinh": true,
+ "SmallestNonzeroFloat32": true,
+ "SmallestNonzeroFloat64": true,
+ "Sqrt": true,
+ "Sqrt2": true,
+ "SqrtE": true,
+ "SqrtPhi": true,
+ "SqrtPi": true,
+ "Tan": true,
+ "Tanh": true,
+ "Trunc": true,
+ "Y0": true,
+ "Y1": true,
+ "Yn": true,
+ },
+ "math/big": map[string]bool{
+ "Above": true,
+ "Accuracy": true,
+ "AwayFromZero": true,
+ "Below": true,
+ "ErrNaN": true,
+ "Exact": true,
+ "Float": true,
+ "Int": true,
+ "Jacobi": true,
+ "MaxBase": true,
+ "MaxExp": true,
+ "MaxPrec": true,
+ "MinExp": true,
+ "NewFloat": true,
+ "NewInt": true,
+ "NewRat": true,
+ "ParseFloat": true,
+ "Rat": true,
+ "RoundingMode": true,
+ "ToNearestAway": true,
+ "ToNearestEven": true,
+ "ToNegativeInf": true,
+ "ToPositiveInf": true,
+ "ToZero": true,
+ "Word": true,
+ },
+ "math/bits": map[string]bool{
+ "Add": true,
+ "Add32": true,
+ "Add64": true,
+ "Div": true,
+ "Div32": true,
+ "Div64": true,
+ "LeadingZeros": true,
+ "LeadingZeros16": true,
+ "LeadingZeros32": true,
+ "LeadingZeros64": true,
+ "LeadingZeros8": true,
+ "Len": true,
+ "Len16": true,
+ "Len32": true,
+ "Len64": true,
+ "Len8": true,
+ "Mul": true,
+ "Mul32": true,
+ "Mul64": true,
+ "OnesCount": true,
+ "OnesCount16": true,
+ "OnesCount32": true,
+ "OnesCount64": true,
+ "OnesCount8": true,
+ "Reverse": true,
+ "Reverse16": true,
+ "Reverse32": true,
+ "Reverse64": true,
+ "Reverse8": true,
+ "ReverseBytes": true,
+ "ReverseBytes16": true,
+ "ReverseBytes32": true,
+ "ReverseBytes64": true,
+ "RotateLeft": true,
+ "RotateLeft16": true,
+ "RotateLeft32": true,
+ "RotateLeft64": true,
+ "RotateLeft8": true,
+ "Sub": true,
+ "Sub32": true,
+ "Sub64": true,
+ "TrailingZeros": true,
+ "TrailingZeros16": true,
+ "TrailingZeros32": true,
+ "TrailingZeros64": true,
+ "TrailingZeros8": true,
+ "UintSize": true,
+ },
+ "math/cmplx": map[string]bool{
+ "Abs": true,
+ "Acos": true,
+ "Acosh": true,
+ "Asin": true,
+ "Asinh": true,
+ "Atan": true,
+ "Atanh": true,
+ "Conj": true,
+ "Cos": true,
+ "Cosh": true,
+ "Cot": true,
+ "Exp": true,
+ "Inf": true,
+ "IsInf": true,
+ "IsNaN": true,
+ "Log": true,
+ "Log10": true,
+ "NaN": true,
+ "Phase": true,
+ "Polar": true,
+ "Pow": true,
+ "Rect": true,
+ "Sin": true,
+ "Sinh": true,
+ "Sqrt": true,
+ "Tan": true,
+ "Tanh": true,
+ },
+ "math/rand": map[string]bool{
+ "ExpFloat64": true,
+ "Float32": true,
+ "Float64": true,
+ "Int": true,
+ "Int31": true,
+ "Int31n": true,
+ "Int63": true,
+ "Int63n": true,
+ "Intn": true,
+ "New": true,
+ "NewSource": true,
+ "NewZipf": true,
+ "NormFloat64": true,
+ "Perm": true,
+ "Rand": true,
+ "Read": true,
+ "Seed": true,
+ "Shuffle": true,
+ "Source": true,
+ "Source64": true,
+ "Uint32": true,
+ "Uint64": true,
+ "Zipf": true,
+ },
+ "mime": map[string]bool{
+ "AddExtensionType": true,
+ "BEncoding": true,
+ "ErrInvalidMediaParameter": true,
+ "ExtensionsByType": true,
+ "FormatMediaType": true,
+ "ParseMediaType": true,
+ "QEncoding": true,
+ "TypeByExtension": true,
+ "WordDecoder": true,
+ "WordEncoder": true,
+ },
+ "mime/multipart": map[string]bool{
+ "ErrMessageTooLarge": true,
+ "File": true,
+ "FileHeader": true,
+ "Form": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "Part": true,
+ "Reader": true,
+ "Writer": true,
+ },
+ "mime/quotedprintable": map[string]bool{
+ "NewReader": true,
+ "NewWriter": true,
+ "Reader": true,
+ "Writer": true,
+ },
+ "net": map[string]bool{
+ "Addr": true,
+ "AddrError": true,
+ "Buffers": true,
+ "CIDRMask": true,
+ "Conn": true,
+ "DNSConfigError": true,
+ "DNSError": true,
+ "DefaultResolver": true,
+ "Dial": true,
+ "DialIP": true,
+ "DialTCP": true,
+ "DialTimeout": true,
+ "DialUDP": true,
+ "DialUnix": true,
+ "Dialer": true,
+ "ErrWriteToConnected": true,
+ "Error": true,
+ "FileConn": true,
+ "FileListener": true,
+ "FilePacketConn": true,
+ "FlagBroadcast": true,
+ "FlagLoopback": true,
+ "FlagMulticast": true,
+ "FlagPointToPoint": true,
+ "FlagUp": true,
+ "Flags": true,
+ "HardwareAddr": true,
+ "IP": true,
+ "IPAddr": true,
+ "IPConn": true,
+ "IPMask": true,
+ "IPNet": true,
+ "IPv4": true,
+ "IPv4Mask": true,
+ "IPv4allrouter": true,
+ "IPv4allsys": true,
+ "IPv4bcast": true,
+ "IPv4len": true,
+ "IPv4zero": true,
+ "IPv6interfacelocalallnodes": true,
+ "IPv6len": true,
+ "IPv6linklocalallnodes": true,
+ "IPv6linklocalallrouters": true,
+ "IPv6loopback": true,
+ "IPv6unspecified": true,
+ "IPv6zero": true,
+ "Interface": true,
+ "InterfaceAddrs": true,
+ "InterfaceByIndex": true,
+ "InterfaceByName": true,
+ "Interfaces": true,
+ "InvalidAddrError": true,
+ "JoinHostPort": true,
+ "Listen": true,
+ "ListenConfig": true,
+ "ListenIP": true,
+ "ListenMulticastUDP": true,
+ "ListenPacket": true,
+ "ListenTCP": true,
+ "ListenUDP": true,
+ "ListenUnix": true,
+ "ListenUnixgram": true,
+ "Listener": true,
+ "LookupAddr": true,
+ "LookupCNAME": true,
+ "LookupHost": true,
+ "LookupIP": true,
+ "LookupMX": true,
+ "LookupNS": true,
+ "LookupPort": true,
+ "LookupSRV": true,
+ "LookupTXT": true,
+ "MX": true,
+ "NS": true,
+ "OpError": true,
+ "PacketConn": true,
+ "ParseCIDR": true,
+ "ParseError": true,
+ "ParseIP": true,
+ "ParseMAC": true,
+ "Pipe": true,
+ "ResolveIPAddr": true,
+ "ResolveTCPAddr": true,
+ "ResolveUDPAddr": true,
+ "ResolveUnixAddr": true,
+ "Resolver": true,
+ "SRV": true,
+ "SplitHostPort": true,
+ "TCPAddr": true,
+ "TCPConn": true,
+ "TCPListener": true,
+ "UDPAddr": true,
+ "UDPConn": true,
+ "UnixAddr": true,
+ "UnixConn": true,
+ "UnixListener": true,
+ "UnknownNetworkError": true,
+ },
+ "net/http": map[string]bool{
+ "CanonicalHeaderKey": true,
+ "Client": true,
+ "CloseNotifier": true,
+ "ConnState": true,
+ "Cookie": true,
+ "CookieJar": true,
+ "DefaultClient": true,
+ "DefaultMaxHeaderBytes": true,
+ "DefaultMaxIdleConnsPerHost": true,
+ "DefaultServeMux": true,
+ "DefaultTransport": true,
+ "DetectContentType": true,
+ "Dir": true,
+ "ErrAbortHandler": true,
+ "ErrBodyNotAllowed": true,
+ "ErrBodyReadAfterClose": true,
+ "ErrContentLength": true,
+ "ErrHandlerTimeout": true,
+ "ErrHeaderTooLong": true,
+ "ErrHijacked": true,
+ "ErrLineTooLong": true,
+ "ErrMissingBoundary": true,
+ "ErrMissingContentLength": true,
+ "ErrMissingFile": true,
+ "ErrNoCookie": true,
+ "ErrNoLocation": true,
+ "ErrNotMultipart": true,
+ "ErrNotSupported": true,
+ "ErrServerClosed": true,
+ "ErrShortBody": true,
+ "ErrSkipAltProtocol": true,
+ "ErrUnexpectedTrailer": true,
+ "ErrUseLastResponse": true,
+ "ErrWriteAfterFlush": true,
+ "Error": true,
+ "File": true,
+ "FileServer": true,
+ "FileSystem": true,
+ "Flusher": true,
+ "Get": true,
+ "Handle": true,
+ "HandleFunc": true,
+ "Handler": true,
+ "HandlerFunc": true,
+ "Head": true,
+ "Header": true,
+ "Hijacker": true,
+ "ListenAndServe": true,
+ "ListenAndServeTLS": true,
+ "LocalAddrContextKey": true,
+ "MaxBytesReader": true,
+ "MethodConnect": true,
+ "MethodDelete": true,
+ "MethodGet": true,
+ "MethodHead": true,
+ "MethodOptions": true,
+ "MethodPatch": true,
+ "MethodPost": true,
+ "MethodPut": true,
+ "MethodTrace": true,
+ "NewFileTransport": true,
+ "NewRequest": true,
+ "NewServeMux": true,
+ "NoBody": true,
+ "NotFound": true,
+ "NotFoundHandler": true,
+ "ParseHTTPVersion": true,
+ "ParseTime": true,
+ "Post": true,
+ "PostForm": true,
+ "ProtocolError": true,
+ "ProxyFromEnvironment": true,
+ "ProxyURL": true,
+ "PushOptions": true,
+ "Pusher": true,
+ "ReadRequest": true,
+ "ReadResponse": true,
+ "Redirect": true,
+ "RedirectHandler": true,
+ "Request": true,
+ "Response": true,
+ "ResponseWriter": true,
+ "RoundTripper": true,
+ "SameSite": true,
+ "SameSiteDefaultMode": true,
+ "SameSiteLaxMode": true,
+ "SameSiteStrictMode": true,
+ "Serve": true,
+ "ServeContent": true,
+ "ServeFile": true,
+ "ServeMux": true,
+ "ServeTLS": true,
+ "Server": true,
+ "ServerContextKey": true,
+ "SetCookie": true,
+ "StateActive": true,
+ "StateClosed": true,
+ "StateHijacked": true,
+ "StateIdle": true,
+ "StateNew": true,
+ "StatusAccepted": true,
+ "StatusAlreadyReported": true,
+ "StatusBadGateway": true,
+ "StatusBadRequest": true,
+ "StatusConflict": true,
+ "StatusContinue": true,
+ "StatusCreated": true,
+ "StatusExpectationFailed": true,
+ "StatusFailedDependency": true,
+ "StatusForbidden": true,
+ "StatusFound": true,
+ "StatusGatewayTimeout": true,
+ "StatusGone": true,
+ "StatusHTTPVersionNotSupported": true,
+ "StatusIMUsed": true,
+ "StatusInsufficientStorage": true,
+ "StatusInternalServerError": true,
+ "StatusLengthRequired": true,
+ "StatusLocked": true,
+ "StatusLoopDetected": true,
+ "StatusMethodNotAllowed": true,
+ "StatusMisdirectedRequest": true,
+ "StatusMovedPermanently": true,
+ "StatusMultiStatus": true,
+ "StatusMultipleChoices": true,
+ "StatusNetworkAuthenticationRequired": true,
+ "StatusNoContent": true,
+ "StatusNonAuthoritativeInfo": true,
+ "StatusNotAcceptable": true,
+ "StatusNotExtended": true,
+ "StatusNotFound": true,
+ "StatusNotImplemented": true,
+ "StatusNotModified": true,
+ "StatusOK": true,
+ "StatusPartialContent": true,
+ "StatusPaymentRequired": true,
+ "StatusPermanentRedirect": true,
+ "StatusPreconditionFailed": true,
+ "StatusPreconditionRequired": true,
+ "StatusProcessing": true,
+ "StatusProxyAuthRequired": true,
+ "StatusRequestEntityTooLarge": true,
+ "StatusRequestHeaderFieldsTooLarge": true,
+ "StatusRequestTimeout": true,
+ "StatusRequestURITooLong": true,
+ "StatusRequestedRangeNotSatisfiable": true,
+ "StatusResetContent": true,
+ "StatusSeeOther": true,
+ "StatusServiceUnavailable": true,
+ "StatusSwitchingProtocols": true,
+ "StatusTeapot": true,
+ "StatusTemporaryRedirect": true,
+ "StatusText": true,
+ "StatusTooEarly": true,
+ "StatusTooManyRequests": true,
+ "StatusUnauthorized": true,
+ "StatusUnavailableForLegalReasons": true,
+ "StatusUnprocessableEntity": true,
+ "StatusUnsupportedMediaType": true,
+ "StatusUpgradeRequired": true,
+ "StatusUseProxy": true,
+ "StatusVariantAlsoNegotiates": true,
+ "StripPrefix": true,
+ "TimeFormat": true,
+ "TimeoutHandler": true,
+ "TrailerPrefix": true,
+ "Transport": true,
+ },
+ "net/http/cgi": map[string]bool{
+ "Handler": true,
+ "Request": true,
+ "RequestFromMap": true,
+ "Serve": true,
+ },
+ "net/http/cookiejar": map[string]bool{
+ "Jar": true,
+ "New": true,
+ "Options": true,
+ "PublicSuffixList": true,
+ },
+ "net/http/fcgi": map[string]bool{
+ "ErrConnClosed": true,
+ "ErrRequestAborted": true,
+ "ProcessEnv": true,
+ "Serve": true,
+ },
+ "net/http/httptest": map[string]bool{
+ "DefaultRemoteAddr": true,
+ "NewRecorder": true,
+ "NewRequest": true,
+ "NewServer": true,
+ "NewTLSServer": true,
+ "NewUnstartedServer": true,
+ "ResponseRecorder": true,
+ "Server": true,
+ },
+ "net/http/httptrace": map[string]bool{
+ "ClientTrace": true,
+ "ContextClientTrace": true,
+ "DNSDoneInfo": true,
+ "DNSStartInfo": true,
+ "GotConnInfo": true,
+ "WithClientTrace": true,
+ "WroteRequestInfo": true,
+ },
+ "net/http/httputil": map[string]bool{
+ "BufferPool": true,
+ "ClientConn": true,
+ "DumpRequest": true,
+ "DumpRequestOut": true,
+ "DumpResponse": true,
+ "ErrClosed": true,
+ "ErrLineTooLong": true,
+ "ErrPersistEOF": true,
+ "ErrPipeline": true,
+ "NewChunkedReader": true,
+ "NewChunkedWriter": true,
+ "NewClientConn": true,
+ "NewProxyClientConn": true,
+ "NewServerConn": true,
+ "NewSingleHostReverseProxy": true,
+ "ReverseProxy": true,
+ "ServerConn": true,
+ },
+ "net/http/pprof": map[string]bool{
+ "Cmdline": true,
+ "Handler": true,
+ "Index": true,
+ "Profile": true,
+ "Symbol": true,
+ "Trace": true,
+ },
+ "net/mail": map[string]bool{
+ "Address": true,
+ "AddressParser": true,
+ "ErrHeaderNotPresent": true,
+ "Header": true,
+ "Message": true,
+ "ParseAddress": true,
+ "ParseAddressList": true,
+ "ParseDate": true,
+ "ReadMessage": true,
+ },
+ "net/rpc": map[string]bool{
+ "Accept": true,
+ "Call": true,
+ "Client": true,
+ "ClientCodec": true,
+ "DefaultDebugPath": true,
+ "DefaultRPCPath": true,
+ "DefaultServer": true,
+ "Dial": true,
+ "DialHTTP": true,
+ "DialHTTPPath": true,
+ "ErrShutdown": true,
+ "HandleHTTP": true,
+ "NewClient": true,
+ "NewClientWithCodec": true,
+ "NewServer": true,
+ "Register": true,
+ "RegisterName": true,
+ "Request": true,
+ "Response": true,
+ "ServeCodec": true,
+ "ServeConn": true,
+ "ServeRequest": true,
+ "Server": true,
+ "ServerCodec": true,
+ "ServerError": true,
+ },
+ "net/rpc/jsonrpc": map[string]bool{
+ "Dial": true,
+ "NewClient": true,
+ "NewClientCodec": true,
+ "NewServerCodec": true,
+ "ServeConn": true,
+ },
+ "net/smtp": map[string]bool{
+ "Auth": true,
+ "CRAMMD5Auth": true,
+ "Client": true,
+ "Dial": true,
+ "NewClient": true,
+ "PlainAuth": true,
+ "SendMail": true,
+ "ServerInfo": true,
+ },
+ "net/textproto": map[string]bool{
+ "CanonicalMIMEHeaderKey": true,
+ "Conn": true,
+ "Dial": true,
+ "Error": true,
+ "MIMEHeader": true,
+ "NewConn": true,
+ "NewReader": true,
+ "NewWriter": true,
+ "Pipeline": true,
+ "ProtocolError": true,
+ "Reader": true,
+ "TrimBytes": true,
+ "TrimString": true,
+ "Writer": true,
+ },
+ "net/url": map[string]bool{
+ "Error": true,
+ "EscapeError": true,
+ "InvalidHostError": true,
+ "Parse": true,
+ "ParseQuery": true,
+ "ParseRequestURI": true,
+ "PathEscape": true,
+ "PathUnescape": true,
+ "QueryEscape": true,
+ "QueryUnescape": true,
+ "URL": true,
+ "User": true,
+ "UserPassword": true,
+ "Userinfo": true,
+ "Values": true,
+ },
+ "os": map[string]bool{
+ "Args": true,
+ "Chdir": true,
+ "Chmod": true,
+ "Chown": true,
+ "Chtimes": true,
+ "Clearenv": true,
+ "Create": true,
+ "DevNull": true,
+ "Environ": true,
+ "ErrClosed": true,
+ "ErrExist": true,
+ "ErrInvalid": true,
+ "ErrNoDeadline": true,
+ "ErrNotExist": true,
+ "ErrPermission": true,
+ "Executable": true,
+ "Exit": true,
+ "Expand": true,
+ "ExpandEnv": true,
+ "File": true,
+ "FileInfo": true,
+ "FileMode": true,
+ "FindProcess": true,
+ "Getegid": true,
+ "Getenv": true,
+ "Geteuid": true,
+ "Getgid": true,
+ "Getgroups": true,
+ "Getpagesize": true,
+ "Getpid": true,
+ "Getppid": true,
+ "Getuid": true,
+ "Getwd": true,
+ "Hostname": true,
+ "Interrupt": true,
+ "IsExist": true,
+ "IsNotExist": true,
+ "IsPathSeparator": true,
+ "IsPermission": true,
+ "IsTimeout": true,
+ "Kill": true,
+ "Lchown": true,
+ "Link": true,
+ "LinkError": true,
+ "LookupEnv": true,
+ "Lstat": true,
+ "Mkdir": true,
+ "MkdirAll": true,
+ "ModeAppend": true,
+ "ModeCharDevice": true,
+ "ModeDevice": true,
+ "ModeDir": true,
+ "ModeExclusive": true,
+ "ModeIrregular": true,
+ "ModeNamedPipe": true,
+ "ModePerm": true,
+ "ModeSetgid": true,
+ "ModeSetuid": true,
+ "ModeSocket": true,
+ "ModeSticky": true,
+ "ModeSymlink": true,
+ "ModeTemporary": true,
+ "ModeType": true,
+ "NewFile": true,
+ "NewSyscallError": true,
+ "O_APPEND": true,
+ "O_CREATE": true,
+ "O_EXCL": true,
+ "O_RDONLY": true,
+ "O_RDWR": true,
+ "O_SYNC": true,
+ "O_TRUNC": true,
+ "O_WRONLY": true,
+ "Open": true,
+ "OpenFile": true,
+ "PathError": true,
+ "PathListSeparator": true,
+ "PathSeparator": true,
+ "Pipe": true,
+ "ProcAttr": true,
+ "Process": true,
+ "ProcessState": true,
+ "Readlink": true,
+ "Remove": true,
+ "RemoveAll": true,
+ "Rename": true,
+ "SEEK_CUR": true,
+ "SEEK_END": true,
+ "SEEK_SET": true,
+ "SameFile": true,
+ "Setenv": true,
+ "Signal": true,
+ "StartProcess": true,
+ "Stat": true,
+ "Stderr": true,
+ "Stdin": true,
+ "Stdout": true,
+ "Symlink": true,
+ "SyscallError": true,
+ "TempDir": true,
+ "Truncate": true,
+ "Unsetenv": true,
+ "UserCacheDir": true,
+ "UserHomeDir": true,
+ },
+ "os/exec": map[string]bool{
+ "Cmd": true,
+ "Command": true,
+ "CommandContext": true,
+ "ErrNotFound": true,
+ "Error": true,
+ "ExitError": true,
+ "LookPath": true,
+ },
+ "os/signal": map[string]bool{
+ "Ignore": true,
+ "Ignored": true,
+ "Notify": true,
+ "Reset": true,
+ "Stop": true,
+ },
+ "os/user": map[string]bool{
+ "Current": true,
+ "Group": true,
+ "Lookup": true,
+ "LookupGroup": true,
+ "LookupGroupId": true,
+ "LookupId": true,
+ "UnknownGroupError": true,
+ "UnknownGroupIdError": true,
+ "UnknownUserError": true,
+ "UnknownUserIdError": true,
+ "User": true,
+ },
+ "path": map[string]bool{
+ "Base": true,
+ "Clean": true,
+ "Dir": true,
+ "ErrBadPattern": true,
+ "Ext": true,
+ "IsAbs": true,
+ "Join": true,
+ "Match": true,
+ "Split": true,
+ },
+ "path/filepath": map[string]bool{
+ "Abs": true,
+ "Base": true,
+ "Clean": true,
+ "Dir": true,
+ "ErrBadPattern": true,
+ "EvalSymlinks": true,
+ "Ext": true,
+ "FromSlash": true,
+ "Glob": true,
+ "HasPrefix": true,
+ "IsAbs": true,
+ "Join": true,
+ "ListSeparator": true,
+ "Match": true,
+ "Rel": true,
+ "Separator": true,
+ "SkipDir": true,
+ "Split": true,
+ "SplitList": true,
+ "ToSlash": true,
+ "VolumeName": true,
+ "Walk": true,
+ "WalkFunc": true,
+ },
+ "plugin": map[string]bool{
+ "Open": true,
+ "Plugin": true,
+ "Symbol": true,
+ },
+ "reflect": map[string]bool{
+ "Append": true,
+ "AppendSlice": true,
+ "Array": true,
+ "ArrayOf": true,
+ "Bool": true,
+ "BothDir": true,
+ "Chan": true,
+ "ChanDir": true,
+ "ChanOf": true,
+ "Complex128": true,
+ "Complex64": true,
+ "Copy": true,
+ "DeepEqual": true,
+ "Float32": true,
+ "Float64": true,
+ "Func": true,
+ "FuncOf": true,
+ "Indirect": true,
+ "Int": true,
+ "Int16": true,
+ "Int32": true,
+ "Int64": true,
+ "Int8": true,
+ "Interface": true,
+ "Invalid": true,
+ "Kind": true,
+ "MakeChan": true,
+ "MakeFunc": true,
+ "MakeMap": true,
+ "MakeMapWithSize": true,
+ "MakeSlice": true,
+ "Map": true,
+ "MapIter": true,
+ "MapOf": true,
+ "Method": true,
+ "New": true,
+ "NewAt": true,
+ "Ptr": true,
+ "PtrTo": true,
+ "RecvDir": true,
+ "Select": true,
+ "SelectCase": true,
+ "SelectDefault": true,
+ "SelectDir": true,
+ "SelectRecv": true,
+ "SelectSend": true,
+ "SendDir": true,
+ "Slice": true,
+ "SliceHeader": true,
+ "SliceOf": true,
+ "String": true,
+ "StringHeader": true,
+ "Struct": true,
+ "StructField": true,
+ "StructOf": true,
+ "StructTag": true,
+ "Swapper": true,
+ "TypeOf": true,
+ "Uint": true,
+ "Uint16": true,
+ "Uint32": true,
+ "Uint64": true,
+ "Uint8": true,
+ "Uintptr": true,
+ "UnsafePointer": true,
+ "Value": true,
+ "ValueError": true,
+ "ValueOf": true,
+ "Zero": true,
+ },
+ "regexp": map[string]bool{
+ "Compile": true,
+ "CompilePOSIX": true,
+ "Match": true,
+ "MatchReader": true,
+ "MatchString": true,
+ "MustCompile": true,
+ "MustCompilePOSIX": true,
+ "QuoteMeta": true,
+ "Regexp": true,
+ },
+ "regexp/syntax": map[string]bool{
+ "ClassNL": true,
+ "Compile": true,
+ "DotNL": true,
+ "EmptyBeginLine": true,
+ "EmptyBeginText": true,
+ "EmptyEndLine": true,
+ "EmptyEndText": true,
+ "EmptyNoWordBoundary": true,
+ "EmptyOp": true,
+ "EmptyOpContext": true,
+ "EmptyWordBoundary": true,
+ "ErrInternalError": true,
+ "ErrInvalidCharClass": true,
+ "ErrInvalidCharRange": true,
+ "ErrInvalidEscape": true,
+ "ErrInvalidNamedCapture": true,
+ "ErrInvalidPerlOp": true,
+ "ErrInvalidRepeatOp": true,
+ "ErrInvalidRepeatSize": true,
+ "ErrInvalidUTF8": true,
+ "ErrMissingBracket": true,
+ "ErrMissingParen": true,
+ "ErrMissingRepeatArgument": true,
+ "ErrTrailingBackslash": true,
+ "ErrUnexpectedParen": true,
+ "Error": true,
+ "ErrorCode": true,
+ "Flags": true,
+ "FoldCase": true,
+ "Inst": true,
+ "InstAlt": true,
+ "InstAltMatch": true,
+ "InstCapture": true,
+ "InstEmptyWidth": true,
+ "InstFail": true,
+ "InstMatch": true,
+ "InstNop": true,
+ "InstOp": true,
+ "InstRune": true,
+ "InstRune1": true,
+ "InstRuneAny": true,
+ "InstRuneAnyNotNL": true,
+ "IsWordChar": true,
+ "Literal": true,
+ "MatchNL": true,
+ "NonGreedy": true,
+ "OneLine": true,
+ "Op": true,
+ "OpAlternate": true,
+ "OpAnyChar": true,
+ "OpAnyCharNotNL": true,
+ "OpBeginLine": true,
+ "OpBeginText": true,
+ "OpCapture": true,
+ "OpCharClass": true,
+ "OpConcat": true,
+ "OpEmptyMatch": true,
+ "OpEndLine": true,
+ "OpEndText": true,
+ "OpLiteral": true,
+ "OpNoMatch": true,
+ "OpNoWordBoundary": true,
+ "OpPlus": true,
+ "OpQuest": true,
+ "OpRepeat": true,
+ "OpStar": true,
+ "OpWordBoundary": true,
+ "POSIX": true,
+ "Parse": true,
+ "Perl": true,
+ "PerlX": true,
+ "Prog": true,
+ "Regexp": true,
+ "Simple": true,
+ "UnicodeGroups": true,
+ "WasDollar": true,
+ },
+ "runtime": map[string]bool{
+ "BlockProfile": true,
+ "BlockProfileRecord": true,
+ "Breakpoint": true,
+ "CPUProfile": true,
+ "Caller": true,
+ "Callers": true,
+ "CallersFrames": true,
+ "Compiler": true,
+ "Error": true,
+ "Frame": true,
+ "Frames": true,
+ "Func": true,
+ "FuncForPC": true,
+ "GC": true,
+ "GOARCH": true,
+ "GOMAXPROCS": true,
+ "GOOS": true,
+ "GOROOT": true,
+ "Goexit": true,
+ "GoroutineProfile": true,
+ "Gosched": true,
+ "KeepAlive": true,
+ "LockOSThread": true,
+ "MemProfile": true,
+ "MemProfileRate": true,
+ "MemProfileRecord": true,
+ "MemStats": true,
+ "MutexProfile": true,
+ "NumCPU": true,
+ "NumCgoCall": true,
+ "NumGoroutine": true,
+ "ReadMemStats": true,
+ "ReadTrace": true,
+ "SetBlockProfileRate": true,
+ "SetCPUProfileRate": true,
+ "SetCgoTraceback": true,
+ "SetFinalizer": true,
+ "SetMutexProfileFraction": true,
+ "Stack": true,
+ "StackRecord": true,
+ "StartTrace": true,
+ "StopTrace": true,
+ "ThreadCreateProfile": true,
+ "TypeAssertionError": true,
+ "UnlockOSThread": true,
+ "Version": true,
+ },
+ "runtime/debug": map[string]bool{
+ "BuildInfo": true,
+ "FreeOSMemory": true,
+ "GCStats": true,
+ "Module": true,
+ "PrintStack": true,
+ "ReadBuildInfo": true,
+ "ReadGCStats": true,
+ "SetGCPercent": true,
+ "SetMaxStack": true,
+ "SetMaxThreads": true,
+ "SetPanicOnFault": true,
+ "SetTraceback": true,
+ "Stack": true,
+ "WriteHeapDump": true,
+ },
+ "runtime/pprof": map[string]bool{
+ "Do": true,
+ "ForLabels": true,
+ "Label": true,
+ "LabelSet": true,
+ "Labels": true,
+ "Lookup": true,
+ "NewProfile": true,
+ "Profile": true,
+ "Profiles": true,
+ "SetGoroutineLabels": true,
+ "StartCPUProfile": true,
+ "StopCPUProfile": true,
+ "WithLabels": true,
+ "WriteHeapProfile": true,
+ },
+ "runtime/trace": map[string]bool{
+ "IsEnabled": true,
+ "Log": true,
+ "Logf": true,
+ "NewTask": true,
+ "Region": true,
+ "Start": true,
+ "StartRegion": true,
+ "Stop": true,
+ "Task": true,
+ "WithRegion": true,
+ },
+ "sort": map[string]bool{
+ "Float64Slice": true,
+ "Float64s": true,
+ "Float64sAreSorted": true,
+ "IntSlice": true,
+ "Interface": true,
+ "Ints": true,
+ "IntsAreSorted": true,
+ "IsSorted": true,
+ "Reverse": true,
+ "Search": true,
+ "SearchFloat64s": true,
+ "SearchInts": true,
+ "SearchStrings": true,
+ "Slice": true,
+ "SliceIsSorted": true,
+ "SliceStable": true,
+ "Sort": true,
+ "Stable": true,
+ "StringSlice": true,
+ "Strings": true,
+ "StringsAreSorted": true,
+ },
+ "strconv": map[string]bool{
+ "AppendBool": true,
+ "AppendFloat": true,
+ "AppendInt": true,
+ "AppendQuote": true,
+ "AppendQuoteRune": true,
+ "AppendQuoteRuneToASCII": true,
+ "AppendQuoteRuneToGraphic": true,
+ "AppendQuoteToASCII": true,
+ "AppendQuoteToGraphic": true,
+ "AppendUint": true,
+ "Atoi": true,
+ "CanBackquote": true,
+ "ErrRange": true,
+ "ErrSyntax": true,
+ "FormatBool": true,
+ "FormatFloat": true,
+ "FormatInt": true,
+ "FormatUint": true,
+ "IntSize": true,
+ "IsGraphic": true,
+ "IsPrint": true,
+ "Itoa": true,
+ "NumError": true,
+ "ParseBool": true,
+ "ParseFloat": true,
+ "ParseInt": true,
+ "ParseUint": true,
+ "Quote": true,
+ "QuoteRune": true,
+ "QuoteRuneToASCII": true,
+ "QuoteRuneToGraphic": true,
+ "QuoteToASCII": true,
+ "QuoteToGraphic": true,
+ "Unquote": true,
+ "UnquoteChar": true,
+ },
+ "strings": map[string]bool{
+ "Builder": true,
+ "Compare": true,
+ "Contains": true,
+ "ContainsAny": true,
+ "ContainsRune": true,
+ "Count": true,
+ "EqualFold": true,
+ "Fields": true,
+ "FieldsFunc": true,
+ "HasPrefix": true,
+ "HasSuffix": true,
+ "Index": true,
+ "IndexAny": true,
+ "IndexByte": true,
+ "IndexFunc": true,
+ "IndexRune": true,
+ "Join": true,
+ "LastIndex": true,
+ "LastIndexAny": true,
+ "LastIndexByte": true,
+ "LastIndexFunc": true,
+ "Map": true,
+ "NewReader": true,
+ "NewReplacer": true,
+ "Reader": true,
+ "Repeat": true,
+ "Replace": true,
+ "ReplaceAll": true,
+ "Replacer": true,
+ "Split": true,
+ "SplitAfter": true,
+ "SplitAfterN": true,
+ "SplitN": true,
+ "Title": true,
+ "ToLower": true,
+ "ToLowerSpecial": true,
+ "ToTitle": true,
+ "ToTitleSpecial": true,
+ "ToUpper": true,
+ "ToUpperSpecial": true,
+ "Trim": true,
+ "TrimFunc": true,
+ "TrimLeft": true,
+ "TrimLeftFunc": true,
+ "TrimPrefix": true,
+ "TrimRight": true,
+ "TrimRightFunc": true,
+ "TrimSpace": true,
+ "TrimSuffix": true,
+ },
+ "sync": map[string]bool{
+ "Cond": true,
+ "Locker": true,
+ "Map": true,
+ "Mutex": true,
+ "NewCond": true,
+ "Once": true,
+ "Pool": true,
+ "RWMutex": true,
+ "WaitGroup": true,
+ },
+ "sync/atomic": map[string]bool{
+ "AddInt32": true,
+ "AddInt64": true,
+ "AddUint32": true,
+ "AddUint64": true,
+ "AddUintptr": true,
+ "CompareAndSwapInt32": true,
+ "CompareAndSwapInt64": true,
+ "CompareAndSwapPointer": true,
+ "CompareAndSwapUint32": true,
+ "CompareAndSwapUint64": true,
+ "CompareAndSwapUintptr": true,
+ "LoadInt32": true,
+ "LoadInt64": true,
+ "LoadPointer": true,
+ "LoadUint32": true,
+ "LoadUint64": true,
+ "LoadUintptr": true,
+ "StoreInt32": true,
+ "StoreInt64": true,
+ "StorePointer": true,
+ "StoreUint32": true,
+ "StoreUint64": true,
+ "StoreUintptr": true,
+ "SwapInt32": true,
+ "SwapInt64": true,
+ "SwapPointer": true,
+ "SwapUint32": true,
+ "SwapUint64": true,
+ "SwapUintptr": true,
+ "Value": true,
+ },
+ "syscall": map[string]bool{
+ "AF_ALG": true,
+ "AF_APPLETALK": true,
+ "AF_ARP": true,
+ "AF_ASH": true,
+ "AF_ATM": true,
+ "AF_ATMPVC": true,
+ "AF_ATMSVC": true,
+ "AF_AX25": true,
+ "AF_BLUETOOTH": true,
+ "AF_BRIDGE": true,
+ "AF_CAIF": true,
+ "AF_CAN": true,
+ "AF_CCITT": true,
+ "AF_CHAOS": true,
+ "AF_CNT": true,
+ "AF_COIP": true,
+ "AF_DATAKIT": true,
+ "AF_DECnet": true,
+ "AF_DLI": true,
+ "AF_E164": true,
+ "AF_ECMA": true,
+ "AF_ECONET": true,
+ "AF_ENCAP": true,
+ "AF_FILE": true,
+ "AF_HYLINK": true,
+ "AF_IEEE80211": true,
+ "AF_IEEE802154": true,
+ "AF_IMPLINK": true,
+ "AF_INET": true,
+ "AF_INET6": true,
+ "AF_INET6_SDP": true,
+ "AF_INET_SDP": true,
+ "AF_IPX": true,
+ "AF_IRDA": true,
+ "AF_ISDN": true,
+ "AF_ISO": true,
+ "AF_IUCV": true,
+ "AF_KEY": true,
+ "AF_LAT": true,
+ "AF_LINK": true,
+ "AF_LLC": true,
+ "AF_LOCAL": true,
+ "AF_MAX": true,
+ "AF_MPLS": true,
+ "AF_NATM": true,
+ "AF_NDRV": true,
+ "AF_NETBEUI": true,
+ "AF_NETBIOS": true,
+ "AF_NETGRAPH": true,
+ "AF_NETLINK": true,
+ "AF_NETROM": true,
+ "AF_NS": true,
+ "AF_OROUTE": true,
+ "AF_OSI": true,
+ "AF_PACKET": true,
+ "AF_PHONET": true,
+ "AF_PPP": true,
+ "AF_PPPOX": true,
+ "AF_PUP": true,
+ "AF_RDS": true,
+ "AF_RESERVED_36": true,
+ "AF_ROSE": true,
+ "AF_ROUTE": true,
+ "AF_RXRPC": true,
+ "AF_SCLUSTER": true,
+ "AF_SECURITY": true,
+ "AF_SIP": true,
+ "AF_SLOW": true,
+ "AF_SNA": true,
+ "AF_SYSTEM": true,
+ "AF_TIPC": true,
+ "AF_UNIX": true,
+ "AF_UNSPEC": true,
+ "AF_VENDOR00": true,
+ "AF_VENDOR01": true,
+ "AF_VENDOR02": true,
+ "AF_VENDOR03": true,
+ "AF_VENDOR04": true,
+ "AF_VENDOR05": true,
+ "AF_VENDOR06": true,
+ "AF_VENDOR07": true,
+ "AF_VENDOR08": true,
+ "AF_VENDOR09": true,
+ "AF_VENDOR10": true,
+ "AF_VENDOR11": true,
+ "AF_VENDOR12": true,
+ "AF_VENDOR13": true,
+ "AF_VENDOR14": true,
+ "AF_VENDOR15": true,
+ "AF_VENDOR16": true,
+ "AF_VENDOR17": true,
+ "AF_VENDOR18": true,
+ "AF_VENDOR19": true,
+ "AF_VENDOR20": true,
+ "AF_VENDOR21": true,
+ "AF_VENDOR22": true,
+ "AF_VENDOR23": true,
+ "AF_VENDOR24": true,
+ "AF_VENDOR25": true,
+ "AF_VENDOR26": true,
+ "AF_VENDOR27": true,
+ "AF_VENDOR28": true,
+ "AF_VENDOR29": true,
+ "AF_VENDOR30": true,
+ "AF_VENDOR31": true,
+ "AF_VENDOR32": true,
+ "AF_VENDOR33": true,
+ "AF_VENDOR34": true,
+ "AF_VENDOR35": true,
+ "AF_VENDOR36": true,
+ "AF_VENDOR37": true,
+ "AF_VENDOR38": true,
+ "AF_VENDOR39": true,
+ "AF_VENDOR40": true,
+ "AF_VENDOR41": true,
+ "AF_VENDOR42": true,
+ "AF_VENDOR43": true,
+ "AF_VENDOR44": true,
+ "AF_VENDOR45": true,
+ "AF_VENDOR46": true,
+ "AF_VENDOR47": true,
+ "AF_WANPIPE": true,
+ "AF_X25": true,
+ "AI_CANONNAME": true,
+ "AI_NUMERICHOST": true,
+ "AI_PASSIVE": true,
+ "APPLICATION_ERROR": true,
+ "ARPHRD_ADAPT": true,
+ "ARPHRD_APPLETLK": true,
+ "ARPHRD_ARCNET": true,
+ "ARPHRD_ASH": true,
+ "ARPHRD_ATM": true,
+ "ARPHRD_AX25": true,
+ "ARPHRD_BIF": true,
+ "ARPHRD_CHAOS": true,
+ "ARPHRD_CISCO": true,
+ "ARPHRD_CSLIP": true,
+ "ARPHRD_CSLIP6": true,
+ "ARPHRD_DDCMP": true,
+ "ARPHRD_DLCI": true,
+ "ARPHRD_ECONET": true,
+ "ARPHRD_EETHER": true,
+ "ARPHRD_ETHER": true,
+ "ARPHRD_EUI64": true,
+ "ARPHRD_FCAL": true,
+ "ARPHRD_FCFABRIC": true,
+ "ARPHRD_FCPL": true,
+ "ARPHRD_FCPP": true,
+ "ARPHRD_FDDI": true,
+ "ARPHRD_FRAD": true,
+ "ARPHRD_FRELAY": true,
+ "ARPHRD_HDLC": true,
+ "ARPHRD_HIPPI": true,
+ "ARPHRD_HWX25": true,
+ "ARPHRD_IEEE1394": true,
+ "ARPHRD_IEEE802": true,
+ "ARPHRD_IEEE80211": true,
+ "ARPHRD_IEEE80211_PRISM": true,
+ "ARPHRD_IEEE80211_RADIOTAP": true,
+ "ARPHRD_IEEE802154": true,
+ "ARPHRD_IEEE802154_PHY": true,
+ "ARPHRD_IEEE802_TR": true,
+ "ARPHRD_INFINIBAND": true,
+ "ARPHRD_IPDDP": true,
+ "ARPHRD_IPGRE": true,
+ "ARPHRD_IRDA": true,
+ "ARPHRD_LAPB": true,
+ "ARPHRD_LOCALTLK": true,
+ "ARPHRD_LOOPBACK": true,
+ "ARPHRD_METRICOM": true,
+ "ARPHRD_NETROM": true,
+ "ARPHRD_NONE": true,
+ "ARPHRD_PIMREG": true,
+ "ARPHRD_PPP": true,
+ "ARPHRD_PRONET": true,
+ "ARPHRD_RAWHDLC": true,
+ "ARPHRD_ROSE": true,
+ "ARPHRD_RSRVD": true,
+ "ARPHRD_SIT": true,
+ "ARPHRD_SKIP": true,
+ "ARPHRD_SLIP": true,
+ "ARPHRD_SLIP6": true,
+ "ARPHRD_STRIP": true,
+ "ARPHRD_TUNNEL": true,
+ "ARPHRD_TUNNEL6": true,
+ "ARPHRD_VOID": true,
+ "ARPHRD_X25": true,
+ "AUTHTYPE_CLIENT": true,
+ "AUTHTYPE_SERVER": true,
+ "Accept": true,
+ "Accept4": true,
+ "AcceptEx": true,
+ "Access": true,
+ "Acct": true,
+ "AddrinfoW": true,
+ "Adjtime": true,
+ "Adjtimex": true,
+ "AttachLsf": true,
+ "B0": true,
+ "B1000000": true,
+ "B110": true,
+ "B115200": true,
+ "B1152000": true,
+ "B1200": true,
+ "B134": true,
+ "B14400": true,
+ "B150": true,
+ "B1500000": true,
+ "B1800": true,
+ "B19200": true,
+ "B200": true,
+ "B2000000": true,
+ "B230400": true,
+ "B2400": true,
+ "B2500000": true,
+ "B28800": true,
+ "B300": true,
+ "B3000000": true,
+ "B3500000": true,
+ "B38400": true,
+ "B4000000": true,
+ "B460800": true,
+ "B4800": true,
+ "B50": true,
+ "B500000": true,
+ "B57600": true,
+ "B576000": true,
+ "B600": true,
+ "B7200": true,
+ "B75": true,
+ "B76800": true,
+ "B921600": true,
+ "B9600": true,
+ "BASE_PROTOCOL": true,
+ "BIOCFEEDBACK": true,
+ "BIOCFLUSH": true,
+ "BIOCGBLEN": true,
+ "BIOCGDIRECTION": true,
+ "BIOCGDIRFILT": true,
+ "BIOCGDLT": true,
+ "BIOCGDLTLIST": true,
+ "BIOCGETBUFMODE": true,
+ "BIOCGETIF": true,
+ "BIOCGETZMAX": true,
+ "BIOCGFEEDBACK": true,
+ "BIOCGFILDROP": true,
+ "BIOCGHDRCMPLT": true,
+ "BIOCGRSIG": true,
+ "BIOCGRTIMEOUT": true,
+ "BIOCGSEESENT": true,
+ "BIOCGSTATS": true,
+ "BIOCGSTATSOLD": true,
+ "BIOCGTSTAMP": true,
+ "BIOCIMMEDIATE": true,
+ "BIOCLOCK": true,
+ "BIOCPROMISC": true,
+ "BIOCROTZBUF": true,
+ "BIOCSBLEN": true,
+ "BIOCSDIRECTION": true,
+ "BIOCSDIRFILT": true,
+ "BIOCSDLT": true,
+ "BIOCSETBUFMODE": true,
+ "BIOCSETF": true,
+ "BIOCSETFNR": true,
+ "BIOCSETIF": true,
+ "BIOCSETWF": true,
+ "BIOCSETZBUF": true,
+ "BIOCSFEEDBACK": true,
+ "BIOCSFILDROP": true,
+ "BIOCSHDRCMPLT": true,
+ "BIOCSRSIG": true,
+ "BIOCSRTIMEOUT": true,
+ "BIOCSSEESENT": true,
+ "BIOCSTCPF": true,
+ "BIOCSTSTAMP": true,
+ "BIOCSUDPF": true,
+ "BIOCVERSION": true,
+ "BPF_A": true,
+ "BPF_ABS": true,
+ "BPF_ADD": true,
+ "BPF_ALIGNMENT": true,
+ "BPF_ALIGNMENT32": true,
+ "BPF_ALU": true,
+ "BPF_AND": true,
+ "BPF_B": true,
+ "BPF_BUFMODE_BUFFER": true,
+ "BPF_BUFMODE_ZBUF": true,
+ "BPF_DFLTBUFSIZE": true,
+ "BPF_DIRECTION_IN": true,
+ "BPF_DIRECTION_OUT": true,
+ "BPF_DIV": true,
+ "BPF_H": true,
+ "BPF_IMM": true,
+ "BPF_IND": true,
+ "BPF_JA": true,
+ "BPF_JEQ": true,
+ "BPF_JGE": true,
+ "BPF_JGT": true,
+ "BPF_JMP": true,
+ "BPF_JSET": true,
+ "BPF_K": true,
+ "BPF_LD": true,
+ "BPF_LDX": true,
+ "BPF_LEN": true,
+ "BPF_LSH": true,
+ "BPF_MAJOR_VERSION": true,
+ "BPF_MAXBUFSIZE": true,
+ "BPF_MAXINSNS": true,
+ "BPF_MEM": true,
+ "BPF_MEMWORDS": true,
+ "BPF_MINBUFSIZE": true,
+ "BPF_MINOR_VERSION": true,
+ "BPF_MISC": true,
+ "BPF_MSH": true,
+ "BPF_MUL": true,
+ "BPF_NEG": true,
+ "BPF_OR": true,
+ "BPF_RELEASE": true,
+ "BPF_RET": true,
+ "BPF_RSH": true,
+ "BPF_ST": true,
+ "BPF_STX": true,
+ "BPF_SUB": true,
+ "BPF_TAX": true,
+ "BPF_TXA": true,
+ "BPF_T_BINTIME": true,
+ "BPF_T_BINTIME_FAST": true,
+ "BPF_T_BINTIME_MONOTONIC": true,
+ "BPF_T_BINTIME_MONOTONIC_FAST": true,
+ "BPF_T_FAST": true,
+ "BPF_T_FLAG_MASK": true,
+ "BPF_T_FORMAT_MASK": true,
+ "BPF_T_MICROTIME": true,
+ "BPF_T_MICROTIME_FAST": true,
+ "BPF_T_MICROTIME_MONOTONIC": true,
+ "BPF_T_MICROTIME_MONOTONIC_FAST": true,
+ "BPF_T_MONOTONIC": true,
+ "BPF_T_MONOTONIC_FAST": true,
+ "BPF_T_NANOTIME": true,
+ "BPF_T_NANOTIME_FAST": true,
+ "BPF_T_NANOTIME_MONOTONIC": true,
+ "BPF_T_NANOTIME_MONOTONIC_FAST": true,
+ "BPF_T_NONE": true,
+ "BPF_T_NORMAL": true,
+ "BPF_W": true,
+ "BPF_X": true,
+ "BRKINT": true,
+ "Bind": true,
+ "BindToDevice": true,
+ "BpfBuflen": true,
+ "BpfDatalink": true,
+ "BpfHdr": true,
+ "BpfHeadercmpl": true,
+ "BpfInsn": true,
+ "BpfInterface": true,
+ "BpfJump": true,
+ "BpfProgram": true,
+ "BpfStat": true,
+ "BpfStats": true,
+ "BpfStmt": true,
+ "BpfTimeout": true,
+ "BpfTimeval": true,
+ "BpfVersion": true,
+ "BpfZbuf": true,
+ "BpfZbufHeader": true,
+ "ByHandleFileInformation": true,
+ "BytePtrFromString": true,
+ "ByteSliceFromString": true,
+ "CCR0_FLUSH": true,
+ "CERT_CHAIN_POLICY_AUTHENTICODE": true,
+ "CERT_CHAIN_POLICY_AUTHENTICODE_TS": true,
+ "CERT_CHAIN_POLICY_BASE": true,
+ "CERT_CHAIN_POLICY_BASIC_CONSTRAINTS": true,
+ "CERT_CHAIN_POLICY_EV": true,
+ "CERT_CHAIN_POLICY_MICROSOFT_ROOT": true,
+ "CERT_CHAIN_POLICY_NT_AUTH": true,
+ "CERT_CHAIN_POLICY_SSL": true,
+ "CERT_E_CN_NO_MATCH": true,
+ "CERT_E_EXPIRED": true,
+ "CERT_E_PURPOSE": true,
+ "CERT_E_ROLE": true,
+ "CERT_E_UNTRUSTEDROOT": true,
+ "CERT_STORE_ADD_ALWAYS": true,
+ "CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG": true,
+ "CERT_STORE_PROV_MEMORY": true,
+ "CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT": true,
+ "CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT": true,
+ "CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT": true,
+ "CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT": true,
+ "CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT": true,
+ "CERT_TRUST_INVALID_BASIC_CONSTRAINTS": true,
+ "CERT_TRUST_INVALID_EXTENSION": true,
+ "CERT_TRUST_INVALID_NAME_CONSTRAINTS": true,
+ "CERT_TRUST_INVALID_POLICY_CONSTRAINTS": true,
+ "CERT_TRUST_IS_CYCLIC": true,
+ "CERT_TRUST_IS_EXPLICIT_DISTRUST": true,
+ "CERT_TRUST_IS_NOT_SIGNATURE_VALID": true,
+ "CERT_TRUST_IS_NOT_TIME_VALID": true,
+ "CERT_TRUST_IS_NOT_VALID_FOR_USAGE": true,
+ "CERT_TRUST_IS_OFFLINE_REVOCATION": true,
+ "CERT_TRUST_IS_REVOKED": true,
+ "CERT_TRUST_IS_UNTRUSTED_ROOT": true,
+ "CERT_TRUST_NO_ERROR": true,
+ "CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY": true,
+ "CERT_TRUST_REVOCATION_STATUS_UNKNOWN": true,
+ "CFLUSH": true,
+ "CLOCAL": true,
+ "CLONE_CHILD_CLEARTID": true,
+ "CLONE_CHILD_SETTID": true,
+ "CLONE_CSIGNAL": true,
+ "CLONE_DETACHED": true,
+ "CLONE_FILES": true,
+ "CLONE_FS": true,
+ "CLONE_IO": true,
+ "CLONE_NEWIPC": true,
+ "CLONE_NEWNET": true,
+ "CLONE_NEWNS": true,
+ "CLONE_NEWPID": true,
+ "CLONE_NEWUSER": true,
+ "CLONE_NEWUTS": true,
+ "CLONE_PARENT": true,
+ "CLONE_PARENT_SETTID": true,
+ "CLONE_PID": true,
+ "CLONE_PTRACE": true,
+ "CLONE_SETTLS": true,
+ "CLONE_SIGHAND": true,
+ "CLONE_SYSVSEM": true,
+ "CLONE_THREAD": true,
+ "CLONE_UNTRACED": true,
+ "CLONE_VFORK": true,
+ "CLONE_VM": true,
+ "CPUID_CFLUSH": true,
+ "CREAD": true,
+ "CREATE_ALWAYS": true,
+ "CREATE_NEW": true,
+ "CREATE_NEW_PROCESS_GROUP": true,
+ "CREATE_UNICODE_ENVIRONMENT": true,
+ "CRYPT_DEFAULT_CONTAINER_OPTIONAL": true,
+ "CRYPT_DELETEKEYSET": true,
+ "CRYPT_MACHINE_KEYSET": true,
+ "CRYPT_NEWKEYSET": true,
+ "CRYPT_SILENT": true,
+ "CRYPT_VERIFYCONTEXT": true,
+ "CS5": true,
+ "CS6": true,
+ "CS7": true,
+ "CS8": true,
+ "CSIZE": true,
+ "CSTART": true,
+ "CSTATUS": true,
+ "CSTOP": true,
+ "CSTOPB": true,
+ "CSUSP": true,
+ "CTL_MAXNAME": true,
+ "CTL_NET": true,
+ "CTL_QUERY": true,
+ "CTRL_BREAK_EVENT": true,
+ "CTRL_C_EVENT": true,
+ "CancelIo": true,
+ "CancelIoEx": true,
+ "CertAddCertificateContextToStore": true,
+ "CertChainContext": true,
+ "CertChainElement": true,
+ "CertChainPara": true,
+ "CertChainPolicyPara": true,
+ "CertChainPolicyStatus": true,
+ "CertCloseStore": true,
+ "CertContext": true,
+ "CertCreateCertificateContext": true,
+ "CertEnhKeyUsage": true,
+ "CertEnumCertificatesInStore": true,
+ "CertFreeCertificateChain": true,
+ "CertFreeCertificateContext": true,
+ "CertGetCertificateChain": true,
+ "CertInfo": true,
+ "CertOpenStore": true,
+ "CertOpenSystemStore": true,
+ "CertRevocationCrlInfo": true,
+ "CertRevocationInfo": true,
+ "CertSimpleChain": true,
+ "CertTrustListInfo": true,
+ "CertTrustStatus": true,
+ "CertUsageMatch": true,
+ "CertVerifyCertificateChainPolicy": true,
+ "Chdir": true,
+ "CheckBpfVersion": true,
+ "Chflags": true,
+ "Chmod": true,
+ "Chown": true,
+ "Chroot": true,
+ "Clearenv": true,
+ "Close": true,
+ "CloseHandle": true,
+ "CloseOnExec": true,
+ "Closesocket": true,
+ "CmsgLen": true,
+ "CmsgSpace": true,
+ "Cmsghdr": true,
+ "CommandLineToArgv": true,
+ "ComputerName": true,
+ "Conn": true,
+ "Connect": true,
+ "ConnectEx": true,
+ "ConvertSidToStringSid": true,
+ "ConvertStringSidToSid": true,
+ "CopySid": true,
+ "Creat": true,
+ "CreateDirectory": true,
+ "CreateFile": true,
+ "CreateFileMapping": true,
+ "CreateHardLink": true,
+ "CreateIoCompletionPort": true,
+ "CreatePipe": true,
+ "CreateProcess": true,
+ "CreateProcessAsUser": true,
+ "CreateSymbolicLink": true,
+ "CreateToolhelp32Snapshot": true,
+ "Credential": true,
+ "CryptAcquireContext": true,
+ "CryptGenRandom": true,
+ "CryptReleaseContext": true,
+ "DIOCBSFLUSH": true,
+ "DIOCOSFPFLUSH": true,
+ "DLL": true,
+ "DLLError": true,
+ "DLT_A429": true,
+ "DLT_A653_ICM": true,
+ "DLT_AIRONET_HEADER": true,
+ "DLT_AOS": true,
+ "DLT_APPLE_IP_OVER_IEEE1394": true,
+ "DLT_ARCNET": true,
+ "DLT_ARCNET_LINUX": true,
+ "DLT_ATM_CLIP": true,
+ "DLT_ATM_RFC1483": true,
+ "DLT_AURORA": true,
+ "DLT_AX25": true,
+ "DLT_AX25_KISS": true,
+ "DLT_BACNET_MS_TP": true,
+ "DLT_BLUETOOTH_HCI_H4": true,
+ "DLT_BLUETOOTH_HCI_H4_WITH_PHDR": true,
+ "DLT_CAN20B": true,
+ "DLT_CAN_SOCKETCAN": true,
+ "DLT_CHAOS": true,
+ "DLT_CHDLC": true,
+ "DLT_CISCO_IOS": true,
+ "DLT_C_HDLC": true,
+ "DLT_C_HDLC_WITH_DIR": true,
+ "DLT_DBUS": true,
+ "DLT_DECT": true,
+ "DLT_DOCSIS": true,
+ "DLT_DVB_CI": true,
+ "DLT_ECONET": true,
+ "DLT_EN10MB": true,
+ "DLT_EN3MB": true,
+ "DLT_ENC": true,
+ "DLT_ERF": true,
+ "DLT_ERF_ETH": true,
+ "DLT_ERF_POS": true,
+ "DLT_FC_2": true,
+ "DLT_FC_2_WITH_FRAME_DELIMS": true,
+ "DLT_FDDI": true,
+ "DLT_FLEXRAY": true,
+ "DLT_FRELAY": true,
+ "DLT_FRELAY_WITH_DIR": true,
+ "DLT_GCOM_SERIAL": true,
+ "DLT_GCOM_T1E1": true,
+ "DLT_GPF_F": true,
+ "DLT_GPF_T": true,
+ "DLT_GPRS_LLC": true,
+ "DLT_GSMTAP_ABIS": true,
+ "DLT_GSMTAP_UM": true,
+ "DLT_HDLC": true,
+ "DLT_HHDLC": true,
+ "DLT_HIPPI": true,
+ "DLT_IBM_SN": true,
+ "DLT_IBM_SP": true,
+ "DLT_IEEE802": true,
+ "DLT_IEEE802_11": true,
+ "DLT_IEEE802_11_RADIO": true,
+ "DLT_IEEE802_11_RADIO_AVS": true,
+ "DLT_IEEE802_15_4": true,
+ "DLT_IEEE802_15_4_LINUX": true,
+ "DLT_IEEE802_15_4_NOFCS": true,
+ "DLT_IEEE802_15_4_NONASK_PHY": true,
+ "DLT_IEEE802_16_MAC_CPS": true,
+ "DLT_IEEE802_16_MAC_CPS_RADIO": true,
+ "DLT_IPFILTER": true,
+ "DLT_IPMB": true,
+ "DLT_IPMB_LINUX": true,
+ "DLT_IPNET": true,
+ "DLT_IPOIB": true,
+ "DLT_IPV4": true,
+ "DLT_IPV6": true,
+ "DLT_IP_OVER_FC": true,
+ "DLT_JUNIPER_ATM1": true,
+ "DLT_JUNIPER_ATM2": true,
+ "DLT_JUNIPER_ATM_CEMIC": true,
+ "DLT_JUNIPER_CHDLC": true,
+ "DLT_JUNIPER_ES": true,
+ "DLT_JUNIPER_ETHER": true,
+ "DLT_JUNIPER_FIBRECHANNEL": true,
+ "DLT_JUNIPER_FRELAY": true,
+ "DLT_JUNIPER_GGSN": true,
+ "DLT_JUNIPER_ISM": true,
+ "DLT_JUNIPER_MFR": true,
+ "DLT_JUNIPER_MLFR": true,
+ "DLT_JUNIPER_MLPPP": true,
+ "DLT_JUNIPER_MONITOR": true,
+ "DLT_JUNIPER_PIC_PEER": true,
+ "DLT_JUNIPER_PPP": true,
+ "DLT_JUNIPER_PPPOE": true,
+ "DLT_JUNIPER_PPPOE_ATM": true,
+ "DLT_JUNIPER_SERVICES": true,
+ "DLT_JUNIPER_SRX_E2E": true,
+ "DLT_JUNIPER_ST": true,
+ "DLT_JUNIPER_VP": true,
+ "DLT_JUNIPER_VS": true,
+ "DLT_LAPB_WITH_DIR": true,
+ "DLT_LAPD": true,
+ "DLT_LIN": true,
+ "DLT_LINUX_EVDEV": true,
+ "DLT_LINUX_IRDA": true,
+ "DLT_LINUX_LAPD": true,
+ "DLT_LINUX_PPP_WITHDIRECTION": true,
+ "DLT_LINUX_SLL": true,
+ "DLT_LOOP": true,
+ "DLT_LTALK": true,
+ "DLT_MATCHING_MAX": true,
+ "DLT_MATCHING_MIN": true,
+ "DLT_MFR": true,
+ "DLT_MOST": true,
+ "DLT_MPEG_2_TS": true,
+ "DLT_MPLS": true,
+ "DLT_MTP2": true,
+ "DLT_MTP2_WITH_PHDR": true,
+ "DLT_MTP3": true,
+ "DLT_MUX27010": true,
+ "DLT_NETANALYZER": true,
+ "DLT_NETANALYZER_TRANSPARENT": true,
+ "DLT_NFC_LLCP": true,
+ "DLT_NFLOG": true,
+ "DLT_NG40": true,
+ "DLT_NULL": true,
+ "DLT_PCI_EXP": true,
+ "DLT_PFLOG": true,
+ "DLT_PFSYNC": true,
+ "DLT_PPI": true,
+ "DLT_PPP": true,
+ "DLT_PPP_BSDOS": true,
+ "DLT_PPP_ETHER": true,
+ "DLT_PPP_PPPD": true,
+ "DLT_PPP_SERIAL": true,
+ "DLT_PPP_WITH_DIR": true,
+ "DLT_PPP_WITH_DIRECTION": true,
+ "DLT_PRISM_HEADER": true,
+ "DLT_PRONET": true,
+ "DLT_RAIF1": true,
+ "DLT_RAW": true,
+ "DLT_RAWAF_MASK": true,
+ "DLT_RIO": true,
+ "DLT_SCCP": true,
+ "DLT_SITA": true,
+ "DLT_SLIP": true,
+ "DLT_SLIP_BSDOS": true,
+ "DLT_STANAG_5066_D_PDU": true,
+ "DLT_SUNATM": true,
+ "DLT_SYMANTEC_FIREWALL": true,
+ "DLT_TZSP": true,
+ "DLT_USB": true,
+ "DLT_USB_LINUX": true,
+ "DLT_USB_LINUX_MMAPPED": true,
+ "DLT_USER0": true,
+ "DLT_USER1": true,
+ "DLT_USER10": true,
+ "DLT_USER11": true,
+ "DLT_USER12": true,
+ "DLT_USER13": true,
+ "DLT_USER14": true,
+ "DLT_USER15": true,
+ "DLT_USER2": true,
+ "DLT_USER3": true,
+ "DLT_USER4": true,
+ "DLT_USER5": true,
+ "DLT_USER6": true,
+ "DLT_USER7": true,
+ "DLT_USER8": true,
+ "DLT_USER9": true,
+ "DLT_WIHART": true,
+ "DLT_X2E_SERIAL": true,
+ "DLT_X2E_XORAYA": true,
+ "DNSMXData": true,
+ "DNSPTRData": true,
+ "DNSRecord": true,
+ "DNSSRVData": true,
+ "DNSTXTData": true,
+ "DNS_INFO_NO_RECORDS": true,
+ "DNS_TYPE_A": true,
+ "DNS_TYPE_A6": true,
+ "DNS_TYPE_AAAA": true,
+ "DNS_TYPE_ADDRS": true,
+ "DNS_TYPE_AFSDB": true,
+ "DNS_TYPE_ALL": true,
+ "DNS_TYPE_ANY": true,
+ "DNS_TYPE_ATMA": true,
+ "DNS_TYPE_AXFR": true,
+ "DNS_TYPE_CERT": true,
+ "DNS_TYPE_CNAME": true,
+ "DNS_TYPE_DHCID": true,
+ "DNS_TYPE_DNAME": true,
+ "DNS_TYPE_DNSKEY": true,
+ "DNS_TYPE_DS": true,
+ "DNS_TYPE_EID": true,
+ "DNS_TYPE_GID": true,
+ "DNS_TYPE_GPOS": true,
+ "DNS_TYPE_HINFO": true,
+ "DNS_TYPE_ISDN": true,
+ "DNS_TYPE_IXFR": true,
+ "DNS_TYPE_KEY": true,
+ "DNS_TYPE_KX": true,
+ "DNS_TYPE_LOC": true,
+ "DNS_TYPE_MAILA": true,
+ "DNS_TYPE_MAILB": true,
+ "DNS_TYPE_MB": true,
+ "DNS_TYPE_MD": true,
+ "DNS_TYPE_MF": true,
+ "DNS_TYPE_MG": true,
+ "DNS_TYPE_MINFO": true,
+ "DNS_TYPE_MR": true,
+ "DNS_TYPE_MX": true,
+ "DNS_TYPE_NAPTR": true,
+ "DNS_TYPE_NBSTAT": true,
+ "DNS_TYPE_NIMLOC": true,
+ "DNS_TYPE_NS": true,
+ "DNS_TYPE_NSAP": true,
+ "DNS_TYPE_NSAPPTR": true,
+ "DNS_TYPE_NSEC": true,
+ "DNS_TYPE_NULL": true,
+ "DNS_TYPE_NXT": true,
+ "DNS_TYPE_OPT": true,
+ "DNS_TYPE_PTR": true,
+ "DNS_TYPE_PX": true,
+ "DNS_TYPE_RP": true,
+ "DNS_TYPE_RRSIG": true,
+ "DNS_TYPE_RT": true,
+ "DNS_TYPE_SIG": true,
+ "DNS_TYPE_SINK": true,
+ "DNS_TYPE_SOA": true,
+ "DNS_TYPE_SRV": true,
+ "DNS_TYPE_TEXT": true,
+ "DNS_TYPE_TKEY": true,
+ "DNS_TYPE_TSIG": true,
+ "DNS_TYPE_UID": true,
+ "DNS_TYPE_UINFO": true,
+ "DNS_TYPE_UNSPEC": true,
+ "DNS_TYPE_WINS": true,
+ "DNS_TYPE_WINSR": true,
+ "DNS_TYPE_WKS": true,
+ "DNS_TYPE_X25": true,
+ "DT_BLK": true,
+ "DT_CHR": true,
+ "DT_DIR": true,
+ "DT_FIFO": true,
+ "DT_LNK": true,
+ "DT_REG": true,
+ "DT_SOCK": true,
+ "DT_UNKNOWN": true,
+ "DT_WHT": true,
+ "DUPLICATE_CLOSE_SOURCE": true,
+ "DUPLICATE_SAME_ACCESS": true,
+ "DeleteFile": true,
+ "DetachLsf": true,
+ "DeviceIoControl": true,
+ "Dirent": true,
+ "DnsNameCompare": true,
+ "DnsQuery": true,
+ "DnsRecordListFree": true,
+ "DnsSectionAdditional": true,
+ "DnsSectionAnswer": true,
+ "DnsSectionAuthority": true,
+ "DnsSectionQuestion": true,
+ "Dup": true,
+ "Dup2": true,
+ "Dup3": true,
+ "DuplicateHandle": true,
+ "E2BIG": true,
+ "EACCES": true,
+ "EADDRINUSE": true,
+ "EADDRNOTAVAIL": true,
+ "EADV": true,
+ "EAFNOSUPPORT": true,
+ "EAGAIN": true,
+ "EALREADY": true,
+ "EAUTH": true,
+ "EBADARCH": true,
+ "EBADE": true,
+ "EBADEXEC": true,
+ "EBADF": true,
+ "EBADFD": true,
+ "EBADMACHO": true,
+ "EBADMSG": true,
+ "EBADR": true,
+ "EBADRPC": true,
+ "EBADRQC": true,
+ "EBADSLT": true,
+ "EBFONT": true,
+ "EBUSY": true,
+ "ECANCELED": true,
+ "ECAPMODE": true,
+ "ECHILD": true,
+ "ECHO": true,
+ "ECHOCTL": true,
+ "ECHOE": true,
+ "ECHOK": true,
+ "ECHOKE": true,
+ "ECHONL": true,
+ "ECHOPRT": true,
+ "ECHRNG": true,
+ "ECOMM": true,
+ "ECONNABORTED": true,
+ "ECONNREFUSED": true,
+ "ECONNRESET": true,
+ "EDEADLK": true,
+ "EDEADLOCK": true,
+ "EDESTADDRREQ": true,
+ "EDEVERR": true,
+ "EDOM": true,
+ "EDOOFUS": true,
+ "EDOTDOT": true,
+ "EDQUOT": true,
+ "EEXIST": true,
+ "EFAULT": true,
+ "EFBIG": true,
+ "EFER_LMA": true,
+ "EFER_LME": true,
+ "EFER_NXE": true,
+ "EFER_SCE": true,
+ "EFTYPE": true,
+ "EHOSTDOWN": true,
+ "EHOSTUNREACH": true,
+ "EHWPOISON": true,
+ "EIDRM": true,
+ "EILSEQ": true,
+ "EINPROGRESS": true,
+ "EINTR": true,
+ "EINVAL": true,
+ "EIO": true,
+ "EIPSEC": true,
+ "EISCONN": true,
+ "EISDIR": true,
+ "EISNAM": true,
+ "EKEYEXPIRED": true,
+ "EKEYREJECTED": true,
+ "EKEYREVOKED": true,
+ "EL2HLT": true,
+ "EL2NSYNC": true,
+ "EL3HLT": true,
+ "EL3RST": true,
+ "ELAST": true,
+ "ELF_NGREG": true,
+ "ELF_PRARGSZ": true,
+ "ELIBACC": true,
+ "ELIBBAD": true,
+ "ELIBEXEC": true,
+ "ELIBMAX": true,
+ "ELIBSCN": true,
+ "ELNRNG": true,
+ "ELOOP": true,
+ "EMEDIUMTYPE": true,
+ "EMFILE": true,
+ "EMLINK": true,
+ "EMSGSIZE": true,
+ "EMT_TAGOVF": true,
+ "EMULTIHOP": true,
+ "EMUL_ENABLED": true,
+ "EMUL_LINUX": true,
+ "EMUL_LINUX32": true,
+ "EMUL_MAXID": true,
+ "EMUL_NATIVE": true,
+ "ENAMETOOLONG": true,
+ "ENAVAIL": true,
+ "ENDRUNDISC": true,
+ "ENEEDAUTH": true,
+ "ENETDOWN": true,
+ "ENETRESET": true,
+ "ENETUNREACH": true,
+ "ENFILE": true,
+ "ENOANO": true,
+ "ENOATTR": true,
+ "ENOBUFS": true,
+ "ENOCSI": true,
+ "ENODATA": true,
+ "ENODEV": true,
+ "ENOENT": true,
+ "ENOEXEC": true,
+ "ENOKEY": true,
+ "ENOLCK": true,
+ "ENOLINK": true,
+ "ENOMEDIUM": true,
+ "ENOMEM": true,
+ "ENOMSG": true,
+ "ENONET": true,
+ "ENOPKG": true,
+ "ENOPOLICY": true,
+ "ENOPROTOOPT": true,
+ "ENOSPC": true,
+ "ENOSR": true,
+ "ENOSTR": true,
+ "ENOSYS": true,
+ "ENOTBLK": true,
+ "ENOTCAPABLE": true,
+ "ENOTCONN": true,
+ "ENOTDIR": true,
+ "ENOTEMPTY": true,
+ "ENOTNAM": true,
+ "ENOTRECOVERABLE": true,
+ "ENOTSOCK": true,
+ "ENOTSUP": true,
+ "ENOTTY": true,
+ "ENOTUNIQ": true,
+ "ENXIO": true,
+ "EN_SW_CTL_INF": true,
+ "EN_SW_CTL_PREC": true,
+ "EN_SW_CTL_ROUND": true,
+ "EN_SW_DATACHAIN": true,
+ "EN_SW_DENORM": true,
+ "EN_SW_INVOP": true,
+ "EN_SW_OVERFLOW": true,
+ "EN_SW_PRECLOSS": true,
+ "EN_SW_UNDERFLOW": true,
+ "EN_SW_ZERODIV": true,
+ "EOPNOTSUPP": true,
+ "EOVERFLOW": true,
+ "EOWNERDEAD": true,
+ "EPERM": true,
+ "EPFNOSUPPORT": true,
+ "EPIPE": true,
+ "EPOLLERR": true,
+ "EPOLLET": true,
+ "EPOLLHUP": true,
+ "EPOLLIN": true,
+ "EPOLLMSG": true,
+ "EPOLLONESHOT": true,
+ "EPOLLOUT": true,
+ "EPOLLPRI": true,
+ "EPOLLRDBAND": true,
+ "EPOLLRDHUP": true,
+ "EPOLLRDNORM": true,
+ "EPOLLWRBAND": true,
+ "EPOLLWRNORM": true,
+ "EPOLL_CLOEXEC": true,
+ "EPOLL_CTL_ADD": true,
+ "EPOLL_CTL_DEL": true,
+ "EPOLL_CTL_MOD": true,
+ "EPOLL_NONBLOCK": true,
+ "EPROCLIM": true,
+ "EPROCUNAVAIL": true,
+ "EPROGMISMATCH": true,
+ "EPROGUNAVAIL": true,
+ "EPROTO": true,
+ "EPROTONOSUPPORT": true,
+ "EPROTOTYPE": true,
+ "EPWROFF": true,
+ "ERANGE": true,
+ "EREMCHG": true,
+ "EREMOTE": true,
+ "EREMOTEIO": true,
+ "ERESTART": true,
+ "ERFKILL": true,
+ "EROFS": true,
+ "ERPCMISMATCH": true,
+ "ERROR_ACCESS_DENIED": true,
+ "ERROR_ALREADY_EXISTS": true,
+ "ERROR_BROKEN_PIPE": true,
+ "ERROR_BUFFER_OVERFLOW": true,
+ "ERROR_DIR_NOT_EMPTY": true,
+ "ERROR_ENVVAR_NOT_FOUND": true,
+ "ERROR_FILE_EXISTS": true,
+ "ERROR_FILE_NOT_FOUND": true,
+ "ERROR_HANDLE_EOF": true,
+ "ERROR_INSUFFICIENT_BUFFER": true,
+ "ERROR_IO_PENDING": true,
+ "ERROR_MOD_NOT_FOUND": true,
+ "ERROR_MORE_DATA": true,
+ "ERROR_NETNAME_DELETED": true,
+ "ERROR_NOT_FOUND": true,
+ "ERROR_NO_MORE_FILES": true,
+ "ERROR_OPERATION_ABORTED": true,
+ "ERROR_PATH_NOT_FOUND": true,
+ "ERROR_PRIVILEGE_NOT_HELD": true,
+ "ERROR_PROC_NOT_FOUND": true,
+ "ESHLIBVERS": true,
+ "ESHUTDOWN": true,
+ "ESOCKTNOSUPPORT": true,
+ "ESPIPE": true,
+ "ESRCH": true,
+ "ESRMNT": true,
+ "ESTALE": true,
+ "ESTRPIPE": true,
+ "ETHERCAP_JUMBO_MTU": true,
+ "ETHERCAP_VLAN_HWTAGGING": true,
+ "ETHERCAP_VLAN_MTU": true,
+ "ETHERMIN": true,
+ "ETHERMTU": true,
+ "ETHERMTU_JUMBO": true,
+ "ETHERTYPE_8023": true,
+ "ETHERTYPE_AARP": true,
+ "ETHERTYPE_ACCTON": true,
+ "ETHERTYPE_AEONIC": true,
+ "ETHERTYPE_ALPHA": true,
+ "ETHERTYPE_AMBER": true,
+ "ETHERTYPE_AMOEBA": true,
+ "ETHERTYPE_AOE": true,
+ "ETHERTYPE_APOLLO": true,
+ "ETHERTYPE_APOLLODOMAIN": true,
+ "ETHERTYPE_APPLETALK": true,
+ "ETHERTYPE_APPLITEK": true,
+ "ETHERTYPE_ARGONAUT": true,
+ "ETHERTYPE_ARP": true,
+ "ETHERTYPE_AT": true,
+ "ETHERTYPE_ATALK": true,
+ "ETHERTYPE_ATOMIC": true,
+ "ETHERTYPE_ATT": true,
+ "ETHERTYPE_ATTSTANFORD": true,
+ "ETHERTYPE_AUTOPHON": true,
+ "ETHERTYPE_AXIS": true,
+ "ETHERTYPE_BCLOOP": true,
+ "ETHERTYPE_BOFL": true,
+ "ETHERTYPE_CABLETRON": true,
+ "ETHERTYPE_CHAOS": true,
+ "ETHERTYPE_COMDESIGN": true,
+ "ETHERTYPE_COMPUGRAPHIC": true,
+ "ETHERTYPE_COUNTERPOINT": true,
+ "ETHERTYPE_CRONUS": true,
+ "ETHERTYPE_CRONUSVLN": true,
+ "ETHERTYPE_DCA": true,
+ "ETHERTYPE_DDE": true,
+ "ETHERTYPE_DEBNI": true,
+ "ETHERTYPE_DECAM": true,
+ "ETHERTYPE_DECCUST": true,
+ "ETHERTYPE_DECDIAG": true,
+ "ETHERTYPE_DECDNS": true,
+ "ETHERTYPE_DECDTS": true,
+ "ETHERTYPE_DECEXPER": true,
+ "ETHERTYPE_DECLAST": true,
+ "ETHERTYPE_DECLTM": true,
+ "ETHERTYPE_DECMUMPS": true,
+ "ETHERTYPE_DECNETBIOS": true,
+ "ETHERTYPE_DELTACON": true,
+ "ETHERTYPE_DIDDLE": true,
+ "ETHERTYPE_DLOG1": true,
+ "ETHERTYPE_DLOG2": true,
+ "ETHERTYPE_DN": true,
+ "ETHERTYPE_DOGFIGHT": true,
+ "ETHERTYPE_DSMD": true,
+ "ETHERTYPE_ECMA": true,
+ "ETHERTYPE_ENCRYPT": true,
+ "ETHERTYPE_ES": true,
+ "ETHERTYPE_EXCELAN": true,
+ "ETHERTYPE_EXPERDATA": true,
+ "ETHERTYPE_FLIP": true,
+ "ETHERTYPE_FLOWCONTROL": true,
+ "ETHERTYPE_FRARP": true,
+ "ETHERTYPE_GENDYN": true,
+ "ETHERTYPE_HAYES": true,
+ "ETHERTYPE_HIPPI_FP": true,
+ "ETHERTYPE_HITACHI": true,
+ "ETHERTYPE_HP": true,
+ "ETHERTYPE_IEEEPUP": true,
+ "ETHERTYPE_IEEEPUPAT": true,
+ "ETHERTYPE_IMLBL": true,
+ "ETHERTYPE_IMLBLDIAG": true,
+ "ETHERTYPE_IP": true,
+ "ETHERTYPE_IPAS": true,
+ "ETHERTYPE_IPV6": true,
+ "ETHERTYPE_IPX": true,
+ "ETHERTYPE_IPXNEW": true,
+ "ETHERTYPE_KALPANA": true,
+ "ETHERTYPE_LANBRIDGE": true,
+ "ETHERTYPE_LANPROBE": true,
+ "ETHERTYPE_LAT": true,
+ "ETHERTYPE_LBACK": true,
+ "ETHERTYPE_LITTLE": true,
+ "ETHERTYPE_LLDP": true,
+ "ETHERTYPE_LOGICRAFT": true,
+ "ETHERTYPE_LOOPBACK": true,
+ "ETHERTYPE_MATRA": true,
+ "ETHERTYPE_MAX": true,
+ "ETHERTYPE_MERIT": true,
+ "ETHERTYPE_MICP": true,
+ "ETHERTYPE_MOPDL": true,
+ "ETHERTYPE_MOPRC": true,
+ "ETHERTYPE_MOTOROLA": true,
+ "ETHERTYPE_MPLS": true,
+ "ETHERTYPE_MPLS_MCAST": true,
+ "ETHERTYPE_MUMPS": true,
+ "ETHERTYPE_NBPCC": true,
+ "ETHERTYPE_NBPCLAIM": true,
+ "ETHERTYPE_NBPCLREQ": true,
+ "ETHERTYPE_NBPCLRSP": true,
+ "ETHERTYPE_NBPCREQ": true,
+ "ETHERTYPE_NBPCRSP": true,
+ "ETHERTYPE_NBPDG": true,
+ "ETHERTYPE_NBPDGB": true,
+ "ETHERTYPE_NBPDLTE": true,
+ "ETHERTYPE_NBPRAR": true,
+ "ETHERTYPE_NBPRAS": true,
+ "ETHERTYPE_NBPRST": true,
+ "ETHERTYPE_NBPSCD": true,
+ "ETHERTYPE_NBPVCD": true,
+ "ETHERTYPE_NBS": true,
+ "ETHERTYPE_NCD": true,
+ "ETHERTYPE_NESTAR": true,
+ "ETHERTYPE_NETBEUI": true,
+ "ETHERTYPE_NOVELL": true,
+ "ETHERTYPE_NS": true,
+ "ETHERTYPE_NSAT": true,
+ "ETHERTYPE_NSCOMPAT": true,
+ "ETHERTYPE_NTRAILER": true,
+ "ETHERTYPE_OS9": true,
+ "ETHERTYPE_OS9NET": true,
+ "ETHERTYPE_PACER": true,
+ "ETHERTYPE_PAE": true,
+ "ETHERTYPE_PCS": true,
+ "ETHERTYPE_PLANNING": true,
+ "ETHERTYPE_PPP": true,
+ "ETHERTYPE_PPPOE": true,
+ "ETHERTYPE_PPPOEDISC": true,
+ "ETHERTYPE_PRIMENTS": true,
+ "ETHERTYPE_PUP": true,
+ "ETHERTYPE_PUPAT": true,
+ "ETHERTYPE_QINQ": true,
+ "ETHERTYPE_RACAL": true,
+ "ETHERTYPE_RATIONAL": true,
+ "ETHERTYPE_RAWFR": true,
+ "ETHERTYPE_RCL": true,
+ "ETHERTYPE_RDP": true,
+ "ETHERTYPE_RETIX": true,
+ "ETHERTYPE_REVARP": true,
+ "ETHERTYPE_SCA": true,
+ "ETHERTYPE_SECTRA": true,
+ "ETHERTYPE_SECUREDATA": true,
+ "ETHERTYPE_SGITW": true,
+ "ETHERTYPE_SG_BOUNCE": true,
+ "ETHERTYPE_SG_DIAG": true,
+ "ETHERTYPE_SG_NETGAMES": true,
+ "ETHERTYPE_SG_RESV": true,
+ "ETHERTYPE_SIMNET": true,
+ "ETHERTYPE_SLOW": true,
+ "ETHERTYPE_SLOWPROTOCOLS": true,
+ "ETHERTYPE_SNA": true,
+ "ETHERTYPE_SNMP": true,
+ "ETHERTYPE_SONIX": true,
+ "ETHERTYPE_SPIDER": true,
+ "ETHERTYPE_SPRITE": true,
+ "ETHERTYPE_STP": true,
+ "ETHERTYPE_TALARIS": true,
+ "ETHERTYPE_TALARISMC": true,
+ "ETHERTYPE_TCPCOMP": true,
+ "ETHERTYPE_TCPSM": true,
+ "ETHERTYPE_TEC": true,
+ "ETHERTYPE_TIGAN": true,
+ "ETHERTYPE_TRAIL": true,
+ "ETHERTYPE_TRANSETHER": true,
+ "ETHERTYPE_TYMSHARE": true,
+ "ETHERTYPE_UBBST": true,
+ "ETHERTYPE_UBDEBUG": true,
+ "ETHERTYPE_UBDIAGLOOP": true,
+ "ETHERTYPE_UBDL": true,
+ "ETHERTYPE_UBNIU": true,
+ "ETHERTYPE_UBNMC": true,
+ "ETHERTYPE_VALID": true,
+ "ETHERTYPE_VARIAN": true,
+ "ETHERTYPE_VAXELN": true,
+ "ETHERTYPE_VEECO": true,
+ "ETHERTYPE_VEXP": true,
+ "ETHERTYPE_VGLAB": true,
+ "ETHERTYPE_VINES": true,
+ "ETHERTYPE_VINESECHO": true,
+ "ETHERTYPE_VINESLOOP": true,
+ "ETHERTYPE_VITAL": true,
+ "ETHERTYPE_VLAN": true,
+ "ETHERTYPE_VLTLMAN": true,
+ "ETHERTYPE_VPROD": true,
+ "ETHERTYPE_VURESERVED": true,
+ "ETHERTYPE_WATERLOO": true,
+ "ETHERTYPE_WELLFLEET": true,
+ "ETHERTYPE_X25": true,
+ "ETHERTYPE_X75": true,
+ "ETHERTYPE_XNSSM": true,
+ "ETHERTYPE_XTP": true,
+ "ETHER_ADDR_LEN": true,
+ "ETHER_ALIGN": true,
+ "ETHER_CRC_LEN": true,
+ "ETHER_CRC_POLY_BE": true,
+ "ETHER_CRC_POLY_LE": true,
+ "ETHER_HDR_LEN": true,
+ "ETHER_MAX_DIX_LEN": true,
+ "ETHER_MAX_LEN": true,
+ "ETHER_MAX_LEN_JUMBO": true,
+ "ETHER_MIN_LEN": true,
+ "ETHER_PPPOE_ENCAP_LEN": true,
+ "ETHER_TYPE_LEN": true,
+ "ETHER_VLAN_ENCAP_LEN": true,
+ "ETH_P_1588": true,
+ "ETH_P_8021Q": true,
+ "ETH_P_802_2": true,
+ "ETH_P_802_3": true,
+ "ETH_P_AARP": true,
+ "ETH_P_ALL": true,
+ "ETH_P_AOE": true,
+ "ETH_P_ARCNET": true,
+ "ETH_P_ARP": true,
+ "ETH_P_ATALK": true,
+ "ETH_P_ATMFATE": true,
+ "ETH_P_ATMMPOA": true,
+ "ETH_P_AX25": true,
+ "ETH_P_BPQ": true,
+ "ETH_P_CAIF": true,
+ "ETH_P_CAN": true,
+ "ETH_P_CONTROL": true,
+ "ETH_P_CUST": true,
+ "ETH_P_DDCMP": true,
+ "ETH_P_DEC": true,
+ "ETH_P_DIAG": true,
+ "ETH_P_DNA_DL": true,
+ "ETH_P_DNA_RC": true,
+ "ETH_P_DNA_RT": true,
+ "ETH_P_DSA": true,
+ "ETH_P_ECONET": true,
+ "ETH_P_EDSA": true,
+ "ETH_P_FCOE": true,
+ "ETH_P_FIP": true,
+ "ETH_P_HDLC": true,
+ "ETH_P_IEEE802154": true,
+ "ETH_P_IEEEPUP": true,
+ "ETH_P_IEEEPUPAT": true,
+ "ETH_P_IP": true,
+ "ETH_P_IPV6": true,
+ "ETH_P_IPX": true,
+ "ETH_P_IRDA": true,
+ "ETH_P_LAT": true,
+ "ETH_P_LINK_CTL": true,
+ "ETH_P_LOCALTALK": true,
+ "ETH_P_LOOP": true,
+ "ETH_P_MOBITEX": true,
+ "ETH_P_MPLS_MC": true,
+ "ETH_P_MPLS_UC": true,
+ "ETH_P_PAE": true,
+ "ETH_P_PAUSE": true,
+ "ETH_P_PHONET": true,
+ "ETH_P_PPPTALK": true,
+ "ETH_P_PPP_DISC": true,
+ "ETH_P_PPP_MP": true,
+ "ETH_P_PPP_SES": true,
+ "ETH_P_PUP": true,
+ "ETH_P_PUPAT": true,
+ "ETH_P_RARP": true,
+ "ETH_P_SCA": true,
+ "ETH_P_SLOW": true,
+ "ETH_P_SNAP": true,
+ "ETH_P_TEB": true,
+ "ETH_P_TIPC": true,
+ "ETH_P_TRAILER": true,
+ "ETH_P_TR_802_2": true,
+ "ETH_P_WAN_PPP": true,
+ "ETH_P_WCCP": true,
+ "ETH_P_X25": true,
+ "ETIME": true,
+ "ETIMEDOUT": true,
+ "ETOOMANYREFS": true,
+ "ETXTBSY": true,
+ "EUCLEAN": true,
+ "EUNATCH": true,
+ "EUSERS": true,
+ "EVFILT_AIO": true,
+ "EVFILT_FS": true,
+ "EVFILT_LIO": true,
+ "EVFILT_MACHPORT": true,
+ "EVFILT_PROC": true,
+ "EVFILT_READ": true,
+ "EVFILT_SIGNAL": true,
+ "EVFILT_SYSCOUNT": true,
+ "EVFILT_THREADMARKER": true,
+ "EVFILT_TIMER": true,
+ "EVFILT_USER": true,
+ "EVFILT_VM": true,
+ "EVFILT_VNODE": true,
+ "EVFILT_WRITE": true,
+ "EV_ADD": true,
+ "EV_CLEAR": true,
+ "EV_DELETE": true,
+ "EV_DISABLE": true,
+ "EV_DISPATCH": true,
+ "EV_DROP": true,
+ "EV_ENABLE": true,
+ "EV_EOF": true,
+ "EV_ERROR": true,
+ "EV_FLAG0": true,
+ "EV_FLAG1": true,
+ "EV_ONESHOT": true,
+ "EV_OOBAND": true,
+ "EV_POLL": true,
+ "EV_RECEIPT": true,
+ "EV_SYSFLAGS": true,
+ "EWINDOWS": true,
+ "EWOULDBLOCK": true,
+ "EXDEV": true,
+ "EXFULL": true,
+ "EXTA": true,
+ "EXTB": true,
+ "EXTPROC": true,
+ "Environ": true,
+ "EpollCreate": true,
+ "EpollCreate1": true,
+ "EpollCtl": true,
+ "EpollEvent": true,
+ "EpollWait": true,
+ "Errno": true,
+ "EscapeArg": true,
+ "Exchangedata": true,
+ "Exec": true,
+ "Exit": true,
+ "ExitProcess": true,
+ "FD_CLOEXEC": true,
+ "FD_SETSIZE": true,
+ "FILE_ACTION_ADDED": true,
+ "FILE_ACTION_MODIFIED": true,
+ "FILE_ACTION_REMOVED": true,
+ "FILE_ACTION_RENAMED_NEW_NAME": true,
+ "FILE_ACTION_RENAMED_OLD_NAME": true,
+ "FILE_APPEND_DATA": true,
+ "FILE_ATTRIBUTE_ARCHIVE": true,
+ "FILE_ATTRIBUTE_DIRECTORY": true,
+ "FILE_ATTRIBUTE_HIDDEN": true,
+ "FILE_ATTRIBUTE_NORMAL": true,
+ "FILE_ATTRIBUTE_READONLY": true,
+ "FILE_ATTRIBUTE_REPARSE_POINT": true,
+ "FILE_ATTRIBUTE_SYSTEM": true,
+ "FILE_BEGIN": true,
+ "FILE_CURRENT": true,
+ "FILE_END": true,
+ "FILE_FLAG_BACKUP_SEMANTICS": true,
+ "FILE_FLAG_OPEN_REPARSE_POINT": true,
+ "FILE_FLAG_OVERLAPPED": true,
+ "FILE_LIST_DIRECTORY": true,
+ "FILE_MAP_COPY": true,
+ "FILE_MAP_EXECUTE": true,
+ "FILE_MAP_READ": true,
+ "FILE_MAP_WRITE": true,
+ "FILE_NOTIFY_CHANGE_ATTRIBUTES": true,
+ "FILE_NOTIFY_CHANGE_CREATION": true,
+ "FILE_NOTIFY_CHANGE_DIR_NAME": true,
+ "FILE_NOTIFY_CHANGE_FILE_NAME": true,
+ "FILE_NOTIFY_CHANGE_LAST_ACCESS": true,
+ "FILE_NOTIFY_CHANGE_LAST_WRITE": true,
+ "FILE_NOTIFY_CHANGE_SIZE": true,
+ "FILE_SHARE_DELETE": true,
+ "FILE_SHARE_READ": true,
+ "FILE_SHARE_WRITE": true,
+ "FILE_SKIP_COMPLETION_PORT_ON_SUCCESS": true,
+ "FILE_SKIP_SET_EVENT_ON_HANDLE": true,
+ "FILE_TYPE_CHAR": true,
+ "FILE_TYPE_DISK": true,
+ "FILE_TYPE_PIPE": true,
+ "FILE_TYPE_REMOTE": true,
+ "FILE_TYPE_UNKNOWN": true,
+ "FILE_WRITE_ATTRIBUTES": true,
+ "FLUSHO": true,
+ "FORMAT_MESSAGE_ALLOCATE_BUFFER": true,
+ "FORMAT_MESSAGE_ARGUMENT_ARRAY": true,
+ "FORMAT_MESSAGE_FROM_HMODULE": true,
+ "FORMAT_MESSAGE_FROM_STRING": true,
+ "FORMAT_MESSAGE_FROM_SYSTEM": true,
+ "FORMAT_MESSAGE_IGNORE_INSERTS": true,
+ "FORMAT_MESSAGE_MAX_WIDTH_MASK": true,
+ "FSCTL_GET_REPARSE_POINT": true,
+ "F_ADDFILESIGS": true,
+ "F_ADDSIGS": true,
+ "F_ALLOCATEALL": true,
+ "F_ALLOCATECONTIG": true,
+ "F_CANCEL": true,
+ "F_CHKCLEAN": true,
+ "F_CLOSEM": true,
+ "F_DUP2FD": true,
+ "F_DUP2FD_CLOEXEC": true,
+ "F_DUPFD": true,
+ "F_DUPFD_CLOEXEC": true,
+ "F_EXLCK": true,
+ "F_FLUSH_DATA": true,
+ "F_FREEZE_FS": true,
+ "F_FSCTL": true,
+ "F_FSDIRMASK": true,
+ "F_FSIN": true,
+ "F_FSINOUT": true,
+ "F_FSOUT": true,
+ "F_FSPRIV": true,
+ "F_FSVOID": true,
+ "F_FULLFSYNC": true,
+ "F_GETFD": true,
+ "F_GETFL": true,
+ "F_GETLEASE": true,
+ "F_GETLK": true,
+ "F_GETLK64": true,
+ "F_GETLKPID": true,
+ "F_GETNOSIGPIPE": true,
+ "F_GETOWN": true,
+ "F_GETOWN_EX": true,
+ "F_GETPATH": true,
+ "F_GETPATH_MTMINFO": true,
+ "F_GETPIPE_SZ": true,
+ "F_GETPROTECTIONCLASS": true,
+ "F_GETSIG": true,
+ "F_GLOBAL_NOCACHE": true,
+ "F_LOCK": true,
+ "F_LOG2PHYS": true,
+ "F_LOG2PHYS_EXT": true,
+ "F_MARKDEPENDENCY": true,
+ "F_MAXFD": true,
+ "F_NOCACHE": true,
+ "F_NODIRECT": true,
+ "F_NOTIFY": true,
+ "F_OGETLK": true,
+ "F_OK": true,
+ "F_OSETLK": true,
+ "F_OSETLKW": true,
+ "F_PARAM_MASK": true,
+ "F_PARAM_MAX": true,
+ "F_PATHPKG_CHECK": true,
+ "F_PEOFPOSMODE": true,
+ "F_PREALLOCATE": true,
+ "F_RDADVISE": true,
+ "F_RDAHEAD": true,
+ "F_RDLCK": true,
+ "F_READAHEAD": true,
+ "F_READBOOTSTRAP": true,
+ "F_SETBACKINGSTORE": true,
+ "F_SETFD": true,
+ "F_SETFL": true,
+ "F_SETLEASE": true,
+ "F_SETLK": true,
+ "F_SETLK64": true,
+ "F_SETLKW": true,
+ "F_SETLKW64": true,
+ "F_SETLK_REMOTE": true,
+ "F_SETNOSIGPIPE": true,
+ "F_SETOWN": true,
+ "F_SETOWN_EX": true,
+ "F_SETPIPE_SZ": true,
+ "F_SETPROTECTIONCLASS": true,
+ "F_SETSIG": true,
+ "F_SETSIZE": true,
+ "F_SHLCK": true,
+ "F_TEST": true,
+ "F_THAW_FS": true,
+ "F_TLOCK": true,
+ "F_ULOCK": true,
+ "F_UNLCK": true,
+ "F_UNLCKSYS": true,
+ "F_VOLPOSMODE": true,
+ "F_WRITEBOOTSTRAP": true,
+ "F_WRLCK": true,
+ "Faccessat": true,
+ "Fallocate": true,
+ "Fbootstraptransfer_t": true,
+ "Fchdir": true,
+ "Fchflags": true,
+ "Fchmod": true,
+ "Fchmodat": true,
+ "Fchown": true,
+ "Fchownat": true,
+ "FcntlFlock": true,
+ "FdSet": true,
+ "Fdatasync": true,
+ "FileNotifyInformation": true,
+ "Filetime": true,
+ "FindClose": true,
+ "FindFirstFile": true,
+ "FindNextFile": true,
+ "Flock": true,
+ "Flock_t": true,
+ "FlushBpf": true,
+ "FlushFileBuffers": true,
+ "FlushViewOfFile": true,
+ "ForkExec": true,
+ "ForkLock": true,
+ "FormatMessage": true,
+ "Fpathconf": true,
+ "FreeAddrInfoW": true,
+ "FreeEnvironmentStrings": true,
+ "FreeLibrary": true,
+ "Fsid": true,
+ "Fstat": true,
+ "Fstatat": true,
+ "Fstatfs": true,
+ "Fstore_t": true,
+ "Fsync": true,
+ "Ftruncate": true,
+ "FullPath": true,
+ "Futimes": true,
+ "Futimesat": true,
+ "GENERIC_ALL": true,
+ "GENERIC_EXECUTE": true,
+ "GENERIC_READ": true,
+ "GENERIC_WRITE": true,
+ "GUID": true,
+ "GetAcceptExSockaddrs": true,
+ "GetAdaptersInfo": true,
+ "GetAddrInfoW": true,
+ "GetCommandLine": true,
+ "GetComputerName": true,
+ "GetConsoleMode": true,
+ "GetCurrentDirectory": true,
+ "GetCurrentProcess": true,
+ "GetEnvironmentStrings": true,
+ "GetEnvironmentVariable": true,
+ "GetExitCodeProcess": true,
+ "GetFileAttributes": true,
+ "GetFileAttributesEx": true,
+ "GetFileExInfoStandard": true,
+ "GetFileExMaxInfoLevel": true,
+ "GetFileInformationByHandle": true,
+ "GetFileType": true,
+ "GetFullPathName": true,
+ "GetHostByName": true,
+ "GetIfEntry": true,
+ "GetLastError": true,
+ "GetLengthSid": true,
+ "GetLongPathName": true,
+ "GetProcAddress": true,
+ "GetProcessTimes": true,
+ "GetProtoByName": true,
+ "GetQueuedCompletionStatus": true,
+ "GetServByName": true,
+ "GetShortPathName": true,
+ "GetStartupInfo": true,
+ "GetStdHandle": true,
+ "GetSystemTimeAsFileTime": true,
+ "GetTempPath": true,
+ "GetTimeZoneInformation": true,
+ "GetTokenInformation": true,
+ "GetUserNameEx": true,
+ "GetUserProfileDirectory": true,
+ "GetVersion": true,
+ "Getcwd": true,
+ "Getdents": true,
+ "Getdirentries": true,
+ "Getdtablesize": true,
+ "Getegid": true,
+ "Getenv": true,
+ "Geteuid": true,
+ "Getfsstat": true,
+ "Getgid": true,
+ "Getgroups": true,
+ "Getpagesize": true,
+ "Getpeername": true,
+ "Getpgid": true,
+ "Getpgrp": true,
+ "Getpid": true,
+ "Getppid": true,
+ "Getpriority": true,
+ "Getrlimit": true,
+ "Getrusage": true,
+ "Getsid": true,
+ "Getsockname": true,
+ "Getsockopt": true,
+ "GetsockoptByte": true,
+ "GetsockoptICMPv6Filter": true,
+ "GetsockoptIPMreq": true,
+ "GetsockoptIPMreqn": true,
+ "GetsockoptIPv6MTUInfo": true,
+ "GetsockoptIPv6Mreq": true,
+ "GetsockoptInet4Addr": true,
+ "GetsockoptInt": true,
+ "GetsockoptUcred": true,
+ "Gettid": true,
+ "Gettimeofday": true,
+ "Getuid": true,
+ "Getwd": true,
+ "Getxattr": true,
+ "HANDLE_FLAG_INHERIT": true,
+ "HKEY_CLASSES_ROOT": true,
+ "HKEY_CURRENT_CONFIG": true,
+ "HKEY_CURRENT_USER": true,
+ "HKEY_DYN_DATA": true,
+ "HKEY_LOCAL_MACHINE": true,
+ "HKEY_PERFORMANCE_DATA": true,
+ "HKEY_USERS": true,
+ "HUPCL": true,
+ "Handle": true,
+ "Hostent": true,
+ "ICANON": true,
+ "ICMP6_FILTER": true,
+ "ICMPV6_FILTER": true,
+ "ICMPv6Filter": true,
+ "ICRNL": true,
+ "IEXTEN": true,
+ "IFAN_ARRIVAL": true,
+ "IFAN_DEPARTURE": true,
+ "IFA_ADDRESS": true,
+ "IFA_ANYCAST": true,
+ "IFA_BROADCAST": true,
+ "IFA_CACHEINFO": true,
+ "IFA_F_DADFAILED": true,
+ "IFA_F_DEPRECATED": true,
+ "IFA_F_HOMEADDRESS": true,
+ "IFA_F_NODAD": true,
+ "IFA_F_OPTIMISTIC": true,
+ "IFA_F_PERMANENT": true,
+ "IFA_F_SECONDARY": true,
+ "IFA_F_TEMPORARY": true,
+ "IFA_F_TENTATIVE": true,
+ "IFA_LABEL": true,
+ "IFA_LOCAL": true,
+ "IFA_MAX": true,
+ "IFA_MULTICAST": true,
+ "IFA_ROUTE": true,
+ "IFA_UNSPEC": true,
+ "IFF_ALLMULTI": true,
+ "IFF_ALTPHYS": true,
+ "IFF_AUTOMEDIA": true,
+ "IFF_BROADCAST": true,
+ "IFF_CANTCHANGE": true,
+ "IFF_CANTCONFIG": true,
+ "IFF_DEBUG": true,
+ "IFF_DRV_OACTIVE": true,
+ "IFF_DRV_RUNNING": true,
+ "IFF_DYING": true,
+ "IFF_DYNAMIC": true,
+ "IFF_LINK0": true,
+ "IFF_LINK1": true,
+ "IFF_LINK2": true,
+ "IFF_LOOPBACK": true,
+ "IFF_MASTER": true,
+ "IFF_MONITOR": true,
+ "IFF_MULTICAST": true,
+ "IFF_NOARP": true,
+ "IFF_NOTRAILERS": true,
+ "IFF_NO_PI": true,
+ "IFF_OACTIVE": true,
+ "IFF_ONE_QUEUE": true,
+ "IFF_POINTOPOINT": true,
+ "IFF_POINTTOPOINT": true,
+ "IFF_PORTSEL": true,
+ "IFF_PPROMISC": true,
+ "IFF_PROMISC": true,
+ "IFF_RENAMING": true,
+ "IFF_RUNNING": true,
+ "IFF_SIMPLEX": true,
+ "IFF_SLAVE": true,
+ "IFF_SMART": true,
+ "IFF_STATICARP": true,
+ "IFF_TAP": true,
+ "IFF_TUN": true,
+ "IFF_TUN_EXCL": true,
+ "IFF_UP": true,
+ "IFF_VNET_HDR": true,
+ "IFLA_ADDRESS": true,
+ "IFLA_BROADCAST": true,
+ "IFLA_COST": true,
+ "IFLA_IFALIAS": true,
+ "IFLA_IFNAME": true,
+ "IFLA_LINK": true,
+ "IFLA_LINKINFO": true,
+ "IFLA_LINKMODE": true,
+ "IFLA_MAP": true,
+ "IFLA_MASTER": true,
+ "IFLA_MAX": true,
+ "IFLA_MTU": true,
+ "IFLA_NET_NS_PID": true,
+ "IFLA_OPERSTATE": true,
+ "IFLA_PRIORITY": true,
+ "IFLA_PROTINFO": true,
+ "IFLA_QDISC": true,
+ "IFLA_STATS": true,
+ "IFLA_TXQLEN": true,
+ "IFLA_UNSPEC": true,
+ "IFLA_WEIGHT": true,
+ "IFLA_WIRELESS": true,
+ "IFNAMSIZ": true,
+ "IFT_1822": true,
+ "IFT_A12MPPSWITCH": true,
+ "IFT_AAL2": true,
+ "IFT_AAL5": true,
+ "IFT_ADSL": true,
+ "IFT_AFLANE8023": true,
+ "IFT_AFLANE8025": true,
+ "IFT_ARAP": true,
+ "IFT_ARCNET": true,
+ "IFT_ARCNETPLUS": true,
+ "IFT_ASYNC": true,
+ "IFT_ATM": true,
+ "IFT_ATMDXI": true,
+ "IFT_ATMFUNI": true,
+ "IFT_ATMIMA": true,
+ "IFT_ATMLOGICAL": true,
+ "IFT_ATMRADIO": true,
+ "IFT_ATMSUBINTERFACE": true,
+ "IFT_ATMVCIENDPT": true,
+ "IFT_ATMVIRTUAL": true,
+ "IFT_BGPPOLICYACCOUNTING": true,
+ "IFT_BLUETOOTH": true,
+ "IFT_BRIDGE": true,
+ "IFT_BSC": true,
+ "IFT_CARP": true,
+ "IFT_CCTEMUL": true,
+ "IFT_CELLULAR": true,
+ "IFT_CEPT": true,
+ "IFT_CES": true,
+ "IFT_CHANNEL": true,
+ "IFT_CNR": true,
+ "IFT_COFFEE": true,
+ "IFT_COMPOSITELINK": true,
+ "IFT_DCN": true,
+ "IFT_DIGITALPOWERLINE": true,
+ "IFT_DIGITALWRAPPEROVERHEADCHANNEL": true,
+ "IFT_DLSW": true,
+ "IFT_DOCSCABLEDOWNSTREAM": true,
+ "IFT_DOCSCABLEMACLAYER": true,
+ "IFT_DOCSCABLEUPSTREAM": true,
+ "IFT_DOCSCABLEUPSTREAMCHANNEL": true,
+ "IFT_DS0": true,
+ "IFT_DS0BUNDLE": true,
+ "IFT_DS1FDL": true,
+ "IFT_DS3": true,
+ "IFT_DTM": true,
+ "IFT_DUMMY": true,
+ "IFT_DVBASILN": true,
+ "IFT_DVBASIOUT": true,
+ "IFT_DVBRCCDOWNSTREAM": true,
+ "IFT_DVBRCCMACLAYER": true,
+ "IFT_DVBRCCUPSTREAM": true,
+ "IFT_ECONET": true,
+ "IFT_ENC": true,
+ "IFT_EON": true,
+ "IFT_EPLRS": true,
+ "IFT_ESCON": true,
+ "IFT_ETHER": true,
+ "IFT_FAITH": true,
+ "IFT_FAST": true,
+ "IFT_FASTETHER": true,
+ "IFT_FASTETHERFX": true,
+ "IFT_FDDI": true,
+ "IFT_FIBRECHANNEL": true,
+ "IFT_FRAMERELAYINTERCONNECT": true,
+ "IFT_FRAMERELAYMPI": true,
+ "IFT_FRDLCIENDPT": true,
+ "IFT_FRELAY": true,
+ "IFT_FRELAYDCE": true,
+ "IFT_FRF16MFRBUNDLE": true,
+ "IFT_FRFORWARD": true,
+ "IFT_G703AT2MB": true,
+ "IFT_G703AT64K": true,
+ "IFT_GIF": true,
+ "IFT_GIGABITETHERNET": true,
+ "IFT_GR303IDT": true,
+ "IFT_GR303RDT": true,
+ "IFT_H323GATEKEEPER": true,
+ "IFT_H323PROXY": true,
+ "IFT_HDH1822": true,
+ "IFT_HDLC": true,
+ "IFT_HDSL2": true,
+ "IFT_HIPERLAN2": true,
+ "IFT_HIPPI": true,
+ "IFT_HIPPIINTERFACE": true,
+ "IFT_HOSTPAD": true,
+ "IFT_HSSI": true,
+ "IFT_HY": true,
+ "IFT_IBM370PARCHAN": true,
+ "IFT_IDSL": true,
+ "IFT_IEEE1394": true,
+ "IFT_IEEE80211": true,
+ "IFT_IEEE80212": true,
+ "IFT_IEEE8023ADLAG": true,
+ "IFT_IFGSN": true,
+ "IFT_IMT": true,
+ "IFT_INFINIBAND": true,
+ "IFT_INTERLEAVE": true,
+ "IFT_IP": true,
+ "IFT_IPFORWARD": true,
+ "IFT_IPOVERATM": true,
+ "IFT_IPOVERCDLC": true,
+ "IFT_IPOVERCLAW": true,
+ "IFT_IPSWITCH": true,
+ "IFT_IPXIP": true,
+ "IFT_ISDN": true,
+ "IFT_ISDNBASIC": true,
+ "IFT_ISDNPRIMARY": true,
+ "IFT_ISDNS": true,
+ "IFT_ISDNU": true,
+ "IFT_ISO88022LLC": true,
+ "IFT_ISO88023": true,
+ "IFT_ISO88024": true,
+ "IFT_ISO88025": true,
+ "IFT_ISO88025CRFPINT": true,
+ "IFT_ISO88025DTR": true,
+ "IFT_ISO88025FIBER": true,
+ "IFT_ISO88026": true,
+ "IFT_ISUP": true,
+ "IFT_L2VLAN": true,
+ "IFT_L3IPVLAN": true,
+ "IFT_L3IPXVLAN": true,
+ "IFT_LAPB": true,
+ "IFT_LAPD": true,
+ "IFT_LAPF": true,
+ "IFT_LINEGROUP": true,
+ "IFT_LOCALTALK": true,
+ "IFT_LOOP": true,
+ "IFT_MEDIAMAILOVERIP": true,
+ "IFT_MFSIGLINK": true,
+ "IFT_MIOX25": true,
+ "IFT_MODEM": true,
+ "IFT_MPC": true,
+ "IFT_MPLS": true,
+ "IFT_MPLSTUNNEL": true,
+ "IFT_MSDSL": true,
+ "IFT_MVL": true,
+ "IFT_MYRINET": true,
+ "IFT_NFAS": true,
+ "IFT_NSIP": true,
+ "IFT_OPTICALCHANNEL": true,
+ "IFT_OPTICALTRANSPORT": true,
+ "IFT_OTHER": true,
+ "IFT_P10": true,
+ "IFT_P80": true,
+ "IFT_PARA": true,
+ "IFT_PDP": true,
+ "IFT_PFLOG": true,
+ "IFT_PFLOW": true,
+ "IFT_PFSYNC": true,
+ "IFT_PLC": true,
+ "IFT_PON155": true,
+ "IFT_PON622": true,
+ "IFT_POS": true,
+ "IFT_PPP": true,
+ "IFT_PPPMULTILINKBUNDLE": true,
+ "IFT_PROPATM": true,
+ "IFT_PROPBWAP2MP": true,
+ "IFT_PROPCNLS": true,
+ "IFT_PROPDOCSWIRELESSDOWNSTREAM": true,
+ "IFT_PROPDOCSWIRELESSMACLAYER": true,
+ "IFT_PROPDOCSWIRELESSUPSTREAM": true,
+ "IFT_PROPMUX": true,
+ "IFT_PROPVIRTUAL": true,
+ "IFT_PROPWIRELESSP2P": true,
+ "IFT_PTPSERIAL": true,
+ "IFT_PVC": true,
+ "IFT_Q2931": true,
+ "IFT_QLLC": true,
+ "IFT_RADIOMAC": true,
+ "IFT_RADSL": true,
+ "IFT_REACHDSL": true,
+ "IFT_RFC1483": true,
+ "IFT_RS232": true,
+ "IFT_RSRB": true,
+ "IFT_SDLC": true,
+ "IFT_SDSL": true,
+ "IFT_SHDSL": true,
+ "IFT_SIP": true,
+ "IFT_SIPSIG": true,
+ "IFT_SIPTG": true,
+ "IFT_SLIP": true,
+ "IFT_SMDSDXI": true,
+ "IFT_SMDSICIP": true,
+ "IFT_SONET": true,
+ "IFT_SONETOVERHEADCHANNEL": true,
+ "IFT_SONETPATH": true,
+ "IFT_SONETVT": true,
+ "IFT_SRP": true,
+ "IFT_SS7SIGLINK": true,
+ "IFT_STACKTOSTACK": true,
+ "IFT_STARLAN": true,
+ "IFT_STF": true,
+ "IFT_T1": true,
+ "IFT_TDLC": true,
+ "IFT_TELINK": true,
+ "IFT_TERMPAD": true,
+ "IFT_TR008": true,
+ "IFT_TRANSPHDLC": true,
+ "IFT_TUNNEL": true,
+ "IFT_ULTRA": true,
+ "IFT_USB": true,
+ "IFT_V11": true,
+ "IFT_V35": true,
+ "IFT_V36": true,
+ "IFT_V37": true,
+ "IFT_VDSL": true,
+ "IFT_VIRTUALIPADDRESS": true,
+ "IFT_VIRTUALTG": true,
+ "IFT_VOICEDID": true,
+ "IFT_VOICEEM": true,
+ "IFT_VOICEEMFGD": true,
+ "IFT_VOICEENCAP": true,
+ "IFT_VOICEFGDEANA": true,
+ "IFT_VOICEFXO": true,
+ "IFT_VOICEFXS": true,
+ "IFT_VOICEOVERATM": true,
+ "IFT_VOICEOVERCABLE": true,
+ "IFT_VOICEOVERFRAMERELAY": true,
+ "IFT_VOICEOVERIP": true,
+ "IFT_X213": true,
+ "IFT_X25": true,
+ "IFT_X25DDN": true,
+ "IFT_X25HUNTGROUP": true,
+ "IFT_X25MLP": true,
+ "IFT_X25PLE": true,
+ "IFT_XETHER": true,
+ "IGNBRK": true,
+ "IGNCR": true,
+ "IGNORE": true,
+ "IGNPAR": true,
+ "IMAXBEL": true,
+ "INFINITE": true,
+ "INLCR": true,
+ "INPCK": true,
+ "INVALID_FILE_ATTRIBUTES": true,
+ "IN_ACCESS": true,
+ "IN_ALL_EVENTS": true,
+ "IN_ATTRIB": true,
+ "IN_CLASSA_HOST": true,
+ "IN_CLASSA_MAX": true,
+ "IN_CLASSA_NET": true,
+ "IN_CLASSA_NSHIFT": true,
+ "IN_CLASSB_HOST": true,
+ "IN_CLASSB_MAX": true,
+ "IN_CLASSB_NET": true,
+ "IN_CLASSB_NSHIFT": true,
+ "IN_CLASSC_HOST": true,
+ "IN_CLASSC_NET": true,
+ "IN_CLASSC_NSHIFT": true,
+ "IN_CLASSD_HOST": true,
+ "IN_CLASSD_NET": true,
+ "IN_CLASSD_NSHIFT": true,
+ "IN_CLOEXEC": true,
+ "IN_CLOSE": true,
+ "IN_CLOSE_NOWRITE": true,
+ "IN_CLOSE_WRITE": true,
+ "IN_CREATE": true,
+ "IN_DELETE": true,
+ "IN_DELETE_SELF": true,
+ "IN_DONT_FOLLOW": true,
+ "IN_EXCL_UNLINK": true,
+ "IN_IGNORED": true,
+ "IN_ISDIR": true,
+ "IN_LINKLOCALNETNUM": true,
+ "IN_LOOPBACKNET": true,
+ "IN_MASK_ADD": true,
+ "IN_MODIFY": true,
+ "IN_MOVE": true,
+ "IN_MOVED_FROM": true,
+ "IN_MOVED_TO": true,
+ "IN_MOVE_SELF": true,
+ "IN_NONBLOCK": true,
+ "IN_ONESHOT": true,
+ "IN_ONLYDIR": true,
+ "IN_OPEN": true,
+ "IN_Q_OVERFLOW": true,
+ "IN_RFC3021_HOST": true,
+ "IN_RFC3021_MASK": true,
+ "IN_RFC3021_NET": true,
+ "IN_RFC3021_NSHIFT": true,
+ "IN_UNMOUNT": true,
+ "IOC_IN": true,
+ "IOC_INOUT": true,
+ "IOC_OUT": true,
+ "IOC_VENDOR": true,
+ "IOC_WS2": true,
+ "IO_REPARSE_TAG_SYMLINK": true,
+ "IPMreq": true,
+ "IPMreqn": true,
+ "IPPROTO_3PC": true,
+ "IPPROTO_ADFS": true,
+ "IPPROTO_AH": true,
+ "IPPROTO_AHIP": true,
+ "IPPROTO_APES": true,
+ "IPPROTO_ARGUS": true,
+ "IPPROTO_AX25": true,
+ "IPPROTO_BHA": true,
+ "IPPROTO_BLT": true,
+ "IPPROTO_BRSATMON": true,
+ "IPPROTO_CARP": true,
+ "IPPROTO_CFTP": true,
+ "IPPROTO_CHAOS": true,
+ "IPPROTO_CMTP": true,
+ "IPPROTO_COMP": true,
+ "IPPROTO_CPHB": true,
+ "IPPROTO_CPNX": true,
+ "IPPROTO_DCCP": true,
+ "IPPROTO_DDP": true,
+ "IPPROTO_DGP": true,
+ "IPPROTO_DIVERT": true,
+ "IPPROTO_DIVERT_INIT": true,
+ "IPPROTO_DIVERT_RESP": true,
+ "IPPROTO_DONE": true,
+ "IPPROTO_DSTOPTS": true,
+ "IPPROTO_EGP": true,
+ "IPPROTO_EMCON": true,
+ "IPPROTO_ENCAP": true,
+ "IPPROTO_EON": true,
+ "IPPROTO_ESP": true,
+ "IPPROTO_ETHERIP": true,
+ "IPPROTO_FRAGMENT": true,
+ "IPPROTO_GGP": true,
+ "IPPROTO_GMTP": true,
+ "IPPROTO_GRE": true,
+ "IPPROTO_HELLO": true,
+ "IPPROTO_HMP": true,
+ "IPPROTO_HOPOPTS": true,
+ "IPPROTO_ICMP": true,
+ "IPPROTO_ICMPV6": true,
+ "IPPROTO_IDP": true,
+ "IPPROTO_IDPR": true,
+ "IPPROTO_IDRP": true,
+ "IPPROTO_IGMP": true,
+ "IPPROTO_IGP": true,
+ "IPPROTO_IGRP": true,
+ "IPPROTO_IL": true,
+ "IPPROTO_INLSP": true,
+ "IPPROTO_INP": true,
+ "IPPROTO_IP": true,
+ "IPPROTO_IPCOMP": true,
+ "IPPROTO_IPCV": true,
+ "IPPROTO_IPEIP": true,
+ "IPPROTO_IPIP": true,
+ "IPPROTO_IPPC": true,
+ "IPPROTO_IPV4": true,
+ "IPPROTO_IPV6": true,
+ "IPPROTO_IPV6_ICMP": true,
+ "IPPROTO_IRTP": true,
+ "IPPROTO_KRYPTOLAN": true,
+ "IPPROTO_LARP": true,
+ "IPPROTO_LEAF1": true,
+ "IPPROTO_LEAF2": true,
+ "IPPROTO_MAX": true,
+ "IPPROTO_MAXID": true,
+ "IPPROTO_MEAS": true,
+ "IPPROTO_MH": true,
+ "IPPROTO_MHRP": true,
+ "IPPROTO_MICP": true,
+ "IPPROTO_MOBILE": true,
+ "IPPROTO_MPLS": true,
+ "IPPROTO_MTP": true,
+ "IPPROTO_MUX": true,
+ "IPPROTO_ND": true,
+ "IPPROTO_NHRP": true,
+ "IPPROTO_NONE": true,
+ "IPPROTO_NSP": true,
+ "IPPROTO_NVPII": true,
+ "IPPROTO_OLD_DIVERT": true,
+ "IPPROTO_OSPFIGP": true,
+ "IPPROTO_PFSYNC": true,
+ "IPPROTO_PGM": true,
+ "IPPROTO_PIGP": true,
+ "IPPROTO_PIM": true,
+ "IPPROTO_PRM": true,
+ "IPPROTO_PUP": true,
+ "IPPROTO_PVP": true,
+ "IPPROTO_RAW": true,
+ "IPPROTO_RCCMON": true,
+ "IPPROTO_RDP": true,
+ "IPPROTO_ROUTING": true,
+ "IPPROTO_RSVP": true,
+ "IPPROTO_RVD": true,
+ "IPPROTO_SATEXPAK": true,
+ "IPPROTO_SATMON": true,
+ "IPPROTO_SCCSP": true,
+ "IPPROTO_SCTP": true,
+ "IPPROTO_SDRP": true,
+ "IPPROTO_SEND": true,
+ "IPPROTO_SEP": true,
+ "IPPROTO_SKIP": true,
+ "IPPROTO_SPACER": true,
+ "IPPROTO_SRPC": true,
+ "IPPROTO_ST": true,
+ "IPPROTO_SVMTP": true,
+ "IPPROTO_SWIPE": true,
+ "IPPROTO_TCF": true,
+ "IPPROTO_TCP": true,
+ "IPPROTO_TLSP": true,
+ "IPPROTO_TP": true,
+ "IPPROTO_TPXX": true,
+ "IPPROTO_TRUNK1": true,
+ "IPPROTO_TRUNK2": true,
+ "IPPROTO_TTP": true,
+ "IPPROTO_UDP": true,
+ "IPPROTO_UDPLITE": true,
+ "IPPROTO_VINES": true,
+ "IPPROTO_VISA": true,
+ "IPPROTO_VMTP": true,
+ "IPPROTO_VRRP": true,
+ "IPPROTO_WBEXPAK": true,
+ "IPPROTO_WBMON": true,
+ "IPPROTO_WSN": true,
+ "IPPROTO_XNET": true,
+ "IPPROTO_XTP": true,
+ "IPV6_2292DSTOPTS": true,
+ "IPV6_2292HOPLIMIT": true,
+ "IPV6_2292HOPOPTS": true,
+ "IPV6_2292NEXTHOP": true,
+ "IPV6_2292PKTINFO": true,
+ "IPV6_2292PKTOPTIONS": true,
+ "IPV6_2292RTHDR": true,
+ "IPV6_ADDRFORM": true,
+ "IPV6_ADD_MEMBERSHIP": true,
+ "IPV6_AUTHHDR": true,
+ "IPV6_AUTH_LEVEL": true,
+ "IPV6_AUTOFLOWLABEL": true,
+ "IPV6_BINDANY": true,
+ "IPV6_BINDV6ONLY": true,
+ "IPV6_BOUND_IF": true,
+ "IPV6_CHECKSUM": true,
+ "IPV6_DEFAULT_MULTICAST_HOPS": true,
+ "IPV6_DEFAULT_MULTICAST_LOOP": true,
+ "IPV6_DEFHLIM": true,
+ "IPV6_DONTFRAG": true,
+ "IPV6_DROP_MEMBERSHIP": true,
+ "IPV6_DSTOPTS": true,
+ "IPV6_ESP_NETWORK_LEVEL": true,
+ "IPV6_ESP_TRANS_LEVEL": true,
+ "IPV6_FAITH": true,
+ "IPV6_FLOWINFO_MASK": true,
+ "IPV6_FLOWLABEL_MASK": true,
+ "IPV6_FRAGTTL": true,
+ "IPV6_FW_ADD": true,
+ "IPV6_FW_DEL": true,
+ "IPV6_FW_FLUSH": true,
+ "IPV6_FW_GET": true,
+ "IPV6_FW_ZERO": true,
+ "IPV6_HLIMDEC": true,
+ "IPV6_HOPLIMIT": true,
+ "IPV6_HOPOPTS": true,
+ "IPV6_IPCOMP_LEVEL": true,
+ "IPV6_IPSEC_POLICY": true,
+ "IPV6_JOIN_ANYCAST": true,
+ "IPV6_JOIN_GROUP": true,
+ "IPV6_LEAVE_ANYCAST": true,
+ "IPV6_LEAVE_GROUP": true,
+ "IPV6_MAXHLIM": true,
+ "IPV6_MAXOPTHDR": true,
+ "IPV6_MAXPACKET": true,
+ "IPV6_MAX_GROUP_SRC_FILTER": true,
+ "IPV6_MAX_MEMBERSHIPS": true,
+ "IPV6_MAX_SOCK_SRC_FILTER": true,
+ "IPV6_MIN_MEMBERSHIPS": true,
+ "IPV6_MMTU": true,
+ "IPV6_MSFILTER": true,
+ "IPV6_MTU": true,
+ "IPV6_MTU_DISCOVER": true,
+ "IPV6_MULTICAST_HOPS": true,
+ "IPV6_MULTICAST_IF": true,
+ "IPV6_MULTICAST_LOOP": true,
+ "IPV6_NEXTHOP": true,
+ "IPV6_OPTIONS": true,
+ "IPV6_PATHMTU": true,
+ "IPV6_PIPEX": true,
+ "IPV6_PKTINFO": true,
+ "IPV6_PMTUDISC_DO": true,
+ "IPV6_PMTUDISC_DONT": true,
+ "IPV6_PMTUDISC_PROBE": true,
+ "IPV6_PMTUDISC_WANT": true,
+ "IPV6_PORTRANGE": true,
+ "IPV6_PORTRANGE_DEFAULT": true,
+ "IPV6_PORTRANGE_HIGH": true,
+ "IPV6_PORTRANGE_LOW": true,
+ "IPV6_PREFER_TEMPADDR": true,
+ "IPV6_RECVDSTOPTS": true,
+ "IPV6_RECVDSTPORT": true,
+ "IPV6_RECVERR": true,
+ "IPV6_RECVHOPLIMIT": true,
+ "IPV6_RECVHOPOPTS": true,
+ "IPV6_RECVPATHMTU": true,
+ "IPV6_RECVPKTINFO": true,
+ "IPV6_RECVRTHDR": true,
+ "IPV6_RECVTCLASS": true,
+ "IPV6_ROUTER_ALERT": true,
+ "IPV6_RTABLE": true,
+ "IPV6_RTHDR": true,
+ "IPV6_RTHDRDSTOPTS": true,
+ "IPV6_RTHDR_LOOSE": true,
+ "IPV6_RTHDR_STRICT": true,
+ "IPV6_RTHDR_TYPE_0": true,
+ "IPV6_RXDSTOPTS": true,
+ "IPV6_RXHOPOPTS": true,
+ "IPV6_SOCKOPT_RESERVED1": true,
+ "IPV6_TCLASS": true,
+ "IPV6_UNICAST_HOPS": true,
+ "IPV6_USE_MIN_MTU": true,
+ "IPV6_V6ONLY": true,
+ "IPV6_VERSION": true,
+ "IPV6_VERSION_MASK": true,
+ "IPV6_XFRM_POLICY": true,
+ "IP_ADD_MEMBERSHIP": true,
+ "IP_ADD_SOURCE_MEMBERSHIP": true,
+ "IP_AUTH_LEVEL": true,
+ "IP_BINDANY": true,
+ "IP_BLOCK_SOURCE": true,
+ "IP_BOUND_IF": true,
+ "IP_DEFAULT_MULTICAST_LOOP": true,
+ "IP_DEFAULT_MULTICAST_TTL": true,
+ "IP_DF": true,
+ "IP_DIVERTFL": true,
+ "IP_DONTFRAG": true,
+ "IP_DROP_MEMBERSHIP": true,
+ "IP_DROP_SOURCE_MEMBERSHIP": true,
+ "IP_DUMMYNET3": true,
+ "IP_DUMMYNET_CONFIGURE": true,
+ "IP_DUMMYNET_DEL": true,
+ "IP_DUMMYNET_FLUSH": true,
+ "IP_DUMMYNET_GET": true,
+ "IP_EF": true,
+ "IP_ERRORMTU": true,
+ "IP_ESP_NETWORK_LEVEL": true,
+ "IP_ESP_TRANS_LEVEL": true,
+ "IP_FAITH": true,
+ "IP_FREEBIND": true,
+ "IP_FW3": true,
+ "IP_FW_ADD": true,
+ "IP_FW_DEL": true,
+ "IP_FW_FLUSH": true,
+ "IP_FW_GET": true,
+ "IP_FW_NAT_CFG": true,
+ "IP_FW_NAT_DEL": true,
+ "IP_FW_NAT_GET_CONFIG": true,
+ "IP_FW_NAT_GET_LOG": true,
+ "IP_FW_RESETLOG": true,
+ "IP_FW_TABLE_ADD": true,
+ "IP_FW_TABLE_DEL": true,
+ "IP_FW_TABLE_FLUSH": true,
+ "IP_FW_TABLE_GETSIZE": true,
+ "IP_FW_TABLE_LIST": true,
+ "IP_FW_ZERO": true,
+ "IP_HDRINCL": true,
+ "IP_IPCOMP_LEVEL": true,
+ "IP_IPSECFLOWINFO": true,
+ "IP_IPSEC_LOCAL_AUTH": true,
+ "IP_IPSEC_LOCAL_CRED": true,
+ "IP_IPSEC_LOCAL_ID": true,
+ "IP_IPSEC_POLICY": true,
+ "IP_IPSEC_REMOTE_AUTH": true,
+ "IP_IPSEC_REMOTE_CRED": true,
+ "IP_IPSEC_REMOTE_ID": true,
+ "IP_MAXPACKET": true,
+ "IP_MAX_GROUP_SRC_FILTER": true,
+ "IP_MAX_MEMBERSHIPS": true,
+ "IP_MAX_SOCK_MUTE_FILTER": true,
+ "IP_MAX_SOCK_SRC_FILTER": true,
+ "IP_MAX_SOURCE_FILTER": true,
+ "IP_MF": true,
+ "IP_MINFRAGSIZE": true,
+ "IP_MINTTL": true,
+ "IP_MIN_MEMBERSHIPS": true,
+ "IP_MSFILTER": true,
+ "IP_MSS": true,
+ "IP_MTU": true,
+ "IP_MTU_DISCOVER": true,
+ "IP_MULTICAST_IF": true,
+ "IP_MULTICAST_IFINDEX": true,
+ "IP_MULTICAST_LOOP": true,
+ "IP_MULTICAST_TTL": true,
+ "IP_MULTICAST_VIF": true,
+ "IP_NAT__XXX": true,
+ "IP_OFFMASK": true,
+ "IP_OLD_FW_ADD": true,
+ "IP_OLD_FW_DEL": true,
+ "IP_OLD_FW_FLUSH": true,
+ "IP_OLD_FW_GET": true,
+ "IP_OLD_FW_RESETLOG": true,
+ "IP_OLD_FW_ZERO": true,
+ "IP_ONESBCAST": true,
+ "IP_OPTIONS": true,
+ "IP_ORIGDSTADDR": true,
+ "IP_PASSSEC": true,
+ "IP_PIPEX": true,
+ "IP_PKTINFO": true,
+ "IP_PKTOPTIONS": true,
+ "IP_PMTUDISC": true,
+ "IP_PMTUDISC_DO": true,
+ "IP_PMTUDISC_DONT": true,
+ "IP_PMTUDISC_PROBE": true,
+ "IP_PMTUDISC_WANT": true,
+ "IP_PORTRANGE": true,
+ "IP_PORTRANGE_DEFAULT": true,
+ "IP_PORTRANGE_HIGH": true,
+ "IP_PORTRANGE_LOW": true,
+ "IP_RECVDSTADDR": true,
+ "IP_RECVDSTPORT": true,
+ "IP_RECVERR": true,
+ "IP_RECVIF": true,
+ "IP_RECVOPTS": true,
+ "IP_RECVORIGDSTADDR": true,
+ "IP_RECVPKTINFO": true,
+ "IP_RECVRETOPTS": true,
+ "IP_RECVRTABLE": true,
+ "IP_RECVTOS": true,
+ "IP_RECVTTL": true,
+ "IP_RETOPTS": true,
+ "IP_RF": true,
+ "IP_ROUTER_ALERT": true,
+ "IP_RSVP_OFF": true,
+ "IP_RSVP_ON": true,
+ "IP_RSVP_VIF_OFF": true,
+ "IP_RSVP_VIF_ON": true,
+ "IP_RTABLE": true,
+ "IP_SENDSRCADDR": true,
+ "IP_STRIPHDR": true,
+ "IP_TOS": true,
+ "IP_TRAFFIC_MGT_BACKGROUND": true,
+ "IP_TRANSPARENT": true,
+ "IP_TTL": true,
+ "IP_UNBLOCK_SOURCE": true,
+ "IP_XFRM_POLICY": true,
+ "IPv6MTUInfo": true,
+ "IPv6Mreq": true,
+ "ISIG": true,
+ "ISTRIP": true,
+ "IUCLC": true,
+ "IUTF8": true,
+ "IXANY": true,
+ "IXOFF": true,
+ "IXON": true,
+ "IfAddrmsg": true,
+ "IfAnnounceMsghdr": true,
+ "IfData": true,
+ "IfInfomsg": true,
+ "IfMsghdr": true,
+ "IfaMsghdr": true,
+ "IfmaMsghdr": true,
+ "IfmaMsghdr2": true,
+ "ImplementsGetwd": true,
+ "Inet4Pktinfo": true,
+ "Inet6Pktinfo": true,
+ "InotifyAddWatch": true,
+ "InotifyEvent": true,
+ "InotifyInit": true,
+ "InotifyInit1": true,
+ "InotifyRmWatch": true,
+ "InterfaceAddrMessage": true,
+ "InterfaceAnnounceMessage": true,
+ "InterfaceInfo": true,
+ "InterfaceMessage": true,
+ "InterfaceMulticastAddrMessage": true,
+ "InvalidHandle": true,
+ "Ioperm": true,
+ "Iopl": true,
+ "Iovec": true,
+ "IpAdapterInfo": true,
+ "IpAddrString": true,
+ "IpAddressString": true,
+ "IpMaskString": true,
+ "Issetugid": true,
+ "KEY_ALL_ACCESS": true,
+ "KEY_CREATE_LINK": true,
+ "KEY_CREATE_SUB_KEY": true,
+ "KEY_ENUMERATE_SUB_KEYS": true,
+ "KEY_EXECUTE": true,
+ "KEY_NOTIFY": true,
+ "KEY_QUERY_VALUE": true,
+ "KEY_READ": true,
+ "KEY_SET_VALUE": true,
+ "KEY_WOW64_32KEY": true,
+ "KEY_WOW64_64KEY": true,
+ "KEY_WRITE": true,
+ "Kevent": true,
+ "Kevent_t": true,
+ "Kill": true,
+ "Klogctl": true,
+ "Kqueue": true,
+ "LANG_ENGLISH": true,
+ "LAYERED_PROTOCOL": true,
+ "LCNT_OVERLOAD_FLUSH": true,
+ "LINUX_REBOOT_CMD_CAD_OFF": true,
+ "LINUX_REBOOT_CMD_CAD_ON": true,
+ "LINUX_REBOOT_CMD_HALT": true,
+ "LINUX_REBOOT_CMD_KEXEC": true,
+ "LINUX_REBOOT_CMD_POWER_OFF": true,
+ "LINUX_REBOOT_CMD_RESTART": true,
+ "LINUX_REBOOT_CMD_RESTART2": true,
+ "LINUX_REBOOT_CMD_SW_SUSPEND": true,
+ "LINUX_REBOOT_MAGIC1": true,
+ "LINUX_REBOOT_MAGIC2": true,
+ "LOCK_EX": true,
+ "LOCK_NB": true,
+ "LOCK_SH": true,
+ "LOCK_UN": true,
+ "LazyDLL": true,
+ "LazyProc": true,
+ "Lchown": true,
+ "Linger": true,
+ "Link": true,
+ "Listen": true,
+ "Listxattr": true,
+ "LoadCancelIoEx": true,
+ "LoadConnectEx": true,
+ "LoadCreateSymbolicLink": true,
+ "LoadDLL": true,
+ "LoadGetAddrInfo": true,
+ "LoadLibrary": true,
+ "LoadSetFileCompletionNotificationModes": true,
+ "LocalFree": true,
+ "Log2phys_t": true,
+ "LookupAccountName": true,
+ "LookupAccountSid": true,
+ "LookupSID": true,
+ "LsfJump": true,
+ "LsfSocket": true,
+ "LsfStmt": true,
+ "Lstat": true,
+ "MADV_AUTOSYNC": true,
+ "MADV_CAN_REUSE": true,
+ "MADV_CORE": true,
+ "MADV_DOFORK": true,
+ "MADV_DONTFORK": true,
+ "MADV_DONTNEED": true,
+ "MADV_FREE": true,
+ "MADV_FREE_REUSABLE": true,
+ "MADV_FREE_REUSE": true,
+ "MADV_HUGEPAGE": true,
+ "MADV_HWPOISON": true,
+ "MADV_MERGEABLE": true,
+ "MADV_NOCORE": true,
+ "MADV_NOHUGEPAGE": true,
+ "MADV_NORMAL": true,
+ "MADV_NOSYNC": true,
+ "MADV_PROTECT": true,
+ "MADV_RANDOM": true,
+ "MADV_REMOVE": true,
+ "MADV_SEQUENTIAL": true,
+ "MADV_SPACEAVAIL": true,
+ "MADV_UNMERGEABLE": true,
+ "MADV_WILLNEED": true,
+ "MADV_ZERO_WIRED_PAGES": true,
+ "MAP_32BIT": true,
+ "MAP_ALIGNED_SUPER": true,
+ "MAP_ALIGNMENT_16MB": true,
+ "MAP_ALIGNMENT_1TB": true,
+ "MAP_ALIGNMENT_256TB": true,
+ "MAP_ALIGNMENT_4GB": true,
+ "MAP_ALIGNMENT_64KB": true,
+ "MAP_ALIGNMENT_64PB": true,
+ "MAP_ALIGNMENT_MASK": true,
+ "MAP_ALIGNMENT_SHIFT": true,
+ "MAP_ANON": true,
+ "MAP_ANONYMOUS": true,
+ "MAP_COPY": true,
+ "MAP_DENYWRITE": true,
+ "MAP_EXECUTABLE": true,
+ "MAP_FILE": true,
+ "MAP_FIXED": true,
+ "MAP_FLAGMASK": true,
+ "MAP_GROWSDOWN": true,
+ "MAP_HASSEMAPHORE": true,
+ "MAP_HUGETLB": true,
+ "MAP_INHERIT": true,
+ "MAP_INHERIT_COPY": true,
+ "MAP_INHERIT_DEFAULT": true,
+ "MAP_INHERIT_DONATE_COPY": true,
+ "MAP_INHERIT_NONE": true,
+ "MAP_INHERIT_SHARE": true,
+ "MAP_JIT": true,
+ "MAP_LOCKED": true,
+ "MAP_NOCACHE": true,
+ "MAP_NOCORE": true,
+ "MAP_NOEXTEND": true,
+ "MAP_NONBLOCK": true,
+ "MAP_NORESERVE": true,
+ "MAP_NOSYNC": true,
+ "MAP_POPULATE": true,
+ "MAP_PREFAULT_READ": true,
+ "MAP_PRIVATE": true,
+ "MAP_RENAME": true,
+ "MAP_RESERVED0080": true,
+ "MAP_RESERVED0100": true,
+ "MAP_SHARED": true,
+ "MAP_STACK": true,
+ "MAP_TRYFIXED": true,
+ "MAP_TYPE": true,
+ "MAP_WIRED": true,
+ "MAXIMUM_REPARSE_DATA_BUFFER_SIZE": true,
+ "MAXLEN_IFDESCR": true,
+ "MAXLEN_PHYSADDR": true,
+ "MAX_ADAPTER_ADDRESS_LENGTH": true,
+ "MAX_ADAPTER_DESCRIPTION_LENGTH": true,
+ "MAX_ADAPTER_NAME_LENGTH": true,
+ "MAX_COMPUTERNAME_LENGTH": true,
+ "MAX_INTERFACE_NAME_LEN": true,
+ "MAX_LONG_PATH": true,
+ "MAX_PATH": true,
+ "MAX_PROTOCOL_CHAIN": true,
+ "MCL_CURRENT": true,
+ "MCL_FUTURE": true,
+ "MNT_DETACH": true,
+ "MNT_EXPIRE": true,
+ "MNT_FORCE": true,
+ "MSG_BCAST": true,
+ "MSG_CMSG_CLOEXEC": true,
+ "MSG_COMPAT": true,
+ "MSG_CONFIRM": true,
+ "MSG_CONTROLMBUF": true,
+ "MSG_CTRUNC": true,
+ "MSG_DONTROUTE": true,
+ "MSG_DONTWAIT": true,
+ "MSG_EOF": true,
+ "MSG_EOR": true,
+ "MSG_ERRQUEUE": true,
+ "MSG_FASTOPEN": true,
+ "MSG_FIN": true,
+ "MSG_FLUSH": true,
+ "MSG_HAVEMORE": true,
+ "MSG_HOLD": true,
+ "MSG_IOVUSRSPACE": true,
+ "MSG_LENUSRSPACE": true,
+ "MSG_MCAST": true,
+ "MSG_MORE": true,
+ "MSG_NAMEMBUF": true,
+ "MSG_NBIO": true,
+ "MSG_NEEDSA": true,
+ "MSG_NOSIGNAL": true,
+ "MSG_NOTIFICATION": true,
+ "MSG_OOB": true,
+ "MSG_PEEK": true,
+ "MSG_PROXY": true,
+ "MSG_RCVMORE": true,
+ "MSG_RST": true,
+ "MSG_SEND": true,
+ "MSG_SYN": true,
+ "MSG_TRUNC": true,
+ "MSG_TRYHARD": true,
+ "MSG_USERFLAGS": true,
+ "MSG_WAITALL": true,
+ "MSG_WAITFORONE": true,
+ "MSG_WAITSTREAM": true,
+ "MS_ACTIVE": true,
+ "MS_ASYNC": true,
+ "MS_BIND": true,
+ "MS_DEACTIVATE": true,
+ "MS_DIRSYNC": true,
+ "MS_INVALIDATE": true,
+ "MS_I_VERSION": true,
+ "MS_KERNMOUNT": true,
+ "MS_KILLPAGES": true,
+ "MS_MANDLOCK": true,
+ "MS_MGC_MSK": true,
+ "MS_MGC_VAL": true,
+ "MS_MOVE": true,
+ "MS_NOATIME": true,
+ "MS_NODEV": true,
+ "MS_NODIRATIME": true,
+ "MS_NOEXEC": true,
+ "MS_NOSUID": true,
+ "MS_NOUSER": true,
+ "MS_POSIXACL": true,
+ "MS_PRIVATE": true,
+ "MS_RDONLY": true,
+ "MS_REC": true,
+ "MS_RELATIME": true,
+ "MS_REMOUNT": true,
+ "MS_RMT_MASK": true,
+ "MS_SHARED": true,
+ "MS_SILENT": true,
+ "MS_SLAVE": true,
+ "MS_STRICTATIME": true,
+ "MS_SYNC": true,
+ "MS_SYNCHRONOUS": true,
+ "MS_UNBINDABLE": true,
+ "Madvise": true,
+ "MapViewOfFile": true,
+ "MaxTokenInfoClass": true,
+ "Mclpool": true,
+ "MibIfRow": true,
+ "Mkdir": true,
+ "Mkdirat": true,
+ "Mkfifo": true,
+ "Mknod": true,
+ "Mknodat": true,
+ "Mlock": true,
+ "Mlockall": true,
+ "Mmap": true,
+ "Mount": true,
+ "MoveFile": true,
+ "Mprotect": true,
+ "Msghdr": true,
+ "Munlock": true,
+ "Munlockall": true,
+ "Munmap": true,
+ "MustLoadDLL": true,
+ "NAME_MAX": true,
+ "NETLINK_ADD_MEMBERSHIP": true,
+ "NETLINK_AUDIT": true,
+ "NETLINK_BROADCAST_ERROR": true,
+ "NETLINK_CONNECTOR": true,
+ "NETLINK_DNRTMSG": true,
+ "NETLINK_DROP_MEMBERSHIP": true,
+ "NETLINK_ECRYPTFS": true,
+ "NETLINK_FIB_LOOKUP": true,
+ "NETLINK_FIREWALL": true,
+ "NETLINK_GENERIC": true,
+ "NETLINK_INET_DIAG": true,
+ "NETLINK_IP6_FW": true,
+ "NETLINK_ISCSI": true,
+ "NETLINK_KOBJECT_UEVENT": true,
+ "NETLINK_NETFILTER": true,
+ "NETLINK_NFLOG": true,
+ "NETLINK_NO_ENOBUFS": true,
+ "NETLINK_PKTINFO": true,
+ "NETLINK_RDMA": true,
+ "NETLINK_ROUTE": true,
+ "NETLINK_SCSITRANSPORT": true,
+ "NETLINK_SELINUX": true,
+ "NETLINK_UNUSED": true,
+ "NETLINK_USERSOCK": true,
+ "NETLINK_XFRM": true,
+ "NET_RT_DUMP": true,
+ "NET_RT_DUMP2": true,
+ "NET_RT_FLAGS": true,
+ "NET_RT_IFLIST": true,
+ "NET_RT_IFLIST2": true,
+ "NET_RT_IFLISTL": true,
+ "NET_RT_IFMALIST": true,
+ "NET_RT_MAXID": true,
+ "NET_RT_OIFLIST": true,
+ "NET_RT_OOIFLIST": true,
+ "NET_RT_STAT": true,
+ "NET_RT_STATS": true,
+ "NET_RT_TABLE": true,
+ "NET_RT_TRASH": true,
+ "NLA_ALIGNTO": true,
+ "NLA_F_NESTED": true,
+ "NLA_F_NET_BYTEORDER": true,
+ "NLA_HDRLEN": true,
+ "NLMSG_ALIGNTO": true,
+ "NLMSG_DONE": true,
+ "NLMSG_ERROR": true,
+ "NLMSG_HDRLEN": true,
+ "NLMSG_MIN_TYPE": true,
+ "NLMSG_NOOP": true,
+ "NLMSG_OVERRUN": true,
+ "NLM_F_ACK": true,
+ "NLM_F_APPEND": true,
+ "NLM_F_ATOMIC": true,
+ "NLM_F_CREATE": true,
+ "NLM_F_DUMP": true,
+ "NLM_F_ECHO": true,
+ "NLM_F_EXCL": true,
+ "NLM_F_MATCH": true,
+ "NLM_F_MULTI": true,
+ "NLM_F_REPLACE": true,
+ "NLM_F_REQUEST": true,
+ "NLM_F_ROOT": true,
+ "NOFLSH": true,
+ "NOTE_ABSOLUTE": true,
+ "NOTE_ATTRIB": true,
+ "NOTE_CHILD": true,
+ "NOTE_DELETE": true,
+ "NOTE_EOF": true,
+ "NOTE_EXEC": true,
+ "NOTE_EXIT": true,
+ "NOTE_EXITSTATUS": true,
+ "NOTE_EXTEND": true,
+ "NOTE_FFAND": true,
+ "NOTE_FFCOPY": true,
+ "NOTE_FFCTRLMASK": true,
+ "NOTE_FFLAGSMASK": true,
+ "NOTE_FFNOP": true,
+ "NOTE_FFOR": true,
+ "NOTE_FORK": true,
+ "NOTE_LINK": true,
+ "NOTE_LOWAT": true,
+ "NOTE_NONE": true,
+ "NOTE_NSECONDS": true,
+ "NOTE_PCTRLMASK": true,
+ "NOTE_PDATAMASK": true,
+ "NOTE_REAP": true,
+ "NOTE_RENAME": true,
+ "NOTE_RESOURCEEND": true,
+ "NOTE_REVOKE": true,
+ "NOTE_SECONDS": true,
+ "NOTE_SIGNAL": true,
+ "NOTE_TRACK": true,
+ "NOTE_TRACKERR": true,
+ "NOTE_TRIGGER": true,
+ "NOTE_TRUNCATE": true,
+ "NOTE_USECONDS": true,
+ "NOTE_VM_ERROR": true,
+ "NOTE_VM_PRESSURE": true,
+ "NOTE_VM_PRESSURE_SUDDEN_TERMINATE": true,
+ "NOTE_VM_PRESSURE_TERMINATE": true,
+ "NOTE_WRITE": true,
+ "NameCanonical": true,
+ "NameCanonicalEx": true,
+ "NameDisplay": true,
+ "NameDnsDomain": true,
+ "NameFullyQualifiedDN": true,
+ "NameSamCompatible": true,
+ "NameServicePrincipal": true,
+ "NameUniqueId": true,
+ "NameUnknown": true,
+ "NameUserPrincipal": true,
+ "Nanosleep": true,
+ "NetApiBufferFree": true,
+ "NetGetJoinInformation": true,
+ "NetSetupDomainName": true,
+ "NetSetupUnjoined": true,
+ "NetSetupUnknownStatus": true,
+ "NetSetupWorkgroupName": true,
+ "NetUserGetInfo": true,
+ "NetlinkMessage": true,
+ "NetlinkRIB": true,
+ "NetlinkRouteAttr": true,
+ "NetlinkRouteRequest": true,
+ "NewCallback": true,
+ "NewCallbackCDecl": true,
+ "NewLazyDLL": true,
+ "NlAttr": true,
+ "NlMsgerr": true,
+ "NlMsghdr": true,
+ "NsecToFiletime": true,
+ "NsecToTimespec": true,
+ "NsecToTimeval": true,
+ "Ntohs": true,
+ "OCRNL": true,
+ "OFDEL": true,
+ "OFILL": true,
+ "OFIOGETBMAP": true,
+ "OID_PKIX_KP_SERVER_AUTH": true,
+ "OID_SERVER_GATED_CRYPTO": true,
+ "OID_SGC_NETSCAPE": true,
+ "OLCUC": true,
+ "ONLCR": true,
+ "ONLRET": true,
+ "ONOCR": true,
+ "ONOEOT": true,
+ "OPEN_ALWAYS": true,
+ "OPEN_EXISTING": true,
+ "OPOST": true,
+ "O_ACCMODE": true,
+ "O_ALERT": true,
+ "O_ALT_IO": true,
+ "O_APPEND": true,
+ "O_ASYNC": true,
+ "O_CLOEXEC": true,
+ "O_CREAT": true,
+ "O_DIRECT": true,
+ "O_DIRECTORY": true,
+ "O_DSYNC": true,
+ "O_EVTONLY": true,
+ "O_EXCL": true,
+ "O_EXEC": true,
+ "O_EXLOCK": true,
+ "O_FSYNC": true,
+ "O_LARGEFILE": true,
+ "O_NDELAY": true,
+ "O_NOATIME": true,
+ "O_NOCTTY": true,
+ "O_NOFOLLOW": true,
+ "O_NONBLOCK": true,
+ "O_NOSIGPIPE": true,
+ "O_POPUP": true,
+ "O_RDONLY": true,
+ "O_RDWR": true,
+ "O_RSYNC": true,
+ "O_SHLOCK": true,
+ "O_SYMLINK": true,
+ "O_SYNC": true,
+ "O_TRUNC": true,
+ "O_TTY_INIT": true,
+ "O_WRONLY": true,
+ "Open": true,
+ "OpenCurrentProcessToken": true,
+ "OpenProcess": true,
+ "OpenProcessToken": true,
+ "Openat": true,
+ "Overlapped": true,
+ "PACKET_ADD_MEMBERSHIP": true,
+ "PACKET_BROADCAST": true,
+ "PACKET_DROP_MEMBERSHIP": true,
+ "PACKET_FASTROUTE": true,
+ "PACKET_HOST": true,
+ "PACKET_LOOPBACK": true,
+ "PACKET_MR_ALLMULTI": true,
+ "PACKET_MR_MULTICAST": true,
+ "PACKET_MR_PROMISC": true,
+ "PACKET_MULTICAST": true,
+ "PACKET_OTHERHOST": true,
+ "PACKET_OUTGOING": true,
+ "PACKET_RECV_OUTPUT": true,
+ "PACKET_RX_RING": true,
+ "PACKET_STATISTICS": true,
+ "PAGE_EXECUTE_READ": true,
+ "PAGE_EXECUTE_READWRITE": true,
+ "PAGE_EXECUTE_WRITECOPY": true,
+ "PAGE_READONLY": true,
+ "PAGE_READWRITE": true,
+ "PAGE_WRITECOPY": true,
+ "PARENB": true,
+ "PARMRK": true,
+ "PARODD": true,
+ "PENDIN": true,
+ "PFL_HIDDEN": true,
+ "PFL_MATCHES_PROTOCOL_ZERO": true,
+ "PFL_MULTIPLE_PROTO_ENTRIES": true,
+ "PFL_NETWORKDIRECT_PROVIDER": true,
+ "PFL_RECOMMENDED_PROTO_ENTRY": true,
+ "PF_FLUSH": true,
+ "PKCS_7_ASN_ENCODING": true,
+ "PMC5_PIPELINE_FLUSH": true,
+ "PRIO_PGRP": true,
+ "PRIO_PROCESS": true,
+ "PRIO_USER": true,
+ "PRI_IOFLUSH": true,
+ "PROCESS_QUERY_INFORMATION": true,
+ "PROCESS_TERMINATE": true,
+ "PROT_EXEC": true,
+ "PROT_GROWSDOWN": true,
+ "PROT_GROWSUP": true,
+ "PROT_NONE": true,
+ "PROT_READ": true,
+ "PROT_WRITE": true,
+ "PROV_DH_SCHANNEL": true,
+ "PROV_DSS": true,
+ "PROV_DSS_DH": true,
+ "PROV_EC_ECDSA_FULL": true,
+ "PROV_EC_ECDSA_SIG": true,
+ "PROV_EC_ECNRA_FULL": true,
+ "PROV_EC_ECNRA_SIG": true,
+ "PROV_FORTEZZA": true,
+ "PROV_INTEL_SEC": true,
+ "PROV_MS_EXCHANGE": true,
+ "PROV_REPLACE_OWF": true,
+ "PROV_RNG": true,
+ "PROV_RSA_AES": true,
+ "PROV_RSA_FULL": true,
+ "PROV_RSA_SCHANNEL": true,
+ "PROV_RSA_SIG": true,
+ "PROV_SPYRUS_LYNKS": true,
+ "PROV_SSL": true,
+ "PR_CAPBSET_DROP": true,
+ "PR_CAPBSET_READ": true,
+ "PR_CLEAR_SECCOMP_FILTER": true,
+ "PR_ENDIAN_BIG": true,
+ "PR_ENDIAN_LITTLE": true,
+ "PR_ENDIAN_PPC_LITTLE": true,
+ "PR_FPEMU_NOPRINT": true,
+ "PR_FPEMU_SIGFPE": true,
+ "PR_FP_EXC_ASYNC": true,
+ "PR_FP_EXC_DISABLED": true,
+ "PR_FP_EXC_DIV": true,
+ "PR_FP_EXC_INV": true,
+ "PR_FP_EXC_NONRECOV": true,
+ "PR_FP_EXC_OVF": true,
+ "PR_FP_EXC_PRECISE": true,
+ "PR_FP_EXC_RES": true,
+ "PR_FP_EXC_SW_ENABLE": true,
+ "PR_FP_EXC_UND": true,
+ "PR_GET_DUMPABLE": true,
+ "PR_GET_ENDIAN": true,
+ "PR_GET_FPEMU": true,
+ "PR_GET_FPEXC": true,
+ "PR_GET_KEEPCAPS": true,
+ "PR_GET_NAME": true,
+ "PR_GET_PDEATHSIG": true,
+ "PR_GET_SECCOMP": true,
+ "PR_GET_SECCOMP_FILTER": true,
+ "PR_GET_SECUREBITS": true,
+ "PR_GET_TIMERSLACK": true,
+ "PR_GET_TIMING": true,
+ "PR_GET_TSC": true,
+ "PR_GET_UNALIGN": true,
+ "PR_MCE_KILL": true,
+ "PR_MCE_KILL_CLEAR": true,
+ "PR_MCE_KILL_DEFAULT": true,
+ "PR_MCE_KILL_EARLY": true,
+ "PR_MCE_KILL_GET": true,
+ "PR_MCE_KILL_LATE": true,
+ "PR_MCE_KILL_SET": true,
+ "PR_SECCOMP_FILTER_EVENT": true,
+ "PR_SECCOMP_FILTER_SYSCALL": true,
+ "PR_SET_DUMPABLE": true,
+ "PR_SET_ENDIAN": true,
+ "PR_SET_FPEMU": true,
+ "PR_SET_FPEXC": true,
+ "PR_SET_KEEPCAPS": true,
+ "PR_SET_NAME": true,
+ "PR_SET_PDEATHSIG": true,
+ "PR_SET_PTRACER": true,
+ "PR_SET_SECCOMP": true,
+ "PR_SET_SECCOMP_FILTER": true,
+ "PR_SET_SECUREBITS": true,
+ "PR_SET_TIMERSLACK": true,
+ "PR_SET_TIMING": true,
+ "PR_SET_TSC": true,
+ "PR_SET_UNALIGN": true,
+ "PR_TASK_PERF_EVENTS_DISABLE": true,
+ "PR_TASK_PERF_EVENTS_ENABLE": true,
+ "PR_TIMING_STATISTICAL": true,
+ "PR_TIMING_TIMESTAMP": true,
+ "PR_TSC_ENABLE": true,
+ "PR_TSC_SIGSEGV": true,
+ "PR_UNALIGN_NOPRINT": true,
+ "PR_UNALIGN_SIGBUS": true,
+ "PTRACE_ARCH_PRCTL": true,
+ "PTRACE_ATTACH": true,
+ "PTRACE_CONT": true,
+ "PTRACE_DETACH": true,
+ "PTRACE_EVENT_CLONE": true,
+ "PTRACE_EVENT_EXEC": true,
+ "PTRACE_EVENT_EXIT": true,
+ "PTRACE_EVENT_FORK": true,
+ "PTRACE_EVENT_VFORK": true,
+ "PTRACE_EVENT_VFORK_DONE": true,
+ "PTRACE_GETCRUNCHREGS": true,
+ "PTRACE_GETEVENTMSG": true,
+ "PTRACE_GETFPREGS": true,
+ "PTRACE_GETFPXREGS": true,
+ "PTRACE_GETHBPREGS": true,
+ "PTRACE_GETREGS": true,
+ "PTRACE_GETREGSET": true,
+ "PTRACE_GETSIGINFO": true,
+ "PTRACE_GETVFPREGS": true,
+ "PTRACE_GETWMMXREGS": true,
+ "PTRACE_GET_THREAD_AREA": true,
+ "PTRACE_KILL": true,
+ "PTRACE_OLDSETOPTIONS": true,
+ "PTRACE_O_MASK": true,
+ "PTRACE_O_TRACECLONE": true,
+ "PTRACE_O_TRACEEXEC": true,
+ "PTRACE_O_TRACEEXIT": true,
+ "PTRACE_O_TRACEFORK": true,
+ "PTRACE_O_TRACESYSGOOD": true,
+ "PTRACE_O_TRACEVFORK": true,
+ "PTRACE_O_TRACEVFORKDONE": true,
+ "PTRACE_PEEKDATA": true,
+ "PTRACE_PEEKTEXT": true,
+ "PTRACE_PEEKUSR": true,
+ "PTRACE_POKEDATA": true,
+ "PTRACE_POKETEXT": true,
+ "PTRACE_POKEUSR": true,
+ "PTRACE_SETCRUNCHREGS": true,
+ "PTRACE_SETFPREGS": true,
+ "PTRACE_SETFPXREGS": true,
+ "PTRACE_SETHBPREGS": true,
+ "PTRACE_SETOPTIONS": true,
+ "PTRACE_SETREGS": true,
+ "PTRACE_SETREGSET": true,
+ "PTRACE_SETSIGINFO": true,
+ "PTRACE_SETVFPREGS": true,
+ "PTRACE_SETWMMXREGS": true,
+ "PTRACE_SET_SYSCALL": true,
+ "PTRACE_SET_THREAD_AREA": true,
+ "PTRACE_SINGLEBLOCK": true,
+ "PTRACE_SINGLESTEP": true,
+ "PTRACE_SYSCALL": true,
+ "PTRACE_SYSEMU": true,
+ "PTRACE_SYSEMU_SINGLESTEP": true,
+ "PTRACE_TRACEME": true,
+ "PT_ATTACH": true,
+ "PT_ATTACHEXC": true,
+ "PT_CONTINUE": true,
+ "PT_DATA_ADDR": true,
+ "PT_DENY_ATTACH": true,
+ "PT_DETACH": true,
+ "PT_FIRSTMACH": true,
+ "PT_FORCEQUOTA": true,
+ "PT_KILL": true,
+ "PT_MASK": true,
+ "PT_READ_D": true,
+ "PT_READ_I": true,
+ "PT_READ_U": true,
+ "PT_SIGEXC": true,
+ "PT_STEP": true,
+ "PT_TEXT_ADDR": true,
+ "PT_TEXT_END_ADDR": true,
+ "PT_THUPDATE": true,
+ "PT_TRACE_ME": true,
+ "PT_WRITE_D": true,
+ "PT_WRITE_I": true,
+ "PT_WRITE_U": true,
+ "ParseDirent": true,
+ "ParseNetlinkMessage": true,
+ "ParseNetlinkRouteAttr": true,
+ "ParseRoutingMessage": true,
+ "ParseRoutingSockaddr": true,
+ "ParseSocketControlMessage": true,
+ "ParseUnixCredentials": true,
+ "ParseUnixRights": true,
+ "PathMax": true,
+ "Pathconf": true,
+ "Pause": true,
+ "Pipe": true,
+ "Pipe2": true,
+ "PivotRoot": true,
+ "Pointer": true,
+ "PostQueuedCompletionStatus": true,
+ "Pread": true,
+ "Proc": true,
+ "ProcAttr": true,
+ "Process32First": true,
+ "Process32Next": true,
+ "ProcessEntry32": true,
+ "ProcessInformation": true,
+ "Protoent": true,
+ "PtraceAttach": true,
+ "PtraceCont": true,
+ "PtraceDetach": true,
+ "PtraceGetEventMsg": true,
+ "PtraceGetRegs": true,
+ "PtracePeekData": true,
+ "PtracePeekText": true,
+ "PtracePokeData": true,
+ "PtracePokeText": true,
+ "PtraceRegs": true,
+ "PtraceSetOptions": true,
+ "PtraceSetRegs": true,
+ "PtraceSingleStep": true,
+ "PtraceSyscall": true,
+ "Pwrite": true,
+ "REG_BINARY": true,
+ "REG_DWORD": true,
+ "REG_DWORD_BIG_ENDIAN": true,
+ "REG_DWORD_LITTLE_ENDIAN": true,
+ "REG_EXPAND_SZ": true,
+ "REG_FULL_RESOURCE_DESCRIPTOR": true,
+ "REG_LINK": true,
+ "REG_MULTI_SZ": true,
+ "REG_NONE": true,
+ "REG_QWORD": true,
+ "REG_QWORD_LITTLE_ENDIAN": true,
+ "REG_RESOURCE_LIST": true,
+ "REG_RESOURCE_REQUIREMENTS_LIST": true,
+ "REG_SZ": true,
+ "RLIMIT_AS": true,
+ "RLIMIT_CORE": true,
+ "RLIMIT_CPU": true,
+ "RLIMIT_DATA": true,
+ "RLIMIT_FSIZE": true,
+ "RLIMIT_NOFILE": true,
+ "RLIMIT_STACK": true,
+ "RLIM_INFINITY": true,
+ "RTAX_ADVMSS": true,
+ "RTAX_AUTHOR": true,
+ "RTAX_BRD": true,
+ "RTAX_CWND": true,
+ "RTAX_DST": true,
+ "RTAX_FEATURES": true,
+ "RTAX_FEATURE_ALLFRAG": true,
+ "RTAX_FEATURE_ECN": true,
+ "RTAX_FEATURE_SACK": true,
+ "RTAX_FEATURE_TIMESTAMP": true,
+ "RTAX_GATEWAY": true,
+ "RTAX_GENMASK": true,
+ "RTAX_HOPLIMIT": true,
+ "RTAX_IFA": true,
+ "RTAX_IFP": true,
+ "RTAX_INITCWND": true,
+ "RTAX_INITRWND": true,
+ "RTAX_LABEL": true,
+ "RTAX_LOCK": true,
+ "RTAX_MAX": true,
+ "RTAX_MTU": true,
+ "RTAX_NETMASK": true,
+ "RTAX_REORDERING": true,
+ "RTAX_RTO_MIN": true,
+ "RTAX_RTT": true,
+ "RTAX_RTTVAR": true,
+ "RTAX_SRC": true,
+ "RTAX_SRCMASK": true,
+ "RTAX_SSTHRESH": true,
+ "RTAX_TAG": true,
+ "RTAX_UNSPEC": true,
+ "RTAX_WINDOW": true,
+ "RTA_ALIGNTO": true,
+ "RTA_AUTHOR": true,
+ "RTA_BRD": true,
+ "RTA_CACHEINFO": true,
+ "RTA_DST": true,
+ "RTA_FLOW": true,
+ "RTA_GATEWAY": true,
+ "RTA_GENMASK": true,
+ "RTA_IFA": true,
+ "RTA_IFP": true,
+ "RTA_IIF": true,
+ "RTA_LABEL": true,
+ "RTA_MAX": true,
+ "RTA_METRICS": true,
+ "RTA_MULTIPATH": true,
+ "RTA_NETMASK": true,
+ "RTA_OIF": true,
+ "RTA_PREFSRC": true,
+ "RTA_PRIORITY": true,
+ "RTA_SRC": true,
+ "RTA_SRCMASK": true,
+ "RTA_TABLE": true,
+ "RTA_TAG": true,
+ "RTA_UNSPEC": true,
+ "RTCF_DIRECTSRC": true,
+ "RTCF_DOREDIRECT": true,
+ "RTCF_LOG": true,
+ "RTCF_MASQ": true,
+ "RTCF_NAT": true,
+ "RTCF_VALVE": true,
+ "RTF_ADDRCLASSMASK": true,
+ "RTF_ADDRCONF": true,
+ "RTF_ALLONLINK": true,
+ "RTF_ANNOUNCE": true,
+ "RTF_BLACKHOLE": true,
+ "RTF_BROADCAST": true,
+ "RTF_CACHE": true,
+ "RTF_CLONED": true,
+ "RTF_CLONING": true,
+ "RTF_CONDEMNED": true,
+ "RTF_DEFAULT": true,
+ "RTF_DELCLONE": true,
+ "RTF_DONE": true,
+ "RTF_DYNAMIC": true,
+ "RTF_FLOW": true,
+ "RTF_FMASK": true,
+ "RTF_GATEWAY": true,
+ "RTF_GWFLAG_COMPAT": true,
+ "RTF_HOST": true,
+ "RTF_IFREF": true,
+ "RTF_IFSCOPE": true,
+ "RTF_INTERFACE": true,
+ "RTF_IRTT": true,
+ "RTF_LINKRT": true,
+ "RTF_LLDATA": true,
+ "RTF_LLINFO": true,
+ "RTF_LOCAL": true,
+ "RTF_MASK": true,
+ "RTF_MODIFIED": true,
+ "RTF_MPATH": true,
+ "RTF_MPLS": true,
+ "RTF_MSS": true,
+ "RTF_MTU": true,
+ "RTF_MULTICAST": true,
+ "RTF_NAT": true,
+ "RTF_NOFORWARD": true,
+ "RTF_NONEXTHOP": true,
+ "RTF_NOPMTUDISC": true,
+ "RTF_PERMANENT_ARP": true,
+ "RTF_PINNED": true,
+ "RTF_POLICY": true,
+ "RTF_PRCLONING": true,
+ "RTF_PROTO1": true,
+ "RTF_PROTO2": true,
+ "RTF_PROTO3": true,
+ "RTF_REINSTATE": true,
+ "RTF_REJECT": true,
+ "RTF_RNH_LOCKED": true,
+ "RTF_SOURCE": true,
+ "RTF_SRC": true,
+ "RTF_STATIC": true,
+ "RTF_STICKY": true,
+ "RTF_THROW": true,
+ "RTF_TUNNEL": true,
+ "RTF_UP": true,
+ "RTF_USETRAILERS": true,
+ "RTF_WASCLONED": true,
+ "RTF_WINDOW": true,
+ "RTF_XRESOLVE": true,
+ "RTM_ADD": true,
+ "RTM_BASE": true,
+ "RTM_CHANGE": true,
+ "RTM_CHGADDR": true,
+ "RTM_DELACTION": true,
+ "RTM_DELADDR": true,
+ "RTM_DELADDRLABEL": true,
+ "RTM_DELETE": true,
+ "RTM_DELLINK": true,
+ "RTM_DELMADDR": true,
+ "RTM_DELNEIGH": true,
+ "RTM_DELQDISC": true,
+ "RTM_DELROUTE": true,
+ "RTM_DELRULE": true,
+ "RTM_DELTCLASS": true,
+ "RTM_DELTFILTER": true,
+ "RTM_DESYNC": true,
+ "RTM_F_CLONED": true,
+ "RTM_F_EQUALIZE": true,
+ "RTM_F_NOTIFY": true,
+ "RTM_F_PREFIX": true,
+ "RTM_GET": true,
+ "RTM_GET2": true,
+ "RTM_GETACTION": true,
+ "RTM_GETADDR": true,
+ "RTM_GETADDRLABEL": true,
+ "RTM_GETANYCAST": true,
+ "RTM_GETDCB": true,
+ "RTM_GETLINK": true,
+ "RTM_GETMULTICAST": true,
+ "RTM_GETNEIGH": true,
+ "RTM_GETNEIGHTBL": true,
+ "RTM_GETQDISC": true,
+ "RTM_GETROUTE": true,
+ "RTM_GETRULE": true,
+ "RTM_GETTCLASS": true,
+ "RTM_GETTFILTER": true,
+ "RTM_IEEE80211": true,
+ "RTM_IFANNOUNCE": true,
+ "RTM_IFINFO": true,
+ "RTM_IFINFO2": true,
+ "RTM_LLINFO_UPD": true,
+ "RTM_LOCK": true,
+ "RTM_LOSING": true,
+ "RTM_MAX": true,
+ "RTM_MAXSIZE": true,
+ "RTM_MISS": true,
+ "RTM_NEWACTION": true,
+ "RTM_NEWADDR": true,
+ "RTM_NEWADDRLABEL": true,
+ "RTM_NEWLINK": true,
+ "RTM_NEWMADDR": true,
+ "RTM_NEWMADDR2": true,
+ "RTM_NEWNDUSEROPT": true,
+ "RTM_NEWNEIGH": true,
+ "RTM_NEWNEIGHTBL": true,
+ "RTM_NEWPREFIX": true,
+ "RTM_NEWQDISC": true,
+ "RTM_NEWROUTE": true,
+ "RTM_NEWRULE": true,
+ "RTM_NEWTCLASS": true,
+ "RTM_NEWTFILTER": true,
+ "RTM_NR_FAMILIES": true,
+ "RTM_NR_MSGTYPES": true,
+ "RTM_OIFINFO": true,
+ "RTM_OLDADD": true,
+ "RTM_OLDDEL": true,
+ "RTM_OOIFINFO": true,
+ "RTM_REDIRECT": true,
+ "RTM_RESOLVE": true,
+ "RTM_RTTUNIT": true,
+ "RTM_SETDCB": true,
+ "RTM_SETGATE": true,
+ "RTM_SETLINK": true,
+ "RTM_SETNEIGHTBL": true,
+ "RTM_VERSION": true,
+ "RTNH_ALIGNTO": true,
+ "RTNH_F_DEAD": true,
+ "RTNH_F_ONLINK": true,
+ "RTNH_F_PERVASIVE": true,
+ "RTNLGRP_IPV4_IFADDR": true,
+ "RTNLGRP_IPV4_MROUTE": true,
+ "RTNLGRP_IPV4_ROUTE": true,
+ "RTNLGRP_IPV4_RULE": true,
+ "RTNLGRP_IPV6_IFADDR": true,
+ "RTNLGRP_IPV6_IFINFO": true,
+ "RTNLGRP_IPV6_MROUTE": true,
+ "RTNLGRP_IPV6_PREFIX": true,
+ "RTNLGRP_IPV6_ROUTE": true,
+ "RTNLGRP_IPV6_RULE": true,
+ "RTNLGRP_LINK": true,
+ "RTNLGRP_ND_USEROPT": true,
+ "RTNLGRP_NEIGH": true,
+ "RTNLGRP_NONE": true,
+ "RTNLGRP_NOTIFY": true,
+ "RTNLGRP_TC": true,
+ "RTN_ANYCAST": true,
+ "RTN_BLACKHOLE": true,
+ "RTN_BROADCAST": true,
+ "RTN_LOCAL": true,
+ "RTN_MAX": true,
+ "RTN_MULTICAST": true,
+ "RTN_NAT": true,
+ "RTN_PROHIBIT": true,
+ "RTN_THROW": true,
+ "RTN_UNICAST": true,
+ "RTN_UNREACHABLE": true,
+ "RTN_UNSPEC": true,
+ "RTN_XRESOLVE": true,
+ "RTPROT_BIRD": true,
+ "RTPROT_BOOT": true,
+ "RTPROT_DHCP": true,
+ "RTPROT_DNROUTED": true,
+ "RTPROT_GATED": true,
+ "RTPROT_KERNEL": true,
+ "RTPROT_MRT": true,
+ "RTPROT_NTK": true,
+ "RTPROT_RA": true,
+ "RTPROT_REDIRECT": true,
+ "RTPROT_STATIC": true,
+ "RTPROT_UNSPEC": true,
+ "RTPROT_XORP": true,
+ "RTPROT_ZEBRA": true,
+ "RTV_EXPIRE": true,
+ "RTV_HOPCOUNT": true,
+ "RTV_MTU": true,
+ "RTV_RPIPE": true,
+ "RTV_RTT": true,
+ "RTV_RTTVAR": true,
+ "RTV_SPIPE": true,
+ "RTV_SSTHRESH": true,
+ "RTV_WEIGHT": true,
+ "RT_CACHING_CONTEXT": true,
+ "RT_CLASS_DEFAULT": true,
+ "RT_CLASS_LOCAL": true,
+ "RT_CLASS_MAIN": true,
+ "RT_CLASS_MAX": true,
+ "RT_CLASS_UNSPEC": true,
+ "RT_DEFAULT_FIB": true,
+ "RT_NORTREF": true,
+ "RT_SCOPE_HOST": true,
+ "RT_SCOPE_LINK": true,
+ "RT_SCOPE_NOWHERE": true,
+ "RT_SCOPE_SITE": true,
+ "RT_SCOPE_UNIVERSE": true,
+ "RT_TABLEID_MAX": true,
+ "RT_TABLE_COMPAT": true,
+ "RT_TABLE_DEFAULT": true,
+ "RT_TABLE_LOCAL": true,
+ "RT_TABLE_MAIN": true,
+ "RT_TABLE_MAX": true,
+ "RT_TABLE_UNSPEC": true,
+ "RUSAGE_CHILDREN": true,
+ "RUSAGE_SELF": true,
+ "RUSAGE_THREAD": true,
+ "Radvisory_t": true,
+ "RawConn": true,
+ "RawSockaddr": true,
+ "RawSockaddrAny": true,
+ "RawSockaddrDatalink": true,
+ "RawSockaddrInet4": true,
+ "RawSockaddrInet6": true,
+ "RawSockaddrLinklayer": true,
+ "RawSockaddrNetlink": true,
+ "RawSockaddrUnix": true,
+ "RawSyscall": true,
+ "RawSyscall6": true,
+ "Read": true,
+ "ReadConsole": true,
+ "ReadDirectoryChanges": true,
+ "ReadDirent": true,
+ "ReadFile": true,
+ "Readlink": true,
+ "Reboot": true,
+ "Recvfrom": true,
+ "Recvmsg": true,
+ "RegCloseKey": true,
+ "RegEnumKeyEx": true,
+ "RegOpenKeyEx": true,
+ "RegQueryInfoKey": true,
+ "RegQueryValueEx": true,
+ "RemoveDirectory": true,
+ "Removexattr": true,
+ "Rename": true,
+ "Renameat": true,
+ "Revoke": true,
+ "Rlimit": true,
+ "Rmdir": true,
+ "RouteMessage": true,
+ "RouteRIB": true,
+ "RtAttr": true,
+ "RtGenmsg": true,
+ "RtMetrics": true,
+ "RtMsg": true,
+ "RtMsghdr": true,
+ "RtNexthop": true,
+ "Rusage": true,
+ "SCM_BINTIME": true,
+ "SCM_CREDENTIALS": true,
+ "SCM_CREDS": true,
+ "SCM_RIGHTS": true,
+ "SCM_TIMESTAMP": true,
+ "SCM_TIMESTAMPING": true,
+ "SCM_TIMESTAMPNS": true,
+ "SCM_TIMESTAMP_MONOTONIC": true,
+ "SHUT_RD": true,
+ "SHUT_RDWR": true,
+ "SHUT_WR": true,
+ "SID": true,
+ "SIDAndAttributes": true,
+ "SIGABRT": true,
+ "SIGALRM": true,
+ "SIGBUS": true,
+ "SIGCHLD": true,
+ "SIGCLD": true,
+ "SIGCONT": true,
+ "SIGEMT": true,
+ "SIGFPE": true,
+ "SIGHUP": true,
+ "SIGILL": true,
+ "SIGINFO": true,
+ "SIGINT": true,
+ "SIGIO": true,
+ "SIGIOT": true,
+ "SIGKILL": true,
+ "SIGLIBRT": true,
+ "SIGLWP": true,
+ "SIGPIPE": true,
+ "SIGPOLL": true,
+ "SIGPROF": true,
+ "SIGPWR": true,
+ "SIGQUIT": true,
+ "SIGSEGV": true,
+ "SIGSTKFLT": true,
+ "SIGSTOP": true,
+ "SIGSYS": true,
+ "SIGTERM": true,
+ "SIGTHR": true,
+ "SIGTRAP": true,
+ "SIGTSTP": true,
+ "SIGTTIN": true,
+ "SIGTTOU": true,
+ "SIGUNUSED": true,
+ "SIGURG": true,
+ "SIGUSR1": true,
+ "SIGUSR2": true,
+ "SIGVTALRM": true,
+ "SIGWINCH": true,
+ "SIGXCPU": true,
+ "SIGXFSZ": true,
+ "SIOCADDDLCI": true,
+ "SIOCADDMULTI": true,
+ "SIOCADDRT": true,
+ "SIOCAIFADDR": true,
+ "SIOCAIFGROUP": true,
+ "SIOCALIFADDR": true,
+ "SIOCARPIPLL": true,
+ "SIOCATMARK": true,
+ "SIOCAUTOADDR": true,
+ "SIOCAUTONETMASK": true,
+ "SIOCBRDGADD": true,
+ "SIOCBRDGADDS": true,
+ "SIOCBRDGARL": true,
+ "SIOCBRDGDADDR": true,
+ "SIOCBRDGDEL": true,
+ "SIOCBRDGDELS": true,
+ "SIOCBRDGFLUSH": true,
+ "SIOCBRDGFRL": true,
+ "SIOCBRDGGCACHE": true,
+ "SIOCBRDGGFD": true,
+ "SIOCBRDGGHT": true,
+ "SIOCBRDGGIFFLGS": true,
+ "SIOCBRDGGMA": true,
+ "SIOCBRDGGPARAM": true,
+ "SIOCBRDGGPRI": true,
+ "SIOCBRDGGRL": true,
+ "SIOCBRDGGSIFS": true,
+ "SIOCBRDGGTO": true,
+ "SIOCBRDGIFS": true,
+ "SIOCBRDGRTS": true,
+ "SIOCBRDGSADDR": true,
+ "SIOCBRDGSCACHE": true,
+ "SIOCBRDGSFD": true,
+ "SIOCBRDGSHT": true,
+ "SIOCBRDGSIFCOST": true,
+ "SIOCBRDGSIFFLGS": true,
+ "SIOCBRDGSIFPRIO": true,
+ "SIOCBRDGSMA": true,
+ "SIOCBRDGSPRI": true,
+ "SIOCBRDGSPROTO": true,
+ "SIOCBRDGSTO": true,
+ "SIOCBRDGSTXHC": true,
+ "SIOCDARP": true,
+ "SIOCDELDLCI": true,
+ "SIOCDELMULTI": true,
+ "SIOCDELRT": true,
+ "SIOCDEVPRIVATE": true,
+ "SIOCDIFADDR": true,
+ "SIOCDIFGROUP": true,
+ "SIOCDIFPHYADDR": true,
+ "SIOCDLIFADDR": true,
+ "SIOCDRARP": true,
+ "SIOCGARP": true,
+ "SIOCGDRVSPEC": true,
+ "SIOCGETKALIVE": true,
+ "SIOCGETLABEL": true,
+ "SIOCGETPFLOW": true,
+ "SIOCGETPFSYNC": true,
+ "SIOCGETSGCNT": true,
+ "SIOCGETVIFCNT": true,
+ "SIOCGETVLAN": true,
+ "SIOCGHIWAT": true,
+ "SIOCGIFADDR": true,
+ "SIOCGIFADDRPREF": true,
+ "SIOCGIFALIAS": true,
+ "SIOCGIFALTMTU": true,
+ "SIOCGIFASYNCMAP": true,
+ "SIOCGIFBOND": true,
+ "SIOCGIFBR": true,
+ "SIOCGIFBRDADDR": true,
+ "SIOCGIFCAP": true,
+ "SIOCGIFCONF": true,
+ "SIOCGIFCOUNT": true,
+ "SIOCGIFDATA": true,
+ "SIOCGIFDESCR": true,
+ "SIOCGIFDEVMTU": true,
+ "SIOCGIFDLT": true,
+ "SIOCGIFDSTADDR": true,
+ "SIOCGIFENCAP": true,
+ "SIOCGIFFIB": true,
+ "SIOCGIFFLAGS": true,
+ "SIOCGIFGATTR": true,
+ "SIOCGIFGENERIC": true,
+ "SIOCGIFGMEMB": true,
+ "SIOCGIFGROUP": true,
+ "SIOCGIFHARDMTU": true,
+ "SIOCGIFHWADDR": true,
+ "SIOCGIFINDEX": true,
+ "SIOCGIFKPI": true,
+ "SIOCGIFMAC": true,
+ "SIOCGIFMAP": true,
+ "SIOCGIFMEDIA": true,
+ "SIOCGIFMEM": true,
+ "SIOCGIFMETRIC": true,
+ "SIOCGIFMTU": true,
+ "SIOCGIFNAME": true,
+ "SIOCGIFNETMASK": true,
+ "SIOCGIFPDSTADDR": true,
+ "SIOCGIFPFLAGS": true,
+ "SIOCGIFPHYS": true,
+ "SIOCGIFPRIORITY": true,
+ "SIOCGIFPSRCADDR": true,
+ "SIOCGIFRDOMAIN": true,
+ "SIOCGIFRTLABEL": true,
+ "SIOCGIFSLAVE": true,
+ "SIOCGIFSTATUS": true,
+ "SIOCGIFTIMESLOT": true,
+ "SIOCGIFTXQLEN": true,
+ "SIOCGIFVLAN": true,
+ "SIOCGIFWAKEFLAGS": true,
+ "SIOCGIFXFLAGS": true,
+ "SIOCGLIFADDR": true,
+ "SIOCGLIFPHYADDR": true,
+ "SIOCGLIFPHYRTABLE": true,
+ "SIOCGLIFPHYTTL": true,
+ "SIOCGLINKSTR": true,
+ "SIOCGLOWAT": true,
+ "SIOCGPGRP": true,
+ "SIOCGPRIVATE_0": true,
+ "SIOCGPRIVATE_1": true,
+ "SIOCGRARP": true,
+ "SIOCGSPPPPARAMS": true,
+ "SIOCGSTAMP": true,
+ "SIOCGSTAMPNS": true,
+ "SIOCGVH": true,
+ "SIOCGVNETID": true,
+ "SIOCIFCREATE": true,
+ "SIOCIFCREATE2": true,
+ "SIOCIFDESTROY": true,
+ "SIOCIFGCLONERS": true,
+ "SIOCINITIFADDR": true,
+ "SIOCPROTOPRIVATE": true,
+ "SIOCRSLVMULTI": true,
+ "SIOCRTMSG": true,
+ "SIOCSARP": true,
+ "SIOCSDRVSPEC": true,
+ "SIOCSETKALIVE": true,
+ "SIOCSETLABEL": true,
+ "SIOCSETPFLOW": true,
+ "SIOCSETPFSYNC": true,
+ "SIOCSETVLAN": true,
+ "SIOCSHIWAT": true,
+ "SIOCSIFADDR": true,
+ "SIOCSIFADDRPREF": true,
+ "SIOCSIFALTMTU": true,
+ "SIOCSIFASYNCMAP": true,
+ "SIOCSIFBOND": true,
+ "SIOCSIFBR": true,
+ "SIOCSIFBRDADDR": true,
+ "SIOCSIFCAP": true,
+ "SIOCSIFDESCR": true,
+ "SIOCSIFDSTADDR": true,
+ "SIOCSIFENCAP": true,
+ "SIOCSIFFIB": true,
+ "SIOCSIFFLAGS": true,
+ "SIOCSIFGATTR": true,
+ "SIOCSIFGENERIC": true,
+ "SIOCSIFHWADDR": true,
+ "SIOCSIFHWBROADCAST": true,
+ "SIOCSIFKPI": true,
+ "SIOCSIFLINK": true,
+ "SIOCSIFLLADDR": true,
+ "SIOCSIFMAC": true,
+ "SIOCSIFMAP": true,
+ "SIOCSIFMEDIA": true,
+ "SIOCSIFMEM": true,
+ "SIOCSIFMETRIC": true,
+ "SIOCSIFMTU": true,
+ "SIOCSIFNAME": true,
+ "SIOCSIFNETMASK": true,
+ "SIOCSIFPFLAGS": true,
+ "SIOCSIFPHYADDR": true,
+ "SIOCSIFPHYS": true,
+ "SIOCSIFPRIORITY": true,
+ "SIOCSIFRDOMAIN": true,
+ "SIOCSIFRTLABEL": true,
+ "SIOCSIFRVNET": true,
+ "SIOCSIFSLAVE": true,
+ "SIOCSIFTIMESLOT": true,
+ "SIOCSIFTXQLEN": true,
+ "SIOCSIFVLAN": true,
+ "SIOCSIFVNET": true,
+ "SIOCSIFXFLAGS": true,
+ "SIOCSLIFPHYADDR": true,
+ "SIOCSLIFPHYRTABLE": true,
+ "SIOCSLIFPHYTTL": true,
+ "SIOCSLINKSTR": true,
+ "SIOCSLOWAT": true,
+ "SIOCSPGRP": true,
+ "SIOCSRARP": true,
+ "SIOCSSPPPPARAMS": true,
+ "SIOCSVH": true,
+ "SIOCSVNETID": true,
+ "SIOCZIFDATA": true,
+ "SIO_GET_EXTENSION_FUNCTION_POINTER": true,
+ "SIO_GET_INTERFACE_LIST": true,
+ "SIO_KEEPALIVE_VALS": true,
+ "SIO_UDP_CONNRESET": true,
+ "SOCK_CLOEXEC": true,
+ "SOCK_DCCP": true,
+ "SOCK_DGRAM": true,
+ "SOCK_FLAGS_MASK": true,
+ "SOCK_MAXADDRLEN": true,
+ "SOCK_NONBLOCK": true,
+ "SOCK_NOSIGPIPE": true,
+ "SOCK_PACKET": true,
+ "SOCK_RAW": true,
+ "SOCK_RDM": true,
+ "SOCK_SEQPACKET": true,
+ "SOCK_STREAM": true,
+ "SOL_AAL": true,
+ "SOL_ATM": true,
+ "SOL_DECNET": true,
+ "SOL_ICMPV6": true,
+ "SOL_IP": true,
+ "SOL_IPV6": true,
+ "SOL_IRDA": true,
+ "SOL_PACKET": true,
+ "SOL_RAW": true,
+ "SOL_SOCKET": true,
+ "SOL_TCP": true,
+ "SOL_X25": true,
+ "SOMAXCONN": true,
+ "SO_ACCEPTCONN": true,
+ "SO_ACCEPTFILTER": true,
+ "SO_ATTACH_FILTER": true,
+ "SO_BINDANY": true,
+ "SO_BINDTODEVICE": true,
+ "SO_BINTIME": true,
+ "SO_BROADCAST": true,
+ "SO_BSDCOMPAT": true,
+ "SO_DEBUG": true,
+ "SO_DETACH_FILTER": true,
+ "SO_DOMAIN": true,
+ "SO_DONTROUTE": true,
+ "SO_DONTTRUNC": true,
+ "SO_ERROR": true,
+ "SO_KEEPALIVE": true,
+ "SO_LABEL": true,
+ "SO_LINGER": true,
+ "SO_LINGER_SEC": true,
+ "SO_LISTENINCQLEN": true,
+ "SO_LISTENQLEN": true,
+ "SO_LISTENQLIMIT": true,
+ "SO_MARK": true,
+ "SO_NETPROC": true,
+ "SO_NKE": true,
+ "SO_NOADDRERR": true,
+ "SO_NOHEADER": true,
+ "SO_NOSIGPIPE": true,
+ "SO_NOTIFYCONFLICT": true,
+ "SO_NO_CHECK": true,
+ "SO_NO_DDP": true,
+ "SO_NO_OFFLOAD": true,
+ "SO_NP_EXTENSIONS": true,
+ "SO_NREAD": true,
+ "SO_NWRITE": true,
+ "SO_OOBINLINE": true,
+ "SO_OVERFLOWED": true,
+ "SO_PASSCRED": true,
+ "SO_PASSSEC": true,
+ "SO_PEERCRED": true,
+ "SO_PEERLABEL": true,
+ "SO_PEERNAME": true,
+ "SO_PEERSEC": true,
+ "SO_PRIORITY": true,
+ "SO_PROTOCOL": true,
+ "SO_PROTOTYPE": true,
+ "SO_RANDOMPORT": true,
+ "SO_RCVBUF": true,
+ "SO_RCVBUFFORCE": true,
+ "SO_RCVLOWAT": true,
+ "SO_RCVTIMEO": true,
+ "SO_RESTRICTIONS": true,
+ "SO_RESTRICT_DENYIN": true,
+ "SO_RESTRICT_DENYOUT": true,
+ "SO_RESTRICT_DENYSET": true,
+ "SO_REUSEADDR": true,
+ "SO_REUSEPORT": true,
+ "SO_REUSESHAREUID": true,
+ "SO_RTABLE": true,
+ "SO_RXQ_OVFL": true,
+ "SO_SECURITY_AUTHENTICATION": true,
+ "SO_SECURITY_ENCRYPTION_NETWORK": true,
+ "SO_SECURITY_ENCRYPTION_TRANSPORT": true,
+ "SO_SETFIB": true,
+ "SO_SNDBUF": true,
+ "SO_SNDBUFFORCE": true,
+ "SO_SNDLOWAT": true,
+ "SO_SNDTIMEO": true,
+ "SO_SPLICE": true,
+ "SO_TIMESTAMP": true,
+ "SO_TIMESTAMPING": true,
+ "SO_TIMESTAMPNS": true,
+ "SO_TIMESTAMP_MONOTONIC": true,
+ "SO_TYPE": true,
+ "SO_UPCALLCLOSEWAIT": true,
+ "SO_UPDATE_ACCEPT_CONTEXT": true,
+ "SO_UPDATE_CONNECT_CONTEXT": true,
+ "SO_USELOOPBACK": true,
+ "SO_USER_COOKIE": true,
+ "SO_VENDOR": true,
+ "SO_WANTMORE": true,
+ "SO_WANTOOBFLAG": true,
+ "SSLExtraCertChainPolicyPara": true,
+ "STANDARD_RIGHTS_ALL": true,
+ "STANDARD_RIGHTS_EXECUTE": true,
+ "STANDARD_RIGHTS_READ": true,
+ "STANDARD_RIGHTS_REQUIRED": true,
+ "STANDARD_RIGHTS_WRITE": true,
+ "STARTF_USESHOWWINDOW": true,
+ "STARTF_USESTDHANDLES": true,
+ "STD_ERROR_HANDLE": true,
+ "STD_INPUT_HANDLE": true,
+ "STD_OUTPUT_HANDLE": true,
+ "SUBLANG_ENGLISH_US": true,
+ "SW_FORCEMINIMIZE": true,
+ "SW_HIDE": true,
+ "SW_MAXIMIZE": true,
+ "SW_MINIMIZE": true,
+ "SW_NORMAL": true,
+ "SW_RESTORE": true,
+ "SW_SHOW": true,
+ "SW_SHOWDEFAULT": true,
+ "SW_SHOWMAXIMIZED": true,
+ "SW_SHOWMINIMIZED": true,
+ "SW_SHOWMINNOACTIVE": true,
+ "SW_SHOWNA": true,
+ "SW_SHOWNOACTIVATE": true,
+ "SW_SHOWNORMAL": true,
+ "SYMBOLIC_LINK_FLAG_DIRECTORY": true,
+ "SYNCHRONIZE": true,
+ "SYSCTL_VERSION": true,
+ "SYSCTL_VERS_0": true,
+ "SYSCTL_VERS_1": true,
+ "SYSCTL_VERS_MASK": true,
+ "SYS_ABORT2": true,
+ "SYS_ACCEPT": true,
+ "SYS_ACCEPT4": true,
+ "SYS_ACCEPT_NOCANCEL": true,
+ "SYS_ACCESS": true,
+ "SYS_ACCESS_EXTENDED": true,
+ "SYS_ACCT": true,
+ "SYS_ADD_KEY": true,
+ "SYS_ADD_PROFIL": true,
+ "SYS_ADJFREQ": true,
+ "SYS_ADJTIME": true,
+ "SYS_ADJTIMEX": true,
+ "SYS_AFS_SYSCALL": true,
+ "SYS_AIO_CANCEL": true,
+ "SYS_AIO_ERROR": true,
+ "SYS_AIO_FSYNC": true,
+ "SYS_AIO_READ": true,
+ "SYS_AIO_RETURN": true,
+ "SYS_AIO_SUSPEND": true,
+ "SYS_AIO_SUSPEND_NOCANCEL": true,
+ "SYS_AIO_WRITE": true,
+ "SYS_ALARM": true,
+ "SYS_ARCH_PRCTL": true,
+ "SYS_ARM_FADVISE64_64": true,
+ "SYS_ARM_SYNC_FILE_RANGE": true,
+ "SYS_ATGETMSG": true,
+ "SYS_ATPGETREQ": true,
+ "SYS_ATPGETRSP": true,
+ "SYS_ATPSNDREQ": true,
+ "SYS_ATPSNDRSP": true,
+ "SYS_ATPUTMSG": true,
+ "SYS_ATSOCKET": true,
+ "SYS_AUDIT": true,
+ "SYS_AUDITCTL": true,
+ "SYS_AUDITON": true,
+ "SYS_AUDIT_SESSION_JOIN": true,
+ "SYS_AUDIT_SESSION_PORT": true,
+ "SYS_AUDIT_SESSION_SELF": true,
+ "SYS_BDFLUSH": true,
+ "SYS_BIND": true,
+ "SYS_BINDAT": true,
+ "SYS_BREAK": true,
+ "SYS_BRK": true,
+ "SYS_BSDTHREAD_CREATE": true,
+ "SYS_BSDTHREAD_REGISTER": true,
+ "SYS_BSDTHREAD_TERMINATE": true,
+ "SYS_CAPGET": true,
+ "SYS_CAPSET": true,
+ "SYS_CAP_ENTER": true,
+ "SYS_CAP_FCNTLS_GET": true,
+ "SYS_CAP_FCNTLS_LIMIT": true,
+ "SYS_CAP_GETMODE": true,
+ "SYS_CAP_GETRIGHTS": true,
+ "SYS_CAP_IOCTLS_GET": true,
+ "SYS_CAP_IOCTLS_LIMIT": true,
+ "SYS_CAP_NEW": true,
+ "SYS_CAP_RIGHTS_GET": true,
+ "SYS_CAP_RIGHTS_LIMIT": true,
+ "SYS_CHDIR": true,
+ "SYS_CHFLAGS": true,
+ "SYS_CHFLAGSAT": true,
+ "SYS_CHMOD": true,
+ "SYS_CHMOD_EXTENDED": true,
+ "SYS_CHOWN": true,
+ "SYS_CHOWN32": true,
+ "SYS_CHROOT": true,
+ "SYS_CHUD": true,
+ "SYS_CLOCK_ADJTIME": true,
+ "SYS_CLOCK_GETCPUCLOCKID2": true,
+ "SYS_CLOCK_GETRES": true,
+ "SYS_CLOCK_GETTIME": true,
+ "SYS_CLOCK_NANOSLEEP": true,
+ "SYS_CLOCK_SETTIME": true,
+ "SYS_CLONE": true,
+ "SYS_CLOSE": true,
+ "SYS_CLOSEFROM": true,
+ "SYS_CLOSE_NOCANCEL": true,
+ "SYS_CONNECT": true,
+ "SYS_CONNECTAT": true,
+ "SYS_CONNECT_NOCANCEL": true,
+ "SYS_COPYFILE": true,
+ "SYS_CPUSET": true,
+ "SYS_CPUSET_GETAFFINITY": true,
+ "SYS_CPUSET_GETID": true,
+ "SYS_CPUSET_SETAFFINITY": true,
+ "SYS_CPUSET_SETID": true,
+ "SYS_CREAT": true,
+ "SYS_CREATE_MODULE": true,
+ "SYS_CSOPS": true,
+ "SYS_DELETE": true,
+ "SYS_DELETE_MODULE": true,
+ "SYS_DUP": true,
+ "SYS_DUP2": true,
+ "SYS_DUP3": true,
+ "SYS_EACCESS": true,
+ "SYS_EPOLL_CREATE": true,
+ "SYS_EPOLL_CREATE1": true,
+ "SYS_EPOLL_CTL": true,
+ "SYS_EPOLL_CTL_OLD": true,
+ "SYS_EPOLL_PWAIT": true,
+ "SYS_EPOLL_WAIT": true,
+ "SYS_EPOLL_WAIT_OLD": true,
+ "SYS_EVENTFD": true,
+ "SYS_EVENTFD2": true,
+ "SYS_EXCHANGEDATA": true,
+ "SYS_EXECVE": true,
+ "SYS_EXIT": true,
+ "SYS_EXIT_GROUP": true,
+ "SYS_EXTATTRCTL": true,
+ "SYS_EXTATTR_DELETE_FD": true,
+ "SYS_EXTATTR_DELETE_FILE": true,
+ "SYS_EXTATTR_DELETE_LINK": true,
+ "SYS_EXTATTR_GET_FD": true,
+ "SYS_EXTATTR_GET_FILE": true,
+ "SYS_EXTATTR_GET_LINK": true,
+ "SYS_EXTATTR_LIST_FD": true,
+ "SYS_EXTATTR_LIST_FILE": true,
+ "SYS_EXTATTR_LIST_LINK": true,
+ "SYS_EXTATTR_SET_FD": true,
+ "SYS_EXTATTR_SET_FILE": true,
+ "SYS_EXTATTR_SET_LINK": true,
+ "SYS_FACCESSAT": true,
+ "SYS_FADVISE64": true,
+ "SYS_FADVISE64_64": true,
+ "SYS_FALLOCATE": true,
+ "SYS_FANOTIFY_INIT": true,
+ "SYS_FANOTIFY_MARK": true,
+ "SYS_FCHDIR": true,
+ "SYS_FCHFLAGS": true,
+ "SYS_FCHMOD": true,
+ "SYS_FCHMODAT": true,
+ "SYS_FCHMOD_EXTENDED": true,
+ "SYS_FCHOWN": true,
+ "SYS_FCHOWN32": true,
+ "SYS_FCHOWNAT": true,
+ "SYS_FCHROOT": true,
+ "SYS_FCNTL": true,
+ "SYS_FCNTL64": true,
+ "SYS_FCNTL_NOCANCEL": true,
+ "SYS_FDATASYNC": true,
+ "SYS_FEXECVE": true,
+ "SYS_FFCLOCK_GETCOUNTER": true,
+ "SYS_FFCLOCK_GETESTIMATE": true,
+ "SYS_FFCLOCK_SETESTIMATE": true,
+ "SYS_FFSCTL": true,
+ "SYS_FGETATTRLIST": true,
+ "SYS_FGETXATTR": true,
+ "SYS_FHOPEN": true,
+ "SYS_FHSTAT": true,
+ "SYS_FHSTATFS": true,
+ "SYS_FILEPORT_MAKEFD": true,
+ "SYS_FILEPORT_MAKEPORT": true,
+ "SYS_FKTRACE": true,
+ "SYS_FLISTXATTR": true,
+ "SYS_FLOCK": true,
+ "SYS_FORK": true,
+ "SYS_FPATHCONF": true,
+ "SYS_FREEBSD6_FTRUNCATE": true,
+ "SYS_FREEBSD6_LSEEK": true,
+ "SYS_FREEBSD6_MMAP": true,
+ "SYS_FREEBSD6_PREAD": true,
+ "SYS_FREEBSD6_PWRITE": true,
+ "SYS_FREEBSD6_TRUNCATE": true,
+ "SYS_FREMOVEXATTR": true,
+ "SYS_FSCTL": true,
+ "SYS_FSETATTRLIST": true,
+ "SYS_FSETXATTR": true,
+ "SYS_FSGETPATH": true,
+ "SYS_FSTAT": true,
+ "SYS_FSTAT64": true,
+ "SYS_FSTAT64_EXTENDED": true,
+ "SYS_FSTATAT": true,
+ "SYS_FSTATAT64": true,
+ "SYS_FSTATFS": true,
+ "SYS_FSTATFS64": true,
+ "SYS_FSTATV": true,
+ "SYS_FSTATVFS1": true,
+ "SYS_FSTAT_EXTENDED": true,
+ "SYS_FSYNC": true,
+ "SYS_FSYNC_NOCANCEL": true,
+ "SYS_FSYNC_RANGE": true,
+ "SYS_FTIME": true,
+ "SYS_FTRUNCATE": true,
+ "SYS_FTRUNCATE64": true,
+ "SYS_FUTEX": true,
+ "SYS_FUTIMENS": true,
+ "SYS_FUTIMES": true,
+ "SYS_FUTIMESAT": true,
+ "SYS_GETATTRLIST": true,
+ "SYS_GETAUDIT": true,
+ "SYS_GETAUDIT_ADDR": true,
+ "SYS_GETAUID": true,
+ "SYS_GETCONTEXT": true,
+ "SYS_GETCPU": true,
+ "SYS_GETCWD": true,
+ "SYS_GETDENTS": true,
+ "SYS_GETDENTS64": true,
+ "SYS_GETDIRENTRIES": true,
+ "SYS_GETDIRENTRIES64": true,
+ "SYS_GETDIRENTRIESATTR": true,
+ "SYS_GETDTABLECOUNT": true,
+ "SYS_GETDTABLESIZE": true,
+ "SYS_GETEGID": true,
+ "SYS_GETEGID32": true,
+ "SYS_GETEUID": true,
+ "SYS_GETEUID32": true,
+ "SYS_GETFH": true,
+ "SYS_GETFSSTAT": true,
+ "SYS_GETFSSTAT64": true,
+ "SYS_GETGID": true,
+ "SYS_GETGID32": true,
+ "SYS_GETGROUPS": true,
+ "SYS_GETGROUPS32": true,
+ "SYS_GETHOSTUUID": true,
+ "SYS_GETITIMER": true,
+ "SYS_GETLCID": true,
+ "SYS_GETLOGIN": true,
+ "SYS_GETLOGINCLASS": true,
+ "SYS_GETPEERNAME": true,
+ "SYS_GETPGID": true,
+ "SYS_GETPGRP": true,
+ "SYS_GETPID": true,
+ "SYS_GETPMSG": true,
+ "SYS_GETPPID": true,
+ "SYS_GETPRIORITY": true,
+ "SYS_GETRESGID": true,
+ "SYS_GETRESGID32": true,
+ "SYS_GETRESUID": true,
+ "SYS_GETRESUID32": true,
+ "SYS_GETRLIMIT": true,
+ "SYS_GETRTABLE": true,
+ "SYS_GETRUSAGE": true,
+ "SYS_GETSGROUPS": true,
+ "SYS_GETSID": true,
+ "SYS_GETSOCKNAME": true,
+ "SYS_GETSOCKOPT": true,
+ "SYS_GETTHRID": true,
+ "SYS_GETTID": true,
+ "SYS_GETTIMEOFDAY": true,
+ "SYS_GETUID": true,
+ "SYS_GETUID32": true,
+ "SYS_GETVFSSTAT": true,
+ "SYS_GETWGROUPS": true,
+ "SYS_GETXATTR": true,
+ "SYS_GET_KERNEL_SYMS": true,
+ "SYS_GET_MEMPOLICY": true,
+ "SYS_GET_ROBUST_LIST": true,
+ "SYS_GET_THREAD_AREA": true,
+ "SYS_GTTY": true,
+ "SYS_IDENTITYSVC": true,
+ "SYS_IDLE": true,
+ "SYS_INITGROUPS": true,
+ "SYS_INIT_MODULE": true,
+ "SYS_INOTIFY_ADD_WATCH": true,
+ "SYS_INOTIFY_INIT": true,
+ "SYS_INOTIFY_INIT1": true,
+ "SYS_INOTIFY_RM_WATCH": true,
+ "SYS_IOCTL": true,
+ "SYS_IOPERM": true,
+ "SYS_IOPL": true,
+ "SYS_IOPOLICYSYS": true,
+ "SYS_IOPRIO_GET": true,
+ "SYS_IOPRIO_SET": true,
+ "SYS_IO_CANCEL": true,
+ "SYS_IO_DESTROY": true,
+ "SYS_IO_GETEVENTS": true,
+ "SYS_IO_SETUP": true,
+ "SYS_IO_SUBMIT": true,
+ "SYS_IPC": true,
+ "SYS_ISSETUGID": true,
+ "SYS_JAIL": true,
+ "SYS_JAIL_ATTACH": true,
+ "SYS_JAIL_GET": true,
+ "SYS_JAIL_REMOVE": true,
+ "SYS_JAIL_SET": true,
+ "SYS_KDEBUG_TRACE": true,
+ "SYS_KENV": true,
+ "SYS_KEVENT": true,
+ "SYS_KEVENT64": true,
+ "SYS_KEXEC_LOAD": true,
+ "SYS_KEYCTL": true,
+ "SYS_KILL": true,
+ "SYS_KLDFIND": true,
+ "SYS_KLDFIRSTMOD": true,
+ "SYS_KLDLOAD": true,
+ "SYS_KLDNEXT": true,
+ "SYS_KLDSTAT": true,
+ "SYS_KLDSYM": true,
+ "SYS_KLDUNLOAD": true,
+ "SYS_KLDUNLOADF": true,
+ "SYS_KQUEUE": true,
+ "SYS_KQUEUE1": true,
+ "SYS_KTIMER_CREATE": true,
+ "SYS_KTIMER_DELETE": true,
+ "SYS_KTIMER_GETOVERRUN": true,
+ "SYS_KTIMER_GETTIME": true,
+ "SYS_KTIMER_SETTIME": true,
+ "SYS_KTRACE": true,
+ "SYS_LCHFLAGS": true,
+ "SYS_LCHMOD": true,
+ "SYS_LCHOWN": true,
+ "SYS_LCHOWN32": true,
+ "SYS_LGETFH": true,
+ "SYS_LGETXATTR": true,
+ "SYS_LINK": true,
+ "SYS_LINKAT": true,
+ "SYS_LIO_LISTIO": true,
+ "SYS_LISTEN": true,
+ "SYS_LISTXATTR": true,
+ "SYS_LLISTXATTR": true,
+ "SYS_LOCK": true,
+ "SYS_LOOKUP_DCOOKIE": true,
+ "SYS_LPATHCONF": true,
+ "SYS_LREMOVEXATTR": true,
+ "SYS_LSEEK": true,
+ "SYS_LSETXATTR": true,
+ "SYS_LSTAT": true,
+ "SYS_LSTAT64": true,
+ "SYS_LSTAT64_EXTENDED": true,
+ "SYS_LSTATV": true,
+ "SYS_LSTAT_EXTENDED": true,
+ "SYS_LUTIMES": true,
+ "SYS_MAC_SYSCALL": true,
+ "SYS_MADVISE": true,
+ "SYS_MADVISE1": true,
+ "SYS_MAXSYSCALL": true,
+ "SYS_MBIND": true,
+ "SYS_MIGRATE_PAGES": true,
+ "SYS_MINCORE": true,
+ "SYS_MINHERIT": true,
+ "SYS_MKCOMPLEX": true,
+ "SYS_MKDIR": true,
+ "SYS_MKDIRAT": true,
+ "SYS_MKDIR_EXTENDED": true,
+ "SYS_MKFIFO": true,
+ "SYS_MKFIFOAT": true,
+ "SYS_MKFIFO_EXTENDED": true,
+ "SYS_MKNOD": true,
+ "SYS_MKNODAT": true,
+ "SYS_MLOCK": true,
+ "SYS_MLOCKALL": true,
+ "SYS_MMAP": true,
+ "SYS_MMAP2": true,
+ "SYS_MODCTL": true,
+ "SYS_MODFIND": true,
+ "SYS_MODFNEXT": true,
+ "SYS_MODIFY_LDT": true,
+ "SYS_MODNEXT": true,
+ "SYS_MODSTAT": true,
+ "SYS_MODWATCH": true,
+ "SYS_MOUNT": true,
+ "SYS_MOVE_PAGES": true,
+ "SYS_MPROTECT": true,
+ "SYS_MPX": true,
+ "SYS_MQUERY": true,
+ "SYS_MQ_GETSETATTR": true,
+ "SYS_MQ_NOTIFY": true,
+ "SYS_MQ_OPEN": true,
+ "SYS_MQ_TIMEDRECEIVE": true,
+ "SYS_MQ_TIMEDSEND": true,
+ "SYS_MQ_UNLINK": true,
+ "SYS_MREMAP": true,
+ "SYS_MSGCTL": true,
+ "SYS_MSGGET": true,
+ "SYS_MSGRCV": true,
+ "SYS_MSGRCV_NOCANCEL": true,
+ "SYS_MSGSND": true,
+ "SYS_MSGSND_NOCANCEL": true,
+ "SYS_MSGSYS": true,
+ "SYS_MSYNC": true,
+ "SYS_MSYNC_NOCANCEL": true,
+ "SYS_MUNLOCK": true,
+ "SYS_MUNLOCKALL": true,
+ "SYS_MUNMAP": true,
+ "SYS_NAME_TO_HANDLE_AT": true,
+ "SYS_NANOSLEEP": true,
+ "SYS_NEWFSTATAT": true,
+ "SYS_NFSCLNT": true,
+ "SYS_NFSSERVCTL": true,
+ "SYS_NFSSVC": true,
+ "SYS_NFSTAT": true,
+ "SYS_NICE": true,
+ "SYS_NLSTAT": true,
+ "SYS_NMOUNT": true,
+ "SYS_NSTAT": true,
+ "SYS_NTP_ADJTIME": true,
+ "SYS_NTP_GETTIME": true,
+ "SYS_OABI_SYSCALL_BASE": true,
+ "SYS_OBREAK": true,
+ "SYS_OLDFSTAT": true,
+ "SYS_OLDLSTAT": true,
+ "SYS_OLDOLDUNAME": true,
+ "SYS_OLDSTAT": true,
+ "SYS_OLDUNAME": true,
+ "SYS_OPEN": true,
+ "SYS_OPENAT": true,
+ "SYS_OPENBSD_POLL": true,
+ "SYS_OPEN_BY_HANDLE_AT": true,
+ "SYS_OPEN_EXTENDED": true,
+ "SYS_OPEN_NOCANCEL": true,
+ "SYS_OVADVISE": true,
+ "SYS_PACCEPT": true,
+ "SYS_PATHCONF": true,
+ "SYS_PAUSE": true,
+ "SYS_PCICONFIG_IOBASE": true,
+ "SYS_PCICONFIG_READ": true,
+ "SYS_PCICONFIG_WRITE": true,
+ "SYS_PDFORK": true,
+ "SYS_PDGETPID": true,
+ "SYS_PDKILL": true,
+ "SYS_PERF_EVENT_OPEN": true,
+ "SYS_PERSONALITY": true,
+ "SYS_PID_HIBERNATE": true,
+ "SYS_PID_RESUME": true,
+ "SYS_PID_SHUTDOWN_SOCKETS": true,
+ "SYS_PID_SUSPEND": true,
+ "SYS_PIPE": true,
+ "SYS_PIPE2": true,
+ "SYS_PIVOT_ROOT": true,
+ "SYS_PMC_CONTROL": true,
+ "SYS_PMC_GET_INFO": true,
+ "SYS_POLL": true,
+ "SYS_POLLTS": true,
+ "SYS_POLL_NOCANCEL": true,
+ "SYS_POSIX_FADVISE": true,
+ "SYS_POSIX_FALLOCATE": true,
+ "SYS_POSIX_OPENPT": true,
+ "SYS_POSIX_SPAWN": true,
+ "SYS_PPOLL": true,
+ "SYS_PRCTL": true,
+ "SYS_PREAD": true,
+ "SYS_PREAD64": true,
+ "SYS_PREADV": true,
+ "SYS_PREAD_NOCANCEL": true,
+ "SYS_PRLIMIT64": true,
+ "SYS_PROCCTL": true,
+ "SYS_PROCESS_POLICY": true,
+ "SYS_PROCESS_VM_READV": true,
+ "SYS_PROCESS_VM_WRITEV": true,
+ "SYS_PROC_INFO": true,
+ "SYS_PROF": true,
+ "SYS_PROFIL": true,
+ "SYS_PSELECT": true,
+ "SYS_PSELECT6": true,
+ "SYS_PSET_ASSIGN": true,
+ "SYS_PSET_CREATE": true,
+ "SYS_PSET_DESTROY": true,
+ "SYS_PSYNCH_CVBROAD": true,
+ "SYS_PSYNCH_CVCLRPREPOST": true,
+ "SYS_PSYNCH_CVSIGNAL": true,
+ "SYS_PSYNCH_CVWAIT": true,
+ "SYS_PSYNCH_MUTEXDROP": true,
+ "SYS_PSYNCH_MUTEXWAIT": true,
+ "SYS_PSYNCH_RW_DOWNGRADE": true,
+ "SYS_PSYNCH_RW_LONGRDLOCK": true,
+ "SYS_PSYNCH_RW_RDLOCK": true,
+ "SYS_PSYNCH_RW_UNLOCK": true,
+ "SYS_PSYNCH_RW_UNLOCK2": true,
+ "SYS_PSYNCH_RW_UPGRADE": true,
+ "SYS_PSYNCH_RW_WRLOCK": true,
+ "SYS_PSYNCH_RW_YIELDWRLOCK": true,
+ "SYS_PTRACE": true,
+ "SYS_PUTPMSG": true,
+ "SYS_PWRITE": true,
+ "SYS_PWRITE64": true,
+ "SYS_PWRITEV": true,
+ "SYS_PWRITE_NOCANCEL": true,
+ "SYS_QUERY_MODULE": true,
+ "SYS_QUOTACTL": true,
+ "SYS_RASCTL": true,
+ "SYS_RCTL_ADD_RULE": true,
+ "SYS_RCTL_GET_LIMITS": true,
+ "SYS_RCTL_GET_RACCT": true,
+ "SYS_RCTL_GET_RULES": true,
+ "SYS_RCTL_REMOVE_RULE": true,
+ "SYS_READ": true,
+ "SYS_READAHEAD": true,
+ "SYS_READDIR": true,
+ "SYS_READLINK": true,
+ "SYS_READLINKAT": true,
+ "SYS_READV": true,
+ "SYS_READV_NOCANCEL": true,
+ "SYS_READ_NOCANCEL": true,
+ "SYS_REBOOT": true,
+ "SYS_RECV": true,
+ "SYS_RECVFROM": true,
+ "SYS_RECVFROM_NOCANCEL": true,
+ "SYS_RECVMMSG": true,
+ "SYS_RECVMSG": true,
+ "SYS_RECVMSG_NOCANCEL": true,
+ "SYS_REMAP_FILE_PAGES": true,
+ "SYS_REMOVEXATTR": true,
+ "SYS_RENAME": true,
+ "SYS_RENAMEAT": true,
+ "SYS_REQUEST_KEY": true,
+ "SYS_RESTART_SYSCALL": true,
+ "SYS_REVOKE": true,
+ "SYS_RFORK": true,
+ "SYS_RMDIR": true,
+ "SYS_RTPRIO": true,
+ "SYS_RTPRIO_THREAD": true,
+ "SYS_RT_SIGACTION": true,
+ "SYS_RT_SIGPENDING": true,
+ "SYS_RT_SIGPROCMASK": true,
+ "SYS_RT_SIGQUEUEINFO": true,
+ "SYS_RT_SIGRETURN": true,
+ "SYS_RT_SIGSUSPEND": true,
+ "SYS_RT_SIGTIMEDWAIT": true,
+ "SYS_RT_TGSIGQUEUEINFO": true,
+ "SYS_SBRK": true,
+ "SYS_SCHED_GETAFFINITY": true,
+ "SYS_SCHED_GETPARAM": true,
+ "SYS_SCHED_GETSCHEDULER": true,
+ "SYS_SCHED_GET_PRIORITY_MAX": true,
+ "SYS_SCHED_GET_PRIORITY_MIN": true,
+ "SYS_SCHED_RR_GET_INTERVAL": true,
+ "SYS_SCHED_SETAFFINITY": true,
+ "SYS_SCHED_SETPARAM": true,
+ "SYS_SCHED_SETSCHEDULER": true,
+ "SYS_SCHED_YIELD": true,
+ "SYS_SCTP_GENERIC_RECVMSG": true,
+ "SYS_SCTP_GENERIC_SENDMSG": true,
+ "SYS_SCTP_GENERIC_SENDMSG_IOV": true,
+ "SYS_SCTP_PEELOFF": true,
+ "SYS_SEARCHFS": true,
+ "SYS_SECURITY": true,
+ "SYS_SELECT": true,
+ "SYS_SELECT_NOCANCEL": true,
+ "SYS_SEMCONFIG": true,
+ "SYS_SEMCTL": true,
+ "SYS_SEMGET": true,
+ "SYS_SEMOP": true,
+ "SYS_SEMSYS": true,
+ "SYS_SEMTIMEDOP": true,
+ "SYS_SEM_CLOSE": true,
+ "SYS_SEM_DESTROY": true,
+ "SYS_SEM_GETVALUE": true,
+ "SYS_SEM_INIT": true,
+ "SYS_SEM_OPEN": true,
+ "SYS_SEM_POST": true,
+ "SYS_SEM_TRYWAIT": true,
+ "SYS_SEM_UNLINK": true,
+ "SYS_SEM_WAIT": true,
+ "SYS_SEM_WAIT_NOCANCEL": true,
+ "SYS_SEND": true,
+ "SYS_SENDFILE": true,
+ "SYS_SENDFILE64": true,
+ "SYS_SENDMMSG": true,
+ "SYS_SENDMSG": true,
+ "SYS_SENDMSG_NOCANCEL": true,
+ "SYS_SENDTO": true,
+ "SYS_SENDTO_NOCANCEL": true,
+ "SYS_SETATTRLIST": true,
+ "SYS_SETAUDIT": true,
+ "SYS_SETAUDIT_ADDR": true,
+ "SYS_SETAUID": true,
+ "SYS_SETCONTEXT": true,
+ "SYS_SETDOMAINNAME": true,
+ "SYS_SETEGID": true,
+ "SYS_SETEUID": true,
+ "SYS_SETFIB": true,
+ "SYS_SETFSGID": true,
+ "SYS_SETFSGID32": true,
+ "SYS_SETFSUID": true,
+ "SYS_SETFSUID32": true,
+ "SYS_SETGID": true,
+ "SYS_SETGID32": true,
+ "SYS_SETGROUPS": true,
+ "SYS_SETGROUPS32": true,
+ "SYS_SETHOSTNAME": true,
+ "SYS_SETITIMER": true,
+ "SYS_SETLCID": true,
+ "SYS_SETLOGIN": true,
+ "SYS_SETLOGINCLASS": true,
+ "SYS_SETNS": true,
+ "SYS_SETPGID": true,
+ "SYS_SETPRIORITY": true,
+ "SYS_SETPRIVEXEC": true,
+ "SYS_SETREGID": true,
+ "SYS_SETREGID32": true,
+ "SYS_SETRESGID": true,
+ "SYS_SETRESGID32": true,
+ "SYS_SETRESUID": true,
+ "SYS_SETRESUID32": true,
+ "SYS_SETREUID": true,
+ "SYS_SETREUID32": true,
+ "SYS_SETRLIMIT": true,
+ "SYS_SETRTABLE": true,
+ "SYS_SETSGROUPS": true,
+ "SYS_SETSID": true,
+ "SYS_SETSOCKOPT": true,
+ "SYS_SETTID": true,
+ "SYS_SETTID_WITH_PID": true,
+ "SYS_SETTIMEOFDAY": true,
+ "SYS_SETUID": true,
+ "SYS_SETUID32": true,
+ "SYS_SETWGROUPS": true,
+ "SYS_SETXATTR": true,
+ "SYS_SET_MEMPOLICY": true,
+ "SYS_SET_ROBUST_LIST": true,
+ "SYS_SET_THREAD_AREA": true,
+ "SYS_SET_TID_ADDRESS": true,
+ "SYS_SGETMASK": true,
+ "SYS_SHARED_REGION_CHECK_NP": true,
+ "SYS_SHARED_REGION_MAP_AND_SLIDE_NP": true,
+ "SYS_SHMAT": true,
+ "SYS_SHMCTL": true,
+ "SYS_SHMDT": true,
+ "SYS_SHMGET": true,
+ "SYS_SHMSYS": true,
+ "SYS_SHM_OPEN": true,
+ "SYS_SHM_UNLINK": true,
+ "SYS_SHUTDOWN": true,
+ "SYS_SIGACTION": true,
+ "SYS_SIGALTSTACK": true,
+ "SYS_SIGNAL": true,
+ "SYS_SIGNALFD": true,
+ "SYS_SIGNALFD4": true,
+ "SYS_SIGPENDING": true,
+ "SYS_SIGPROCMASK": true,
+ "SYS_SIGQUEUE": true,
+ "SYS_SIGQUEUEINFO": true,
+ "SYS_SIGRETURN": true,
+ "SYS_SIGSUSPEND": true,
+ "SYS_SIGSUSPEND_NOCANCEL": true,
+ "SYS_SIGTIMEDWAIT": true,
+ "SYS_SIGWAIT": true,
+ "SYS_SIGWAITINFO": true,
+ "SYS_SOCKET": true,
+ "SYS_SOCKETCALL": true,
+ "SYS_SOCKETPAIR": true,
+ "SYS_SPLICE": true,
+ "SYS_SSETMASK": true,
+ "SYS_SSTK": true,
+ "SYS_STACK_SNAPSHOT": true,
+ "SYS_STAT": true,
+ "SYS_STAT64": true,
+ "SYS_STAT64_EXTENDED": true,
+ "SYS_STATFS": true,
+ "SYS_STATFS64": true,
+ "SYS_STATV": true,
+ "SYS_STATVFS1": true,
+ "SYS_STAT_EXTENDED": true,
+ "SYS_STIME": true,
+ "SYS_STTY": true,
+ "SYS_SWAPCONTEXT": true,
+ "SYS_SWAPCTL": true,
+ "SYS_SWAPOFF": true,
+ "SYS_SWAPON": true,
+ "SYS_SYMLINK": true,
+ "SYS_SYMLINKAT": true,
+ "SYS_SYNC": true,
+ "SYS_SYNCFS": true,
+ "SYS_SYNC_FILE_RANGE": true,
+ "SYS_SYSARCH": true,
+ "SYS_SYSCALL": true,
+ "SYS_SYSCALL_BASE": true,
+ "SYS_SYSFS": true,
+ "SYS_SYSINFO": true,
+ "SYS_SYSLOG": true,
+ "SYS_TEE": true,
+ "SYS_TGKILL": true,
+ "SYS_THREAD_SELFID": true,
+ "SYS_THR_CREATE": true,
+ "SYS_THR_EXIT": true,
+ "SYS_THR_KILL": true,
+ "SYS_THR_KILL2": true,
+ "SYS_THR_NEW": true,
+ "SYS_THR_SELF": true,
+ "SYS_THR_SET_NAME": true,
+ "SYS_THR_SUSPEND": true,
+ "SYS_THR_WAKE": true,
+ "SYS_TIME": true,
+ "SYS_TIMERFD_CREATE": true,
+ "SYS_TIMERFD_GETTIME": true,
+ "SYS_TIMERFD_SETTIME": true,
+ "SYS_TIMER_CREATE": true,
+ "SYS_TIMER_DELETE": true,
+ "SYS_TIMER_GETOVERRUN": true,
+ "SYS_TIMER_GETTIME": true,
+ "SYS_TIMER_SETTIME": true,
+ "SYS_TIMES": true,
+ "SYS_TKILL": true,
+ "SYS_TRUNCATE": true,
+ "SYS_TRUNCATE64": true,
+ "SYS_TUXCALL": true,
+ "SYS_UGETRLIMIT": true,
+ "SYS_ULIMIT": true,
+ "SYS_UMASK": true,
+ "SYS_UMASK_EXTENDED": true,
+ "SYS_UMOUNT": true,
+ "SYS_UMOUNT2": true,
+ "SYS_UNAME": true,
+ "SYS_UNDELETE": true,
+ "SYS_UNLINK": true,
+ "SYS_UNLINKAT": true,
+ "SYS_UNMOUNT": true,
+ "SYS_UNSHARE": true,
+ "SYS_USELIB": true,
+ "SYS_USTAT": true,
+ "SYS_UTIME": true,
+ "SYS_UTIMENSAT": true,
+ "SYS_UTIMES": true,
+ "SYS_UTRACE": true,
+ "SYS_UUIDGEN": true,
+ "SYS_VADVISE": true,
+ "SYS_VFORK": true,
+ "SYS_VHANGUP": true,
+ "SYS_VM86": true,
+ "SYS_VM86OLD": true,
+ "SYS_VMSPLICE": true,
+ "SYS_VM_PRESSURE_MONITOR": true,
+ "SYS_VSERVER": true,
+ "SYS_WAIT4": true,
+ "SYS_WAIT4_NOCANCEL": true,
+ "SYS_WAIT6": true,
+ "SYS_WAITEVENT": true,
+ "SYS_WAITID": true,
+ "SYS_WAITID_NOCANCEL": true,
+ "SYS_WAITPID": true,
+ "SYS_WATCHEVENT": true,
+ "SYS_WORKQ_KERNRETURN": true,
+ "SYS_WORKQ_OPEN": true,
+ "SYS_WRITE": true,
+ "SYS_WRITEV": true,
+ "SYS_WRITEV_NOCANCEL": true,
+ "SYS_WRITE_NOCANCEL": true,
+ "SYS_YIELD": true,
+ "SYS__LLSEEK": true,
+ "SYS__LWP_CONTINUE": true,
+ "SYS__LWP_CREATE": true,
+ "SYS__LWP_CTL": true,
+ "SYS__LWP_DETACH": true,
+ "SYS__LWP_EXIT": true,
+ "SYS__LWP_GETNAME": true,
+ "SYS__LWP_GETPRIVATE": true,
+ "SYS__LWP_KILL": true,
+ "SYS__LWP_PARK": true,
+ "SYS__LWP_SELF": true,
+ "SYS__LWP_SETNAME": true,
+ "SYS__LWP_SETPRIVATE": true,
+ "SYS__LWP_SUSPEND": true,
+ "SYS__LWP_UNPARK": true,
+ "SYS__LWP_UNPARK_ALL": true,
+ "SYS__LWP_WAIT": true,
+ "SYS__LWP_WAKEUP": true,
+ "SYS__NEWSELECT": true,
+ "SYS__PSET_BIND": true,
+ "SYS__SCHED_GETAFFINITY": true,
+ "SYS__SCHED_GETPARAM": true,
+ "SYS__SCHED_SETAFFINITY": true,
+ "SYS__SCHED_SETPARAM": true,
+ "SYS__SYSCTL": true,
+ "SYS__UMTX_LOCK": true,
+ "SYS__UMTX_OP": true,
+ "SYS__UMTX_UNLOCK": true,
+ "SYS___ACL_ACLCHECK_FD": true,
+ "SYS___ACL_ACLCHECK_FILE": true,
+ "SYS___ACL_ACLCHECK_LINK": true,
+ "SYS___ACL_DELETE_FD": true,
+ "SYS___ACL_DELETE_FILE": true,
+ "SYS___ACL_DELETE_LINK": true,
+ "SYS___ACL_GET_FD": true,
+ "SYS___ACL_GET_FILE": true,
+ "SYS___ACL_GET_LINK": true,
+ "SYS___ACL_SET_FD": true,
+ "SYS___ACL_SET_FILE": true,
+ "SYS___ACL_SET_LINK": true,
+ "SYS___CLONE": true,
+ "SYS___DISABLE_THREADSIGNAL": true,
+ "SYS___GETCWD": true,
+ "SYS___GETLOGIN": true,
+ "SYS___GET_TCB": true,
+ "SYS___MAC_EXECVE": true,
+ "SYS___MAC_GETFSSTAT": true,
+ "SYS___MAC_GET_FD": true,
+ "SYS___MAC_GET_FILE": true,
+ "SYS___MAC_GET_LCID": true,
+ "SYS___MAC_GET_LCTX": true,
+ "SYS___MAC_GET_LINK": true,
+ "SYS___MAC_GET_MOUNT": true,
+ "SYS___MAC_GET_PID": true,
+ "SYS___MAC_GET_PROC": true,
+ "SYS___MAC_MOUNT": true,
+ "SYS___MAC_SET_FD": true,
+ "SYS___MAC_SET_FILE": true,
+ "SYS___MAC_SET_LCTX": true,
+ "SYS___MAC_SET_LINK": true,
+ "SYS___MAC_SET_PROC": true,
+ "SYS___MAC_SYSCALL": true,
+ "SYS___OLD_SEMWAIT_SIGNAL": true,
+ "SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL": true,
+ "SYS___POSIX_CHOWN": true,
+ "SYS___POSIX_FCHOWN": true,
+ "SYS___POSIX_LCHOWN": true,
+ "SYS___POSIX_RENAME": true,
+ "SYS___PTHREAD_CANCELED": true,
+ "SYS___PTHREAD_CHDIR": true,
+ "SYS___PTHREAD_FCHDIR": true,
+ "SYS___PTHREAD_KILL": true,
+ "SYS___PTHREAD_MARKCANCEL": true,
+ "SYS___PTHREAD_SIGMASK": true,
+ "SYS___QUOTACTL": true,
+ "SYS___SEMCTL": true,
+ "SYS___SEMWAIT_SIGNAL": true,
+ "SYS___SEMWAIT_SIGNAL_NOCANCEL": true,
+ "SYS___SETLOGIN": true,
+ "SYS___SETUGID": true,
+ "SYS___SET_TCB": true,
+ "SYS___SIGACTION_SIGTRAMP": true,
+ "SYS___SIGTIMEDWAIT": true,
+ "SYS___SIGWAIT": true,
+ "SYS___SIGWAIT_NOCANCEL": true,
+ "SYS___SYSCTL": true,
+ "SYS___TFORK": true,
+ "SYS___THREXIT": true,
+ "SYS___THRSIGDIVERT": true,
+ "SYS___THRSLEEP": true,
+ "SYS___THRWAKEUP": true,
+ "S_ARCH1": true,
+ "S_ARCH2": true,
+ "S_BLKSIZE": true,
+ "S_IEXEC": true,
+ "S_IFBLK": true,
+ "S_IFCHR": true,
+ "S_IFDIR": true,
+ "S_IFIFO": true,
+ "S_IFLNK": true,
+ "S_IFMT": true,
+ "S_IFREG": true,
+ "S_IFSOCK": true,
+ "S_IFWHT": true,
+ "S_IREAD": true,
+ "S_IRGRP": true,
+ "S_IROTH": true,
+ "S_IRUSR": true,
+ "S_IRWXG": true,
+ "S_IRWXO": true,
+ "S_IRWXU": true,
+ "S_ISGID": true,
+ "S_ISTXT": true,
+ "S_ISUID": true,
+ "S_ISVTX": true,
+ "S_IWGRP": true,
+ "S_IWOTH": true,
+ "S_IWRITE": true,
+ "S_IWUSR": true,
+ "S_IXGRP": true,
+ "S_IXOTH": true,
+ "S_IXUSR": true,
+ "S_LOGIN_SET": true,
+ "SecurityAttributes": true,
+ "Seek": true,
+ "Select": true,
+ "Sendfile": true,
+ "Sendmsg": true,
+ "SendmsgN": true,
+ "Sendto": true,
+ "Servent": true,
+ "SetBpf": true,
+ "SetBpfBuflen": true,
+ "SetBpfDatalink": true,
+ "SetBpfHeadercmpl": true,
+ "SetBpfImmediate": true,
+ "SetBpfInterface": true,
+ "SetBpfPromisc": true,
+ "SetBpfTimeout": true,
+ "SetCurrentDirectory": true,
+ "SetEndOfFile": true,
+ "SetEnvironmentVariable": true,
+ "SetFileAttributes": true,
+ "SetFileCompletionNotificationModes": true,
+ "SetFilePointer": true,
+ "SetFileTime": true,
+ "SetHandleInformation": true,
+ "SetKevent": true,
+ "SetLsfPromisc": true,
+ "SetNonblock": true,
+ "Setdomainname": true,
+ "Setegid": true,
+ "Setenv": true,
+ "Seteuid": true,
+ "Setfsgid": true,
+ "Setfsuid": true,
+ "Setgid": true,
+ "Setgroups": true,
+ "Sethostname": true,
+ "Setlogin": true,
+ "Setpgid": true,
+ "Setpriority": true,
+ "Setprivexec": true,
+ "Setregid": true,
+ "Setresgid": true,
+ "Setresuid": true,
+ "Setreuid": true,
+ "Setrlimit": true,
+ "Setsid": true,
+ "Setsockopt": true,
+ "SetsockoptByte": true,
+ "SetsockoptICMPv6Filter": true,
+ "SetsockoptIPMreq": true,
+ "SetsockoptIPMreqn": true,
+ "SetsockoptIPv6Mreq": true,
+ "SetsockoptInet4Addr": true,
+ "SetsockoptInt": true,
+ "SetsockoptLinger": true,
+ "SetsockoptString": true,
+ "SetsockoptTimeval": true,
+ "Settimeofday": true,
+ "Setuid": true,
+ "Setxattr": true,
+ "Shutdown": true,
+ "SidTypeAlias": true,
+ "SidTypeComputer": true,
+ "SidTypeDeletedAccount": true,
+ "SidTypeDomain": true,
+ "SidTypeGroup": true,
+ "SidTypeInvalid": true,
+ "SidTypeLabel": true,
+ "SidTypeUnknown": true,
+ "SidTypeUser": true,
+ "SidTypeWellKnownGroup": true,
+ "Signal": true,
+ "SizeofBpfHdr": true,
+ "SizeofBpfInsn": true,
+ "SizeofBpfProgram": true,
+ "SizeofBpfStat": true,
+ "SizeofBpfVersion": true,
+ "SizeofBpfZbuf": true,
+ "SizeofBpfZbufHeader": true,
+ "SizeofCmsghdr": true,
+ "SizeofICMPv6Filter": true,
+ "SizeofIPMreq": true,
+ "SizeofIPMreqn": true,
+ "SizeofIPv6MTUInfo": true,
+ "SizeofIPv6Mreq": true,
+ "SizeofIfAddrmsg": true,
+ "SizeofIfAnnounceMsghdr": true,
+ "SizeofIfData": true,
+ "SizeofIfInfomsg": true,
+ "SizeofIfMsghdr": true,
+ "SizeofIfaMsghdr": true,
+ "SizeofIfmaMsghdr": true,
+ "SizeofIfmaMsghdr2": true,
+ "SizeofInet4Pktinfo": true,
+ "SizeofInet6Pktinfo": true,
+ "SizeofInotifyEvent": true,
+ "SizeofLinger": true,
+ "SizeofMsghdr": true,
+ "SizeofNlAttr": true,
+ "SizeofNlMsgerr": true,
+ "SizeofNlMsghdr": true,
+ "SizeofRtAttr": true,
+ "SizeofRtGenmsg": true,
+ "SizeofRtMetrics": true,
+ "SizeofRtMsg": true,
+ "SizeofRtMsghdr": true,
+ "SizeofRtNexthop": true,
+ "SizeofSockFilter": true,
+ "SizeofSockFprog": true,
+ "SizeofSockaddrAny": true,
+ "SizeofSockaddrDatalink": true,
+ "SizeofSockaddrInet4": true,
+ "SizeofSockaddrInet6": true,
+ "SizeofSockaddrLinklayer": true,
+ "SizeofSockaddrNetlink": true,
+ "SizeofSockaddrUnix": true,
+ "SizeofTCPInfo": true,
+ "SizeofUcred": true,
+ "SlicePtrFromStrings": true,
+ "SockFilter": true,
+ "SockFprog": true,
+ "SockaddrDatalink": true,
+ "SockaddrGen": true,
+ "SockaddrInet4": true,
+ "SockaddrInet6": true,
+ "SockaddrLinklayer": true,
+ "SockaddrNetlink": true,
+ "SockaddrUnix": true,
+ "Socket": true,
+ "SocketControlMessage": true,
+ "SocketDisableIPv6": true,
+ "Socketpair": true,
+ "Splice": true,
+ "StartProcess": true,
+ "StartupInfo": true,
+ "Stat": true,
+ "Stat_t": true,
+ "Statfs": true,
+ "Statfs_t": true,
+ "Stderr": true,
+ "Stdin": true,
+ "Stdout": true,
+ "StringBytePtr": true,
+ "StringByteSlice": true,
+ "StringSlicePtr": true,
+ "StringToSid": true,
+ "StringToUTF16": true,
+ "StringToUTF16Ptr": true,
+ "Symlink": true,
+ "Sync": true,
+ "SyncFileRange": true,
+ "SysProcAttr": true,
+ "SysProcIDMap": true,
+ "Syscall": true,
+ "Syscall12": true,
+ "Syscall15": true,
+ "Syscall18": true,
+ "Syscall6": true,
+ "Syscall9": true,
+ "Sysctl": true,
+ "SysctlUint32": true,
+ "Sysctlnode": true,
+ "Sysinfo": true,
+ "Sysinfo_t": true,
+ "Systemtime": true,
+ "TCGETS": true,
+ "TCIFLUSH": true,
+ "TCIOFLUSH": true,
+ "TCOFLUSH": true,
+ "TCPInfo": true,
+ "TCPKeepalive": true,
+ "TCP_CA_NAME_MAX": true,
+ "TCP_CONGCTL": true,
+ "TCP_CONGESTION": true,
+ "TCP_CONNECTIONTIMEOUT": true,
+ "TCP_CORK": true,
+ "TCP_DEFER_ACCEPT": true,
+ "TCP_INFO": true,
+ "TCP_KEEPALIVE": true,
+ "TCP_KEEPCNT": true,
+ "TCP_KEEPIDLE": true,
+ "TCP_KEEPINIT": true,
+ "TCP_KEEPINTVL": true,
+ "TCP_LINGER2": true,
+ "TCP_MAXBURST": true,
+ "TCP_MAXHLEN": true,
+ "TCP_MAXOLEN": true,
+ "TCP_MAXSEG": true,
+ "TCP_MAXWIN": true,
+ "TCP_MAX_SACK": true,
+ "TCP_MAX_WINSHIFT": true,
+ "TCP_MD5SIG": true,
+ "TCP_MD5SIG_MAXKEYLEN": true,
+ "TCP_MINMSS": true,
+ "TCP_MINMSSOVERLOAD": true,
+ "TCP_MSS": true,
+ "TCP_NODELAY": true,
+ "TCP_NOOPT": true,
+ "TCP_NOPUSH": true,
+ "TCP_NSTATES": true,
+ "TCP_QUICKACK": true,
+ "TCP_RXT_CONNDROPTIME": true,
+ "TCP_RXT_FINDROP": true,
+ "TCP_SACK_ENABLE": true,
+ "TCP_SYNCNT": true,
+ "TCP_VENDOR": true,
+ "TCP_WINDOW_CLAMP": true,
+ "TCSAFLUSH": true,
+ "TCSETS": true,
+ "TF_DISCONNECT": true,
+ "TF_REUSE_SOCKET": true,
+ "TF_USE_DEFAULT_WORKER": true,
+ "TF_USE_KERNEL_APC": true,
+ "TF_USE_SYSTEM_THREAD": true,
+ "TF_WRITE_BEHIND": true,
+ "TH32CS_INHERIT": true,
+ "TH32CS_SNAPALL": true,
+ "TH32CS_SNAPHEAPLIST": true,
+ "TH32CS_SNAPMODULE": true,
+ "TH32CS_SNAPMODULE32": true,
+ "TH32CS_SNAPPROCESS": true,
+ "TH32CS_SNAPTHREAD": true,
+ "TIME_ZONE_ID_DAYLIGHT": true,
+ "TIME_ZONE_ID_STANDARD": true,
+ "TIME_ZONE_ID_UNKNOWN": true,
+ "TIOCCBRK": true,
+ "TIOCCDTR": true,
+ "TIOCCONS": true,
+ "TIOCDCDTIMESTAMP": true,
+ "TIOCDRAIN": true,
+ "TIOCDSIMICROCODE": true,
+ "TIOCEXCL": true,
+ "TIOCEXT": true,
+ "TIOCFLAG_CDTRCTS": true,
+ "TIOCFLAG_CLOCAL": true,
+ "TIOCFLAG_CRTSCTS": true,
+ "TIOCFLAG_MDMBUF": true,
+ "TIOCFLAG_PPS": true,
+ "TIOCFLAG_SOFTCAR": true,
+ "TIOCFLUSH": true,
+ "TIOCGDEV": true,
+ "TIOCGDRAINWAIT": true,
+ "TIOCGETA": true,
+ "TIOCGETD": true,
+ "TIOCGFLAGS": true,
+ "TIOCGICOUNT": true,
+ "TIOCGLCKTRMIOS": true,
+ "TIOCGLINED": true,
+ "TIOCGPGRP": true,
+ "TIOCGPTN": true,
+ "TIOCGQSIZE": true,
+ "TIOCGRANTPT": true,
+ "TIOCGRS485": true,
+ "TIOCGSERIAL": true,
+ "TIOCGSID": true,
+ "TIOCGSIZE": true,
+ "TIOCGSOFTCAR": true,
+ "TIOCGTSTAMP": true,
+ "TIOCGWINSZ": true,
+ "TIOCINQ": true,
+ "TIOCIXOFF": true,
+ "TIOCIXON": true,
+ "TIOCLINUX": true,
+ "TIOCMBIC": true,
+ "TIOCMBIS": true,
+ "TIOCMGDTRWAIT": true,
+ "TIOCMGET": true,
+ "TIOCMIWAIT": true,
+ "TIOCMODG": true,
+ "TIOCMODS": true,
+ "TIOCMSDTRWAIT": true,
+ "TIOCMSET": true,
+ "TIOCM_CAR": true,
+ "TIOCM_CD": true,
+ "TIOCM_CTS": true,
+ "TIOCM_DCD": true,
+ "TIOCM_DSR": true,
+ "TIOCM_DTR": true,
+ "TIOCM_LE": true,
+ "TIOCM_RI": true,
+ "TIOCM_RNG": true,
+ "TIOCM_RTS": true,
+ "TIOCM_SR": true,
+ "TIOCM_ST": true,
+ "TIOCNOTTY": true,
+ "TIOCNXCL": true,
+ "TIOCOUTQ": true,
+ "TIOCPKT": true,
+ "TIOCPKT_DATA": true,
+ "TIOCPKT_DOSTOP": true,
+ "TIOCPKT_FLUSHREAD": true,
+ "TIOCPKT_FLUSHWRITE": true,
+ "TIOCPKT_IOCTL": true,
+ "TIOCPKT_NOSTOP": true,
+ "TIOCPKT_START": true,
+ "TIOCPKT_STOP": true,
+ "TIOCPTMASTER": true,
+ "TIOCPTMGET": true,
+ "TIOCPTSNAME": true,
+ "TIOCPTYGNAME": true,
+ "TIOCPTYGRANT": true,
+ "TIOCPTYUNLK": true,
+ "TIOCRCVFRAME": true,
+ "TIOCREMOTE": true,
+ "TIOCSBRK": true,
+ "TIOCSCONS": true,
+ "TIOCSCTTY": true,
+ "TIOCSDRAINWAIT": true,
+ "TIOCSDTR": true,
+ "TIOCSERCONFIG": true,
+ "TIOCSERGETLSR": true,
+ "TIOCSERGETMULTI": true,
+ "TIOCSERGSTRUCT": true,
+ "TIOCSERGWILD": true,
+ "TIOCSERSETMULTI": true,
+ "TIOCSERSWILD": true,
+ "TIOCSER_TEMT": true,
+ "TIOCSETA": true,
+ "TIOCSETAF": true,
+ "TIOCSETAW": true,
+ "TIOCSETD": true,
+ "TIOCSFLAGS": true,
+ "TIOCSIG": true,
+ "TIOCSLCKTRMIOS": true,
+ "TIOCSLINED": true,
+ "TIOCSPGRP": true,
+ "TIOCSPTLCK": true,
+ "TIOCSQSIZE": true,
+ "TIOCSRS485": true,
+ "TIOCSSERIAL": true,
+ "TIOCSSIZE": true,
+ "TIOCSSOFTCAR": true,
+ "TIOCSTART": true,
+ "TIOCSTAT": true,
+ "TIOCSTI": true,
+ "TIOCSTOP": true,
+ "TIOCSTSTAMP": true,
+ "TIOCSWINSZ": true,
+ "TIOCTIMESTAMP": true,
+ "TIOCUCNTL": true,
+ "TIOCVHANGUP": true,
+ "TIOCXMTFRAME": true,
+ "TOKEN_ADJUST_DEFAULT": true,
+ "TOKEN_ADJUST_GROUPS": true,
+ "TOKEN_ADJUST_PRIVILEGES": true,
+ "TOKEN_ADJUST_SESSIONID": true,
+ "TOKEN_ALL_ACCESS": true,
+ "TOKEN_ASSIGN_PRIMARY": true,
+ "TOKEN_DUPLICATE": true,
+ "TOKEN_EXECUTE": true,
+ "TOKEN_IMPERSONATE": true,
+ "TOKEN_QUERY": true,
+ "TOKEN_QUERY_SOURCE": true,
+ "TOKEN_READ": true,
+ "TOKEN_WRITE": true,
+ "TOSTOP": true,
+ "TRUNCATE_EXISTING": true,
+ "TUNATTACHFILTER": true,
+ "TUNDETACHFILTER": true,
+ "TUNGETFEATURES": true,
+ "TUNGETIFF": true,
+ "TUNGETSNDBUF": true,
+ "TUNGETVNETHDRSZ": true,
+ "TUNSETDEBUG": true,
+ "TUNSETGROUP": true,
+ "TUNSETIFF": true,
+ "TUNSETLINK": true,
+ "TUNSETNOCSUM": true,
+ "TUNSETOFFLOAD": true,
+ "TUNSETOWNER": true,
+ "TUNSETPERSIST": true,
+ "TUNSETSNDBUF": true,
+ "TUNSETTXFILTER": true,
+ "TUNSETVNETHDRSZ": true,
+ "Tee": true,
+ "TerminateProcess": true,
+ "Termios": true,
+ "Tgkill": true,
+ "Time": true,
+ "Time_t": true,
+ "Times": true,
+ "Timespec": true,
+ "TimespecToNsec": true,
+ "Timeval": true,
+ "Timeval32": true,
+ "TimevalToNsec": true,
+ "Timex": true,
+ "Timezoneinformation": true,
+ "Tms": true,
+ "Token": true,
+ "TokenAccessInformation": true,
+ "TokenAuditPolicy": true,
+ "TokenDefaultDacl": true,
+ "TokenElevation": true,
+ "TokenElevationType": true,
+ "TokenGroups": true,
+ "TokenGroupsAndPrivileges": true,
+ "TokenHasRestrictions": true,
+ "TokenImpersonationLevel": true,
+ "TokenIntegrityLevel": true,
+ "TokenLinkedToken": true,
+ "TokenLogonSid": true,
+ "TokenMandatoryPolicy": true,
+ "TokenOrigin": true,
+ "TokenOwner": true,
+ "TokenPrimaryGroup": true,
+ "TokenPrivileges": true,
+ "TokenRestrictedSids": true,
+ "TokenSandBoxInert": true,
+ "TokenSessionId": true,
+ "TokenSessionReference": true,
+ "TokenSource": true,
+ "TokenStatistics": true,
+ "TokenType": true,
+ "TokenUIAccess": true,
+ "TokenUser": true,
+ "TokenVirtualizationAllowed": true,
+ "TokenVirtualizationEnabled": true,
+ "Tokenprimarygroup": true,
+ "Tokenuser": true,
+ "TranslateAccountName": true,
+ "TranslateName": true,
+ "TransmitFile": true,
+ "TransmitFileBuffers": true,
+ "Truncate": true,
+ "UNIX_PATH_MAX": true,
+ "USAGE_MATCH_TYPE_AND": true,
+ "USAGE_MATCH_TYPE_OR": true,
+ "UTF16FromString": true,
+ "UTF16PtrFromString": true,
+ "UTF16ToString": true,
+ "Ucred": true,
+ "Umask": true,
+ "Uname": true,
+ "Undelete": true,
+ "UnixCredentials": true,
+ "UnixRights": true,
+ "Unlink": true,
+ "Unlinkat": true,
+ "UnmapViewOfFile": true,
+ "Unmount": true,
+ "Unsetenv": true,
+ "Unshare": true,
+ "UserInfo10": true,
+ "Ustat": true,
+ "Ustat_t": true,
+ "Utimbuf": true,
+ "Utime": true,
+ "Utimes": true,
+ "UtimesNano": true,
+ "Utsname": true,
+ "VDISCARD": true,
+ "VDSUSP": true,
+ "VEOF": true,
+ "VEOL": true,
+ "VEOL2": true,
+ "VERASE": true,
+ "VERASE2": true,
+ "VINTR": true,
+ "VKILL": true,
+ "VLNEXT": true,
+ "VMIN": true,
+ "VQUIT": true,
+ "VREPRINT": true,
+ "VSTART": true,
+ "VSTATUS": true,
+ "VSTOP": true,
+ "VSUSP": true,
+ "VSWTC": true,
+ "VT0": true,
+ "VT1": true,
+ "VTDLY": true,
+ "VTIME": true,
+ "VWERASE": true,
+ "VirtualLock": true,
+ "VirtualUnlock": true,
+ "WAIT_ABANDONED": true,
+ "WAIT_FAILED": true,
+ "WAIT_OBJECT_0": true,
+ "WAIT_TIMEOUT": true,
+ "WALL": true,
+ "WALLSIG": true,
+ "WALTSIG": true,
+ "WCLONE": true,
+ "WCONTINUED": true,
+ "WCOREFLAG": true,
+ "WEXITED": true,
+ "WLINUXCLONE": true,
+ "WNOHANG": true,
+ "WNOTHREAD": true,
+ "WNOWAIT": true,
+ "WNOZOMBIE": true,
+ "WOPTSCHECKED": true,
+ "WORDSIZE": true,
+ "WSABuf": true,
+ "WSACleanup": true,
+ "WSADESCRIPTION_LEN": true,
+ "WSAData": true,
+ "WSAEACCES": true,
+ "WSAECONNABORTED": true,
+ "WSAECONNRESET": true,
+ "WSAEnumProtocols": true,
+ "WSAID_CONNECTEX": true,
+ "WSAIoctl": true,
+ "WSAPROTOCOL_LEN": true,
+ "WSAProtocolChain": true,
+ "WSAProtocolInfo": true,
+ "WSARecv": true,
+ "WSARecvFrom": true,
+ "WSASYS_STATUS_LEN": true,
+ "WSASend": true,
+ "WSASendTo": true,
+ "WSASendto": true,
+ "WSAStartup": true,
+ "WSTOPPED": true,
+ "WTRAPPED": true,
+ "WUNTRACED": true,
+ "Wait4": true,
+ "WaitForSingleObject": true,
+ "WaitStatus": true,
+ "Win32FileAttributeData": true,
+ "Win32finddata": true,
+ "Write": true,
+ "WriteConsole": true,
+ "WriteFile": true,
+ "X509_ASN_ENCODING": true,
+ "XCASE": true,
+ "XP1_CONNECTIONLESS": true,
+ "XP1_CONNECT_DATA": true,
+ "XP1_DISCONNECT_DATA": true,
+ "XP1_EXPEDITED_DATA": true,
+ "XP1_GRACEFUL_CLOSE": true,
+ "XP1_GUARANTEED_DELIVERY": true,
+ "XP1_GUARANTEED_ORDER": true,
+ "XP1_IFS_HANDLES": true,
+ "XP1_MESSAGE_ORIENTED": true,
+ "XP1_MULTIPOINT_CONTROL_PLANE": true,
+ "XP1_MULTIPOINT_DATA_PLANE": true,
+ "XP1_PARTIAL_MESSAGE": true,
+ "XP1_PSEUDO_STREAM": true,
+ "XP1_QOS_SUPPORTED": true,
+ "XP1_SAN_SUPPORT_SDP": true,
+ "XP1_SUPPORT_BROADCAST": true,
+ "XP1_SUPPORT_MULTIPOINT": true,
+ "XP1_UNI_RECV": true,
+ "XP1_UNI_SEND": true,
+ },
+ "testing": map[string]bool{
+ "AllocsPerRun": true,
+ "B": true,
+ "Benchmark": true,
+ "BenchmarkResult": true,
+ "Cover": true,
+ "CoverBlock": true,
+ "CoverMode": true,
+ "Coverage": true,
+ "InternalBenchmark": true,
+ "InternalExample": true,
+ "InternalTest": true,
+ "M": true,
+ "Main": true,
+ "MainStart": true,
+ "PB": true,
+ "RegisterCover": true,
+ "RunBenchmarks": true,
+ "RunExamples": true,
+ "RunTests": true,
+ "Short": true,
+ "T": true,
+ "Verbose": true,
+ },
+ "testing/iotest": map[string]bool{
+ "DataErrReader": true,
+ "ErrTimeout": true,
+ "HalfReader": true,
+ "NewReadLogger": true,
+ "NewWriteLogger": true,
+ "OneByteReader": true,
+ "TimeoutReader": true,
+ "TruncateWriter": true,
+ },
+ "testing/quick": map[string]bool{
+ "Check": true,
+ "CheckEqual": true,
+ "CheckEqualError": true,
+ "CheckError": true,
+ "Config": true,
+ "Generator": true,
+ "SetupError": true,
+ "Value": true,
+ },
+ "text/scanner": map[string]bool{
+ "Char": true,
+ "Comment": true,
+ "EOF": true,
+ "Float": true,
+ "GoTokens": true,
+ "GoWhitespace": true,
+ "Ident": true,
+ "Int": true,
+ "Position": true,
+ "RawString": true,
+ "ScanChars": true,
+ "ScanComments": true,
+ "ScanFloats": true,
+ "ScanIdents": true,
+ "ScanInts": true,
+ "ScanRawStrings": true,
+ "ScanStrings": true,
+ "Scanner": true,
+ "SkipComments": true,
+ "String": true,
+ "TokenString": true,
+ },
+ "text/tabwriter": map[string]bool{
+ "AlignRight": true,
+ "Debug": true,
+ "DiscardEmptyColumns": true,
+ "Escape": true,
+ "FilterHTML": true,
+ "NewWriter": true,
+ "StripEscape": true,
+ "TabIndent": true,
+ "Writer": true,
+ },
+ "text/template": map[string]bool{
+ "ExecError": true,
+ "FuncMap": true,
+ "HTMLEscape": true,
+ "HTMLEscapeString": true,
+ "HTMLEscaper": true,
+ "IsTrue": true,
+ "JSEscape": true,
+ "JSEscapeString": true,
+ "JSEscaper": true,
+ "Must": true,
+ "New": true,
+ "ParseFiles": true,
+ "ParseGlob": true,
+ "Template": true,
+ "URLQueryEscaper": true,
+ },
+ "text/template/parse": map[string]bool{
+ "ActionNode": true,
+ "BoolNode": true,
+ "BranchNode": true,
+ "ChainNode": true,
+ "CommandNode": true,
+ "DotNode": true,
+ "FieldNode": true,
+ "IdentifierNode": true,
+ "IfNode": true,
+ "IsEmptyTree": true,
+ "ListNode": true,
+ "New": true,
+ "NewIdentifier": true,
+ "NilNode": true,
+ "Node": true,
+ "NodeAction": true,
+ "NodeBool": true,
+ "NodeChain": true,
+ "NodeCommand": true,
+ "NodeDot": true,
+ "NodeField": true,
+ "NodeIdentifier": true,
+ "NodeIf": true,
+ "NodeList": true,
+ "NodeNil": true,
+ "NodeNumber": true,
+ "NodePipe": true,
+ "NodeRange": true,
+ "NodeString": true,
+ "NodeTemplate": true,
+ "NodeText": true,
+ "NodeType": true,
+ "NodeVariable": true,
+ "NodeWith": true,
+ "NumberNode": true,
+ "Parse": true,
+ "PipeNode": true,
+ "Pos": true,
+ "RangeNode": true,
+ "StringNode": true,
+ "TemplateNode": true,
+ "TextNode": true,
+ "Tree": true,
+ "VariableNode": true,
+ "WithNode": true,
+ },
+ "time": map[string]bool{
+ "ANSIC": true,
+ "After": true,
+ "AfterFunc": true,
+ "April": true,
+ "August": true,
+ "Date": true,
+ "December": true,
+ "Duration": true,
+ "February": true,
+ "FixedZone": true,
+ "Friday": true,
+ "Hour": true,
+ "January": true,
+ "July": true,
+ "June": true,
+ "Kitchen": true,
+ "LoadLocation": true,
+ "LoadLocationFromTZData": true,
+ "Local": true,
+ "Location": true,
+ "March": true,
+ "May": true,
+ "Microsecond": true,
+ "Millisecond": true,
+ "Minute": true,
+ "Monday": true,
+ "Month": true,
+ "Nanosecond": true,
+ "NewTicker": true,
+ "NewTimer": true,
+ "November": true,
+ "Now": true,
+ "October": true,
+ "Parse": true,
+ "ParseDuration": true,
+ "ParseError": true,
+ "ParseInLocation": true,
+ "RFC1123": true,
+ "RFC1123Z": true,
+ "RFC3339": true,
+ "RFC3339Nano": true,
+ "RFC822": true,
+ "RFC822Z": true,
+ "RFC850": true,
+ "RubyDate": true,
+ "Saturday": true,
+ "Second": true,
+ "September": true,
+ "Since": true,
+ "Sleep": true,
+ "Stamp": true,
+ "StampMicro": true,
+ "StampMilli": true,
+ "StampNano": true,
+ "Sunday": true,
+ "Thursday": true,
+ "Tick": true,
+ "Ticker": true,
+ "Time": true,
+ "Timer": true,
+ "Tuesday": true,
+ "UTC": true,
+ "Unix": true,
+ "UnixDate": true,
+ "Until": true,
+ "Wednesday": true,
+ "Weekday": true,
+ },
+ "unicode": map[string]bool{
+ "ASCII_Hex_Digit": true,
+ "Adlam": true,
+ "Ahom": true,
+ "Anatolian_Hieroglyphs": true,
+ "Arabic": true,
+ "Armenian": true,
+ "Avestan": true,
+ "AzeriCase": true,
+ "Balinese": true,
+ "Bamum": true,
+ "Bassa_Vah": true,
+ "Batak": true,
+ "Bengali": true,
+ "Bhaiksuki": true,
+ "Bidi_Control": true,
+ "Bopomofo": true,
+ "Brahmi": true,
+ "Braille": true,
+ "Buginese": true,
+ "Buhid": true,
+ "C": true,
+ "Canadian_Aboriginal": true,
+ "Carian": true,
+ "CaseRange": true,
+ "CaseRanges": true,
+ "Categories": true,
+ "Caucasian_Albanian": true,
+ "Cc": true,
+ "Cf": true,
+ "Chakma": true,
+ "Cham": true,
+ "Cherokee": true,
+ "Co": true,
+ "Common": true,
+ "Coptic": true,
+ "Cs": true,
+ "Cuneiform": true,
+ "Cypriot": true,
+ "Cyrillic": true,
+ "Dash": true,
+ "Deprecated": true,
+ "Deseret": true,
+ "Devanagari": true,
+ "Diacritic": true,
+ "Digit": true,
+ "Duployan": true,
+ "Egyptian_Hieroglyphs": true,
+ "Elbasan": true,
+ "Ethiopic": true,
+ "Extender": true,
+ "FoldCategory": true,
+ "FoldScript": true,
+ "Georgian": true,
+ "Glagolitic": true,
+ "Gothic": true,
+ "Grantha": true,
+ "GraphicRanges": true,
+ "Greek": true,
+ "Gujarati": true,
+ "Gurmukhi": true,
+ "Han": true,
+ "Hangul": true,
+ "Hanunoo": true,
+ "Hatran": true,
+ "Hebrew": true,
+ "Hex_Digit": true,
+ "Hiragana": true,
+ "Hyphen": true,
+ "IDS_Binary_Operator": true,
+ "IDS_Trinary_Operator": true,
+ "Ideographic": true,
+ "Imperial_Aramaic": true,
+ "In": true,
+ "Inherited": true,
+ "Inscriptional_Pahlavi": true,
+ "Inscriptional_Parthian": true,
+ "Is": true,
+ "IsControl": true,
+ "IsDigit": true,
+ "IsGraphic": true,
+ "IsLetter": true,
+ "IsLower": true,
+ "IsMark": true,
+ "IsNumber": true,
+ "IsOneOf": true,
+ "IsPrint": true,
+ "IsPunct": true,
+ "IsSpace": true,
+ "IsSymbol": true,
+ "IsTitle": true,
+ "IsUpper": true,
+ "Javanese": true,
+ "Join_Control": true,
+ "Kaithi": true,
+ "Kannada": true,
+ "Katakana": true,
+ "Kayah_Li": true,
+ "Kharoshthi": true,
+ "Khmer": true,
+ "Khojki": true,
+ "Khudawadi": true,
+ "L": true,
+ "Lao": true,
+ "Latin": true,
+ "Lepcha": true,
+ "Letter": true,
+ "Limbu": true,
+ "Linear_A": true,
+ "Linear_B": true,
+ "Lisu": true,
+ "Ll": true,
+ "Lm": true,
+ "Lo": true,
+ "Logical_Order_Exception": true,
+ "Lower": true,
+ "LowerCase": true,
+ "Lt": true,
+ "Lu": true,
+ "Lycian": true,
+ "Lydian": true,
+ "M": true,
+ "Mahajani": true,
+ "Malayalam": true,
+ "Mandaic": true,
+ "Manichaean": true,
+ "Marchen": true,
+ "Mark": true,
+ "Masaram_Gondi": true,
+ "MaxASCII": true,
+ "MaxCase": true,
+ "MaxLatin1": true,
+ "MaxRune": true,
+ "Mc": true,
+ "Me": true,
+ "Meetei_Mayek": true,
+ "Mende_Kikakui": true,
+ "Meroitic_Cursive": true,
+ "Meroitic_Hieroglyphs": true,
+ "Miao": true,
+ "Mn": true,
+ "Modi": true,
+ "Mongolian": true,
+ "Mro": true,
+ "Multani": true,
+ "Myanmar": true,
+ "N": true,
+ "Nabataean": true,
+ "Nd": true,
+ "New_Tai_Lue": true,
+ "Newa": true,
+ "Nko": true,
+ "Nl": true,
+ "No": true,
+ "Noncharacter_Code_Point": true,
+ "Number": true,
+ "Nushu": true,
+ "Ogham": true,
+ "Ol_Chiki": true,
+ "Old_Hungarian": true,
+ "Old_Italic": true,
+ "Old_North_Arabian": true,
+ "Old_Permic": true,
+ "Old_Persian": true,
+ "Old_South_Arabian": true,
+ "Old_Turkic": true,
+ "Oriya": true,
+ "Osage": true,
+ "Osmanya": true,
+ "Other": true,
+ "Other_Alphabetic": true,
+ "Other_Default_Ignorable_Code_Point": true,
+ "Other_Grapheme_Extend": true,
+ "Other_ID_Continue": true,
+ "Other_ID_Start": true,
+ "Other_Lowercase": true,
+ "Other_Math": true,
+ "Other_Uppercase": true,
+ "P": true,
+ "Pahawh_Hmong": true,
+ "Palmyrene": true,
+ "Pattern_Syntax": true,
+ "Pattern_White_Space": true,
+ "Pau_Cin_Hau": true,
+ "Pc": true,
+ "Pd": true,
+ "Pe": true,
+ "Pf": true,
+ "Phags_Pa": true,
+ "Phoenician": true,
+ "Pi": true,
+ "Po": true,
+ "Prepended_Concatenation_Mark": true,
+ "PrintRanges": true,
+ "Properties": true,
+ "Ps": true,
+ "Psalter_Pahlavi": true,
+ "Punct": true,
+ "Quotation_Mark": true,
+ "Radical": true,
+ "Range16": true,
+ "Range32": true,
+ "RangeTable": true,
+ "Regional_Indicator": true,
+ "Rejang": true,
+ "ReplacementChar": true,
+ "Runic": true,
+ "S": true,
+ "STerm": true,
+ "Samaritan": true,
+ "Saurashtra": true,
+ "Sc": true,
+ "Scripts": true,
+ "Sentence_Terminal": true,
+ "Sharada": true,
+ "Shavian": true,
+ "Siddham": true,
+ "SignWriting": true,
+ "SimpleFold": true,
+ "Sinhala": true,
+ "Sk": true,
+ "Sm": true,
+ "So": true,
+ "Soft_Dotted": true,
+ "Sora_Sompeng": true,
+ "Soyombo": true,
+ "Space": true,
+ "SpecialCase": true,
+ "Sundanese": true,
+ "Syloti_Nagri": true,
+ "Symbol": true,
+ "Syriac": true,
+ "Tagalog": true,
+ "Tagbanwa": true,
+ "Tai_Le": true,
+ "Tai_Tham": true,
+ "Tai_Viet": true,
+ "Takri": true,
+ "Tamil": true,
+ "Tangut": true,
+ "Telugu": true,
+ "Terminal_Punctuation": true,
+ "Thaana": true,
+ "Thai": true,
+ "Tibetan": true,
+ "Tifinagh": true,
+ "Tirhuta": true,
+ "Title": true,
+ "TitleCase": true,
+ "To": true,
+ "ToLower": true,
+ "ToTitle": true,
+ "ToUpper": true,
+ "TurkishCase": true,
+ "Ugaritic": true,
+ "Unified_Ideograph": true,
+ "Upper": true,
+ "UpperCase": true,
+ "UpperLower": true,
+ "Vai": true,
+ "Variation_Selector": true,
+ "Version": true,
+ "Warang_Citi": true,
+ "White_Space": true,
+ "Yi": true,
+ "Z": true,
+ "Zanabazar_Square": true,
+ "Zl": true,
+ "Zp": true,
+ "Zs": true,
+ },
+ "unicode/utf16": map[string]bool{
+ "Decode": true,
+ "DecodeRune": true,
+ "Encode": true,
+ "EncodeRune": true,
+ "IsSurrogate": true,
+ },
+ "unicode/utf8": map[string]bool{
+ "DecodeLastRune": true,
+ "DecodeLastRuneInString": true,
+ "DecodeRune": true,
+ "DecodeRuneInString": true,
+ "EncodeRune": true,
+ "FullRune": true,
+ "FullRuneInString": true,
+ "MaxRune": true,
+ "RuneCount": true,
+ "RuneCountInString": true,
+ "RuneError": true,
+ "RuneLen": true,
+ "RuneSelf": true,
+ "RuneStart": true,
+ "UTFMax": true,
+ "Valid": true,
+ "ValidRune": true,
+ "ValidString": true,
+ },
+ "unsafe": map[string]bool{
+ "Alignof": true,
+ "ArbitraryType": true,
+ "Offsetof": true,
+ "Pointer": true,
+ "Sizeof": true,
+ },
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go
new file mode 100644
index 0000000..7219c8e
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go
@@ -0,0 +1,196 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package fastwalk provides a faster version of filepath.Walk for file system
+// scanning tools.
+package fastwalk
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+ "runtime"
+ "sync"
+)
+
+// TraverseLink is used as a return value from WalkFuncs to indicate that the
+// symlink named in the call may be traversed.
+var TraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
+
+// SkipFiles is a used as a return value from WalkFuncs to indicate that the
+// callback should not be called for any other files in the current directory.
+// Child directories will still be traversed.
+var SkipFiles = errors.New("fastwalk: skip remaining files in directory")
+
+// Walk is a faster implementation of filepath.Walk.
+//
+// filepath.Walk's design necessarily calls os.Lstat on each file,
+// even if the caller needs less info.
+// Many tools need only the type of each file.
+// On some platforms, this information is provided directly by the readdir
+// system call, avoiding the need to stat each file individually.
+// fastwalk_unix.go contains a fork of the syscall routines.
+//
+// See golang.org/issue/16399
+//
+// Walk walks the file tree rooted at root, calling walkFn for
+// each file or directory in the tree, including root.
+//
+// If fastWalk returns filepath.SkipDir, the directory is skipped.
+//
+// Unlike filepath.Walk:
+// * file stat calls must be done by the user.
+// The only provided metadata is the file type, which does not include
+// any permission bits.
+// * multiple goroutines stat the filesystem concurrently. The provided
+// walkFn must be safe for concurrent use.
+// * fastWalk can follow symlinks if walkFn returns the TraverseLink
+// sentinel error. It is the walkFn's responsibility to prevent
+// fastWalk from going into symlink cycles.
+func Walk(root string, walkFn func(path string, typ os.FileMode) error) error {
+ // TODO(bradfitz): make numWorkers configurable? We used a
+ // minimum of 4 to give the kernel more info about multiple
+ // things we want, in hopes its I/O scheduling can take
+ // advantage of that. Hopefully most are in cache. Maybe 4 is
+ // even too low of a minimum. Profile more.
+ numWorkers := 4
+ if n := runtime.NumCPU(); n > numWorkers {
+ numWorkers = n
+ }
+
+ // Make sure to wait for all workers to finish, otherwise
+ // walkFn could still be called after returning. This Wait call
+ // runs after close(e.donec) below.
+ var wg sync.WaitGroup
+ defer wg.Wait()
+
+ w := &walker{
+ fn: walkFn,
+ enqueuec: make(chan walkItem, numWorkers), // buffered for performance
+ workc: make(chan walkItem, numWorkers), // buffered for performance
+ donec: make(chan struct{}),
+
+ // buffered for correctness & not leaking goroutines:
+ resc: make(chan error, numWorkers),
+ }
+ defer close(w.donec)
+
+ for i := 0; i < numWorkers; i++ {
+ wg.Add(1)
+ go w.doWork(&wg)
+ }
+ todo := []walkItem{{dir: root}}
+ out := 0
+ for {
+ workc := w.workc
+ var workItem walkItem
+ if len(todo) == 0 {
+ workc = nil
+ } else {
+ workItem = todo[len(todo)-1]
+ }
+ select {
+ case workc <- workItem:
+ todo = todo[:len(todo)-1]
+ out++
+ case it := <-w.enqueuec:
+ todo = append(todo, it)
+ case err := <-w.resc:
+ out--
+ if err != nil {
+ return err
+ }
+ if out == 0 && len(todo) == 0 {
+ // It's safe to quit here, as long as the buffered
+ // enqueue channel isn't also readable, which might
+ // happen if the worker sends both another unit of
+ // work and its result before the other select was
+ // scheduled and both w.resc and w.enqueuec were
+ // readable.
+ select {
+ case it := <-w.enqueuec:
+ todo = append(todo, it)
+ default:
+ return nil
+ }
+ }
+ }
+ }
+}
+
+// doWork reads directories as instructed (via workc) and runs the
+// user's callback function.
+func (w *walker) doWork(wg *sync.WaitGroup) {
+ defer wg.Done()
+ for {
+ select {
+ case <-w.donec:
+ return
+ case it := <-w.workc:
+ select {
+ case <-w.donec:
+ return
+ case w.resc <- w.walk(it.dir, !it.callbackDone):
+ }
+ }
+ }
+}
+
+type walker struct {
+ fn func(path string, typ os.FileMode) error
+
+ donec chan struct{} // closed on fastWalk's return
+ workc chan walkItem // to workers
+ enqueuec chan walkItem // from workers
+ resc chan error // from workers
+}
+
+type walkItem struct {
+ dir string
+ callbackDone bool // callback already called; don't do it again
+}
+
+func (w *walker) enqueue(it walkItem) {
+ select {
+ case w.enqueuec <- it:
+ case <-w.donec:
+ }
+}
+
+func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error {
+ joined := dirName + string(os.PathSeparator) + baseName
+ if typ == os.ModeDir {
+ w.enqueue(walkItem{dir: joined})
+ return nil
+ }
+
+ err := w.fn(joined, typ)
+ if typ == os.ModeSymlink {
+ if err == TraverseLink {
+ // Set callbackDone so we don't call it twice for both the
+ // symlink-as-symlink and the symlink-as-directory later:
+ w.enqueue(walkItem{dir: joined, callbackDone: true})
+ return nil
+ }
+ if err == filepath.SkipDir {
+ // Permit SkipDir on symlinks too.
+ return nil
+ }
+ }
+ return err
+}
+
+func (w *walker) walk(root string, runUserCallback bool) error {
+ if runUserCallback {
+ err := w.fn(root, os.ModeDir)
+ if err == filepath.SkipDir {
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+ }
+
+ return readDir(root, w.onDirEnt)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go
new file mode 100644
index 0000000..ccffec5
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go
@@ -0,0 +1,13 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build freebsd openbsd netbsd
+
+package fastwalk
+
+import "syscall"
+
+func direntInode(dirent *syscall.Dirent) uint64 {
+ return uint64(dirent.Fileno)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go
new file mode 100644
index 0000000..ab7fbc0
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go
@@ -0,0 +1,14 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build linux darwin
+// +build !appengine
+
+package fastwalk
+
+import "syscall"
+
+func direntInode(dirent *syscall.Dirent) uint64 {
+ return uint64(dirent.Ino)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go
new file mode 100644
index 0000000..a3b26a7
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go
@@ -0,0 +1,13 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin freebsd openbsd netbsd
+
+package fastwalk
+
+import "syscall"
+
+func direntNamlen(dirent *syscall.Dirent) uint64 {
+ return uint64(dirent.Namlen)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go
new file mode 100644
index 0000000..e880d35
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go
@@ -0,0 +1,29 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build linux
+// +build !appengine
+
+package fastwalk
+
+import (
+ "bytes"
+ "syscall"
+ "unsafe"
+)
+
+func direntNamlen(dirent *syscall.Dirent) uint64 {
+ const fixedHdr = uint16(unsafe.Offsetof(syscall.Dirent{}.Name))
+ nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
+ const nameBufLen = uint16(len(nameBuf))
+ limit := dirent.Reclen - fixedHdr
+ if limit > nameBufLen {
+ limit = nameBufLen
+ }
+ nameLen := bytes.IndexByte(nameBuf[:limit], 0)
+ if nameLen < 0 {
+ panic("failed to find terminating 0 byte in dirent")
+ }
+ return uint64(nameLen)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go
new file mode 100644
index 0000000..a906b87
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go
@@ -0,0 +1,37 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd
+
+package fastwalk
+
+import (
+ "io/ioutil"
+ "os"
+)
+
+// readDir calls fn for each directory entry in dirName.
+// It does not descend into directories or follow symlinks.
+// If fn returns a non-nil error, readDir returns with that error
+// immediately.
+func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
+ fis, err := ioutil.ReadDir(dirName)
+ if err != nil {
+ return err
+ }
+ skipFiles := false
+ for _, fi := range fis {
+ if fi.Mode().IsRegular() && skipFiles {
+ continue
+ }
+ if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil {
+ if err == SkipFiles {
+ skipFiles = true
+ continue
+ }
+ return err
+ }
+ }
+ return nil
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go
new file mode 100644
index 0000000..3369b1a
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go
@@ -0,0 +1,127 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build linux darwin freebsd openbsd netbsd
+// +build !appengine
+
+package fastwalk
+
+import (
+ "fmt"
+ "os"
+ "syscall"
+ "unsafe"
+)
+
+const blockSize = 8 << 10
+
+// unknownFileMode is a sentinel (and bogus) os.FileMode
+// value used to represent a syscall.DT_UNKNOWN Dirent.Type.
+const unknownFileMode os.FileMode = os.ModeNamedPipe | os.ModeSocket | os.ModeDevice
+
+func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
+ fd, err := syscall.Open(dirName, 0, 0)
+ if err != nil {
+ return &os.PathError{Op: "open", Path: dirName, Err: err}
+ }
+ defer syscall.Close(fd)
+
+ // The buffer must be at least a block long.
+ buf := make([]byte, blockSize) // stack-allocated; doesn't escape
+ bufp := 0 // starting read position in buf
+ nbuf := 0 // end valid data in buf
+ skipFiles := false
+ for {
+ if bufp >= nbuf {
+ bufp = 0
+ nbuf, err = syscall.ReadDirent(fd, buf)
+ if err != nil {
+ return os.NewSyscallError("readdirent", err)
+ }
+ if nbuf <= 0 {
+ return nil
+ }
+ }
+ consumed, name, typ := parseDirEnt(buf[bufp:nbuf])
+ bufp += consumed
+ if name == "" || name == "." || name == ".." {
+ continue
+ }
+ // Fallback for filesystems (like old XFS) that don't
+ // support Dirent.Type and have DT_UNKNOWN (0) there
+ // instead.
+ if typ == unknownFileMode {
+ fi, err := os.Lstat(dirName + "/" + name)
+ if err != nil {
+ // It got deleted in the meantime.
+ if os.IsNotExist(err) {
+ continue
+ }
+ return err
+ }
+ typ = fi.Mode() & os.ModeType
+ }
+ if skipFiles && typ.IsRegular() {
+ continue
+ }
+ if err := fn(dirName, name, typ); err != nil {
+ if err == SkipFiles {
+ skipFiles = true
+ continue
+ }
+ return err
+ }
+ }
+}
+
+func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) {
+ // golang.org/issue/15653
+ dirent := (*syscall.Dirent)(unsafe.Pointer(&buf[0]))
+ if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v {
+ panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v))
+ }
+ if len(buf) < int(dirent.Reclen) {
+ panic(fmt.Sprintf("buf size %d < record length %d", len(buf), dirent.Reclen))
+ }
+ consumed = int(dirent.Reclen)
+ if direntInode(dirent) == 0 { // File absent in directory.
+ return
+ }
+ switch dirent.Type {
+ case syscall.DT_REG:
+ typ = 0
+ case syscall.DT_DIR:
+ typ = os.ModeDir
+ case syscall.DT_LNK:
+ typ = os.ModeSymlink
+ case syscall.DT_BLK:
+ typ = os.ModeDevice
+ case syscall.DT_FIFO:
+ typ = os.ModeNamedPipe
+ case syscall.DT_SOCK:
+ typ = os.ModeSocket
+ case syscall.DT_UNKNOWN:
+ typ = unknownFileMode
+ default:
+ // Skip weird things.
+ // It's probably a DT_WHT (http://lwn.net/Articles/325369/)
+ // or something. Revisit if/when this package is moved outside
+ // of goimports. goimports only cares about regular files,
+ // symlinks, and directories.
+ return
+ }
+
+ nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
+ nameLen := direntNamlen(dirent)
+
+ // Special cases for common things:
+ if nameLen == 1 && nameBuf[0] == '.' {
+ name = "."
+ } else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' {
+ name = ".."
+ } else {
+ name = string(nameBuf[:nameLen])
+ }
+ return
+}
diff --git a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
new file mode 100644
index 0000000..04bb96a
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
@@ -0,0 +1,250 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gopathwalk is like filepath.Walk but specialized for finding Go
+// packages, particularly in $GOPATH and $GOROOT.
+package gopathwalk
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/build"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "golang.org/x/tools/internal/fastwalk"
+)
+
+// Options controls the behavior of a Walk call.
+type Options struct {
+ Debug bool // Enable debug logging
+ ModulesEnabled bool // Search module caches. Also disables legacy goimports ignore rules.
+}
+
+// RootType indicates the type of a Root.
+type RootType int
+
+const (
+ RootUnknown RootType = iota
+ RootGOROOT
+ RootGOPATH
+ RootCurrentModule
+ RootModuleCache
+ RootOther
+)
+
+// A Root is a starting point for a Walk.
+type Root struct {
+ Path string
+ Type RootType
+}
+
+// SrcDirsRoots returns the roots from build.Default.SrcDirs(). Not modules-compatible.
+func SrcDirsRoots(ctx *build.Context) []Root {
+ var roots []Root
+ roots = append(roots, Root{filepath.Join(ctx.GOROOT, "src"), RootGOROOT})
+ for _, p := range filepath.SplitList(ctx.GOPATH) {
+ roots = append(roots, Root{filepath.Join(p, "src"), RootGOPATH})
+ }
+ return roots
+}
+
+// Walk walks Go source directories ($GOROOT, $GOPATH, etc) to find packages.
+// For each package found, add will be called (concurrently) with the absolute
+// paths of the containing source directory and the package directory.
+// add will be called concurrently.
+func Walk(roots []Root, add func(root Root, dir string), opts Options) {
+ for _, root := range roots {
+ walkDir(root, add, opts)
+ }
+}
+
+func walkDir(root Root, add func(Root, string), opts Options) {
+ if _, err := os.Stat(root.Path); os.IsNotExist(err) {
+ if opts.Debug {
+ log.Printf("skipping nonexistant directory: %v", root.Path)
+ }
+ return
+ }
+ if opts.Debug {
+ log.Printf("scanning %s", root.Path)
+ }
+ w := &walker{
+ root: root,
+ add: add,
+ opts: opts,
+ }
+ w.init()
+ if err := fastwalk.Walk(root.Path, w.walk); err != nil {
+ log.Printf("gopathwalk: scanning directory %v: %v", root.Path, err)
+ }
+
+ if opts.Debug {
+ log.Printf("scanned %s", root.Path)
+ }
+}
+
+// walker is the callback for fastwalk.Walk.
+type walker struct {
+ root Root // The source directory to scan.
+ add func(Root, string) // The callback that will be invoked for every possible Go package dir.
+ opts Options // Options passed to Walk by the user.
+
+ ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files.
+}
+
+// init initializes the walker based on its Options.
+func (w *walker) init() {
+ var ignoredPaths []string
+ if w.root.Type == RootModuleCache {
+ ignoredPaths = []string{"cache"}
+ }
+ if !w.opts.ModulesEnabled && w.root.Type == RootGOPATH {
+ ignoredPaths = w.getIgnoredDirs(w.root.Path)
+ ignoredPaths = append(ignoredPaths, "v", "mod")
+ }
+
+ for _, p := range ignoredPaths {
+ full := filepath.Join(w.root.Path, p)
+ if fi, err := os.Stat(full); err == nil {
+ w.ignoredDirs = append(w.ignoredDirs, fi)
+ if w.opts.Debug {
+ log.Printf("Directory added to ignore list: %s", full)
+ }
+ } else if w.opts.Debug {
+ log.Printf("Error statting ignored directory: %v", err)
+ }
+ }
+}
+
+// getIgnoredDirs reads an optional config file at /.goimportsignore
+// of relative directories to ignore when scanning for go files.
+// The provided path is one of the $GOPATH entries with "src" appended.
+func (w *walker) getIgnoredDirs(path string) []string {
+ file := filepath.Join(path, ".goimportsignore")
+ slurp, err := ioutil.ReadFile(file)
+ if w.opts.Debug {
+ if err != nil {
+ log.Print(err)
+ } else {
+ log.Printf("Read %s", file)
+ }
+ }
+ if err != nil {
+ return nil
+ }
+
+ var ignoredDirs []string
+ bs := bufio.NewScanner(bytes.NewReader(slurp))
+ for bs.Scan() {
+ line := strings.TrimSpace(bs.Text())
+ if line == "" || strings.HasPrefix(line, "#") {
+ continue
+ }
+ ignoredDirs = append(ignoredDirs, line)
+ }
+ return ignoredDirs
+}
+
+func (w *walker) shouldSkipDir(fi os.FileInfo) bool {
+ for _, ignoredDir := range w.ignoredDirs {
+ if os.SameFile(fi, ignoredDir) {
+ return true
+ }
+ }
+ return false
+}
+
+func (w *walker) walk(path string, typ os.FileMode) error {
+ dir := filepath.Dir(path)
+ if typ.IsRegular() {
+ if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) {
+ // Doesn't make sense to have regular files
+ // directly in your $GOPATH/src or $GOROOT/src.
+ return fastwalk.SkipFiles
+ }
+ if !strings.HasSuffix(path, ".go") {
+ return nil
+ }
+
+ w.add(w.root, dir)
+ return fastwalk.SkipFiles
+ }
+ if typ == os.ModeDir {
+ base := filepath.Base(path)
+ if base == "" || base[0] == '.' || base[0] == '_' ||
+ base == "testdata" ||
+ (w.root.Type == RootGOROOT && w.opts.ModulesEnabled && base == "vendor") ||
+ (!w.opts.ModulesEnabled && base == "node_modules") {
+ return filepath.SkipDir
+ }
+ fi, err := os.Lstat(path)
+ if err == nil && w.shouldSkipDir(fi) {
+ return filepath.SkipDir
+ }
+ return nil
+ }
+ if typ == os.ModeSymlink {
+ base := filepath.Base(path)
+ if strings.HasPrefix(base, ".#") {
+ // Emacs noise.
+ return nil
+ }
+ fi, err := os.Lstat(path)
+ if err != nil {
+ // Just ignore it.
+ return nil
+ }
+ if w.shouldTraverse(dir, fi) {
+ return fastwalk.TraverseLink
+ }
+ }
+ return nil
+}
+
+// shouldTraverse reports whether the symlink fi, found in dir,
+// should be followed. It makes sure symlinks were never visited
+// before to avoid symlink loops.
+func (w *walker) shouldTraverse(dir string, fi os.FileInfo) bool {
+ path := filepath.Join(dir, fi.Name())
+ target, err := filepath.EvalSymlinks(path)
+ if err != nil {
+ return false
+ }
+ ts, err := os.Stat(target)
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ return false
+ }
+ if !ts.IsDir() {
+ return false
+ }
+ if w.shouldSkipDir(ts) {
+ return false
+ }
+ // Check for symlink loops by statting each directory component
+ // and seeing if any are the same file as ts.
+ for {
+ parent := filepath.Dir(path)
+ if parent == path {
+ // Made it to the root without seeing a cycle.
+ // Use this symlink.
+ return true
+ }
+ parentInfo, err := os.Stat(parent)
+ if err != nil {
+ return false
+ }
+ if os.SameFile(ts, parentInfo) {
+ // Cycle. Don't traverse.
+ return false
+ }
+ path = parent
+ }
+
+}
diff --git a/vendor/golang.org/x/tools/internal/module/module.go b/vendor/golang.org/x/tools/internal/module/module.go
new file mode 100644
index 0000000..9a4edb9
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/module/module.go
@@ -0,0 +1,540 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package module defines the module.Version type
+// along with support code.
+package module
+
+// IMPORTANT NOTE
+//
+// This file essentially defines the set of valid import paths for the go command.
+// There are many subtle considerations, including Unicode ambiguity,
+// security, network, and file system representations.
+//
+// This file also defines the set of valid module path and version combinations,
+// another topic with many subtle considerations.
+//
+// Changes to the semantics in this file require approval from rsc.
+
+import (
+ "fmt"
+ "sort"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/tools/internal/semver"
+)
+
+// A Version is defined by a module path and version pair.
+type Version struct {
+ Path string
+
+ // Version is usually a semantic version in canonical form.
+ // There are two exceptions to this general rule.
+ // First, the top-level target of a build has no specific version
+ // and uses Version = "".
+ // Second, during MVS calculations the version "none" is used
+ // to represent the decision to take no version of a given module.
+ Version string `json:",omitempty"`
+}
+
+// Check checks that a given module path, version pair is valid.
+// In addition to the path being a valid module path
+// and the version being a valid semantic version,
+// the two must correspond.
+// For example, the path "yaml/v2" only corresponds to
+// semantic versions beginning with "v2.".
+func Check(path, version string) error {
+ if err := CheckPath(path); err != nil {
+ return err
+ }
+ if !semver.IsValid(version) {
+ return fmt.Errorf("malformed semantic version %v", version)
+ }
+ _, pathMajor, _ := SplitPathVersion(path)
+ if !MatchPathMajor(version, pathMajor) {
+ if pathMajor == "" {
+ pathMajor = "v0 or v1"
+ }
+ if pathMajor[0] == '.' { // .v1
+ pathMajor = pathMajor[1:]
+ }
+ return fmt.Errorf("mismatched module path %v and version %v (want %v)", path, version, pathMajor)
+ }
+ return nil
+}
+
+// firstPathOK reports whether r can appear in the first element of a module path.
+// The first element of the path must be an LDH domain name, at least for now.
+// To avoid case ambiguity, the domain name must be entirely lower case.
+func firstPathOK(r rune) bool {
+ return r == '-' || r == '.' ||
+ '0' <= r && r <= '9' ||
+ 'a' <= r && r <= 'z'
+}
+
+// pathOK reports whether r can appear in an import path element.
+// Paths can be ASCII letters, ASCII digits, and limited ASCII punctuation: + - . _ and ~.
+// This matches what "go get" has historically recognized in import paths.
+// TODO(rsc): We would like to allow Unicode letters, but that requires additional
+// care in the safe encoding (see note below).
+func pathOK(r rune) bool {
+ if r < utf8.RuneSelf {
+ return r == '+' || r == '-' || r == '.' || r == '_' || r == '~' ||
+ '0' <= r && r <= '9' ||
+ 'A' <= r && r <= 'Z' ||
+ 'a' <= r && r <= 'z'
+ }
+ return false
+}
+
+// fileNameOK reports whether r can appear in a file name.
+// For now we allow all Unicode letters but otherwise limit to pathOK plus a few more punctuation characters.
+// If we expand the set of allowed characters here, we have to
+// work harder at detecting potential case-folding and normalization collisions.
+// See note about "safe encoding" below.
+func fileNameOK(r rune) bool {
+ if r < utf8.RuneSelf {
+ // Entire set of ASCII punctuation, from which we remove characters:
+ // ! " # $ % & ' ( ) * + , - . / : ; < = > ? @ [ \ ] ^ _ ` { | } ~
+ // We disallow some shell special characters: " ' * < > ? ` |
+ // (Note that some of those are disallowed by the Windows file system as well.)
+ // We also disallow path separators / : and \ (fileNameOK is only called on path element characters).
+ // We allow spaces (U+0020) in file names.
+ const allowed = "!#$%&()+,-.=@[]^_{}~ "
+ if '0' <= r && r <= '9' || 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' {
+ return true
+ }
+ for i := 0; i < len(allowed); i++ {
+ if rune(allowed[i]) == r {
+ return true
+ }
+ }
+ return false
+ }
+ // It may be OK to add more ASCII punctuation here, but only carefully.
+ // For example Windows disallows < > \, and macOS disallows :, so we must not allow those.
+ return unicode.IsLetter(r)
+}
+
+// CheckPath checks that a module path is valid.
+func CheckPath(path string) error {
+ if err := checkPath(path, false); err != nil {
+ return fmt.Errorf("malformed module path %q: %v", path, err)
+ }
+ i := strings.Index(path, "/")
+ if i < 0 {
+ i = len(path)
+ }
+ if i == 0 {
+ return fmt.Errorf("malformed module path %q: leading slash", path)
+ }
+ if !strings.Contains(path[:i], ".") {
+ return fmt.Errorf("malformed module path %q: missing dot in first path element", path)
+ }
+ if path[0] == '-' {
+ return fmt.Errorf("malformed module path %q: leading dash in first path element", path)
+ }
+ for _, r := range path[:i] {
+ if !firstPathOK(r) {
+ return fmt.Errorf("malformed module path %q: invalid char %q in first path element", path, r)
+ }
+ }
+ if _, _, ok := SplitPathVersion(path); !ok {
+ return fmt.Errorf("malformed module path %q: invalid version", path)
+ }
+ return nil
+}
+
+// CheckImportPath checks that an import path is valid.
+func CheckImportPath(path string) error {
+ if err := checkPath(path, false); err != nil {
+ return fmt.Errorf("malformed import path %q: %v", path, err)
+ }
+ return nil
+}
+
+// checkPath checks that a general path is valid.
+// It returns an error describing why but not mentioning path.
+// Because these checks apply to both module paths and import paths,
+// the caller is expected to add the "malformed ___ path %q: " prefix.
+// fileName indicates whether the final element of the path is a file name
+// (as opposed to a directory name).
+func checkPath(path string, fileName bool) error {
+ if !utf8.ValidString(path) {
+ return fmt.Errorf("invalid UTF-8")
+ }
+ if path == "" {
+ return fmt.Errorf("empty string")
+ }
+ if strings.Contains(path, "..") {
+ return fmt.Errorf("double dot")
+ }
+ if strings.Contains(path, "//") {
+ return fmt.Errorf("double slash")
+ }
+ if path[len(path)-1] == '/' {
+ return fmt.Errorf("trailing slash")
+ }
+ elemStart := 0
+ for i, r := range path {
+ if r == '/' {
+ if err := checkElem(path[elemStart:i], fileName); err != nil {
+ return err
+ }
+ elemStart = i + 1
+ }
+ }
+ if err := checkElem(path[elemStart:], fileName); err != nil {
+ return err
+ }
+ return nil
+}
+
+// checkElem checks whether an individual path element is valid.
+// fileName indicates whether the element is a file name (not a directory name).
+func checkElem(elem string, fileName bool) error {
+ if elem == "" {
+ return fmt.Errorf("empty path element")
+ }
+ if strings.Count(elem, ".") == len(elem) {
+ return fmt.Errorf("invalid path element %q", elem)
+ }
+ if elem[0] == '.' && !fileName {
+ return fmt.Errorf("leading dot in path element")
+ }
+ if elem[len(elem)-1] == '.' {
+ return fmt.Errorf("trailing dot in path element")
+ }
+ charOK := pathOK
+ if fileName {
+ charOK = fileNameOK
+ }
+ for _, r := range elem {
+ if !charOK(r) {
+ return fmt.Errorf("invalid char %q", r)
+ }
+ }
+
+ // Windows disallows a bunch of path elements, sadly.
+ // See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file
+ short := elem
+ if i := strings.Index(short, "."); i >= 0 {
+ short = short[:i]
+ }
+ for _, bad := range badWindowsNames {
+ if strings.EqualFold(bad, short) {
+ return fmt.Errorf("disallowed path element %q", elem)
+ }
+ }
+ return nil
+}
+
+// CheckFilePath checks whether a slash-separated file path is valid.
+func CheckFilePath(path string) error {
+ if err := checkPath(path, true); err != nil {
+ return fmt.Errorf("malformed file path %q: %v", path, err)
+ }
+ return nil
+}
+
+// badWindowsNames are the reserved file path elements on Windows.
+// See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file
+var badWindowsNames = []string{
+ "CON",
+ "PRN",
+ "AUX",
+ "NUL",
+ "COM1",
+ "COM2",
+ "COM3",
+ "COM4",
+ "COM5",
+ "COM6",
+ "COM7",
+ "COM8",
+ "COM9",
+ "LPT1",
+ "LPT2",
+ "LPT3",
+ "LPT4",
+ "LPT5",
+ "LPT6",
+ "LPT7",
+ "LPT8",
+ "LPT9",
+}
+
+// SplitPathVersion returns prefix and major version such that prefix+pathMajor == path
+// and version is either empty or "/vN" for N >= 2.
+// As a special case, gopkg.in paths are recognized directly;
+// they require ".vN" instead of "/vN", and for all N, not just N >= 2.
+func SplitPathVersion(path string) (prefix, pathMajor string, ok bool) {
+ if strings.HasPrefix(path, "gopkg.in/") {
+ return splitGopkgIn(path)
+ }
+
+ i := len(path)
+ dot := false
+ for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9' || path[i-1] == '.') {
+ if path[i-1] == '.' {
+ dot = true
+ }
+ i--
+ }
+ if i <= 1 || i == len(path) || path[i-1] != 'v' || path[i-2] != '/' {
+ return path, "", true
+ }
+ prefix, pathMajor = path[:i-2], path[i-2:]
+ if dot || len(pathMajor) <= 2 || pathMajor[2] == '0' || pathMajor == "/v1" {
+ return path, "", false
+ }
+ return prefix, pathMajor, true
+}
+
+// splitGopkgIn is like SplitPathVersion but only for gopkg.in paths.
+func splitGopkgIn(path string) (prefix, pathMajor string, ok bool) {
+ if !strings.HasPrefix(path, "gopkg.in/") {
+ return path, "", false
+ }
+ i := len(path)
+ if strings.HasSuffix(path, "-unstable") {
+ i -= len("-unstable")
+ }
+ for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9') {
+ i--
+ }
+ if i <= 1 || path[i-1] != 'v' || path[i-2] != '.' {
+ // All gopkg.in paths must end in vN for some N.
+ return path, "", false
+ }
+ prefix, pathMajor = path[:i-2], path[i-2:]
+ if len(pathMajor) <= 2 || pathMajor[2] == '0' && pathMajor != ".v0" {
+ return path, "", false
+ }
+ return prefix, pathMajor, true
+}
+
+// MatchPathMajor reports whether the semantic version v
+// matches the path major version pathMajor.
+func MatchPathMajor(v, pathMajor string) bool {
+ if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") {
+ pathMajor = strings.TrimSuffix(pathMajor, "-unstable")
+ }
+ if strings.HasPrefix(v, "v0.0.0-") && pathMajor == ".v1" {
+ // Allow old bug in pseudo-versions that generated v0.0.0- pseudoversion for gopkg .v1.
+ // For example, gopkg.in/yaml.v2@v2.2.1's go.mod requires gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405.
+ return true
+ }
+ m := semver.Major(v)
+ if pathMajor == "" {
+ return m == "v0" || m == "v1" || semver.Build(v) == "+incompatible"
+ }
+ return (pathMajor[0] == '/' || pathMajor[0] == '.') && m == pathMajor[1:]
+}
+
+// CanonicalVersion returns the canonical form of the version string v.
+// It is the same as semver.Canonical(v) except that it preserves the special build suffix "+incompatible".
+func CanonicalVersion(v string) string {
+ cv := semver.Canonical(v)
+ if semver.Build(v) == "+incompatible" {
+ cv += "+incompatible"
+ }
+ return cv
+}
+
+// Sort sorts the list by Path, breaking ties by comparing Versions.
+func Sort(list []Version) {
+ sort.Slice(list, func(i, j int) bool {
+ mi := list[i]
+ mj := list[j]
+ if mi.Path != mj.Path {
+ return mi.Path < mj.Path
+ }
+ // To help go.sum formatting, allow version/file.
+ // Compare semver prefix by semver rules,
+ // file by string order.
+ vi := mi.Version
+ vj := mj.Version
+ var fi, fj string
+ if k := strings.Index(vi, "/"); k >= 0 {
+ vi, fi = vi[:k], vi[k:]
+ }
+ if k := strings.Index(vj, "/"); k >= 0 {
+ vj, fj = vj[:k], vj[k:]
+ }
+ if vi != vj {
+ return semver.Compare(vi, vj) < 0
+ }
+ return fi < fj
+ })
+}
+
+// Safe encodings
+//
+// Module paths appear as substrings of file system paths
+// (in the download cache) and of web server URLs in the proxy protocol.
+// In general we cannot rely on file systems to be case-sensitive,
+// nor can we rely on web servers, since they read from file systems.
+// That is, we cannot rely on the file system to keep rsc.io/QUOTE
+// and rsc.io/quote separate. Windows and macOS don't.
+// Instead, we must never require two different casings of a file path.
+// Because we want the download cache to match the proxy protocol,
+// and because we want the proxy protocol to be possible to serve
+// from a tree of static files (which might be stored on a case-insensitive
+// file system), the proxy protocol must never require two different casings
+// of a URL path either.
+//
+// One possibility would be to make the safe encoding be the lowercase
+// hexadecimal encoding of the actual path bytes. This would avoid ever
+// needing different casings of a file path, but it would be fairly illegible
+// to most programmers when those paths appeared in the file system
+// (including in file paths in compiler errors and stack traces)
+// in web server logs, and so on. Instead, we want a safe encoding that
+// leaves most paths unaltered.
+//
+// The safe encoding is this:
+// replace every uppercase letter with an exclamation mark
+// followed by the letter's lowercase equivalent.
+//
+// For example,
+// github.com/Azure/azure-sdk-for-go -> github.com/!azure/azure-sdk-for-go.
+// github.com/GoogleCloudPlatform/cloudsql-proxy -> github.com/!google!cloud!platform/cloudsql-proxy
+// github.com/Sirupsen/logrus -> github.com/!sirupsen/logrus.
+//
+// Import paths that avoid upper-case letters are left unchanged.
+// Note that because import paths are ASCII-only and avoid various
+// problematic punctuation (like : < and >), the safe encoding is also ASCII-only
+// and avoids the same problematic punctuation.
+//
+// Import paths have never allowed exclamation marks, so there is no
+// need to define how to encode a literal !.
+//
+// Although paths are disallowed from using Unicode (see pathOK above),
+// the eventual plan is to allow Unicode letters as well, to assume that
+// file systems and URLs are Unicode-safe (storing UTF-8), and apply
+// the !-for-uppercase convention. Note however that not all runes that
+// are different but case-fold equivalent are an upper/lower pair.
+// For example, U+004B ('K'), U+006B ('k'), and U+212A ('K' for Kelvin)
+// are considered to case-fold to each other. When we do add Unicode
+// letters, we must not assume that upper/lower are the only case-equivalent pairs.
+// Perhaps the Kelvin symbol would be disallowed entirely, for example.
+// Or perhaps it would encode as "!!k", or perhaps as "(212A)".
+//
+// Also, it would be nice to allow Unicode marks as well as letters,
+// but marks include combining marks, and then we must deal not
+// only with case folding but also normalization: both U+00E9 ('é')
+// and U+0065 U+0301 ('e' followed by combining acute accent)
+// look the same on the page and are treated by some file systems
+// as the same path. If we do allow Unicode marks in paths, there
+// must be some kind of normalization to allow only one canonical
+// encoding of any character used in an import path.
+
+// EncodePath returns the safe encoding of the given module path.
+// It fails if the module path is invalid.
+func EncodePath(path string) (encoding string, err error) {
+ if err := CheckPath(path); err != nil {
+ return "", err
+ }
+
+ return encodeString(path)
+}
+
+// EncodeVersion returns the safe encoding of the given module version.
+// Versions are allowed to be in non-semver form but must be valid file names
+// and not contain exclamation marks.
+func EncodeVersion(v string) (encoding string, err error) {
+ if err := checkElem(v, true); err != nil || strings.Contains(v, "!") {
+ return "", fmt.Errorf("disallowed version string %q", v)
+ }
+ return encodeString(v)
+}
+
+func encodeString(s string) (encoding string, err error) {
+ haveUpper := false
+ for _, r := range s {
+ if r == '!' || r >= utf8.RuneSelf {
+ // This should be disallowed by CheckPath, but diagnose anyway.
+ // The correctness of the encoding loop below depends on it.
+ return "", fmt.Errorf("internal error: inconsistency in EncodePath")
+ }
+ if 'A' <= r && r <= 'Z' {
+ haveUpper = true
+ }
+ }
+
+ if !haveUpper {
+ return s, nil
+ }
+
+ var buf []byte
+ for _, r := range s {
+ if 'A' <= r && r <= 'Z' {
+ buf = append(buf, '!', byte(r+'a'-'A'))
+ } else {
+ buf = append(buf, byte(r))
+ }
+ }
+ return string(buf), nil
+}
+
+// DecodePath returns the module path of the given safe encoding.
+// It fails if the encoding is invalid or encodes an invalid path.
+func DecodePath(encoding string) (path string, err error) {
+ path, ok := decodeString(encoding)
+ if !ok {
+ return "", fmt.Errorf("invalid module path encoding %q", encoding)
+ }
+ if err := CheckPath(path); err != nil {
+ return "", fmt.Errorf("invalid module path encoding %q: %v", encoding, err)
+ }
+ return path, nil
+}
+
+// DecodeVersion returns the version string for the given safe encoding.
+// It fails if the encoding is invalid or encodes an invalid version.
+// Versions are allowed to be in non-semver form but must be valid file names
+// and not contain exclamation marks.
+func DecodeVersion(encoding string) (v string, err error) {
+ v, ok := decodeString(encoding)
+ if !ok {
+ return "", fmt.Errorf("invalid version encoding %q", encoding)
+ }
+ if err := checkElem(v, true); err != nil {
+ return "", fmt.Errorf("disallowed version string %q", v)
+ }
+ return v, nil
+}
+
+func decodeString(encoding string) (string, bool) {
+ var buf []byte
+
+ bang := false
+ for _, r := range encoding {
+ if r >= utf8.RuneSelf {
+ return "", false
+ }
+ if bang {
+ bang = false
+ if r < 'a' || 'z' < r {
+ return "", false
+ }
+ buf = append(buf, byte(r+'A'-'a'))
+ continue
+ }
+ if r == '!' {
+ bang = true
+ continue
+ }
+ if 'A' <= r && r <= 'Z' {
+ return "", false
+ }
+ buf = append(buf, byte(r))
+ }
+ if bang {
+ return "", false
+ }
+ return string(buf), true
+}
diff --git a/vendor/golang.org/x/tools/internal/semver/semver.go b/vendor/golang.org/x/tools/internal/semver/semver.go
new file mode 100644
index 0000000..4af7118
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/semver/semver.go
@@ -0,0 +1,388 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package semver implements comparison of semantic version strings.
+// In this package, semantic version strings must begin with a leading "v",
+// as in "v1.0.0".
+//
+// The general form of a semantic version string accepted by this package is
+//
+// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]]
+//
+// where square brackets indicate optional parts of the syntax;
+// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros;
+// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers
+// using only alphanumeric characters and hyphens; and
+// all-numeric PRERELEASE identifiers must not have leading zeros.
+//
+// This package follows Semantic Versioning 2.0.0 (see semver.org)
+// with two exceptions. First, it requires the "v" prefix. Second, it recognizes
+// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes)
+// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0.
+package semver
+
+// parsed returns the parsed form of a semantic version string.
+type parsed struct {
+ major string
+ minor string
+ patch string
+ short string
+ prerelease string
+ build string
+ err string
+}
+
+// IsValid reports whether v is a valid semantic version string.
+func IsValid(v string) bool {
+ _, ok := parse(v)
+ return ok
+}
+
+// Canonical returns the canonical formatting of the semantic version v.
+// It fills in any missing .MINOR or .PATCH and discards build metadata.
+// Two semantic versions compare equal only if their canonical formattings
+// are identical strings.
+// The canonical invalid semantic version is the empty string.
+func Canonical(v string) string {
+ p, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ if p.build != "" {
+ return v[:len(v)-len(p.build)]
+ }
+ if p.short != "" {
+ return v + p.short
+ }
+ return v
+}
+
+// Major returns the major version prefix of the semantic version v.
+// For example, Major("v2.1.0") == "v2".
+// If v is an invalid semantic version string, Major returns the empty string.
+func Major(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return v[:1+len(pv.major)]
+}
+
+// MajorMinor returns the major.minor version prefix of the semantic version v.
+// For example, MajorMinor("v2.1.0") == "v2.1".
+// If v is an invalid semantic version string, MajorMinor returns the empty string.
+func MajorMinor(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ i := 1 + len(pv.major)
+ if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor {
+ return v[:j]
+ }
+ return v[:i] + "." + pv.minor
+}
+
+// Prerelease returns the prerelease suffix of the semantic version v.
+// For example, Prerelease("v2.1.0-pre+meta") == "-pre".
+// If v is an invalid semantic version string, Prerelease returns the empty string.
+func Prerelease(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return pv.prerelease
+}
+
+// Build returns the build suffix of the semantic version v.
+// For example, Build("v2.1.0+meta") == "+meta".
+// If v is an invalid semantic version string, Build returns the empty string.
+func Build(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return pv.build
+}
+
+// Compare returns an integer comparing two versions according to
+// according to semantic version precedence.
+// The result will be 0 if v == w, -1 if v < w, or +1 if v > w.
+//
+// An invalid semantic version string is considered less than a valid one.
+// All invalid semantic version strings compare equal to each other.
+func Compare(v, w string) int {
+ pv, ok1 := parse(v)
+ pw, ok2 := parse(w)
+ if !ok1 && !ok2 {
+ return 0
+ }
+ if !ok1 {
+ return -1
+ }
+ if !ok2 {
+ return +1
+ }
+ if c := compareInt(pv.major, pw.major); c != 0 {
+ return c
+ }
+ if c := compareInt(pv.minor, pw.minor); c != 0 {
+ return c
+ }
+ if c := compareInt(pv.patch, pw.patch); c != 0 {
+ return c
+ }
+ return comparePrerelease(pv.prerelease, pw.prerelease)
+}
+
+// Max canonicalizes its arguments and then returns the version string
+// that compares greater.
+func Max(v, w string) string {
+ v = Canonical(v)
+ w = Canonical(w)
+ if Compare(v, w) > 0 {
+ return v
+ }
+ return w
+}
+
+func parse(v string) (p parsed, ok bool) {
+ if v == "" || v[0] != 'v' {
+ p.err = "missing v prefix"
+ return
+ }
+ p.major, v, ok = parseInt(v[1:])
+ if !ok {
+ p.err = "bad major version"
+ return
+ }
+ if v == "" {
+ p.minor = "0"
+ p.patch = "0"
+ p.short = ".0.0"
+ return
+ }
+ if v[0] != '.' {
+ p.err = "bad minor prefix"
+ ok = false
+ return
+ }
+ p.minor, v, ok = parseInt(v[1:])
+ if !ok {
+ p.err = "bad minor version"
+ return
+ }
+ if v == "" {
+ p.patch = "0"
+ p.short = ".0"
+ return
+ }
+ if v[0] != '.' {
+ p.err = "bad patch prefix"
+ ok = false
+ return
+ }
+ p.patch, v, ok = parseInt(v[1:])
+ if !ok {
+ p.err = "bad patch version"
+ return
+ }
+ if len(v) > 0 && v[0] == '-' {
+ p.prerelease, v, ok = parsePrerelease(v)
+ if !ok {
+ p.err = "bad prerelease"
+ return
+ }
+ }
+ if len(v) > 0 && v[0] == '+' {
+ p.build, v, ok = parseBuild(v)
+ if !ok {
+ p.err = "bad build"
+ return
+ }
+ }
+ if v != "" {
+ p.err = "junk on end"
+ ok = false
+ return
+ }
+ ok = true
+ return
+}
+
+func parseInt(v string) (t, rest string, ok bool) {
+ if v == "" {
+ return
+ }
+ if v[0] < '0' || '9' < v[0] {
+ return
+ }
+ i := 1
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ if v[0] == '0' && i != 1 {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func parsePrerelease(v string) (t, rest string, ok bool) {
+ // "A pre-release version MAY be denoted by appending a hyphen and
+ // a series of dot separated identifiers immediately following the patch version.
+ // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-].
+ // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes."
+ if v == "" || v[0] != '-' {
+ return
+ }
+ i := 1
+ start := 1
+ for i < len(v) && v[i] != '+' {
+ if !isIdentChar(v[i]) && v[i] != '.' {
+ return
+ }
+ if v[i] == '.' {
+ if start == i || isBadNum(v[start:i]) {
+ return
+ }
+ start = i + 1
+ }
+ i++
+ }
+ if start == i || isBadNum(v[start:i]) {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func parseBuild(v string) (t, rest string, ok bool) {
+ if v == "" || v[0] != '+' {
+ return
+ }
+ i := 1
+ start := 1
+ for i < len(v) {
+ if !isIdentChar(v[i]) {
+ return
+ }
+ if v[i] == '.' {
+ if start == i {
+ return
+ }
+ start = i + 1
+ }
+ i++
+ }
+ if start == i {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func isIdentChar(c byte) bool {
+ return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-'
+}
+
+func isBadNum(v string) bool {
+ i := 0
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ return i == len(v) && i > 1 && v[0] == '0'
+}
+
+func isNum(v string) bool {
+ i := 0
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ return i == len(v)
+}
+
+func compareInt(x, y string) int {
+ if x == y {
+ return 0
+ }
+ if len(x) < len(y) {
+ return -1
+ }
+ if len(x) > len(y) {
+ return +1
+ }
+ if x < y {
+ return -1
+ } else {
+ return +1
+ }
+}
+
+func comparePrerelease(x, y string) int {
+ // "When major, minor, and patch are equal, a pre-release version has
+ // lower precedence than a normal version.
+ // Example: 1.0.0-alpha < 1.0.0.
+ // Precedence for two pre-release versions with the same major, minor,
+ // and patch version MUST be determined by comparing each dot separated
+ // identifier from left to right until a difference is found as follows:
+ // identifiers consisting of only digits are compared numerically and
+ // identifiers with letters or hyphens are compared lexically in ASCII
+ // sort order. Numeric identifiers always have lower precedence than
+ // non-numeric identifiers. A larger set of pre-release fields has a
+ // higher precedence than a smaller set, if all of the preceding
+ // identifiers are equal.
+ // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta <
+ // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0."
+ if x == y {
+ return 0
+ }
+ if x == "" {
+ return +1
+ }
+ if y == "" {
+ return -1
+ }
+ for x != "" && y != "" {
+ x = x[1:] // skip - or .
+ y = y[1:] // skip - or .
+ var dx, dy string
+ dx, x = nextIdent(x)
+ dy, y = nextIdent(y)
+ if dx != dy {
+ ix := isNum(dx)
+ iy := isNum(dy)
+ if ix != iy {
+ if ix {
+ return -1
+ } else {
+ return +1
+ }
+ }
+ if ix {
+ if len(dx) < len(dy) {
+ return -1
+ }
+ if len(dx) > len(dy) {
+ return +1
+ }
+ }
+ if dx < dy {
+ return -1
+ } else {
+ return +1
+ }
+ }
+ }
+ if x == "" {
+ return -1
+ } else {
+ return +1
+ }
+}
+
+func nextIdent(x string) (dx, rest string) {
+ i := 0
+ for i < len(x) && x[i] != '.' {
+ i++
+ }
+ return x[:i], x[i:]
+}
diff --git a/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE b/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE
new file mode 100644
index 0000000..1723a22
--- /dev/null
+++ b/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE
@@ -0,0 +1,22 @@
+Copyright (c) 2013-2016 Guy Bedford, Luke Hoban, Addy Osmani
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/vendor/golang.org/x/tools/third_party/typescript/LICENSE b/vendor/golang.org/x/tools/third_party/typescript/LICENSE
new file mode 100644
index 0000000..e7259f8
--- /dev/null
+++ b/vendor/golang.org/x/tools/third_party/typescript/LICENSE
@@ -0,0 +1,55 @@
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
+If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
\ No newline at end of file
diff --git a/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE b/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE
new file mode 100644
index 0000000..e648283
--- /dev/null
+++ b/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2015 The Polymer Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/vendor/google.golang.org/appengine/LICENSE b/vendor/google.golang.org/appengine/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/vendor/google.golang.org/appengine/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/google.golang.org/appengine/internal/api.go b/vendor/google.golang.org/appengine/internal/api.go
new file mode 100644
index 0000000..bbc1cb9
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/api.go
@@ -0,0 +1,671 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// +build !appengine
+
+package internal
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "net"
+ "net/http"
+ "net/url"
+ "os"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "github.com/golang/protobuf/proto"
+ netcontext "golang.org/x/net/context"
+
+ basepb "google.golang.org/appengine/internal/base"
+ logpb "google.golang.org/appengine/internal/log"
+ remotepb "google.golang.org/appengine/internal/remote_api"
+)
+
+const (
+ apiPath = "/rpc_http"
+ defaultTicketSuffix = "/default.20150612t184001.0"
+)
+
+var (
+ // Incoming headers.
+ ticketHeader = http.CanonicalHeaderKey("X-AppEngine-API-Ticket")
+ dapperHeader = http.CanonicalHeaderKey("X-Google-DapperTraceInfo")
+ traceHeader = http.CanonicalHeaderKey("X-Cloud-Trace-Context")
+ curNamespaceHeader = http.CanonicalHeaderKey("X-AppEngine-Current-Namespace")
+ userIPHeader = http.CanonicalHeaderKey("X-AppEngine-User-IP")
+ remoteAddrHeader = http.CanonicalHeaderKey("X-AppEngine-Remote-Addr")
+
+ // Outgoing headers.
+ apiEndpointHeader = http.CanonicalHeaderKey("X-Google-RPC-Service-Endpoint")
+ apiEndpointHeaderValue = []string{"app-engine-apis"}
+ apiMethodHeader = http.CanonicalHeaderKey("X-Google-RPC-Service-Method")
+ apiMethodHeaderValue = []string{"/VMRemoteAPI.CallRemoteAPI"}
+ apiDeadlineHeader = http.CanonicalHeaderKey("X-Google-RPC-Service-Deadline")
+ apiContentType = http.CanonicalHeaderKey("Content-Type")
+ apiContentTypeValue = []string{"application/octet-stream"}
+ logFlushHeader = http.CanonicalHeaderKey("X-AppEngine-Log-Flush-Count")
+
+ apiHTTPClient = &http.Client{
+ Transport: &http.Transport{
+ Proxy: http.ProxyFromEnvironment,
+ Dial: limitDial,
+ },
+ }
+
+ defaultTicketOnce sync.Once
+ defaultTicket string
+ backgroundContextOnce sync.Once
+ backgroundContext netcontext.Context
+)
+
+func apiURL() *url.URL {
+ host, port := "appengine.googleapis.internal", "10001"
+ if h := os.Getenv("API_HOST"); h != "" {
+ host = h
+ }
+ if p := os.Getenv("API_PORT"); p != "" {
+ port = p
+ }
+ return &url.URL{
+ Scheme: "http",
+ Host: host + ":" + port,
+ Path: apiPath,
+ }
+}
+
+func handleHTTP(w http.ResponseWriter, r *http.Request) {
+ c := &context{
+ req: r,
+ outHeader: w.Header(),
+ apiURL: apiURL(),
+ }
+ r = r.WithContext(withContext(r.Context(), c))
+ c.req = r
+
+ stopFlushing := make(chan int)
+
+ // Patch up RemoteAddr so it looks reasonable.
+ if addr := r.Header.Get(userIPHeader); addr != "" {
+ r.RemoteAddr = addr
+ } else if addr = r.Header.Get(remoteAddrHeader); addr != "" {
+ r.RemoteAddr = addr
+ } else {
+ // Should not normally reach here, but pick a sensible default anyway.
+ r.RemoteAddr = "127.0.0.1"
+ }
+ // The address in the headers will most likely be of these forms:
+ // 123.123.123.123
+ // 2001:db8::1
+ // net/http.Request.RemoteAddr is specified to be in "IP:port" form.
+ if _, _, err := net.SplitHostPort(r.RemoteAddr); err != nil {
+ // Assume the remote address is only a host; add a default port.
+ r.RemoteAddr = net.JoinHostPort(r.RemoteAddr, "80")
+ }
+
+ // Start goroutine responsible for flushing app logs.
+ // This is done after adding c to ctx.m (and stopped before removing it)
+ // because flushing logs requires making an API call.
+ go c.logFlusher(stopFlushing)
+
+ executeRequestSafely(c, r)
+ c.outHeader = nil // make sure header changes aren't respected any more
+
+ stopFlushing <- 1 // any logging beyond this point will be dropped
+
+ // Flush any pending logs asynchronously.
+ c.pendingLogs.Lock()
+ flushes := c.pendingLogs.flushes
+ if len(c.pendingLogs.lines) > 0 {
+ flushes++
+ }
+ c.pendingLogs.Unlock()
+ flushed := make(chan struct{})
+ go func() {
+ defer close(flushed)
+ // Force a log flush, because with very short requests we
+ // may not ever flush logs.
+ c.flushLog(true)
+ }()
+ w.Header().Set(logFlushHeader, strconv.Itoa(flushes))
+
+ // Avoid nil Write call if c.Write is never called.
+ if c.outCode != 0 {
+ w.WriteHeader(c.outCode)
+ }
+ if c.outBody != nil {
+ w.Write(c.outBody)
+ }
+ // Wait for the last flush to complete before returning,
+ // otherwise the security ticket will not be valid.
+ <-flushed
+}
+
+func executeRequestSafely(c *context, r *http.Request) {
+ defer func() {
+ if x := recover(); x != nil {
+ logf(c, 4, "%s", renderPanic(x)) // 4 == critical
+ c.outCode = 500
+ }
+ }()
+
+ http.DefaultServeMux.ServeHTTP(c, r)
+}
+
+func renderPanic(x interface{}) string {
+ buf := make([]byte, 16<<10) // 16 KB should be plenty
+ buf = buf[:runtime.Stack(buf, false)]
+
+ // Remove the first few stack frames:
+ // this func
+ // the recover closure in the caller
+ // That will root the stack trace at the site of the panic.
+ const (
+ skipStart = "internal.renderPanic"
+ skipFrames = 2
+ )
+ start := bytes.Index(buf, []byte(skipStart))
+ p := start
+ for i := 0; i < skipFrames*2 && p+1 < len(buf); i++ {
+ p = bytes.IndexByte(buf[p+1:], '\n') + p + 1
+ if p < 0 {
+ break
+ }
+ }
+ if p >= 0 {
+ // buf[start:p+1] is the block to remove.
+ // Copy buf[p+1:] over buf[start:] and shrink buf.
+ copy(buf[start:], buf[p+1:])
+ buf = buf[:len(buf)-(p+1-start)]
+ }
+
+ // Add panic heading.
+ head := fmt.Sprintf("panic: %v\n\n", x)
+ if len(head) > len(buf) {
+ // Extremely unlikely to happen.
+ return head
+ }
+ copy(buf[len(head):], buf)
+ copy(buf, head)
+
+ return string(buf)
+}
+
+// context represents the context of an in-flight HTTP request.
+// It implements the appengine.Context and http.ResponseWriter interfaces.
+type context struct {
+ req *http.Request
+
+ outCode int
+ outHeader http.Header
+ outBody []byte
+
+ pendingLogs struct {
+ sync.Mutex
+ lines []*logpb.UserAppLogLine
+ flushes int
+ }
+
+ apiURL *url.URL
+}
+
+var contextKey = "holds a *context"
+
+// jointContext joins two contexts in a superficial way.
+// It takes values and timeouts from a base context, and only values from another context.
+type jointContext struct {
+ base netcontext.Context
+ valuesOnly netcontext.Context
+}
+
+func (c jointContext) Deadline() (time.Time, bool) {
+ return c.base.Deadline()
+}
+
+func (c jointContext) Done() <-chan struct{} {
+ return c.base.Done()
+}
+
+func (c jointContext) Err() error {
+ return c.base.Err()
+}
+
+func (c jointContext) Value(key interface{}) interface{} {
+ if val := c.base.Value(key); val != nil {
+ return val
+ }
+ return c.valuesOnly.Value(key)
+}
+
+// fromContext returns the App Engine context or nil if ctx is not
+// derived from an App Engine context.
+func fromContext(ctx netcontext.Context) *context {
+ c, _ := ctx.Value(&contextKey).(*context)
+ return c
+}
+
+func withContext(parent netcontext.Context, c *context) netcontext.Context {
+ ctx := netcontext.WithValue(parent, &contextKey, c)
+ if ns := c.req.Header.Get(curNamespaceHeader); ns != "" {
+ ctx = withNamespace(ctx, ns)
+ }
+ return ctx
+}
+
+func toContext(c *context) netcontext.Context {
+ return withContext(netcontext.Background(), c)
+}
+
+func IncomingHeaders(ctx netcontext.Context) http.Header {
+ if c := fromContext(ctx); c != nil {
+ return c.req.Header
+ }
+ return nil
+}
+
+func ReqContext(req *http.Request) netcontext.Context {
+ return req.Context()
+}
+
+func WithContext(parent netcontext.Context, req *http.Request) netcontext.Context {
+ return jointContext{
+ base: parent,
+ valuesOnly: req.Context(),
+ }
+}
+
+// DefaultTicket returns a ticket used for background context or dev_appserver.
+func DefaultTicket() string {
+ defaultTicketOnce.Do(func() {
+ if IsDevAppServer() {
+ defaultTicket = "testapp" + defaultTicketSuffix
+ return
+ }
+ appID := partitionlessAppID()
+ escAppID := strings.Replace(strings.Replace(appID, ":", "_", -1), ".", "_", -1)
+ majVersion := VersionID(nil)
+ if i := strings.Index(majVersion, "."); i > 0 {
+ majVersion = majVersion[:i]
+ }
+ defaultTicket = fmt.Sprintf("%s/%s.%s.%s", escAppID, ModuleName(nil), majVersion, InstanceID())
+ })
+ return defaultTicket
+}
+
+func BackgroundContext() netcontext.Context {
+ backgroundContextOnce.Do(func() {
+ // Compute background security ticket.
+ ticket := DefaultTicket()
+
+ c := &context{
+ req: &http.Request{
+ Header: http.Header{
+ ticketHeader: []string{ticket},
+ },
+ },
+ apiURL: apiURL(),
+ }
+ backgroundContext = toContext(c)
+
+ // TODO(dsymonds): Wire up the shutdown handler to do a final flush.
+ go c.logFlusher(make(chan int))
+ })
+
+ return backgroundContext
+}
+
+// RegisterTestRequest registers the HTTP request req for testing, such that
+// any API calls are sent to the provided URL. It returns a closure to delete
+// the registration.
+// It should only be used by aetest package.
+func RegisterTestRequest(req *http.Request, apiURL *url.URL, decorate func(netcontext.Context) netcontext.Context) (*http.Request, func()) {
+ c := &context{
+ req: req,
+ apiURL: apiURL,
+ }
+ ctx := withContext(decorate(req.Context()), c)
+ req = req.WithContext(ctx)
+ c.req = req
+ return req, func() {}
+}
+
+var errTimeout = &CallError{
+ Detail: "Deadline exceeded",
+ Code: int32(remotepb.RpcError_CANCELLED),
+ Timeout: true,
+}
+
+func (c *context) Header() http.Header { return c.outHeader }
+
+// Copied from $GOROOT/src/pkg/net/http/transfer.go. Some response status
+// codes do not permit a response body (nor response entity headers such as
+// Content-Length, Content-Type, etc).
+func bodyAllowedForStatus(status int) bool {
+ switch {
+ case status >= 100 && status <= 199:
+ return false
+ case status == 204:
+ return false
+ case status == 304:
+ return false
+ }
+ return true
+}
+
+func (c *context) Write(b []byte) (int, error) {
+ if c.outCode == 0 {
+ c.WriteHeader(http.StatusOK)
+ }
+ if len(b) > 0 && !bodyAllowedForStatus(c.outCode) {
+ return 0, http.ErrBodyNotAllowed
+ }
+ c.outBody = append(c.outBody, b...)
+ return len(b), nil
+}
+
+func (c *context) WriteHeader(code int) {
+ if c.outCode != 0 {
+ logf(c, 3, "WriteHeader called multiple times on request.") // error level
+ return
+ }
+ c.outCode = code
+}
+
+func (c *context) post(body []byte, timeout time.Duration) (b []byte, err error) {
+ hreq := &http.Request{
+ Method: "POST",
+ URL: c.apiURL,
+ Header: http.Header{
+ apiEndpointHeader: apiEndpointHeaderValue,
+ apiMethodHeader: apiMethodHeaderValue,
+ apiContentType: apiContentTypeValue,
+ apiDeadlineHeader: []string{strconv.FormatFloat(timeout.Seconds(), 'f', -1, 64)},
+ },
+ Body: ioutil.NopCloser(bytes.NewReader(body)),
+ ContentLength: int64(len(body)),
+ Host: c.apiURL.Host,
+ }
+ if info := c.req.Header.Get(dapperHeader); info != "" {
+ hreq.Header.Set(dapperHeader, info)
+ }
+ if info := c.req.Header.Get(traceHeader); info != "" {
+ hreq.Header.Set(traceHeader, info)
+ }
+
+ tr := apiHTTPClient.Transport.(*http.Transport)
+
+ var timedOut int32 // atomic; set to 1 if timed out
+ t := time.AfterFunc(timeout, func() {
+ atomic.StoreInt32(&timedOut, 1)
+ tr.CancelRequest(hreq)
+ })
+ defer t.Stop()
+ defer func() {
+ // Check if timeout was exceeded.
+ if atomic.LoadInt32(&timedOut) != 0 {
+ err = errTimeout
+ }
+ }()
+
+ hresp, err := apiHTTPClient.Do(hreq)
+ if err != nil {
+ return nil, &CallError{
+ Detail: fmt.Sprintf("service bridge HTTP failed: %v", err),
+ Code: int32(remotepb.RpcError_UNKNOWN),
+ }
+ }
+ defer hresp.Body.Close()
+ hrespBody, err := ioutil.ReadAll(hresp.Body)
+ if hresp.StatusCode != 200 {
+ return nil, &CallError{
+ Detail: fmt.Sprintf("service bridge returned HTTP %d (%q)", hresp.StatusCode, hrespBody),
+ Code: int32(remotepb.RpcError_UNKNOWN),
+ }
+ }
+ if err != nil {
+ return nil, &CallError{
+ Detail: fmt.Sprintf("service bridge response bad: %v", err),
+ Code: int32(remotepb.RpcError_UNKNOWN),
+ }
+ }
+ return hrespBody, nil
+}
+
+func Call(ctx netcontext.Context, service, method string, in, out proto.Message) error {
+ if ns := NamespaceFromContext(ctx); ns != "" {
+ if fn, ok := NamespaceMods[service]; ok {
+ fn(in, ns)
+ }
+ }
+
+ if f, ctx, ok := callOverrideFromContext(ctx); ok {
+ return f(ctx, service, method, in, out)
+ }
+
+ // Handle already-done contexts quickly.
+ select {
+ case <-ctx.Done():
+ return ctx.Err()
+ default:
+ }
+
+ c := fromContext(ctx)
+ if c == nil {
+ // Give a good error message rather than a panic lower down.
+ return errNotAppEngineContext
+ }
+
+ // Apply transaction modifications if we're in a transaction.
+ if t := transactionFromContext(ctx); t != nil {
+ if t.finished {
+ return errors.New("transaction context has expired")
+ }
+ applyTransaction(in, &t.transaction)
+ }
+
+ // Default RPC timeout is 60s.
+ timeout := 60 * time.Second
+ if deadline, ok := ctx.Deadline(); ok {
+ timeout = deadline.Sub(time.Now())
+ }
+
+ data, err := proto.Marshal(in)
+ if err != nil {
+ return err
+ }
+
+ ticket := c.req.Header.Get(ticketHeader)
+ // Use a test ticket under test environment.
+ if ticket == "" {
+ if appid := ctx.Value(&appIDOverrideKey); appid != nil {
+ ticket = appid.(string) + defaultTicketSuffix
+ }
+ }
+ // Fall back to use background ticket when the request ticket is not available in Flex or dev_appserver.
+ if ticket == "" {
+ ticket = DefaultTicket()
+ }
+ req := &remotepb.Request{
+ ServiceName: &service,
+ Method: &method,
+ Request: data,
+ RequestId: &ticket,
+ }
+ hreqBody, err := proto.Marshal(req)
+ if err != nil {
+ return err
+ }
+
+ hrespBody, err := c.post(hreqBody, timeout)
+ if err != nil {
+ return err
+ }
+
+ res := &remotepb.Response{}
+ if err := proto.Unmarshal(hrespBody, res); err != nil {
+ return err
+ }
+ if res.RpcError != nil {
+ ce := &CallError{
+ Detail: res.RpcError.GetDetail(),
+ Code: *res.RpcError.Code,
+ }
+ switch remotepb.RpcError_ErrorCode(ce.Code) {
+ case remotepb.RpcError_CANCELLED, remotepb.RpcError_DEADLINE_EXCEEDED:
+ ce.Timeout = true
+ }
+ return ce
+ }
+ if res.ApplicationError != nil {
+ return &APIError{
+ Service: *req.ServiceName,
+ Detail: res.ApplicationError.GetDetail(),
+ Code: *res.ApplicationError.Code,
+ }
+ }
+ if res.Exception != nil || res.JavaException != nil {
+ // This shouldn't happen, but let's be defensive.
+ return &CallError{
+ Detail: "service bridge returned exception",
+ Code: int32(remotepb.RpcError_UNKNOWN),
+ }
+ }
+ return proto.Unmarshal(res.Response, out)
+}
+
+func (c *context) Request() *http.Request {
+ return c.req
+}
+
+func (c *context) addLogLine(ll *logpb.UserAppLogLine) {
+ // Truncate long log lines.
+ // TODO(dsymonds): Check if this is still necessary.
+ const lim = 8 << 10
+ if len(*ll.Message) > lim {
+ suffix := fmt.Sprintf("...(length %d)", len(*ll.Message))
+ ll.Message = proto.String((*ll.Message)[:lim-len(suffix)] + suffix)
+ }
+
+ c.pendingLogs.Lock()
+ c.pendingLogs.lines = append(c.pendingLogs.lines, ll)
+ c.pendingLogs.Unlock()
+}
+
+var logLevelName = map[int64]string{
+ 0: "DEBUG",
+ 1: "INFO",
+ 2: "WARNING",
+ 3: "ERROR",
+ 4: "CRITICAL",
+}
+
+func logf(c *context, level int64, format string, args ...interface{}) {
+ if c == nil {
+ panic("not an App Engine context")
+ }
+ s := fmt.Sprintf(format, args...)
+ s = strings.TrimRight(s, "\n") // Remove any trailing newline characters.
+ c.addLogLine(&logpb.UserAppLogLine{
+ TimestampUsec: proto.Int64(time.Now().UnixNano() / 1e3),
+ Level: &level,
+ Message: &s,
+ })
+ // Only duplicate log to stderr if not running on App Engine second generation
+ if !IsSecondGen() {
+ log.Print(logLevelName[level] + ": " + s)
+ }
+}
+
+// flushLog attempts to flush any pending logs to the appserver.
+// It should not be called concurrently.
+func (c *context) flushLog(force bool) (flushed bool) {
+ c.pendingLogs.Lock()
+ // Grab up to 30 MB. We can get away with up to 32 MB, but let's be cautious.
+ n, rem := 0, 30<<20
+ for ; n < len(c.pendingLogs.lines); n++ {
+ ll := c.pendingLogs.lines[n]
+ // Each log line will require about 3 bytes of overhead.
+ nb := proto.Size(ll) + 3
+ if nb > rem {
+ break
+ }
+ rem -= nb
+ }
+ lines := c.pendingLogs.lines[:n]
+ c.pendingLogs.lines = c.pendingLogs.lines[n:]
+ c.pendingLogs.Unlock()
+
+ if len(lines) == 0 && !force {
+ // Nothing to flush.
+ return false
+ }
+
+ rescueLogs := false
+ defer func() {
+ if rescueLogs {
+ c.pendingLogs.Lock()
+ c.pendingLogs.lines = append(lines, c.pendingLogs.lines...)
+ c.pendingLogs.Unlock()
+ }
+ }()
+
+ buf, err := proto.Marshal(&logpb.UserAppLogGroup{
+ LogLine: lines,
+ })
+ if err != nil {
+ log.Printf("internal.flushLog: marshaling UserAppLogGroup: %v", err)
+ rescueLogs = true
+ return false
+ }
+
+ req := &logpb.FlushRequest{
+ Logs: buf,
+ }
+ res := &basepb.VoidProto{}
+ c.pendingLogs.Lock()
+ c.pendingLogs.flushes++
+ c.pendingLogs.Unlock()
+ if err := Call(toContext(c), "logservice", "Flush", req, res); err != nil {
+ log.Printf("internal.flushLog: Flush RPC: %v", err)
+ rescueLogs = true
+ return false
+ }
+ return true
+}
+
+const (
+ // Log flushing parameters.
+ flushInterval = 1 * time.Second
+ forceFlushInterval = 60 * time.Second
+)
+
+func (c *context) logFlusher(stop <-chan int) {
+ lastFlush := time.Now()
+ tick := time.NewTicker(flushInterval)
+ for {
+ select {
+ case <-stop:
+ // Request finished.
+ tick.Stop()
+ return
+ case <-tick.C:
+ force := time.Now().Sub(lastFlush) > forceFlushInterval
+ if c.flushLog(force) {
+ lastFlush = time.Now()
+ }
+ }
+ }
+}
+
+func ContextForTesting(req *http.Request) netcontext.Context {
+ return toContext(&context{req: req})
+}
diff --git a/vendor/google.golang.org/appengine/internal/api_classic.go b/vendor/google.golang.org/appengine/internal/api_classic.go
new file mode 100644
index 0000000..f0f40b2
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/api_classic.go
@@ -0,0 +1,169 @@
+// Copyright 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// +build appengine
+
+package internal
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "time"
+
+ "appengine"
+ "appengine_internal"
+ basepb "appengine_internal/base"
+
+ "github.com/golang/protobuf/proto"
+ netcontext "golang.org/x/net/context"
+)
+
+var contextKey = "holds an appengine.Context"
+
+// fromContext returns the App Engine context or nil if ctx is not
+// derived from an App Engine context.
+func fromContext(ctx netcontext.Context) appengine.Context {
+ c, _ := ctx.Value(&contextKey).(appengine.Context)
+ return c
+}
+
+// This is only for classic App Engine adapters.
+func ClassicContextFromContext(ctx netcontext.Context) (appengine.Context, error) {
+ c := fromContext(ctx)
+ if c == nil {
+ return nil, errNotAppEngineContext
+ }
+ return c, nil
+}
+
+func withContext(parent netcontext.Context, c appengine.Context) netcontext.Context {
+ ctx := netcontext.WithValue(parent, &contextKey, c)
+
+ s := &basepb.StringProto{}
+ c.Call("__go__", "GetNamespace", &basepb.VoidProto{}, s, nil)
+ if ns := s.GetValue(); ns != "" {
+ ctx = NamespacedContext(ctx, ns)
+ }
+
+ return ctx
+}
+
+func IncomingHeaders(ctx netcontext.Context) http.Header {
+ if c := fromContext(ctx); c != nil {
+ if req, ok := c.Request().(*http.Request); ok {
+ return req.Header
+ }
+ }
+ return nil
+}
+
+func ReqContext(req *http.Request) netcontext.Context {
+ return WithContext(netcontext.Background(), req)
+}
+
+func WithContext(parent netcontext.Context, req *http.Request) netcontext.Context {
+ c := appengine.NewContext(req)
+ return withContext(parent, c)
+}
+
+type testingContext struct {
+ appengine.Context
+
+ req *http.Request
+}
+
+func (t *testingContext) FullyQualifiedAppID() string { return "dev~testcontext" }
+func (t *testingContext) Call(service, method string, _, _ appengine_internal.ProtoMessage, _ *appengine_internal.CallOptions) error {
+ if service == "__go__" && method == "GetNamespace" {
+ return nil
+ }
+ return fmt.Errorf("testingContext: unsupported Call")
+}
+func (t *testingContext) Request() interface{} { return t.req }
+
+func ContextForTesting(req *http.Request) netcontext.Context {
+ return withContext(netcontext.Background(), &testingContext{req: req})
+}
+
+func Call(ctx netcontext.Context, service, method string, in, out proto.Message) error {
+ if ns := NamespaceFromContext(ctx); ns != "" {
+ if fn, ok := NamespaceMods[service]; ok {
+ fn(in, ns)
+ }
+ }
+
+ if f, ctx, ok := callOverrideFromContext(ctx); ok {
+ return f(ctx, service, method, in, out)
+ }
+
+ // Handle already-done contexts quickly.
+ select {
+ case <-ctx.Done():
+ return ctx.Err()
+ default:
+ }
+
+ c := fromContext(ctx)
+ if c == nil {
+ // Give a good error message rather than a panic lower down.
+ return errNotAppEngineContext
+ }
+
+ // Apply transaction modifications if we're in a transaction.
+ if t := transactionFromContext(ctx); t != nil {
+ if t.finished {
+ return errors.New("transaction context has expired")
+ }
+ applyTransaction(in, &t.transaction)
+ }
+
+ var opts *appengine_internal.CallOptions
+ if d, ok := ctx.Deadline(); ok {
+ opts = &appengine_internal.CallOptions{
+ Timeout: d.Sub(time.Now()),
+ }
+ }
+
+ err := c.Call(service, method, in, out, opts)
+ switch v := err.(type) {
+ case *appengine_internal.APIError:
+ return &APIError{
+ Service: v.Service,
+ Detail: v.Detail,
+ Code: v.Code,
+ }
+ case *appengine_internal.CallError:
+ return &CallError{
+ Detail: v.Detail,
+ Code: v.Code,
+ Timeout: v.Timeout,
+ }
+ }
+ return err
+}
+
+func handleHTTP(w http.ResponseWriter, r *http.Request) {
+ panic("handleHTTP called; this should be impossible")
+}
+
+func logf(c appengine.Context, level int64, format string, args ...interface{}) {
+ var fn func(format string, args ...interface{})
+ switch level {
+ case 0:
+ fn = c.Debugf
+ case 1:
+ fn = c.Infof
+ case 2:
+ fn = c.Warningf
+ case 3:
+ fn = c.Errorf
+ case 4:
+ fn = c.Criticalf
+ default:
+ // This shouldn't happen.
+ fn = c.Criticalf
+ }
+ fn(format, args...)
+}
diff --git a/vendor/google.golang.org/appengine/internal/api_common.go b/vendor/google.golang.org/appengine/internal/api_common.go
new file mode 100644
index 0000000..e0c0b21
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/api_common.go
@@ -0,0 +1,123 @@
+// Copyright 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+ "errors"
+ "os"
+
+ "github.com/golang/protobuf/proto"
+ netcontext "golang.org/x/net/context"
+)
+
+var errNotAppEngineContext = errors.New("not an App Engine context")
+
+type CallOverrideFunc func(ctx netcontext.Context, service, method string, in, out proto.Message) error
+
+var callOverrideKey = "holds []CallOverrideFunc"
+
+func WithCallOverride(ctx netcontext.Context, f CallOverrideFunc) netcontext.Context {
+ // We avoid appending to any existing call override
+ // so we don't risk overwriting a popped stack below.
+ var cofs []CallOverrideFunc
+ if uf, ok := ctx.Value(&callOverrideKey).([]CallOverrideFunc); ok {
+ cofs = append(cofs, uf...)
+ }
+ cofs = append(cofs, f)
+ return netcontext.WithValue(ctx, &callOverrideKey, cofs)
+}
+
+func callOverrideFromContext(ctx netcontext.Context) (CallOverrideFunc, netcontext.Context, bool) {
+ cofs, _ := ctx.Value(&callOverrideKey).([]CallOverrideFunc)
+ if len(cofs) == 0 {
+ return nil, nil, false
+ }
+ // We found a list of overrides; grab the last, and reconstitute a
+ // context that will hide it.
+ f := cofs[len(cofs)-1]
+ ctx = netcontext.WithValue(ctx, &callOverrideKey, cofs[:len(cofs)-1])
+ return f, ctx, true
+}
+
+type logOverrideFunc func(level int64, format string, args ...interface{})
+
+var logOverrideKey = "holds a logOverrideFunc"
+
+func WithLogOverride(ctx netcontext.Context, f logOverrideFunc) netcontext.Context {
+ return netcontext.WithValue(ctx, &logOverrideKey, f)
+}
+
+var appIDOverrideKey = "holds a string, being the full app ID"
+
+func WithAppIDOverride(ctx netcontext.Context, appID string) netcontext.Context {
+ return netcontext.WithValue(ctx, &appIDOverrideKey, appID)
+}
+
+var namespaceKey = "holds the namespace string"
+
+func withNamespace(ctx netcontext.Context, ns string) netcontext.Context {
+ return netcontext.WithValue(ctx, &namespaceKey, ns)
+}
+
+func NamespaceFromContext(ctx netcontext.Context) string {
+ // If there's no namespace, return the empty string.
+ ns, _ := ctx.Value(&namespaceKey).(string)
+ return ns
+}
+
+// FullyQualifiedAppID returns the fully-qualified application ID.
+// This may contain a partition prefix (e.g. "s~" for High Replication apps),
+// or a domain prefix (e.g. "example.com:").
+func FullyQualifiedAppID(ctx netcontext.Context) string {
+ if id, ok := ctx.Value(&appIDOverrideKey).(string); ok {
+ return id
+ }
+ return fullyQualifiedAppID(ctx)
+}
+
+func Logf(ctx netcontext.Context, level int64, format string, args ...interface{}) {
+ if f, ok := ctx.Value(&logOverrideKey).(logOverrideFunc); ok {
+ f(level, format, args...)
+ return
+ }
+ c := fromContext(ctx)
+ if c == nil {
+ panic(errNotAppEngineContext)
+ }
+ logf(c, level, format, args...)
+}
+
+// NamespacedContext wraps a Context to support namespaces.
+func NamespacedContext(ctx netcontext.Context, namespace string) netcontext.Context {
+ return withNamespace(ctx, namespace)
+}
+
+// SetTestEnv sets the env variables for testing background ticket in Flex.
+func SetTestEnv() func() {
+ var environ = []struct {
+ key, value string
+ }{
+ {"GAE_LONG_APP_ID", "my-app-id"},
+ {"GAE_MINOR_VERSION", "067924799508853122"},
+ {"GAE_MODULE_INSTANCE", "0"},
+ {"GAE_MODULE_NAME", "default"},
+ {"GAE_MODULE_VERSION", "20150612t184001"},
+ }
+
+ for _, v := range environ {
+ old := os.Getenv(v.key)
+ os.Setenv(v.key, v.value)
+ v.value = old
+ }
+ return func() { // Restore old environment after the test completes.
+ for _, v := range environ {
+ if v.value == "" {
+ os.Unsetenv(v.key)
+ continue
+ }
+ os.Setenv(v.key, v.value)
+ }
+ }
+}
diff --git a/vendor/google.golang.org/appengine/internal/app_id.go b/vendor/google.golang.org/appengine/internal/app_id.go
new file mode 100644
index 0000000..11df8c0
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/app_id.go
@@ -0,0 +1,28 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+ "strings"
+)
+
+func parseFullAppID(appid string) (partition, domain, displayID string) {
+ if i := strings.Index(appid, "~"); i != -1 {
+ partition, appid = appid[:i], appid[i+1:]
+ }
+ if i := strings.Index(appid, ":"); i != -1 {
+ domain, appid = appid[:i], appid[i+1:]
+ }
+ return partition, domain, appid
+}
+
+// appID returns "appid" or "domain.com:appid".
+func appID(fullAppID string) string {
+ _, dom, dis := parseFullAppID(fullAppID)
+ if dom != "" {
+ return dom + ":" + dis
+ }
+ return dis
+}
diff --git a/vendor/google.golang.org/appengine/internal/base/api_base.pb.go b/vendor/google.golang.org/appengine/internal/base/api_base.pb.go
new file mode 100644
index 0000000..db4777e
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/base/api_base.pb.go
@@ -0,0 +1,308 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/base/api_base.proto
+
+package base
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type StringProto struct {
+ Value *string `protobuf:"bytes,1,req,name=value" json:"value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *StringProto) Reset() { *m = StringProto{} }
+func (m *StringProto) String() string { return proto.CompactTextString(m) }
+func (*StringProto) ProtoMessage() {}
+func (*StringProto) Descriptor() ([]byte, []int) {
+ return fileDescriptor_api_base_9d49f8792e0c1140, []int{0}
+}
+func (m *StringProto) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_StringProto.Unmarshal(m, b)
+}
+func (m *StringProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_StringProto.Marshal(b, m, deterministic)
+}
+func (dst *StringProto) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_StringProto.Merge(dst, src)
+}
+func (m *StringProto) XXX_Size() int {
+ return xxx_messageInfo_StringProto.Size(m)
+}
+func (m *StringProto) XXX_DiscardUnknown() {
+ xxx_messageInfo_StringProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_StringProto proto.InternalMessageInfo
+
+func (m *StringProto) GetValue() string {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return ""
+}
+
+type Integer32Proto struct {
+ Value *int32 `protobuf:"varint,1,req,name=value" json:"value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Integer32Proto) Reset() { *m = Integer32Proto{} }
+func (m *Integer32Proto) String() string { return proto.CompactTextString(m) }
+func (*Integer32Proto) ProtoMessage() {}
+func (*Integer32Proto) Descriptor() ([]byte, []int) {
+ return fileDescriptor_api_base_9d49f8792e0c1140, []int{1}
+}
+func (m *Integer32Proto) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Integer32Proto.Unmarshal(m, b)
+}
+func (m *Integer32Proto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Integer32Proto.Marshal(b, m, deterministic)
+}
+func (dst *Integer32Proto) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Integer32Proto.Merge(dst, src)
+}
+func (m *Integer32Proto) XXX_Size() int {
+ return xxx_messageInfo_Integer32Proto.Size(m)
+}
+func (m *Integer32Proto) XXX_DiscardUnknown() {
+ xxx_messageInfo_Integer32Proto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Integer32Proto proto.InternalMessageInfo
+
+func (m *Integer32Proto) GetValue() int32 {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return 0
+}
+
+type Integer64Proto struct {
+ Value *int64 `protobuf:"varint,1,req,name=value" json:"value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Integer64Proto) Reset() { *m = Integer64Proto{} }
+func (m *Integer64Proto) String() string { return proto.CompactTextString(m) }
+func (*Integer64Proto) ProtoMessage() {}
+func (*Integer64Proto) Descriptor() ([]byte, []int) {
+ return fileDescriptor_api_base_9d49f8792e0c1140, []int{2}
+}
+func (m *Integer64Proto) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Integer64Proto.Unmarshal(m, b)
+}
+func (m *Integer64Proto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Integer64Proto.Marshal(b, m, deterministic)
+}
+func (dst *Integer64Proto) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Integer64Proto.Merge(dst, src)
+}
+func (m *Integer64Proto) XXX_Size() int {
+ return xxx_messageInfo_Integer64Proto.Size(m)
+}
+func (m *Integer64Proto) XXX_DiscardUnknown() {
+ xxx_messageInfo_Integer64Proto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Integer64Proto proto.InternalMessageInfo
+
+func (m *Integer64Proto) GetValue() int64 {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return 0
+}
+
+type BoolProto struct {
+ Value *bool `protobuf:"varint,1,req,name=value" json:"value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *BoolProto) Reset() { *m = BoolProto{} }
+func (m *BoolProto) String() string { return proto.CompactTextString(m) }
+func (*BoolProto) ProtoMessage() {}
+func (*BoolProto) Descriptor() ([]byte, []int) {
+ return fileDescriptor_api_base_9d49f8792e0c1140, []int{3}
+}
+func (m *BoolProto) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_BoolProto.Unmarshal(m, b)
+}
+func (m *BoolProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_BoolProto.Marshal(b, m, deterministic)
+}
+func (dst *BoolProto) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_BoolProto.Merge(dst, src)
+}
+func (m *BoolProto) XXX_Size() int {
+ return xxx_messageInfo_BoolProto.Size(m)
+}
+func (m *BoolProto) XXX_DiscardUnknown() {
+ xxx_messageInfo_BoolProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_BoolProto proto.InternalMessageInfo
+
+func (m *BoolProto) GetValue() bool {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return false
+}
+
+type DoubleProto struct {
+ Value *float64 `protobuf:"fixed64,1,req,name=value" json:"value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *DoubleProto) Reset() { *m = DoubleProto{} }
+func (m *DoubleProto) String() string { return proto.CompactTextString(m) }
+func (*DoubleProto) ProtoMessage() {}
+func (*DoubleProto) Descriptor() ([]byte, []int) {
+ return fileDescriptor_api_base_9d49f8792e0c1140, []int{4}
+}
+func (m *DoubleProto) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_DoubleProto.Unmarshal(m, b)
+}
+func (m *DoubleProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_DoubleProto.Marshal(b, m, deterministic)
+}
+func (dst *DoubleProto) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_DoubleProto.Merge(dst, src)
+}
+func (m *DoubleProto) XXX_Size() int {
+ return xxx_messageInfo_DoubleProto.Size(m)
+}
+func (m *DoubleProto) XXX_DiscardUnknown() {
+ xxx_messageInfo_DoubleProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_DoubleProto proto.InternalMessageInfo
+
+func (m *DoubleProto) GetValue() float64 {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return 0
+}
+
+type BytesProto struct {
+ Value []byte `protobuf:"bytes,1,req,name=value" json:"value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *BytesProto) Reset() { *m = BytesProto{} }
+func (m *BytesProto) String() string { return proto.CompactTextString(m) }
+func (*BytesProto) ProtoMessage() {}
+func (*BytesProto) Descriptor() ([]byte, []int) {
+ return fileDescriptor_api_base_9d49f8792e0c1140, []int{5}
+}
+func (m *BytesProto) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_BytesProto.Unmarshal(m, b)
+}
+func (m *BytesProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_BytesProto.Marshal(b, m, deterministic)
+}
+func (dst *BytesProto) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_BytesProto.Merge(dst, src)
+}
+func (m *BytesProto) XXX_Size() int {
+ return xxx_messageInfo_BytesProto.Size(m)
+}
+func (m *BytesProto) XXX_DiscardUnknown() {
+ xxx_messageInfo_BytesProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_BytesProto proto.InternalMessageInfo
+
+func (m *BytesProto) GetValue() []byte {
+ if m != nil {
+ return m.Value
+ }
+ return nil
+}
+
+type VoidProto struct {
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *VoidProto) Reset() { *m = VoidProto{} }
+func (m *VoidProto) String() string { return proto.CompactTextString(m) }
+func (*VoidProto) ProtoMessage() {}
+func (*VoidProto) Descriptor() ([]byte, []int) {
+ return fileDescriptor_api_base_9d49f8792e0c1140, []int{6}
+}
+func (m *VoidProto) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_VoidProto.Unmarshal(m, b)
+}
+func (m *VoidProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_VoidProto.Marshal(b, m, deterministic)
+}
+func (dst *VoidProto) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_VoidProto.Merge(dst, src)
+}
+func (m *VoidProto) XXX_Size() int {
+ return xxx_messageInfo_VoidProto.Size(m)
+}
+func (m *VoidProto) XXX_DiscardUnknown() {
+ xxx_messageInfo_VoidProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_VoidProto proto.InternalMessageInfo
+
+func init() {
+ proto.RegisterType((*StringProto)(nil), "appengine.base.StringProto")
+ proto.RegisterType((*Integer32Proto)(nil), "appengine.base.Integer32Proto")
+ proto.RegisterType((*Integer64Proto)(nil), "appengine.base.Integer64Proto")
+ proto.RegisterType((*BoolProto)(nil), "appengine.base.BoolProto")
+ proto.RegisterType((*DoubleProto)(nil), "appengine.base.DoubleProto")
+ proto.RegisterType((*BytesProto)(nil), "appengine.base.BytesProto")
+ proto.RegisterType((*VoidProto)(nil), "appengine.base.VoidProto")
+}
+
+func init() {
+ proto.RegisterFile("google.golang.org/appengine/internal/base/api_base.proto", fileDescriptor_api_base_9d49f8792e0c1140)
+}
+
+var fileDescriptor_api_base_9d49f8792e0c1140 = []byte{
+ // 199 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0xcf, 0x3f, 0x4b, 0xc6, 0x30,
+ 0x10, 0x06, 0x70, 0x5a, 0xad, 0xb4, 0x57, 0xe9, 0x20, 0x0e, 0x1d, 0xb5, 0x05, 0x71, 0x4a, 0x40,
+ 0x45, 0x9c, 0x83, 0x8b, 0x9b, 0x28, 0x38, 0xb8, 0x48, 0x8a, 0xc7, 0x11, 0x08, 0xb9, 0x90, 0xa6,
+ 0x82, 0xdf, 0x5e, 0xda, 0xd2, 0xfa, 0xc2, 0x9b, 0xed, 0xfe, 0xfc, 0xe0, 0xe1, 0x81, 0x27, 0x62,
+ 0x26, 0x8b, 0x82, 0xd8, 0x6a, 0x47, 0x82, 0x03, 0x49, 0xed, 0x3d, 0x3a, 0x32, 0x0e, 0xa5, 0x71,
+ 0x11, 0x83, 0xd3, 0x56, 0x0e, 0x7a, 0x44, 0xa9, 0xbd, 0xf9, 0x9a, 0x07, 0xe1, 0x03, 0x47, 0xbe,
+ 0x68, 0x76, 0x27, 0xe6, 0x6b, 0xd7, 0x43, 0xfd, 0x1e, 0x83, 0x71, 0xf4, 0xba, 0xbc, 0x2f, 0xa1,
+ 0xf8, 0xd1, 0x76, 0xc2, 0x36, 0xbb, 0xca, 0x6f, 0xab, 0xb7, 0x75, 0xe9, 0x6e, 0xa0, 0x79, 0x71,
+ 0x11, 0x09, 0xc3, 0xfd, 0x5d, 0xc2, 0x15, 0xc7, 0xee, 0xf1, 0x21, 0xe1, 0x4e, 0x36, 0x77, 0x0d,
+ 0x95, 0x62, 0xb6, 0x09, 0x52, 0x6e, 0xa4, 0x87, 0xfa, 0x99, 0xa7, 0xc1, 0x62, 0x02, 0x65, 0xff,
+ 0x79, 0xa0, 0x7e, 0x23, 0x8e, 0xab, 0x69, 0x0f, 0xcd, 0xb9, 0xca, 0xcb, 0xdd, 0xd5, 0x50, 0x7d,
+ 0xb0, 0xf9, 0x5e, 0x98, 0x3a, 0xfb, 0x3c, 0x9d, 0x9b, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xba,
+ 0x37, 0x25, 0xea, 0x44, 0x01, 0x00, 0x00,
+}
diff --git a/vendor/google.golang.org/appengine/internal/base/api_base.proto b/vendor/google.golang.org/appengine/internal/base/api_base.proto
new file mode 100644
index 0000000..56cd7a3
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/base/api_base.proto
@@ -0,0 +1,33 @@
+// Built-in base types for API calls. Primarily useful as return types.
+
+syntax = "proto2";
+option go_package = "base";
+
+package appengine.base;
+
+message StringProto {
+ required string value = 1;
+}
+
+message Integer32Proto {
+ required int32 value = 1;
+}
+
+message Integer64Proto {
+ required int64 value = 1;
+}
+
+message BoolProto {
+ required bool value = 1;
+}
+
+message DoubleProto {
+ required double value = 1;
+}
+
+message BytesProto {
+ required bytes value = 1 [ctype=CORD];
+}
+
+message VoidProto {
+}
diff --git a/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.pb.go b/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.pb.go
new file mode 100644
index 0000000..2fb7482
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.pb.go
@@ -0,0 +1,4367 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/datastore/datastore_v3.proto
+
+package datastore
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type Property_Meaning int32
+
+const (
+ Property_NO_MEANING Property_Meaning = 0
+ Property_BLOB Property_Meaning = 14
+ Property_TEXT Property_Meaning = 15
+ Property_BYTESTRING Property_Meaning = 16
+ Property_ATOM_CATEGORY Property_Meaning = 1
+ Property_ATOM_LINK Property_Meaning = 2
+ Property_ATOM_TITLE Property_Meaning = 3
+ Property_ATOM_CONTENT Property_Meaning = 4
+ Property_ATOM_SUMMARY Property_Meaning = 5
+ Property_ATOM_AUTHOR Property_Meaning = 6
+ Property_GD_WHEN Property_Meaning = 7
+ Property_GD_EMAIL Property_Meaning = 8
+ Property_GEORSS_POINT Property_Meaning = 9
+ Property_GD_IM Property_Meaning = 10
+ Property_GD_PHONENUMBER Property_Meaning = 11
+ Property_GD_POSTALADDRESS Property_Meaning = 12
+ Property_GD_RATING Property_Meaning = 13
+ Property_BLOBKEY Property_Meaning = 17
+ Property_ENTITY_PROTO Property_Meaning = 19
+ Property_INDEX_VALUE Property_Meaning = 18
+)
+
+var Property_Meaning_name = map[int32]string{
+ 0: "NO_MEANING",
+ 14: "BLOB",
+ 15: "TEXT",
+ 16: "BYTESTRING",
+ 1: "ATOM_CATEGORY",
+ 2: "ATOM_LINK",
+ 3: "ATOM_TITLE",
+ 4: "ATOM_CONTENT",
+ 5: "ATOM_SUMMARY",
+ 6: "ATOM_AUTHOR",
+ 7: "GD_WHEN",
+ 8: "GD_EMAIL",
+ 9: "GEORSS_POINT",
+ 10: "GD_IM",
+ 11: "GD_PHONENUMBER",
+ 12: "GD_POSTALADDRESS",
+ 13: "GD_RATING",
+ 17: "BLOBKEY",
+ 19: "ENTITY_PROTO",
+ 18: "INDEX_VALUE",
+}
+var Property_Meaning_value = map[string]int32{
+ "NO_MEANING": 0,
+ "BLOB": 14,
+ "TEXT": 15,
+ "BYTESTRING": 16,
+ "ATOM_CATEGORY": 1,
+ "ATOM_LINK": 2,
+ "ATOM_TITLE": 3,
+ "ATOM_CONTENT": 4,
+ "ATOM_SUMMARY": 5,
+ "ATOM_AUTHOR": 6,
+ "GD_WHEN": 7,
+ "GD_EMAIL": 8,
+ "GEORSS_POINT": 9,
+ "GD_IM": 10,
+ "GD_PHONENUMBER": 11,
+ "GD_POSTALADDRESS": 12,
+ "GD_RATING": 13,
+ "BLOBKEY": 17,
+ "ENTITY_PROTO": 19,
+ "INDEX_VALUE": 18,
+}
+
+func (x Property_Meaning) Enum() *Property_Meaning {
+ p := new(Property_Meaning)
+ *p = x
+ return p
+}
+func (x Property_Meaning) String() string {
+ return proto.EnumName(Property_Meaning_name, int32(x))
+}
+func (x *Property_Meaning) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(Property_Meaning_value, data, "Property_Meaning")
+ if err != nil {
+ return err
+ }
+ *x = Property_Meaning(value)
+ return nil
+}
+func (Property_Meaning) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{2, 0}
+}
+
+type Property_FtsTokenizationOption int32
+
+const (
+ Property_HTML Property_FtsTokenizationOption = 1
+ Property_ATOM Property_FtsTokenizationOption = 2
+)
+
+var Property_FtsTokenizationOption_name = map[int32]string{
+ 1: "HTML",
+ 2: "ATOM",
+}
+var Property_FtsTokenizationOption_value = map[string]int32{
+ "HTML": 1,
+ "ATOM": 2,
+}
+
+func (x Property_FtsTokenizationOption) Enum() *Property_FtsTokenizationOption {
+ p := new(Property_FtsTokenizationOption)
+ *p = x
+ return p
+}
+func (x Property_FtsTokenizationOption) String() string {
+ return proto.EnumName(Property_FtsTokenizationOption_name, int32(x))
+}
+func (x *Property_FtsTokenizationOption) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(Property_FtsTokenizationOption_value, data, "Property_FtsTokenizationOption")
+ if err != nil {
+ return err
+ }
+ *x = Property_FtsTokenizationOption(value)
+ return nil
+}
+func (Property_FtsTokenizationOption) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{2, 1}
+}
+
+type EntityProto_Kind int32
+
+const (
+ EntityProto_GD_CONTACT EntityProto_Kind = 1
+ EntityProto_GD_EVENT EntityProto_Kind = 2
+ EntityProto_GD_MESSAGE EntityProto_Kind = 3
+)
+
+var EntityProto_Kind_name = map[int32]string{
+ 1: "GD_CONTACT",
+ 2: "GD_EVENT",
+ 3: "GD_MESSAGE",
+}
+var EntityProto_Kind_value = map[string]int32{
+ "GD_CONTACT": 1,
+ "GD_EVENT": 2,
+ "GD_MESSAGE": 3,
+}
+
+func (x EntityProto_Kind) Enum() *EntityProto_Kind {
+ p := new(EntityProto_Kind)
+ *p = x
+ return p
+}
+func (x EntityProto_Kind) String() string {
+ return proto.EnumName(EntityProto_Kind_name, int32(x))
+}
+func (x *EntityProto_Kind) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(EntityProto_Kind_value, data, "EntityProto_Kind")
+ if err != nil {
+ return err
+ }
+ *x = EntityProto_Kind(value)
+ return nil
+}
+func (EntityProto_Kind) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{6, 0}
+}
+
+type Index_Property_Direction int32
+
+const (
+ Index_Property_ASCENDING Index_Property_Direction = 1
+ Index_Property_DESCENDING Index_Property_Direction = 2
+)
+
+var Index_Property_Direction_name = map[int32]string{
+ 1: "ASCENDING",
+ 2: "DESCENDING",
+}
+var Index_Property_Direction_value = map[string]int32{
+ "ASCENDING": 1,
+ "DESCENDING": 2,
+}
+
+func (x Index_Property_Direction) Enum() *Index_Property_Direction {
+ p := new(Index_Property_Direction)
+ *p = x
+ return p
+}
+func (x Index_Property_Direction) String() string {
+ return proto.EnumName(Index_Property_Direction_name, int32(x))
+}
+func (x *Index_Property_Direction) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(Index_Property_Direction_value, data, "Index_Property_Direction")
+ if err != nil {
+ return err
+ }
+ *x = Index_Property_Direction(value)
+ return nil
+}
+func (Index_Property_Direction) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{8, 0, 0}
+}
+
+type CompositeIndex_State int32
+
+const (
+ CompositeIndex_WRITE_ONLY CompositeIndex_State = 1
+ CompositeIndex_READ_WRITE CompositeIndex_State = 2
+ CompositeIndex_DELETED CompositeIndex_State = 3
+ CompositeIndex_ERROR CompositeIndex_State = 4
+)
+
+var CompositeIndex_State_name = map[int32]string{
+ 1: "WRITE_ONLY",
+ 2: "READ_WRITE",
+ 3: "DELETED",
+ 4: "ERROR",
+}
+var CompositeIndex_State_value = map[string]int32{
+ "WRITE_ONLY": 1,
+ "READ_WRITE": 2,
+ "DELETED": 3,
+ "ERROR": 4,
+}
+
+func (x CompositeIndex_State) Enum() *CompositeIndex_State {
+ p := new(CompositeIndex_State)
+ *p = x
+ return p
+}
+func (x CompositeIndex_State) String() string {
+ return proto.EnumName(CompositeIndex_State_name, int32(x))
+}
+func (x *CompositeIndex_State) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(CompositeIndex_State_value, data, "CompositeIndex_State")
+ if err != nil {
+ return err
+ }
+ *x = CompositeIndex_State(value)
+ return nil
+}
+func (CompositeIndex_State) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{9, 0}
+}
+
+type Snapshot_Status int32
+
+const (
+ Snapshot_INACTIVE Snapshot_Status = 0
+ Snapshot_ACTIVE Snapshot_Status = 1
+)
+
+var Snapshot_Status_name = map[int32]string{
+ 0: "INACTIVE",
+ 1: "ACTIVE",
+}
+var Snapshot_Status_value = map[string]int32{
+ "INACTIVE": 0,
+ "ACTIVE": 1,
+}
+
+func (x Snapshot_Status) Enum() *Snapshot_Status {
+ p := new(Snapshot_Status)
+ *p = x
+ return p
+}
+func (x Snapshot_Status) String() string {
+ return proto.EnumName(Snapshot_Status_name, int32(x))
+}
+func (x *Snapshot_Status) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(Snapshot_Status_value, data, "Snapshot_Status")
+ if err != nil {
+ return err
+ }
+ *x = Snapshot_Status(value)
+ return nil
+}
+func (Snapshot_Status) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{12, 0}
+}
+
+type Query_Hint int32
+
+const (
+ Query_ORDER_FIRST Query_Hint = 1
+ Query_ANCESTOR_FIRST Query_Hint = 2
+ Query_FILTER_FIRST Query_Hint = 3
+)
+
+var Query_Hint_name = map[int32]string{
+ 1: "ORDER_FIRST",
+ 2: "ANCESTOR_FIRST",
+ 3: "FILTER_FIRST",
+}
+var Query_Hint_value = map[string]int32{
+ "ORDER_FIRST": 1,
+ "ANCESTOR_FIRST": 2,
+ "FILTER_FIRST": 3,
+}
+
+func (x Query_Hint) Enum() *Query_Hint {
+ p := new(Query_Hint)
+ *p = x
+ return p
+}
+func (x Query_Hint) String() string {
+ return proto.EnumName(Query_Hint_name, int32(x))
+}
+func (x *Query_Hint) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(Query_Hint_value, data, "Query_Hint")
+ if err != nil {
+ return err
+ }
+ *x = Query_Hint(value)
+ return nil
+}
+func (Query_Hint) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 0}
+}
+
+type Query_Filter_Operator int32
+
+const (
+ Query_Filter_LESS_THAN Query_Filter_Operator = 1
+ Query_Filter_LESS_THAN_OR_EQUAL Query_Filter_Operator = 2
+ Query_Filter_GREATER_THAN Query_Filter_Operator = 3
+ Query_Filter_GREATER_THAN_OR_EQUAL Query_Filter_Operator = 4
+ Query_Filter_EQUAL Query_Filter_Operator = 5
+ Query_Filter_IN Query_Filter_Operator = 6
+ Query_Filter_EXISTS Query_Filter_Operator = 7
+)
+
+var Query_Filter_Operator_name = map[int32]string{
+ 1: "LESS_THAN",
+ 2: "LESS_THAN_OR_EQUAL",
+ 3: "GREATER_THAN",
+ 4: "GREATER_THAN_OR_EQUAL",
+ 5: "EQUAL",
+ 6: "IN",
+ 7: "EXISTS",
+}
+var Query_Filter_Operator_value = map[string]int32{
+ "LESS_THAN": 1,
+ "LESS_THAN_OR_EQUAL": 2,
+ "GREATER_THAN": 3,
+ "GREATER_THAN_OR_EQUAL": 4,
+ "EQUAL": 5,
+ "IN": 6,
+ "EXISTS": 7,
+}
+
+func (x Query_Filter_Operator) Enum() *Query_Filter_Operator {
+ p := new(Query_Filter_Operator)
+ *p = x
+ return p
+}
+func (x Query_Filter_Operator) String() string {
+ return proto.EnumName(Query_Filter_Operator_name, int32(x))
+}
+func (x *Query_Filter_Operator) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(Query_Filter_Operator_value, data, "Query_Filter_Operator")
+ if err != nil {
+ return err
+ }
+ *x = Query_Filter_Operator(value)
+ return nil
+}
+func (Query_Filter_Operator) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 0, 0}
+}
+
+type Query_Order_Direction int32
+
+const (
+ Query_Order_ASCENDING Query_Order_Direction = 1
+ Query_Order_DESCENDING Query_Order_Direction = 2
+)
+
+var Query_Order_Direction_name = map[int32]string{
+ 1: "ASCENDING",
+ 2: "DESCENDING",
+}
+var Query_Order_Direction_value = map[string]int32{
+ "ASCENDING": 1,
+ "DESCENDING": 2,
+}
+
+func (x Query_Order_Direction) Enum() *Query_Order_Direction {
+ p := new(Query_Order_Direction)
+ *p = x
+ return p
+}
+func (x Query_Order_Direction) String() string {
+ return proto.EnumName(Query_Order_Direction_name, int32(x))
+}
+func (x *Query_Order_Direction) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(Query_Order_Direction_value, data, "Query_Order_Direction")
+ if err != nil {
+ return err
+ }
+ *x = Query_Order_Direction(value)
+ return nil
+}
+func (Query_Order_Direction) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 1, 0}
+}
+
+type Error_ErrorCode int32
+
+const (
+ Error_BAD_REQUEST Error_ErrorCode = 1
+ Error_CONCURRENT_TRANSACTION Error_ErrorCode = 2
+ Error_INTERNAL_ERROR Error_ErrorCode = 3
+ Error_NEED_INDEX Error_ErrorCode = 4
+ Error_TIMEOUT Error_ErrorCode = 5
+ Error_PERMISSION_DENIED Error_ErrorCode = 6
+ Error_BIGTABLE_ERROR Error_ErrorCode = 7
+ Error_COMMITTED_BUT_STILL_APPLYING Error_ErrorCode = 8
+ Error_CAPABILITY_DISABLED Error_ErrorCode = 9
+ Error_TRY_ALTERNATE_BACKEND Error_ErrorCode = 10
+ Error_SAFE_TIME_TOO_OLD Error_ErrorCode = 11
+)
+
+var Error_ErrorCode_name = map[int32]string{
+ 1: "BAD_REQUEST",
+ 2: "CONCURRENT_TRANSACTION",
+ 3: "INTERNAL_ERROR",
+ 4: "NEED_INDEX",
+ 5: "TIMEOUT",
+ 6: "PERMISSION_DENIED",
+ 7: "BIGTABLE_ERROR",
+ 8: "COMMITTED_BUT_STILL_APPLYING",
+ 9: "CAPABILITY_DISABLED",
+ 10: "TRY_ALTERNATE_BACKEND",
+ 11: "SAFE_TIME_TOO_OLD",
+}
+var Error_ErrorCode_value = map[string]int32{
+ "BAD_REQUEST": 1,
+ "CONCURRENT_TRANSACTION": 2,
+ "INTERNAL_ERROR": 3,
+ "NEED_INDEX": 4,
+ "TIMEOUT": 5,
+ "PERMISSION_DENIED": 6,
+ "BIGTABLE_ERROR": 7,
+ "COMMITTED_BUT_STILL_APPLYING": 8,
+ "CAPABILITY_DISABLED": 9,
+ "TRY_ALTERNATE_BACKEND": 10,
+ "SAFE_TIME_TOO_OLD": 11,
+}
+
+func (x Error_ErrorCode) Enum() *Error_ErrorCode {
+ p := new(Error_ErrorCode)
+ *p = x
+ return p
+}
+func (x Error_ErrorCode) String() string {
+ return proto.EnumName(Error_ErrorCode_name, int32(x))
+}
+func (x *Error_ErrorCode) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(Error_ErrorCode_value, data, "Error_ErrorCode")
+ if err != nil {
+ return err
+ }
+ *x = Error_ErrorCode(value)
+ return nil
+}
+func (Error_ErrorCode) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{19, 0}
+}
+
+type PutRequest_AutoIdPolicy int32
+
+const (
+ PutRequest_CURRENT PutRequest_AutoIdPolicy = 0
+ PutRequest_SEQUENTIAL PutRequest_AutoIdPolicy = 1
+)
+
+var PutRequest_AutoIdPolicy_name = map[int32]string{
+ 0: "CURRENT",
+ 1: "SEQUENTIAL",
+}
+var PutRequest_AutoIdPolicy_value = map[string]int32{
+ "CURRENT": 0,
+ "SEQUENTIAL": 1,
+}
+
+func (x PutRequest_AutoIdPolicy) Enum() *PutRequest_AutoIdPolicy {
+ p := new(PutRequest_AutoIdPolicy)
+ *p = x
+ return p
+}
+func (x PutRequest_AutoIdPolicy) String() string {
+ return proto.EnumName(PutRequest_AutoIdPolicy_name, int32(x))
+}
+func (x *PutRequest_AutoIdPolicy) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(PutRequest_AutoIdPolicy_value, data, "PutRequest_AutoIdPolicy")
+ if err != nil {
+ return err
+ }
+ *x = PutRequest_AutoIdPolicy(value)
+ return nil
+}
+func (PutRequest_AutoIdPolicy) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{23, 0}
+}
+
+type BeginTransactionRequest_TransactionMode int32
+
+const (
+ BeginTransactionRequest_UNKNOWN BeginTransactionRequest_TransactionMode = 0
+ BeginTransactionRequest_READ_ONLY BeginTransactionRequest_TransactionMode = 1
+ BeginTransactionRequest_READ_WRITE BeginTransactionRequest_TransactionMode = 2
+)
+
+var BeginTransactionRequest_TransactionMode_name = map[int32]string{
+ 0: "UNKNOWN",
+ 1: "READ_ONLY",
+ 2: "READ_WRITE",
+}
+var BeginTransactionRequest_TransactionMode_value = map[string]int32{
+ "UNKNOWN": 0,
+ "READ_ONLY": 1,
+ "READ_WRITE": 2,
+}
+
+func (x BeginTransactionRequest_TransactionMode) Enum() *BeginTransactionRequest_TransactionMode {
+ p := new(BeginTransactionRequest_TransactionMode)
+ *p = x
+ return p
+}
+func (x BeginTransactionRequest_TransactionMode) String() string {
+ return proto.EnumName(BeginTransactionRequest_TransactionMode_name, int32(x))
+}
+func (x *BeginTransactionRequest_TransactionMode) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(BeginTransactionRequest_TransactionMode_value, data, "BeginTransactionRequest_TransactionMode")
+ if err != nil {
+ return err
+ }
+ *x = BeginTransactionRequest_TransactionMode(value)
+ return nil
+}
+func (BeginTransactionRequest_TransactionMode) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{36, 0}
+}
+
+type Action struct {
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Action) Reset() { *m = Action{} }
+func (m *Action) String() string { return proto.CompactTextString(m) }
+func (*Action) ProtoMessage() {}
+func (*Action) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{0}
+}
+func (m *Action) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Action.Unmarshal(m, b)
+}
+func (m *Action) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Action.Marshal(b, m, deterministic)
+}
+func (dst *Action) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Action.Merge(dst, src)
+}
+func (m *Action) XXX_Size() int {
+ return xxx_messageInfo_Action.Size(m)
+}
+func (m *Action) XXX_DiscardUnknown() {
+ xxx_messageInfo_Action.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Action proto.InternalMessageInfo
+
+type PropertyValue struct {
+ Int64Value *int64 `protobuf:"varint,1,opt,name=int64Value" json:"int64Value,omitempty"`
+ BooleanValue *bool `protobuf:"varint,2,opt,name=booleanValue" json:"booleanValue,omitempty"`
+ StringValue *string `protobuf:"bytes,3,opt,name=stringValue" json:"stringValue,omitempty"`
+ DoubleValue *float64 `protobuf:"fixed64,4,opt,name=doubleValue" json:"doubleValue,omitempty"`
+ Pointvalue *PropertyValue_PointValue `protobuf:"group,5,opt,name=PointValue,json=pointvalue" json:"pointvalue,omitempty"`
+ Uservalue *PropertyValue_UserValue `protobuf:"group,8,opt,name=UserValue,json=uservalue" json:"uservalue,omitempty"`
+ Referencevalue *PropertyValue_ReferenceValue `protobuf:"group,12,opt,name=ReferenceValue,json=referencevalue" json:"referencevalue,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *PropertyValue) Reset() { *m = PropertyValue{} }
+func (m *PropertyValue) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue) ProtoMessage() {}
+func (*PropertyValue) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1}
+}
+func (m *PropertyValue) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_PropertyValue.Unmarshal(m, b)
+}
+func (m *PropertyValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_PropertyValue.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_PropertyValue.Merge(dst, src)
+}
+func (m *PropertyValue) XXX_Size() int {
+ return xxx_messageInfo_PropertyValue.Size(m)
+}
+func (m *PropertyValue) XXX_DiscardUnknown() {
+ xxx_messageInfo_PropertyValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue proto.InternalMessageInfo
+
+func (m *PropertyValue) GetInt64Value() int64 {
+ if m != nil && m.Int64Value != nil {
+ return *m.Int64Value
+ }
+ return 0
+}
+
+func (m *PropertyValue) GetBooleanValue() bool {
+ if m != nil && m.BooleanValue != nil {
+ return *m.BooleanValue
+ }
+ return false
+}
+
+func (m *PropertyValue) GetStringValue() string {
+ if m != nil && m.StringValue != nil {
+ return *m.StringValue
+ }
+ return ""
+}
+
+func (m *PropertyValue) GetDoubleValue() float64 {
+ if m != nil && m.DoubleValue != nil {
+ return *m.DoubleValue
+ }
+ return 0
+}
+
+func (m *PropertyValue) GetPointvalue() *PropertyValue_PointValue {
+ if m != nil {
+ return m.Pointvalue
+ }
+ return nil
+}
+
+func (m *PropertyValue) GetUservalue() *PropertyValue_UserValue {
+ if m != nil {
+ return m.Uservalue
+ }
+ return nil
+}
+
+func (m *PropertyValue) GetReferencevalue() *PropertyValue_ReferenceValue {
+ if m != nil {
+ return m.Referencevalue
+ }
+ return nil
+}
+
+type PropertyValue_PointValue struct {
+ X *float64 `protobuf:"fixed64,6,req,name=x" json:"x,omitempty"`
+ Y *float64 `protobuf:"fixed64,7,req,name=y" json:"y,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *PropertyValue_PointValue) Reset() { *m = PropertyValue_PointValue{} }
+func (m *PropertyValue_PointValue) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue_PointValue) ProtoMessage() {}
+func (*PropertyValue_PointValue) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1, 0}
+}
+func (m *PropertyValue_PointValue) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_PropertyValue_PointValue.Unmarshal(m, b)
+}
+func (m *PropertyValue_PointValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_PropertyValue_PointValue.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue_PointValue) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_PropertyValue_PointValue.Merge(dst, src)
+}
+func (m *PropertyValue_PointValue) XXX_Size() int {
+ return xxx_messageInfo_PropertyValue_PointValue.Size(m)
+}
+func (m *PropertyValue_PointValue) XXX_DiscardUnknown() {
+ xxx_messageInfo_PropertyValue_PointValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue_PointValue proto.InternalMessageInfo
+
+func (m *PropertyValue_PointValue) GetX() float64 {
+ if m != nil && m.X != nil {
+ return *m.X
+ }
+ return 0
+}
+
+func (m *PropertyValue_PointValue) GetY() float64 {
+ if m != nil && m.Y != nil {
+ return *m.Y
+ }
+ return 0
+}
+
+type PropertyValue_UserValue struct {
+ Email *string `protobuf:"bytes,9,req,name=email" json:"email,omitempty"`
+ AuthDomain *string `protobuf:"bytes,10,req,name=auth_domain,json=authDomain" json:"auth_domain,omitempty"`
+ Nickname *string `protobuf:"bytes,11,opt,name=nickname" json:"nickname,omitempty"`
+ FederatedIdentity *string `protobuf:"bytes,21,opt,name=federated_identity,json=federatedIdentity" json:"federated_identity,omitempty"`
+ FederatedProvider *string `protobuf:"bytes,22,opt,name=federated_provider,json=federatedProvider" json:"federated_provider,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *PropertyValue_UserValue) Reset() { *m = PropertyValue_UserValue{} }
+func (m *PropertyValue_UserValue) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue_UserValue) ProtoMessage() {}
+func (*PropertyValue_UserValue) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1, 1}
+}
+func (m *PropertyValue_UserValue) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_PropertyValue_UserValue.Unmarshal(m, b)
+}
+func (m *PropertyValue_UserValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_PropertyValue_UserValue.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue_UserValue) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_PropertyValue_UserValue.Merge(dst, src)
+}
+func (m *PropertyValue_UserValue) XXX_Size() int {
+ return xxx_messageInfo_PropertyValue_UserValue.Size(m)
+}
+func (m *PropertyValue_UserValue) XXX_DiscardUnknown() {
+ xxx_messageInfo_PropertyValue_UserValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue_UserValue proto.InternalMessageInfo
+
+func (m *PropertyValue_UserValue) GetEmail() string {
+ if m != nil && m.Email != nil {
+ return *m.Email
+ }
+ return ""
+}
+
+func (m *PropertyValue_UserValue) GetAuthDomain() string {
+ if m != nil && m.AuthDomain != nil {
+ return *m.AuthDomain
+ }
+ return ""
+}
+
+func (m *PropertyValue_UserValue) GetNickname() string {
+ if m != nil && m.Nickname != nil {
+ return *m.Nickname
+ }
+ return ""
+}
+
+func (m *PropertyValue_UserValue) GetFederatedIdentity() string {
+ if m != nil && m.FederatedIdentity != nil {
+ return *m.FederatedIdentity
+ }
+ return ""
+}
+
+func (m *PropertyValue_UserValue) GetFederatedProvider() string {
+ if m != nil && m.FederatedProvider != nil {
+ return *m.FederatedProvider
+ }
+ return ""
+}
+
+type PropertyValue_ReferenceValue struct {
+ App *string `protobuf:"bytes,13,req,name=app" json:"app,omitempty"`
+ NameSpace *string `protobuf:"bytes,20,opt,name=name_space,json=nameSpace" json:"name_space,omitempty"`
+ Pathelement []*PropertyValue_ReferenceValue_PathElement `protobuf:"group,14,rep,name=PathElement,json=pathelement" json:"pathelement,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *PropertyValue_ReferenceValue) Reset() { *m = PropertyValue_ReferenceValue{} }
+func (m *PropertyValue_ReferenceValue) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue_ReferenceValue) ProtoMessage() {}
+func (*PropertyValue_ReferenceValue) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1, 2}
+}
+func (m *PropertyValue_ReferenceValue) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_PropertyValue_ReferenceValue.Unmarshal(m, b)
+}
+func (m *PropertyValue_ReferenceValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_PropertyValue_ReferenceValue.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue_ReferenceValue) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_PropertyValue_ReferenceValue.Merge(dst, src)
+}
+func (m *PropertyValue_ReferenceValue) XXX_Size() int {
+ return xxx_messageInfo_PropertyValue_ReferenceValue.Size(m)
+}
+func (m *PropertyValue_ReferenceValue) XXX_DiscardUnknown() {
+ xxx_messageInfo_PropertyValue_ReferenceValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue_ReferenceValue proto.InternalMessageInfo
+
+func (m *PropertyValue_ReferenceValue) GetApp() string {
+ if m != nil && m.App != nil {
+ return *m.App
+ }
+ return ""
+}
+
+func (m *PropertyValue_ReferenceValue) GetNameSpace() string {
+ if m != nil && m.NameSpace != nil {
+ return *m.NameSpace
+ }
+ return ""
+}
+
+func (m *PropertyValue_ReferenceValue) GetPathelement() []*PropertyValue_ReferenceValue_PathElement {
+ if m != nil {
+ return m.Pathelement
+ }
+ return nil
+}
+
+type PropertyValue_ReferenceValue_PathElement struct {
+ Type *string `protobuf:"bytes,15,req,name=type" json:"type,omitempty"`
+ Id *int64 `protobuf:"varint,16,opt,name=id" json:"id,omitempty"`
+ Name *string `protobuf:"bytes,17,opt,name=name" json:"name,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *PropertyValue_ReferenceValue_PathElement) Reset() {
+ *m = PropertyValue_ReferenceValue_PathElement{}
+}
+func (m *PropertyValue_ReferenceValue_PathElement) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue_ReferenceValue_PathElement) ProtoMessage() {}
+func (*PropertyValue_ReferenceValue_PathElement) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1, 2, 0}
+}
+func (m *PropertyValue_ReferenceValue_PathElement) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.Unmarshal(m, b)
+}
+func (m *PropertyValue_ReferenceValue_PathElement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue_ReferenceValue_PathElement) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.Merge(dst, src)
+}
+func (m *PropertyValue_ReferenceValue_PathElement) XXX_Size() int {
+ return xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.Size(m)
+}
+func (m *PropertyValue_ReferenceValue_PathElement) XXX_DiscardUnknown() {
+ xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue_ReferenceValue_PathElement proto.InternalMessageInfo
+
+func (m *PropertyValue_ReferenceValue_PathElement) GetType() string {
+ if m != nil && m.Type != nil {
+ return *m.Type
+ }
+ return ""
+}
+
+func (m *PropertyValue_ReferenceValue_PathElement) GetId() int64 {
+ if m != nil && m.Id != nil {
+ return *m.Id
+ }
+ return 0
+}
+
+func (m *PropertyValue_ReferenceValue_PathElement) GetName() string {
+ if m != nil && m.Name != nil {
+ return *m.Name
+ }
+ return ""
+}
+
+type Property struct {
+ Meaning *Property_Meaning `protobuf:"varint,1,opt,name=meaning,enum=appengine.Property_Meaning,def=0" json:"meaning,omitempty"`
+ MeaningUri *string `protobuf:"bytes,2,opt,name=meaning_uri,json=meaningUri" json:"meaning_uri,omitempty"`
+ Name *string `protobuf:"bytes,3,req,name=name" json:"name,omitempty"`
+ Value *PropertyValue `protobuf:"bytes,5,req,name=value" json:"value,omitempty"`
+ Multiple *bool `protobuf:"varint,4,req,name=multiple" json:"multiple,omitempty"`
+ Searchable *bool `protobuf:"varint,6,opt,name=searchable,def=0" json:"searchable,omitempty"`
+ FtsTokenizationOption *Property_FtsTokenizationOption `protobuf:"varint,8,opt,name=fts_tokenization_option,json=ftsTokenizationOption,enum=appengine.Property_FtsTokenizationOption" json:"fts_tokenization_option,omitempty"`
+ Locale *string `protobuf:"bytes,9,opt,name=locale,def=en" json:"locale,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Property) Reset() { *m = Property{} }
+func (m *Property) String() string { return proto.CompactTextString(m) }
+func (*Property) ProtoMessage() {}
+func (*Property) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{2}
+}
+func (m *Property) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Property.Unmarshal(m, b)
+}
+func (m *Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Property.Marshal(b, m, deterministic)
+}
+func (dst *Property) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Property.Merge(dst, src)
+}
+func (m *Property) XXX_Size() int {
+ return xxx_messageInfo_Property.Size(m)
+}
+func (m *Property) XXX_DiscardUnknown() {
+ xxx_messageInfo_Property.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Property proto.InternalMessageInfo
+
+const Default_Property_Meaning Property_Meaning = Property_NO_MEANING
+const Default_Property_Searchable bool = false
+const Default_Property_Locale string = "en"
+
+func (m *Property) GetMeaning() Property_Meaning {
+ if m != nil && m.Meaning != nil {
+ return *m.Meaning
+ }
+ return Default_Property_Meaning
+}
+
+func (m *Property) GetMeaningUri() string {
+ if m != nil && m.MeaningUri != nil {
+ return *m.MeaningUri
+ }
+ return ""
+}
+
+func (m *Property) GetName() string {
+ if m != nil && m.Name != nil {
+ return *m.Name
+ }
+ return ""
+}
+
+func (m *Property) GetValue() *PropertyValue {
+ if m != nil {
+ return m.Value
+ }
+ return nil
+}
+
+func (m *Property) GetMultiple() bool {
+ if m != nil && m.Multiple != nil {
+ return *m.Multiple
+ }
+ return false
+}
+
+func (m *Property) GetSearchable() bool {
+ if m != nil && m.Searchable != nil {
+ return *m.Searchable
+ }
+ return Default_Property_Searchable
+}
+
+func (m *Property) GetFtsTokenizationOption() Property_FtsTokenizationOption {
+ if m != nil && m.FtsTokenizationOption != nil {
+ return *m.FtsTokenizationOption
+ }
+ return Property_HTML
+}
+
+func (m *Property) GetLocale() string {
+ if m != nil && m.Locale != nil {
+ return *m.Locale
+ }
+ return Default_Property_Locale
+}
+
+type Path struct {
+ Element []*Path_Element `protobuf:"group,1,rep,name=Element,json=element" json:"element,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Path) Reset() { *m = Path{} }
+func (m *Path) String() string { return proto.CompactTextString(m) }
+func (*Path) ProtoMessage() {}
+func (*Path) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{3}
+}
+func (m *Path) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Path.Unmarshal(m, b)
+}
+func (m *Path) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Path.Marshal(b, m, deterministic)
+}
+func (dst *Path) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Path.Merge(dst, src)
+}
+func (m *Path) XXX_Size() int {
+ return xxx_messageInfo_Path.Size(m)
+}
+func (m *Path) XXX_DiscardUnknown() {
+ xxx_messageInfo_Path.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Path proto.InternalMessageInfo
+
+func (m *Path) GetElement() []*Path_Element {
+ if m != nil {
+ return m.Element
+ }
+ return nil
+}
+
+type Path_Element struct {
+ Type *string `protobuf:"bytes,2,req,name=type" json:"type,omitempty"`
+ Id *int64 `protobuf:"varint,3,opt,name=id" json:"id,omitempty"`
+ Name *string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Path_Element) Reset() { *m = Path_Element{} }
+func (m *Path_Element) String() string { return proto.CompactTextString(m) }
+func (*Path_Element) ProtoMessage() {}
+func (*Path_Element) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{3, 0}
+}
+func (m *Path_Element) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Path_Element.Unmarshal(m, b)
+}
+func (m *Path_Element) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Path_Element.Marshal(b, m, deterministic)
+}
+func (dst *Path_Element) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Path_Element.Merge(dst, src)
+}
+func (m *Path_Element) XXX_Size() int {
+ return xxx_messageInfo_Path_Element.Size(m)
+}
+func (m *Path_Element) XXX_DiscardUnknown() {
+ xxx_messageInfo_Path_Element.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Path_Element proto.InternalMessageInfo
+
+func (m *Path_Element) GetType() string {
+ if m != nil && m.Type != nil {
+ return *m.Type
+ }
+ return ""
+}
+
+func (m *Path_Element) GetId() int64 {
+ if m != nil && m.Id != nil {
+ return *m.Id
+ }
+ return 0
+}
+
+func (m *Path_Element) GetName() string {
+ if m != nil && m.Name != nil {
+ return *m.Name
+ }
+ return ""
+}
+
+type Reference struct {
+ App *string `protobuf:"bytes,13,req,name=app" json:"app,omitempty"`
+ NameSpace *string `protobuf:"bytes,20,opt,name=name_space,json=nameSpace" json:"name_space,omitempty"`
+ Path *Path `protobuf:"bytes,14,req,name=path" json:"path,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Reference) Reset() { *m = Reference{} }
+func (m *Reference) String() string { return proto.CompactTextString(m) }
+func (*Reference) ProtoMessage() {}
+func (*Reference) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{4}
+}
+func (m *Reference) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Reference.Unmarshal(m, b)
+}
+func (m *Reference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Reference.Marshal(b, m, deterministic)
+}
+func (dst *Reference) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Reference.Merge(dst, src)
+}
+func (m *Reference) XXX_Size() int {
+ return xxx_messageInfo_Reference.Size(m)
+}
+func (m *Reference) XXX_DiscardUnknown() {
+ xxx_messageInfo_Reference.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Reference proto.InternalMessageInfo
+
+func (m *Reference) GetApp() string {
+ if m != nil && m.App != nil {
+ return *m.App
+ }
+ return ""
+}
+
+func (m *Reference) GetNameSpace() string {
+ if m != nil && m.NameSpace != nil {
+ return *m.NameSpace
+ }
+ return ""
+}
+
+func (m *Reference) GetPath() *Path {
+ if m != nil {
+ return m.Path
+ }
+ return nil
+}
+
+type User struct {
+ Email *string `protobuf:"bytes,1,req,name=email" json:"email,omitempty"`
+ AuthDomain *string `protobuf:"bytes,2,req,name=auth_domain,json=authDomain" json:"auth_domain,omitempty"`
+ Nickname *string `protobuf:"bytes,3,opt,name=nickname" json:"nickname,omitempty"`
+ FederatedIdentity *string `protobuf:"bytes,6,opt,name=federated_identity,json=federatedIdentity" json:"federated_identity,omitempty"`
+ FederatedProvider *string `protobuf:"bytes,7,opt,name=federated_provider,json=federatedProvider" json:"federated_provider,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *User) Reset() { *m = User{} }
+func (m *User) String() string { return proto.CompactTextString(m) }
+func (*User) ProtoMessage() {}
+func (*User) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{5}
+}
+func (m *User) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_User.Unmarshal(m, b)
+}
+func (m *User) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_User.Marshal(b, m, deterministic)
+}
+func (dst *User) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_User.Merge(dst, src)
+}
+func (m *User) XXX_Size() int {
+ return xxx_messageInfo_User.Size(m)
+}
+func (m *User) XXX_DiscardUnknown() {
+ xxx_messageInfo_User.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_User proto.InternalMessageInfo
+
+func (m *User) GetEmail() string {
+ if m != nil && m.Email != nil {
+ return *m.Email
+ }
+ return ""
+}
+
+func (m *User) GetAuthDomain() string {
+ if m != nil && m.AuthDomain != nil {
+ return *m.AuthDomain
+ }
+ return ""
+}
+
+func (m *User) GetNickname() string {
+ if m != nil && m.Nickname != nil {
+ return *m.Nickname
+ }
+ return ""
+}
+
+func (m *User) GetFederatedIdentity() string {
+ if m != nil && m.FederatedIdentity != nil {
+ return *m.FederatedIdentity
+ }
+ return ""
+}
+
+func (m *User) GetFederatedProvider() string {
+ if m != nil && m.FederatedProvider != nil {
+ return *m.FederatedProvider
+ }
+ return ""
+}
+
+type EntityProto struct {
+ Key *Reference `protobuf:"bytes,13,req,name=key" json:"key,omitempty"`
+ EntityGroup *Path `protobuf:"bytes,16,req,name=entity_group,json=entityGroup" json:"entity_group,omitempty"`
+ Owner *User `protobuf:"bytes,17,opt,name=owner" json:"owner,omitempty"`
+ Kind *EntityProto_Kind `protobuf:"varint,4,opt,name=kind,enum=appengine.EntityProto_Kind" json:"kind,omitempty"`
+ KindUri *string `protobuf:"bytes,5,opt,name=kind_uri,json=kindUri" json:"kind_uri,omitempty"`
+ Property []*Property `protobuf:"bytes,14,rep,name=property" json:"property,omitempty"`
+ RawProperty []*Property `protobuf:"bytes,15,rep,name=raw_property,json=rawProperty" json:"raw_property,omitempty"`
+ Rank *int32 `protobuf:"varint,18,opt,name=rank" json:"rank,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *EntityProto) Reset() { *m = EntityProto{} }
+func (m *EntityProto) String() string { return proto.CompactTextString(m) }
+func (*EntityProto) ProtoMessage() {}
+func (*EntityProto) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{6}
+}
+func (m *EntityProto) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_EntityProto.Unmarshal(m, b)
+}
+func (m *EntityProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_EntityProto.Marshal(b, m, deterministic)
+}
+func (dst *EntityProto) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_EntityProto.Merge(dst, src)
+}
+func (m *EntityProto) XXX_Size() int {
+ return xxx_messageInfo_EntityProto.Size(m)
+}
+func (m *EntityProto) XXX_DiscardUnknown() {
+ xxx_messageInfo_EntityProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_EntityProto proto.InternalMessageInfo
+
+func (m *EntityProto) GetKey() *Reference {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+func (m *EntityProto) GetEntityGroup() *Path {
+ if m != nil {
+ return m.EntityGroup
+ }
+ return nil
+}
+
+func (m *EntityProto) GetOwner() *User {
+ if m != nil {
+ return m.Owner
+ }
+ return nil
+}
+
+func (m *EntityProto) GetKind() EntityProto_Kind {
+ if m != nil && m.Kind != nil {
+ return *m.Kind
+ }
+ return EntityProto_GD_CONTACT
+}
+
+func (m *EntityProto) GetKindUri() string {
+ if m != nil && m.KindUri != nil {
+ return *m.KindUri
+ }
+ return ""
+}
+
+func (m *EntityProto) GetProperty() []*Property {
+ if m != nil {
+ return m.Property
+ }
+ return nil
+}
+
+func (m *EntityProto) GetRawProperty() []*Property {
+ if m != nil {
+ return m.RawProperty
+ }
+ return nil
+}
+
+func (m *EntityProto) GetRank() int32 {
+ if m != nil && m.Rank != nil {
+ return *m.Rank
+ }
+ return 0
+}
+
+type CompositeProperty struct {
+ IndexId *int64 `protobuf:"varint,1,req,name=index_id,json=indexId" json:"index_id,omitempty"`
+ Value []string `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompositeProperty) Reset() { *m = CompositeProperty{} }
+func (m *CompositeProperty) String() string { return proto.CompactTextString(m) }
+func (*CompositeProperty) ProtoMessage() {}
+func (*CompositeProperty) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{7}
+}
+func (m *CompositeProperty) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompositeProperty.Unmarshal(m, b)
+}
+func (m *CompositeProperty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompositeProperty.Marshal(b, m, deterministic)
+}
+func (dst *CompositeProperty) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompositeProperty.Merge(dst, src)
+}
+func (m *CompositeProperty) XXX_Size() int {
+ return xxx_messageInfo_CompositeProperty.Size(m)
+}
+func (m *CompositeProperty) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompositeProperty.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompositeProperty proto.InternalMessageInfo
+
+func (m *CompositeProperty) GetIndexId() int64 {
+ if m != nil && m.IndexId != nil {
+ return *m.IndexId
+ }
+ return 0
+}
+
+func (m *CompositeProperty) GetValue() []string {
+ if m != nil {
+ return m.Value
+ }
+ return nil
+}
+
+type Index struct {
+ EntityType *string `protobuf:"bytes,1,req,name=entity_type,json=entityType" json:"entity_type,omitempty"`
+ Ancestor *bool `protobuf:"varint,5,req,name=ancestor" json:"ancestor,omitempty"`
+ Property []*Index_Property `protobuf:"group,2,rep,name=Property,json=property" json:"property,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Index) Reset() { *m = Index{} }
+func (m *Index) String() string { return proto.CompactTextString(m) }
+func (*Index) ProtoMessage() {}
+func (*Index) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{8}
+}
+func (m *Index) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Index.Unmarshal(m, b)
+}
+func (m *Index) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Index.Marshal(b, m, deterministic)
+}
+func (dst *Index) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Index.Merge(dst, src)
+}
+func (m *Index) XXX_Size() int {
+ return xxx_messageInfo_Index.Size(m)
+}
+func (m *Index) XXX_DiscardUnknown() {
+ xxx_messageInfo_Index.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Index proto.InternalMessageInfo
+
+func (m *Index) GetEntityType() string {
+ if m != nil && m.EntityType != nil {
+ return *m.EntityType
+ }
+ return ""
+}
+
+func (m *Index) GetAncestor() bool {
+ if m != nil && m.Ancestor != nil {
+ return *m.Ancestor
+ }
+ return false
+}
+
+func (m *Index) GetProperty() []*Index_Property {
+ if m != nil {
+ return m.Property
+ }
+ return nil
+}
+
+type Index_Property struct {
+ Name *string `protobuf:"bytes,3,req,name=name" json:"name,omitempty"`
+ Direction *Index_Property_Direction `protobuf:"varint,4,opt,name=direction,enum=appengine.Index_Property_Direction,def=1" json:"direction,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Index_Property) Reset() { *m = Index_Property{} }
+func (m *Index_Property) String() string { return proto.CompactTextString(m) }
+func (*Index_Property) ProtoMessage() {}
+func (*Index_Property) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{8, 0}
+}
+func (m *Index_Property) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Index_Property.Unmarshal(m, b)
+}
+func (m *Index_Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Index_Property.Marshal(b, m, deterministic)
+}
+func (dst *Index_Property) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Index_Property.Merge(dst, src)
+}
+func (m *Index_Property) XXX_Size() int {
+ return xxx_messageInfo_Index_Property.Size(m)
+}
+func (m *Index_Property) XXX_DiscardUnknown() {
+ xxx_messageInfo_Index_Property.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Index_Property proto.InternalMessageInfo
+
+const Default_Index_Property_Direction Index_Property_Direction = Index_Property_ASCENDING
+
+func (m *Index_Property) GetName() string {
+ if m != nil && m.Name != nil {
+ return *m.Name
+ }
+ return ""
+}
+
+func (m *Index_Property) GetDirection() Index_Property_Direction {
+ if m != nil && m.Direction != nil {
+ return *m.Direction
+ }
+ return Default_Index_Property_Direction
+}
+
+type CompositeIndex struct {
+ AppId *string `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+ Id *int64 `protobuf:"varint,2,req,name=id" json:"id,omitempty"`
+ Definition *Index `protobuf:"bytes,3,req,name=definition" json:"definition,omitempty"`
+ State *CompositeIndex_State `protobuf:"varint,4,req,name=state,enum=appengine.CompositeIndex_State" json:"state,omitempty"`
+ OnlyUseIfRequired *bool `protobuf:"varint,6,opt,name=only_use_if_required,json=onlyUseIfRequired,def=0" json:"only_use_if_required,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompositeIndex) Reset() { *m = CompositeIndex{} }
+func (m *CompositeIndex) String() string { return proto.CompactTextString(m) }
+func (*CompositeIndex) ProtoMessage() {}
+func (*CompositeIndex) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{9}
+}
+func (m *CompositeIndex) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompositeIndex.Unmarshal(m, b)
+}
+func (m *CompositeIndex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompositeIndex.Marshal(b, m, deterministic)
+}
+func (dst *CompositeIndex) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompositeIndex.Merge(dst, src)
+}
+func (m *CompositeIndex) XXX_Size() int {
+ return xxx_messageInfo_CompositeIndex.Size(m)
+}
+func (m *CompositeIndex) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompositeIndex.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompositeIndex proto.InternalMessageInfo
+
+const Default_CompositeIndex_OnlyUseIfRequired bool = false
+
+func (m *CompositeIndex) GetAppId() string {
+ if m != nil && m.AppId != nil {
+ return *m.AppId
+ }
+ return ""
+}
+
+func (m *CompositeIndex) GetId() int64 {
+ if m != nil && m.Id != nil {
+ return *m.Id
+ }
+ return 0
+}
+
+func (m *CompositeIndex) GetDefinition() *Index {
+ if m != nil {
+ return m.Definition
+ }
+ return nil
+}
+
+func (m *CompositeIndex) GetState() CompositeIndex_State {
+ if m != nil && m.State != nil {
+ return *m.State
+ }
+ return CompositeIndex_WRITE_ONLY
+}
+
+func (m *CompositeIndex) GetOnlyUseIfRequired() bool {
+ if m != nil && m.OnlyUseIfRequired != nil {
+ return *m.OnlyUseIfRequired
+ }
+ return Default_CompositeIndex_OnlyUseIfRequired
+}
+
+type IndexPostfix struct {
+ IndexValue []*IndexPostfix_IndexValue `protobuf:"bytes,1,rep,name=index_value,json=indexValue" json:"index_value,omitempty"`
+ Key *Reference `protobuf:"bytes,2,opt,name=key" json:"key,omitempty"`
+ Before *bool `protobuf:"varint,3,opt,name=before,def=1" json:"before,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *IndexPostfix) Reset() { *m = IndexPostfix{} }
+func (m *IndexPostfix) String() string { return proto.CompactTextString(m) }
+func (*IndexPostfix) ProtoMessage() {}
+func (*IndexPostfix) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{10}
+}
+func (m *IndexPostfix) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_IndexPostfix.Unmarshal(m, b)
+}
+func (m *IndexPostfix) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_IndexPostfix.Marshal(b, m, deterministic)
+}
+func (dst *IndexPostfix) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_IndexPostfix.Merge(dst, src)
+}
+func (m *IndexPostfix) XXX_Size() int {
+ return xxx_messageInfo_IndexPostfix.Size(m)
+}
+func (m *IndexPostfix) XXX_DiscardUnknown() {
+ xxx_messageInfo_IndexPostfix.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_IndexPostfix proto.InternalMessageInfo
+
+const Default_IndexPostfix_Before bool = true
+
+func (m *IndexPostfix) GetIndexValue() []*IndexPostfix_IndexValue {
+ if m != nil {
+ return m.IndexValue
+ }
+ return nil
+}
+
+func (m *IndexPostfix) GetKey() *Reference {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+func (m *IndexPostfix) GetBefore() bool {
+ if m != nil && m.Before != nil {
+ return *m.Before
+ }
+ return Default_IndexPostfix_Before
+}
+
+type IndexPostfix_IndexValue struct {
+ PropertyName *string `protobuf:"bytes,1,req,name=property_name,json=propertyName" json:"property_name,omitempty"`
+ Value *PropertyValue `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *IndexPostfix_IndexValue) Reset() { *m = IndexPostfix_IndexValue{} }
+func (m *IndexPostfix_IndexValue) String() string { return proto.CompactTextString(m) }
+func (*IndexPostfix_IndexValue) ProtoMessage() {}
+func (*IndexPostfix_IndexValue) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{10, 0}
+}
+func (m *IndexPostfix_IndexValue) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_IndexPostfix_IndexValue.Unmarshal(m, b)
+}
+func (m *IndexPostfix_IndexValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_IndexPostfix_IndexValue.Marshal(b, m, deterministic)
+}
+func (dst *IndexPostfix_IndexValue) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_IndexPostfix_IndexValue.Merge(dst, src)
+}
+func (m *IndexPostfix_IndexValue) XXX_Size() int {
+ return xxx_messageInfo_IndexPostfix_IndexValue.Size(m)
+}
+func (m *IndexPostfix_IndexValue) XXX_DiscardUnknown() {
+ xxx_messageInfo_IndexPostfix_IndexValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_IndexPostfix_IndexValue proto.InternalMessageInfo
+
+func (m *IndexPostfix_IndexValue) GetPropertyName() string {
+ if m != nil && m.PropertyName != nil {
+ return *m.PropertyName
+ }
+ return ""
+}
+
+func (m *IndexPostfix_IndexValue) GetValue() *PropertyValue {
+ if m != nil {
+ return m.Value
+ }
+ return nil
+}
+
+type IndexPosition struct {
+ Key *string `protobuf:"bytes,1,opt,name=key" json:"key,omitempty"`
+ Before *bool `protobuf:"varint,2,opt,name=before,def=1" json:"before,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *IndexPosition) Reset() { *m = IndexPosition{} }
+func (m *IndexPosition) String() string { return proto.CompactTextString(m) }
+func (*IndexPosition) ProtoMessage() {}
+func (*IndexPosition) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{11}
+}
+func (m *IndexPosition) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_IndexPosition.Unmarshal(m, b)
+}
+func (m *IndexPosition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_IndexPosition.Marshal(b, m, deterministic)
+}
+func (dst *IndexPosition) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_IndexPosition.Merge(dst, src)
+}
+func (m *IndexPosition) XXX_Size() int {
+ return xxx_messageInfo_IndexPosition.Size(m)
+}
+func (m *IndexPosition) XXX_DiscardUnknown() {
+ xxx_messageInfo_IndexPosition.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_IndexPosition proto.InternalMessageInfo
+
+const Default_IndexPosition_Before bool = true
+
+func (m *IndexPosition) GetKey() string {
+ if m != nil && m.Key != nil {
+ return *m.Key
+ }
+ return ""
+}
+
+func (m *IndexPosition) GetBefore() bool {
+ if m != nil && m.Before != nil {
+ return *m.Before
+ }
+ return Default_IndexPosition_Before
+}
+
+type Snapshot struct {
+ Ts *int64 `protobuf:"varint,1,req,name=ts" json:"ts,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Snapshot) Reset() { *m = Snapshot{} }
+func (m *Snapshot) String() string { return proto.CompactTextString(m) }
+func (*Snapshot) ProtoMessage() {}
+func (*Snapshot) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{12}
+}
+func (m *Snapshot) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Snapshot.Unmarshal(m, b)
+}
+func (m *Snapshot) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Snapshot.Marshal(b, m, deterministic)
+}
+func (dst *Snapshot) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Snapshot.Merge(dst, src)
+}
+func (m *Snapshot) XXX_Size() int {
+ return xxx_messageInfo_Snapshot.Size(m)
+}
+func (m *Snapshot) XXX_DiscardUnknown() {
+ xxx_messageInfo_Snapshot.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Snapshot proto.InternalMessageInfo
+
+func (m *Snapshot) GetTs() int64 {
+ if m != nil && m.Ts != nil {
+ return *m.Ts
+ }
+ return 0
+}
+
+type InternalHeader struct {
+ Qos *string `protobuf:"bytes,1,opt,name=qos" json:"qos,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *InternalHeader) Reset() { *m = InternalHeader{} }
+func (m *InternalHeader) String() string { return proto.CompactTextString(m) }
+func (*InternalHeader) ProtoMessage() {}
+func (*InternalHeader) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{13}
+}
+func (m *InternalHeader) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_InternalHeader.Unmarshal(m, b)
+}
+func (m *InternalHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_InternalHeader.Marshal(b, m, deterministic)
+}
+func (dst *InternalHeader) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_InternalHeader.Merge(dst, src)
+}
+func (m *InternalHeader) XXX_Size() int {
+ return xxx_messageInfo_InternalHeader.Size(m)
+}
+func (m *InternalHeader) XXX_DiscardUnknown() {
+ xxx_messageInfo_InternalHeader.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_InternalHeader proto.InternalMessageInfo
+
+func (m *InternalHeader) GetQos() string {
+ if m != nil && m.Qos != nil {
+ return *m.Qos
+ }
+ return ""
+}
+
+type Transaction struct {
+ Header *InternalHeader `protobuf:"bytes,4,opt,name=header" json:"header,omitempty"`
+ Handle *uint64 `protobuf:"fixed64,1,req,name=handle" json:"handle,omitempty"`
+ App *string `protobuf:"bytes,2,req,name=app" json:"app,omitempty"`
+ MarkChanges *bool `protobuf:"varint,3,opt,name=mark_changes,json=markChanges,def=0" json:"mark_changes,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Transaction) Reset() { *m = Transaction{} }
+func (m *Transaction) String() string { return proto.CompactTextString(m) }
+func (*Transaction) ProtoMessage() {}
+func (*Transaction) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{14}
+}
+func (m *Transaction) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Transaction.Unmarshal(m, b)
+}
+func (m *Transaction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Transaction.Marshal(b, m, deterministic)
+}
+func (dst *Transaction) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Transaction.Merge(dst, src)
+}
+func (m *Transaction) XXX_Size() int {
+ return xxx_messageInfo_Transaction.Size(m)
+}
+func (m *Transaction) XXX_DiscardUnknown() {
+ xxx_messageInfo_Transaction.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Transaction proto.InternalMessageInfo
+
+const Default_Transaction_MarkChanges bool = false
+
+func (m *Transaction) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *Transaction) GetHandle() uint64 {
+ if m != nil && m.Handle != nil {
+ return *m.Handle
+ }
+ return 0
+}
+
+func (m *Transaction) GetApp() string {
+ if m != nil && m.App != nil {
+ return *m.App
+ }
+ return ""
+}
+
+func (m *Transaction) GetMarkChanges() bool {
+ if m != nil && m.MarkChanges != nil {
+ return *m.MarkChanges
+ }
+ return Default_Transaction_MarkChanges
+}
+
+type Query struct {
+ Header *InternalHeader `protobuf:"bytes,39,opt,name=header" json:"header,omitempty"`
+ App *string `protobuf:"bytes,1,req,name=app" json:"app,omitempty"`
+ NameSpace *string `protobuf:"bytes,29,opt,name=name_space,json=nameSpace" json:"name_space,omitempty"`
+ Kind *string `protobuf:"bytes,3,opt,name=kind" json:"kind,omitempty"`
+ Ancestor *Reference `protobuf:"bytes,17,opt,name=ancestor" json:"ancestor,omitempty"`
+ Filter []*Query_Filter `protobuf:"group,4,rep,name=Filter,json=filter" json:"filter,omitempty"`
+ SearchQuery *string `protobuf:"bytes,8,opt,name=search_query,json=searchQuery" json:"search_query,omitempty"`
+ Order []*Query_Order `protobuf:"group,9,rep,name=Order,json=order" json:"order,omitempty"`
+ Hint *Query_Hint `protobuf:"varint,18,opt,name=hint,enum=appengine.Query_Hint" json:"hint,omitempty"`
+ Count *int32 `protobuf:"varint,23,opt,name=count" json:"count,omitempty"`
+ Offset *int32 `protobuf:"varint,12,opt,name=offset,def=0" json:"offset,omitempty"`
+ Limit *int32 `protobuf:"varint,16,opt,name=limit" json:"limit,omitempty"`
+ CompiledCursor *CompiledCursor `protobuf:"bytes,30,opt,name=compiled_cursor,json=compiledCursor" json:"compiled_cursor,omitempty"`
+ EndCompiledCursor *CompiledCursor `protobuf:"bytes,31,opt,name=end_compiled_cursor,json=endCompiledCursor" json:"end_compiled_cursor,omitempty"`
+ CompositeIndex []*CompositeIndex `protobuf:"bytes,19,rep,name=composite_index,json=compositeIndex" json:"composite_index,omitempty"`
+ RequirePerfectPlan *bool `protobuf:"varint,20,opt,name=require_perfect_plan,json=requirePerfectPlan,def=0" json:"require_perfect_plan,omitempty"`
+ KeysOnly *bool `protobuf:"varint,21,opt,name=keys_only,json=keysOnly,def=0" json:"keys_only,omitempty"`
+ Transaction *Transaction `protobuf:"bytes,22,opt,name=transaction" json:"transaction,omitempty"`
+ Compile *bool `protobuf:"varint,25,opt,name=compile,def=0" json:"compile,omitempty"`
+ FailoverMs *int64 `protobuf:"varint,26,opt,name=failover_ms,json=failoverMs" json:"failover_ms,omitempty"`
+ Strong *bool `protobuf:"varint,32,opt,name=strong" json:"strong,omitempty"`
+ PropertyName []string `protobuf:"bytes,33,rep,name=property_name,json=propertyName" json:"property_name,omitempty"`
+ GroupByPropertyName []string `protobuf:"bytes,34,rep,name=group_by_property_name,json=groupByPropertyName" json:"group_by_property_name,omitempty"`
+ Distinct *bool `protobuf:"varint,24,opt,name=distinct" json:"distinct,omitempty"`
+ MinSafeTimeSeconds *int64 `protobuf:"varint,35,opt,name=min_safe_time_seconds,json=minSafeTimeSeconds" json:"min_safe_time_seconds,omitempty"`
+ SafeReplicaName []string `protobuf:"bytes,36,rep,name=safe_replica_name,json=safeReplicaName" json:"safe_replica_name,omitempty"`
+ PersistOffset *bool `protobuf:"varint,37,opt,name=persist_offset,json=persistOffset,def=0" json:"persist_offset,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Query) Reset() { *m = Query{} }
+func (m *Query) String() string { return proto.CompactTextString(m) }
+func (*Query) ProtoMessage() {}
+func (*Query) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15}
+}
+func (m *Query) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Query.Unmarshal(m, b)
+}
+func (m *Query) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Query.Marshal(b, m, deterministic)
+}
+func (dst *Query) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Query.Merge(dst, src)
+}
+func (m *Query) XXX_Size() int {
+ return xxx_messageInfo_Query.Size(m)
+}
+func (m *Query) XXX_DiscardUnknown() {
+ xxx_messageInfo_Query.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Query proto.InternalMessageInfo
+
+const Default_Query_Offset int32 = 0
+const Default_Query_RequirePerfectPlan bool = false
+const Default_Query_KeysOnly bool = false
+const Default_Query_Compile bool = false
+const Default_Query_PersistOffset bool = false
+
+func (m *Query) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *Query) GetApp() string {
+ if m != nil && m.App != nil {
+ return *m.App
+ }
+ return ""
+}
+
+func (m *Query) GetNameSpace() string {
+ if m != nil && m.NameSpace != nil {
+ return *m.NameSpace
+ }
+ return ""
+}
+
+func (m *Query) GetKind() string {
+ if m != nil && m.Kind != nil {
+ return *m.Kind
+ }
+ return ""
+}
+
+func (m *Query) GetAncestor() *Reference {
+ if m != nil {
+ return m.Ancestor
+ }
+ return nil
+}
+
+func (m *Query) GetFilter() []*Query_Filter {
+ if m != nil {
+ return m.Filter
+ }
+ return nil
+}
+
+func (m *Query) GetSearchQuery() string {
+ if m != nil && m.SearchQuery != nil {
+ return *m.SearchQuery
+ }
+ return ""
+}
+
+func (m *Query) GetOrder() []*Query_Order {
+ if m != nil {
+ return m.Order
+ }
+ return nil
+}
+
+func (m *Query) GetHint() Query_Hint {
+ if m != nil && m.Hint != nil {
+ return *m.Hint
+ }
+ return Query_ORDER_FIRST
+}
+
+func (m *Query) GetCount() int32 {
+ if m != nil && m.Count != nil {
+ return *m.Count
+ }
+ return 0
+}
+
+func (m *Query) GetOffset() int32 {
+ if m != nil && m.Offset != nil {
+ return *m.Offset
+ }
+ return Default_Query_Offset
+}
+
+func (m *Query) GetLimit() int32 {
+ if m != nil && m.Limit != nil {
+ return *m.Limit
+ }
+ return 0
+}
+
+func (m *Query) GetCompiledCursor() *CompiledCursor {
+ if m != nil {
+ return m.CompiledCursor
+ }
+ return nil
+}
+
+func (m *Query) GetEndCompiledCursor() *CompiledCursor {
+ if m != nil {
+ return m.EndCompiledCursor
+ }
+ return nil
+}
+
+func (m *Query) GetCompositeIndex() []*CompositeIndex {
+ if m != nil {
+ return m.CompositeIndex
+ }
+ return nil
+}
+
+func (m *Query) GetRequirePerfectPlan() bool {
+ if m != nil && m.RequirePerfectPlan != nil {
+ return *m.RequirePerfectPlan
+ }
+ return Default_Query_RequirePerfectPlan
+}
+
+func (m *Query) GetKeysOnly() bool {
+ if m != nil && m.KeysOnly != nil {
+ return *m.KeysOnly
+ }
+ return Default_Query_KeysOnly
+}
+
+func (m *Query) GetTransaction() *Transaction {
+ if m != nil {
+ return m.Transaction
+ }
+ return nil
+}
+
+func (m *Query) GetCompile() bool {
+ if m != nil && m.Compile != nil {
+ return *m.Compile
+ }
+ return Default_Query_Compile
+}
+
+func (m *Query) GetFailoverMs() int64 {
+ if m != nil && m.FailoverMs != nil {
+ return *m.FailoverMs
+ }
+ return 0
+}
+
+func (m *Query) GetStrong() bool {
+ if m != nil && m.Strong != nil {
+ return *m.Strong
+ }
+ return false
+}
+
+func (m *Query) GetPropertyName() []string {
+ if m != nil {
+ return m.PropertyName
+ }
+ return nil
+}
+
+func (m *Query) GetGroupByPropertyName() []string {
+ if m != nil {
+ return m.GroupByPropertyName
+ }
+ return nil
+}
+
+func (m *Query) GetDistinct() bool {
+ if m != nil && m.Distinct != nil {
+ return *m.Distinct
+ }
+ return false
+}
+
+func (m *Query) GetMinSafeTimeSeconds() int64 {
+ if m != nil && m.MinSafeTimeSeconds != nil {
+ return *m.MinSafeTimeSeconds
+ }
+ return 0
+}
+
+func (m *Query) GetSafeReplicaName() []string {
+ if m != nil {
+ return m.SafeReplicaName
+ }
+ return nil
+}
+
+func (m *Query) GetPersistOffset() bool {
+ if m != nil && m.PersistOffset != nil {
+ return *m.PersistOffset
+ }
+ return Default_Query_PersistOffset
+}
+
+type Query_Filter struct {
+ Op *Query_Filter_Operator `protobuf:"varint,6,req,name=op,enum=appengine.Query_Filter_Operator" json:"op,omitempty"`
+ Property []*Property `protobuf:"bytes,14,rep,name=property" json:"property,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Query_Filter) Reset() { *m = Query_Filter{} }
+func (m *Query_Filter) String() string { return proto.CompactTextString(m) }
+func (*Query_Filter) ProtoMessage() {}
+func (*Query_Filter) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 0}
+}
+func (m *Query_Filter) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Query_Filter.Unmarshal(m, b)
+}
+func (m *Query_Filter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Query_Filter.Marshal(b, m, deterministic)
+}
+func (dst *Query_Filter) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Query_Filter.Merge(dst, src)
+}
+func (m *Query_Filter) XXX_Size() int {
+ return xxx_messageInfo_Query_Filter.Size(m)
+}
+func (m *Query_Filter) XXX_DiscardUnknown() {
+ xxx_messageInfo_Query_Filter.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Query_Filter proto.InternalMessageInfo
+
+func (m *Query_Filter) GetOp() Query_Filter_Operator {
+ if m != nil && m.Op != nil {
+ return *m.Op
+ }
+ return Query_Filter_LESS_THAN
+}
+
+func (m *Query_Filter) GetProperty() []*Property {
+ if m != nil {
+ return m.Property
+ }
+ return nil
+}
+
+type Query_Order struct {
+ Property *string `protobuf:"bytes,10,req,name=property" json:"property,omitempty"`
+ Direction *Query_Order_Direction `protobuf:"varint,11,opt,name=direction,enum=appengine.Query_Order_Direction,def=1" json:"direction,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Query_Order) Reset() { *m = Query_Order{} }
+func (m *Query_Order) String() string { return proto.CompactTextString(m) }
+func (*Query_Order) ProtoMessage() {}
+func (*Query_Order) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 1}
+}
+func (m *Query_Order) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Query_Order.Unmarshal(m, b)
+}
+func (m *Query_Order) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Query_Order.Marshal(b, m, deterministic)
+}
+func (dst *Query_Order) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Query_Order.Merge(dst, src)
+}
+func (m *Query_Order) XXX_Size() int {
+ return xxx_messageInfo_Query_Order.Size(m)
+}
+func (m *Query_Order) XXX_DiscardUnknown() {
+ xxx_messageInfo_Query_Order.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Query_Order proto.InternalMessageInfo
+
+const Default_Query_Order_Direction Query_Order_Direction = Query_Order_ASCENDING
+
+func (m *Query_Order) GetProperty() string {
+ if m != nil && m.Property != nil {
+ return *m.Property
+ }
+ return ""
+}
+
+func (m *Query_Order) GetDirection() Query_Order_Direction {
+ if m != nil && m.Direction != nil {
+ return *m.Direction
+ }
+ return Default_Query_Order_Direction
+}
+
+type CompiledQuery struct {
+ Primaryscan *CompiledQuery_PrimaryScan `protobuf:"group,1,req,name=PrimaryScan,json=primaryscan" json:"primaryscan,omitempty"`
+ Mergejoinscan []*CompiledQuery_MergeJoinScan `protobuf:"group,7,rep,name=MergeJoinScan,json=mergejoinscan" json:"mergejoinscan,omitempty"`
+ IndexDef *Index `protobuf:"bytes,21,opt,name=index_def,json=indexDef" json:"index_def,omitempty"`
+ Offset *int32 `protobuf:"varint,10,opt,name=offset,def=0" json:"offset,omitempty"`
+ Limit *int32 `protobuf:"varint,11,opt,name=limit" json:"limit,omitempty"`
+ KeysOnly *bool `protobuf:"varint,12,req,name=keys_only,json=keysOnly" json:"keys_only,omitempty"`
+ PropertyName []string `protobuf:"bytes,24,rep,name=property_name,json=propertyName" json:"property_name,omitempty"`
+ DistinctInfixSize *int32 `protobuf:"varint,25,opt,name=distinct_infix_size,json=distinctInfixSize" json:"distinct_infix_size,omitempty"`
+ Entityfilter *CompiledQuery_EntityFilter `protobuf:"group,13,opt,name=EntityFilter,json=entityfilter" json:"entityfilter,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompiledQuery) Reset() { *m = CompiledQuery{} }
+func (m *CompiledQuery) String() string { return proto.CompactTextString(m) }
+func (*CompiledQuery) ProtoMessage() {}
+func (*CompiledQuery) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{16}
+}
+func (m *CompiledQuery) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompiledQuery.Unmarshal(m, b)
+}
+func (m *CompiledQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompiledQuery.Marshal(b, m, deterministic)
+}
+func (dst *CompiledQuery) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompiledQuery.Merge(dst, src)
+}
+func (m *CompiledQuery) XXX_Size() int {
+ return xxx_messageInfo_CompiledQuery.Size(m)
+}
+func (m *CompiledQuery) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompiledQuery.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledQuery proto.InternalMessageInfo
+
+const Default_CompiledQuery_Offset int32 = 0
+
+func (m *CompiledQuery) GetPrimaryscan() *CompiledQuery_PrimaryScan {
+ if m != nil {
+ return m.Primaryscan
+ }
+ return nil
+}
+
+func (m *CompiledQuery) GetMergejoinscan() []*CompiledQuery_MergeJoinScan {
+ if m != nil {
+ return m.Mergejoinscan
+ }
+ return nil
+}
+
+func (m *CompiledQuery) GetIndexDef() *Index {
+ if m != nil {
+ return m.IndexDef
+ }
+ return nil
+}
+
+func (m *CompiledQuery) GetOffset() int32 {
+ if m != nil && m.Offset != nil {
+ return *m.Offset
+ }
+ return Default_CompiledQuery_Offset
+}
+
+func (m *CompiledQuery) GetLimit() int32 {
+ if m != nil && m.Limit != nil {
+ return *m.Limit
+ }
+ return 0
+}
+
+func (m *CompiledQuery) GetKeysOnly() bool {
+ if m != nil && m.KeysOnly != nil {
+ return *m.KeysOnly
+ }
+ return false
+}
+
+func (m *CompiledQuery) GetPropertyName() []string {
+ if m != nil {
+ return m.PropertyName
+ }
+ return nil
+}
+
+func (m *CompiledQuery) GetDistinctInfixSize() int32 {
+ if m != nil && m.DistinctInfixSize != nil {
+ return *m.DistinctInfixSize
+ }
+ return 0
+}
+
+func (m *CompiledQuery) GetEntityfilter() *CompiledQuery_EntityFilter {
+ if m != nil {
+ return m.Entityfilter
+ }
+ return nil
+}
+
+type CompiledQuery_PrimaryScan struct {
+ IndexName *string `protobuf:"bytes,2,opt,name=index_name,json=indexName" json:"index_name,omitempty"`
+ StartKey *string `protobuf:"bytes,3,opt,name=start_key,json=startKey" json:"start_key,omitempty"`
+ StartInclusive *bool `protobuf:"varint,4,opt,name=start_inclusive,json=startInclusive" json:"start_inclusive,omitempty"`
+ EndKey *string `protobuf:"bytes,5,opt,name=end_key,json=endKey" json:"end_key,omitempty"`
+ EndInclusive *bool `protobuf:"varint,6,opt,name=end_inclusive,json=endInclusive" json:"end_inclusive,omitempty"`
+ StartPostfixValue []string `protobuf:"bytes,22,rep,name=start_postfix_value,json=startPostfixValue" json:"start_postfix_value,omitempty"`
+ EndPostfixValue []string `protobuf:"bytes,23,rep,name=end_postfix_value,json=endPostfixValue" json:"end_postfix_value,omitempty"`
+ EndUnappliedLogTimestampUs *int64 `protobuf:"varint,19,opt,name=end_unapplied_log_timestamp_us,json=endUnappliedLogTimestampUs" json:"end_unapplied_log_timestamp_us,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompiledQuery_PrimaryScan) Reset() { *m = CompiledQuery_PrimaryScan{} }
+func (m *CompiledQuery_PrimaryScan) String() string { return proto.CompactTextString(m) }
+func (*CompiledQuery_PrimaryScan) ProtoMessage() {}
+func (*CompiledQuery_PrimaryScan) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{16, 0}
+}
+func (m *CompiledQuery_PrimaryScan) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompiledQuery_PrimaryScan.Unmarshal(m, b)
+}
+func (m *CompiledQuery_PrimaryScan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompiledQuery_PrimaryScan.Marshal(b, m, deterministic)
+}
+func (dst *CompiledQuery_PrimaryScan) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompiledQuery_PrimaryScan.Merge(dst, src)
+}
+func (m *CompiledQuery_PrimaryScan) XXX_Size() int {
+ return xxx_messageInfo_CompiledQuery_PrimaryScan.Size(m)
+}
+func (m *CompiledQuery_PrimaryScan) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompiledQuery_PrimaryScan.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledQuery_PrimaryScan proto.InternalMessageInfo
+
+func (m *CompiledQuery_PrimaryScan) GetIndexName() string {
+ if m != nil && m.IndexName != nil {
+ return *m.IndexName
+ }
+ return ""
+}
+
+func (m *CompiledQuery_PrimaryScan) GetStartKey() string {
+ if m != nil && m.StartKey != nil {
+ return *m.StartKey
+ }
+ return ""
+}
+
+func (m *CompiledQuery_PrimaryScan) GetStartInclusive() bool {
+ if m != nil && m.StartInclusive != nil {
+ return *m.StartInclusive
+ }
+ return false
+}
+
+func (m *CompiledQuery_PrimaryScan) GetEndKey() string {
+ if m != nil && m.EndKey != nil {
+ return *m.EndKey
+ }
+ return ""
+}
+
+func (m *CompiledQuery_PrimaryScan) GetEndInclusive() bool {
+ if m != nil && m.EndInclusive != nil {
+ return *m.EndInclusive
+ }
+ return false
+}
+
+func (m *CompiledQuery_PrimaryScan) GetStartPostfixValue() []string {
+ if m != nil {
+ return m.StartPostfixValue
+ }
+ return nil
+}
+
+func (m *CompiledQuery_PrimaryScan) GetEndPostfixValue() []string {
+ if m != nil {
+ return m.EndPostfixValue
+ }
+ return nil
+}
+
+func (m *CompiledQuery_PrimaryScan) GetEndUnappliedLogTimestampUs() int64 {
+ if m != nil && m.EndUnappliedLogTimestampUs != nil {
+ return *m.EndUnappliedLogTimestampUs
+ }
+ return 0
+}
+
+type CompiledQuery_MergeJoinScan struct {
+ IndexName *string `protobuf:"bytes,8,req,name=index_name,json=indexName" json:"index_name,omitempty"`
+ PrefixValue []string `protobuf:"bytes,9,rep,name=prefix_value,json=prefixValue" json:"prefix_value,omitempty"`
+ ValuePrefix *bool `protobuf:"varint,20,opt,name=value_prefix,json=valuePrefix,def=0" json:"value_prefix,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompiledQuery_MergeJoinScan) Reset() { *m = CompiledQuery_MergeJoinScan{} }
+func (m *CompiledQuery_MergeJoinScan) String() string { return proto.CompactTextString(m) }
+func (*CompiledQuery_MergeJoinScan) ProtoMessage() {}
+func (*CompiledQuery_MergeJoinScan) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{16, 1}
+}
+func (m *CompiledQuery_MergeJoinScan) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompiledQuery_MergeJoinScan.Unmarshal(m, b)
+}
+func (m *CompiledQuery_MergeJoinScan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompiledQuery_MergeJoinScan.Marshal(b, m, deterministic)
+}
+func (dst *CompiledQuery_MergeJoinScan) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompiledQuery_MergeJoinScan.Merge(dst, src)
+}
+func (m *CompiledQuery_MergeJoinScan) XXX_Size() int {
+ return xxx_messageInfo_CompiledQuery_MergeJoinScan.Size(m)
+}
+func (m *CompiledQuery_MergeJoinScan) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompiledQuery_MergeJoinScan.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledQuery_MergeJoinScan proto.InternalMessageInfo
+
+const Default_CompiledQuery_MergeJoinScan_ValuePrefix bool = false
+
+func (m *CompiledQuery_MergeJoinScan) GetIndexName() string {
+ if m != nil && m.IndexName != nil {
+ return *m.IndexName
+ }
+ return ""
+}
+
+func (m *CompiledQuery_MergeJoinScan) GetPrefixValue() []string {
+ if m != nil {
+ return m.PrefixValue
+ }
+ return nil
+}
+
+func (m *CompiledQuery_MergeJoinScan) GetValuePrefix() bool {
+ if m != nil && m.ValuePrefix != nil {
+ return *m.ValuePrefix
+ }
+ return Default_CompiledQuery_MergeJoinScan_ValuePrefix
+}
+
+type CompiledQuery_EntityFilter struct {
+ Distinct *bool `protobuf:"varint,14,opt,name=distinct,def=0" json:"distinct,omitempty"`
+ Kind *string `protobuf:"bytes,17,opt,name=kind" json:"kind,omitempty"`
+ Ancestor *Reference `protobuf:"bytes,18,opt,name=ancestor" json:"ancestor,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompiledQuery_EntityFilter) Reset() { *m = CompiledQuery_EntityFilter{} }
+func (m *CompiledQuery_EntityFilter) String() string { return proto.CompactTextString(m) }
+func (*CompiledQuery_EntityFilter) ProtoMessage() {}
+func (*CompiledQuery_EntityFilter) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{16, 2}
+}
+func (m *CompiledQuery_EntityFilter) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompiledQuery_EntityFilter.Unmarshal(m, b)
+}
+func (m *CompiledQuery_EntityFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompiledQuery_EntityFilter.Marshal(b, m, deterministic)
+}
+func (dst *CompiledQuery_EntityFilter) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompiledQuery_EntityFilter.Merge(dst, src)
+}
+func (m *CompiledQuery_EntityFilter) XXX_Size() int {
+ return xxx_messageInfo_CompiledQuery_EntityFilter.Size(m)
+}
+func (m *CompiledQuery_EntityFilter) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompiledQuery_EntityFilter.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledQuery_EntityFilter proto.InternalMessageInfo
+
+const Default_CompiledQuery_EntityFilter_Distinct bool = false
+
+func (m *CompiledQuery_EntityFilter) GetDistinct() bool {
+ if m != nil && m.Distinct != nil {
+ return *m.Distinct
+ }
+ return Default_CompiledQuery_EntityFilter_Distinct
+}
+
+func (m *CompiledQuery_EntityFilter) GetKind() string {
+ if m != nil && m.Kind != nil {
+ return *m.Kind
+ }
+ return ""
+}
+
+func (m *CompiledQuery_EntityFilter) GetAncestor() *Reference {
+ if m != nil {
+ return m.Ancestor
+ }
+ return nil
+}
+
+type CompiledCursor struct {
+ Position *CompiledCursor_Position `protobuf:"group,2,opt,name=Position,json=position" json:"position,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompiledCursor) Reset() { *m = CompiledCursor{} }
+func (m *CompiledCursor) String() string { return proto.CompactTextString(m) }
+func (*CompiledCursor) ProtoMessage() {}
+func (*CompiledCursor) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{17}
+}
+func (m *CompiledCursor) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompiledCursor.Unmarshal(m, b)
+}
+func (m *CompiledCursor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompiledCursor.Marshal(b, m, deterministic)
+}
+func (dst *CompiledCursor) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompiledCursor.Merge(dst, src)
+}
+func (m *CompiledCursor) XXX_Size() int {
+ return xxx_messageInfo_CompiledCursor.Size(m)
+}
+func (m *CompiledCursor) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompiledCursor.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledCursor proto.InternalMessageInfo
+
+func (m *CompiledCursor) GetPosition() *CompiledCursor_Position {
+ if m != nil {
+ return m.Position
+ }
+ return nil
+}
+
+type CompiledCursor_Position struct {
+ StartKey *string `protobuf:"bytes,27,opt,name=start_key,json=startKey" json:"start_key,omitempty"`
+ Indexvalue []*CompiledCursor_Position_IndexValue `protobuf:"group,29,rep,name=IndexValue,json=indexvalue" json:"indexvalue,omitempty"`
+ Key *Reference `protobuf:"bytes,32,opt,name=key" json:"key,omitempty"`
+ StartInclusive *bool `protobuf:"varint,28,opt,name=start_inclusive,json=startInclusive,def=1" json:"start_inclusive,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompiledCursor_Position) Reset() { *m = CompiledCursor_Position{} }
+func (m *CompiledCursor_Position) String() string { return proto.CompactTextString(m) }
+func (*CompiledCursor_Position) ProtoMessage() {}
+func (*CompiledCursor_Position) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{17, 0}
+}
+func (m *CompiledCursor_Position) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompiledCursor_Position.Unmarshal(m, b)
+}
+func (m *CompiledCursor_Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompiledCursor_Position.Marshal(b, m, deterministic)
+}
+func (dst *CompiledCursor_Position) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompiledCursor_Position.Merge(dst, src)
+}
+func (m *CompiledCursor_Position) XXX_Size() int {
+ return xxx_messageInfo_CompiledCursor_Position.Size(m)
+}
+func (m *CompiledCursor_Position) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompiledCursor_Position.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledCursor_Position proto.InternalMessageInfo
+
+const Default_CompiledCursor_Position_StartInclusive bool = true
+
+func (m *CompiledCursor_Position) GetStartKey() string {
+ if m != nil && m.StartKey != nil {
+ return *m.StartKey
+ }
+ return ""
+}
+
+func (m *CompiledCursor_Position) GetIndexvalue() []*CompiledCursor_Position_IndexValue {
+ if m != nil {
+ return m.Indexvalue
+ }
+ return nil
+}
+
+func (m *CompiledCursor_Position) GetKey() *Reference {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+func (m *CompiledCursor_Position) GetStartInclusive() bool {
+ if m != nil && m.StartInclusive != nil {
+ return *m.StartInclusive
+ }
+ return Default_CompiledCursor_Position_StartInclusive
+}
+
+type CompiledCursor_Position_IndexValue struct {
+ Property *string `protobuf:"bytes,30,opt,name=property" json:"property,omitempty"`
+ Value *PropertyValue `protobuf:"bytes,31,req,name=value" json:"value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompiledCursor_Position_IndexValue) Reset() { *m = CompiledCursor_Position_IndexValue{} }
+func (m *CompiledCursor_Position_IndexValue) String() string { return proto.CompactTextString(m) }
+func (*CompiledCursor_Position_IndexValue) ProtoMessage() {}
+func (*CompiledCursor_Position_IndexValue) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{17, 0, 0}
+}
+func (m *CompiledCursor_Position_IndexValue) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompiledCursor_Position_IndexValue.Unmarshal(m, b)
+}
+func (m *CompiledCursor_Position_IndexValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompiledCursor_Position_IndexValue.Marshal(b, m, deterministic)
+}
+func (dst *CompiledCursor_Position_IndexValue) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompiledCursor_Position_IndexValue.Merge(dst, src)
+}
+func (m *CompiledCursor_Position_IndexValue) XXX_Size() int {
+ return xxx_messageInfo_CompiledCursor_Position_IndexValue.Size(m)
+}
+func (m *CompiledCursor_Position_IndexValue) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompiledCursor_Position_IndexValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledCursor_Position_IndexValue proto.InternalMessageInfo
+
+func (m *CompiledCursor_Position_IndexValue) GetProperty() string {
+ if m != nil && m.Property != nil {
+ return *m.Property
+ }
+ return ""
+}
+
+func (m *CompiledCursor_Position_IndexValue) GetValue() *PropertyValue {
+ if m != nil {
+ return m.Value
+ }
+ return nil
+}
+
+type Cursor struct {
+ Cursor *uint64 `protobuf:"fixed64,1,req,name=cursor" json:"cursor,omitempty"`
+ App *string `protobuf:"bytes,2,opt,name=app" json:"app,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Cursor) Reset() { *m = Cursor{} }
+func (m *Cursor) String() string { return proto.CompactTextString(m) }
+func (*Cursor) ProtoMessage() {}
+func (*Cursor) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{18}
+}
+func (m *Cursor) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Cursor.Unmarshal(m, b)
+}
+func (m *Cursor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Cursor.Marshal(b, m, deterministic)
+}
+func (dst *Cursor) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Cursor.Merge(dst, src)
+}
+func (m *Cursor) XXX_Size() int {
+ return xxx_messageInfo_Cursor.Size(m)
+}
+func (m *Cursor) XXX_DiscardUnknown() {
+ xxx_messageInfo_Cursor.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Cursor proto.InternalMessageInfo
+
+func (m *Cursor) GetCursor() uint64 {
+ if m != nil && m.Cursor != nil {
+ return *m.Cursor
+ }
+ return 0
+}
+
+func (m *Cursor) GetApp() string {
+ if m != nil && m.App != nil {
+ return *m.App
+ }
+ return ""
+}
+
+type Error struct {
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Error) Reset() { *m = Error{} }
+func (m *Error) String() string { return proto.CompactTextString(m) }
+func (*Error) ProtoMessage() {}
+func (*Error) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{19}
+}
+func (m *Error) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Error.Unmarshal(m, b)
+}
+func (m *Error) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Error.Marshal(b, m, deterministic)
+}
+func (dst *Error) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Error.Merge(dst, src)
+}
+func (m *Error) XXX_Size() int {
+ return xxx_messageInfo_Error.Size(m)
+}
+func (m *Error) XXX_DiscardUnknown() {
+ xxx_messageInfo_Error.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Error proto.InternalMessageInfo
+
+type Cost struct {
+ IndexWrites *int32 `protobuf:"varint,1,opt,name=index_writes,json=indexWrites" json:"index_writes,omitempty"`
+ IndexWriteBytes *int32 `protobuf:"varint,2,opt,name=index_write_bytes,json=indexWriteBytes" json:"index_write_bytes,omitempty"`
+ EntityWrites *int32 `protobuf:"varint,3,opt,name=entity_writes,json=entityWrites" json:"entity_writes,omitempty"`
+ EntityWriteBytes *int32 `protobuf:"varint,4,opt,name=entity_write_bytes,json=entityWriteBytes" json:"entity_write_bytes,omitempty"`
+ Commitcost *Cost_CommitCost `protobuf:"group,5,opt,name=CommitCost,json=commitcost" json:"commitcost,omitempty"`
+ ApproximateStorageDelta *int32 `protobuf:"varint,8,opt,name=approximate_storage_delta,json=approximateStorageDelta" json:"approximate_storage_delta,omitempty"`
+ IdSequenceUpdates *int32 `protobuf:"varint,9,opt,name=id_sequence_updates,json=idSequenceUpdates" json:"id_sequence_updates,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Cost) Reset() { *m = Cost{} }
+func (m *Cost) String() string { return proto.CompactTextString(m) }
+func (*Cost) ProtoMessage() {}
+func (*Cost) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{20}
+}
+func (m *Cost) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Cost.Unmarshal(m, b)
+}
+func (m *Cost) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Cost.Marshal(b, m, deterministic)
+}
+func (dst *Cost) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Cost.Merge(dst, src)
+}
+func (m *Cost) XXX_Size() int {
+ return xxx_messageInfo_Cost.Size(m)
+}
+func (m *Cost) XXX_DiscardUnknown() {
+ xxx_messageInfo_Cost.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Cost proto.InternalMessageInfo
+
+func (m *Cost) GetIndexWrites() int32 {
+ if m != nil && m.IndexWrites != nil {
+ return *m.IndexWrites
+ }
+ return 0
+}
+
+func (m *Cost) GetIndexWriteBytes() int32 {
+ if m != nil && m.IndexWriteBytes != nil {
+ return *m.IndexWriteBytes
+ }
+ return 0
+}
+
+func (m *Cost) GetEntityWrites() int32 {
+ if m != nil && m.EntityWrites != nil {
+ return *m.EntityWrites
+ }
+ return 0
+}
+
+func (m *Cost) GetEntityWriteBytes() int32 {
+ if m != nil && m.EntityWriteBytes != nil {
+ return *m.EntityWriteBytes
+ }
+ return 0
+}
+
+func (m *Cost) GetCommitcost() *Cost_CommitCost {
+ if m != nil {
+ return m.Commitcost
+ }
+ return nil
+}
+
+func (m *Cost) GetApproximateStorageDelta() int32 {
+ if m != nil && m.ApproximateStorageDelta != nil {
+ return *m.ApproximateStorageDelta
+ }
+ return 0
+}
+
+func (m *Cost) GetIdSequenceUpdates() int32 {
+ if m != nil && m.IdSequenceUpdates != nil {
+ return *m.IdSequenceUpdates
+ }
+ return 0
+}
+
+type Cost_CommitCost struct {
+ RequestedEntityPuts *int32 `protobuf:"varint,6,opt,name=requested_entity_puts,json=requestedEntityPuts" json:"requested_entity_puts,omitempty"`
+ RequestedEntityDeletes *int32 `protobuf:"varint,7,opt,name=requested_entity_deletes,json=requestedEntityDeletes" json:"requested_entity_deletes,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Cost_CommitCost) Reset() { *m = Cost_CommitCost{} }
+func (m *Cost_CommitCost) String() string { return proto.CompactTextString(m) }
+func (*Cost_CommitCost) ProtoMessage() {}
+func (*Cost_CommitCost) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{20, 0}
+}
+func (m *Cost_CommitCost) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Cost_CommitCost.Unmarshal(m, b)
+}
+func (m *Cost_CommitCost) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Cost_CommitCost.Marshal(b, m, deterministic)
+}
+func (dst *Cost_CommitCost) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Cost_CommitCost.Merge(dst, src)
+}
+func (m *Cost_CommitCost) XXX_Size() int {
+ return xxx_messageInfo_Cost_CommitCost.Size(m)
+}
+func (m *Cost_CommitCost) XXX_DiscardUnknown() {
+ xxx_messageInfo_Cost_CommitCost.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Cost_CommitCost proto.InternalMessageInfo
+
+func (m *Cost_CommitCost) GetRequestedEntityPuts() int32 {
+ if m != nil && m.RequestedEntityPuts != nil {
+ return *m.RequestedEntityPuts
+ }
+ return 0
+}
+
+func (m *Cost_CommitCost) GetRequestedEntityDeletes() int32 {
+ if m != nil && m.RequestedEntityDeletes != nil {
+ return *m.RequestedEntityDeletes
+ }
+ return 0
+}
+
+type GetRequest struct {
+ Header *InternalHeader `protobuf:"bytes,6,opt,name=header" json:"header,omitempty"`
+ Key []*Reference `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
+ Transaction *Transaction `protobuf:"bytes,2,opt,name=transaction" json:"transaction,omitempty"`
+ FailoverMs *int64 `protobuf:"varint,3,opt,name=failover_ms,json=failoverMs" json:"failover_ms,omitempty"`
+ Strong *bool `protobuf:"varint,4,opt,name=strong" json:"strong,omitempty"`
+ AllowDeferred *bool `protobuf:"varint,5,opt,name=allow_deferred,json=allowDeferred,def=0" json:"allow_deferred,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *GetRequest) Reset() { *m = GetRequest{} }
+func (m *GetRequest) String() string { return proto.CompactTextString(m) }
+func (*GetRequest) ProtoMessage() {}
+func (*GetRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{21}
+}
+func (m *GetRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_GetRequest.Unmarshal(m, b)
+}
+func (m *GetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_GetRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_GetRequest.Merge(dst, src)
+}
+func (m *GetRequest) XXX_Size() int {
+ return xxx_messageInfo_GetRequest.Size(m)
+}
+func (m *GetRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_GetRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetRequest proto.InternalMessageInfo
+
+const Default_GetRequest_AllowDeferred bool = false
+
+func (m *GetRequest) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *GetRequest) GetKey() []*Reference {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+func (m *GetRequest) GetTransaction() *Transaction {
+ if m != nil {
+ return m.Transaction
+ }
+ return nil
+}
+
+func (m *GetRequest) GetFailoverMs() int64 {
+ if m != nil && m.FailoverMs != nil {
+ return *m.FailoverMs
+ }
+ return 0
+}
+
+func (m *GetRequest) GetStrong() bool {
+ if m != nil && m.Strong != nil {
+ return *m.Strong
+ }
+ return false
+}
+
+func (m *GetRequest) GetAllowDeferred() bool {
+ if m != nil && m.AllowDeferred != nil {
+ return *m.AllowDeferred
+ }
+ return Default_GetRequest_AllowDeferred
+}
+
+type GetResponse struct {
+ Entity []*GetResponse_Entity `protobuf:"group,1,rep,name=Entity,json=entity" json:"entity,omitempty"`
+ Deferred []*Reference `protobuf:"bytes,5,rep,name=deferred" json:"deferred,omitempty"`
+ InOrder *bool `protobuf:"varint,6,opt,name=in_order,json=inOrder,def=1" json:"in_order,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *GetResponse) Reset() { *m = GetResponse{} }
+func (m *GetResponse) String() string { return proto.CompactTextString(m) }
+func (*GetResponse) ProtoMessage() {}
+func (*GetResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{22}
+}
+func (m *GetResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_GetResponse.Unmarshal(m, b)
+}
+func (m *GetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_GetResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_GetResponse.Merge(dst, src)
+}
+func (m *GetResponse) XXX_Size() int {
+ return xxx_messageInfo_GetResponse.Size(m)
+}
+func (m *GetResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_GetResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetResponse proto.InternalMessageInfo
+
+const Default_GetResponse_InOrder bool = true
+
+func (m *GetResponse) GetEntity() []*GetResponse_Entity {
+ if m != nil {
+ return m.Entity
+ }
+ return nil
+}
+
+func (m *GetResponse) GetDeferred() []*Reference {
+ if m != nil {
+ return m.Deferred
+ }
+ return nil
+}
+
+func (m *GetResponse) GetInOrder() bool {
+ if m != nil && m.InOrder != nil {
+ return *m.InOrder
+ }
+ return Default_GetResponse_InOrder
+}
+
+type GetResponse_Entity struct {
+ Entity *EntityProto `protobuf:"bytes,2,opt,name=entity" json:"entity,omitempty"`
+ Key *Reference `protobuf:"bytes,4,opt,name=key" json:"key,omitempty"`
+ Version *int64 `protobuf:"varint,3,opt,name=version" json:"version,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *GetResponse_Entity) Reset() { *m = GetResponse_Entity{} }
+func (m *GetResponse_Entity) String() string { return proto.CompactTextString(m) }
+func (*GetResponse_Entity) ProtoMessage() {}
+func (*GetResponse_Entity) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{22, 0}
+}
+func (m *GetResponse_Entity) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_GetResponse_Entity.Unmarshal(m, b)
+}
+func (m *GetResponse_Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_GetResponse_Entity.Marshal(b, m, deterministic)
+}
+func (dst *GetResponse_Entity) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_GetResponse_Entity.Merge(dst, src)
+}
+func (m *GetResponse_Entity) XXX_Size() int {
+ return xxx_messageInfo_GetResponse_Entity.Size(m)
+}
+func (m *GetResponse_Entity) XXX_DiscardUnknown() {
+ xxx_messageInfo_GetResponse_Entity.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetResponse_Entity proto.InternalMessageInfo
+
+func (m *GetResponse_Entity) GetEntity() *EntityProto {
+ if m != nil {
+ return m.Entity
+ }
+ return nil
+}
+
+func (m *GetResponse_Entity) GetKey() *Reference {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+func (m *GetResponse_Entity) GetVersion() int64 {
+ if m != nil && m.Version != nil {
+ return *m.Version
+ }
+ return 0
+}
+
+type PutRequest struct {
+ Header *InternalHeader `protobuf:"bytes,11,opt,name=header" json:"header,omitempty"`
+ Entity []*EntityProto `protobuf:"bytes,1,rep,name=entity" json:"entity,omitempty"`
+ Transaction *Transaction `protobuf:"bytes,2,opt,name=transaction" json:"transaction,omitempty"`
+ CompositeIndex []*CompositeIndex `protobuf:"bytes,3,rep,name=composite_index,json=compositeIndex" json:"composite_index,omitempty"`
+ Trusted *bool `protobuf:"varint,4,opt,name=trusted,def=0" json:"trusted,omitempty"`
+ Force *bool `protobuf:"varint,7,opt,name=force,def=0" json:"force,omitempty"`
+ MarkChanges *bool `protobuf:"varint,8,opt,name=mark_changes,json=markChanges,def=0" json:"mark_changes,omitempty"`
+ Snapshot []*Snapshot `protobuf:"bytes,9,rep,name=snapshot" json:"snapshot,omitempty"`
+ AutoIdPolicy *PutRequest_AutoIdPolicy `protobuf:"varint,10,opt,name=auto_id_policy,json=autoIdPolicy,enum=appengine.PutRequest_AutoIdPolicy,def=0" json:"auto_id_policy,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *PutRequest) Reset() { *m = PutRequest{} }
+func (m *PutRequest) String() string { return proto.CompactTextString(m) }
+func (*PutRequest) ProtoMessage() {}
+func (*PutRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{23}
+}
+func (m *PutRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_PutRequest.Unmarshal(m, b)
+}
+func (m *PutRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_PutRequest.Marshal(b, m, deterministic)
+}
+func (dst *PutRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_PutRequest.Merge(dst, src)
+}
+func (m *PutRequest) XXX_Size() int {
+ return xxx_messageInfo_PutRequest.Size(m)
+}
+func (m *PutRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_PutRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PutRequest proto.InternalMessageInfo
+
+const Default_PutRequest_Trusted bool = false
+const Default_PutRequest_Force bool = false
+const Default_PutRequest_MarkChanges bool = false
+const Default_PutRequest_AutoIdPolicy PutRequest_AutoIdPolicy = PutRequest_CURRENT
+
+func (m *PutRequest) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *PutRequest) GetEntity() []*EntityProto {
+ if m != nil {
+ return m.Entity
+ }
+ return nil
+}
+
+func (m *PutRequest) GetTransaction() *Transaction {
+ if m != nil {
+ return m.Transaction
+ }
+ return nil
+}
+
+func (m *PutRequest) GetCompositeIndex() []*CompositeIndex {
+ if m != nil {
+ return m.CompositeIndex
+ }
+ return nil
+}
+
+func (m *PutRequest) GetTrusted() bool {
+ if m != nil && m.Trusted != nil {
+ return *m.Trusted
+ }
+ return Default_PutRequest_Trusted
+}
+
+func (m *PutRequest) GetForce() bool {
+ if m != nil && m.Force != nil {
+ return *m.Force
+ }
+ return Default_PutRequest_Force
+}
+
+func (m *PutRequest) GetMarkChanges() bool {
+ if m != nil && m.MarkChanges != nil {
+ return *m.MarkChanges
+ }
+ return Default_PutRequest_MarkChanges
+}
+
+func (m *PutRequest) GetSnapshot() []*Snapshot {
+ if m != nil {
+ return m.Snapshot
+ }
+ return nil
+}
+
+func (m *PutRequest) GetAutoIdPolicy() PutRequest_AutoIdPolicy {
+ if m != nil && m.AutoIdPolicy != nil {
+ return *m.AutoIdPolicy
+ }
+ return Default_PutRequest_AutoIdPolicy
+}
+
+type PutResponse struct {
+ Key []*Reference `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
+ Cost *Cost `protobuf:"bytes,2,opt,name=cost" json:"cost,omitempty"`
+ Version []int64 `protobuf:"varint,3,rep,name=version" json:"version,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *PutResponse) Reset() { *m = PutResponse{} }
+func (m *PutResponse) String() string { return proto.CompactTextString(m) }
+func (*PutResponse) ProtoMessage() {}
+func (*PutResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{24}
+}
+func (m *PutResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_PutResponse.Unmarshal(m, b)
+}
+func (m *PutResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_PutResponse.Marshal(b, m, deterministic)
+}
+func (dst *PutResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_PutResponse.Merge(dst, src)
+}
+func (m *PutResponse) XXX_Size() int {
+ return xxx_messageInfo_PutResponse.Size(m)
+}
+func (m *PutResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_PutResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PutResponse proto.InternalMessageInfo
+
+func (m *PutResponse) GetKey() []*Reference {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+func (m *PutResponse) GetCost() *Cost {
+ if m != nil {
+ return m.Cost
+ }
+ return nil
+}
+
+func (m *PutResponse) GetVersion() []int64 {
+ if m != nil {
+ return m.Version
+ }
+ return nil
+}
+
+type TouchRequest struct {
+ Header *InternalHeader `protobuf:"bytes,10,opt,name=header" json:"header,omitempty"`
+ Key []*Reference `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
+ CompositeIndex []*CompositeIndex `protobuf:"bytes,2,rep,name=composite_index,json=compositeIndex" json:"composite_index,omitempty"`
+ Force *bool `protobuf:"varint,3,opt,name=force,def=0" json:"force,omitempty"`
+ Snapshot []*Snapshot `protobuf:"bytes,9,rep,name=snapshot" json:"snapshot,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *TouchRequest) Reset() { *m = TouchRequest{} }
+func (m *TouchRequest) String() string { return proto.CompactTextString(m) }
+func (*TouchRequest) ProtoMessage() {}
+func (*TouchRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{25}
+}
+func (m *TouchRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_TouchRequest.Unmarshal(m, b)
+}
+func (m *TouchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_TouchRequest.Marshal(b, m, deterministic)
+}
+func (dst *TouchRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_TouchRequest.Merge(dst, src)
+}
+func (m *TouchRequest) XXX_Size() int {
+ return xxx_messageInfo_TouchRequest.Size(m)
+}
+func (m *TouchRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_TouchRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TouchRequest proto.InternalMessageInfo
+
+const Default_TouchRequest_Force bool = false
+
+func (m *TouchRequest) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *TouchRequest) GetKey() []*Reference {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+func (m *TouchRequest) GetCompositeIndex() []*CompositeIndex {
+ if m != nil {
+ return m.CompositeIndex
+ }
+ return nil
+}
+
+func (m *TouchRequest) GetForce() bool {
+ if m != nil && m.Force != nil {
+ return *m.Force
+ }
+ return Default_TouchRequest_Force
+}
+
+func (m *TouchRequest) GetSnapshot() []*Snapshot {
+ if m != nil {
+ return m.Snapshot
+ }
+ return nil
+}
+
+type TouchResponse struct {
+ Cost *Cost `protobuf:"bytes,1,opt,name=cost" json:"cost,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *TouchResponse) Reset() { *m = TouchResponse{} }
+func (m *TouchResponse) String() string { return proto.CompactTextString(m) }
+func (*TouchResponse) ProtoMessage() {}
+func (*TouchResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{26}
+}
+func (m *TouchResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_TouchResponse.Unmarshal(m, b)
+}
+func (m *TouchResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_TouchResponse.Marshal(b, m, deterministic)
+}
+func (dst *TouchResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_TouchResponse.Merge(dst, src)
+}
+func (m *TouchResponse) XXX_Size() int {
+ return xxx_messageInfo_TouchResponse.Size(m)
+}
+func (m *TouchResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_TouchResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TouchResponse proto.InternalMessageInfo
+
+func (m *TouchResponse) GetCost() *Cost {
+ if m != nil {
+ return m.Cost
+ }
+ return nil
+}
+
+type DeleteRequest struct {
+ Header *InternalHeader `protobuf:"bytes,10,opt,name=header" json:"header,omitempty"`
+ Key []*Reference `protobuf:"bytes,6,rep,name=key" json:"key,omitempty"`
+ Transaction *Transaction `protobuf:"bytes,5,opt,name=transaction" json:"transaction,omitempty"`
+ Trusted *bool `protobuf:"varint,4,opt,name=trusted,def=0" json:"trusted,omitempty"`
+ Force *bool `protobuf:"varint,7,opt,name=force,def=0" json:"force,omitempty"`
+ MarkChanges *bool `protobuf:"varint,8,opt,name=mark_changes,json=markChanges,def=0" json:"mark_changes,omitempty"`
+ Snapshot []*Snapshot `protobuf:"bytes,9,rep,name=snapshot" json:"snapshot,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *DeleteRequest) Reset() { *m = DeleteRequest{} }
+func (m *DeleteRequest) String() string { return proto.CompactTextString(m) }
+func (*DeleteRequest) ProtoMessage() {}
+func (*DeleteRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{27}
+}
+func (m *DeleteRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_DeleteRequest.Unmarshal(m, b)
+}
+func (m *DeleteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_DeleteRequest.Marshal(b, m, deterministic)
+}
+func (dst *DeleteRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_DeleteRequest.Merge(dst, src)
+}
+func (m *DeleteRequest) XXX_Size() int {
+ return xxx_messageInfo_DeleteRequest.Size(m)
+}
+func (m *DeleteRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_DeleteRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_DeleteRequest proto.InternalMessageInfo
+
+const Default_DeleteRequest_Trusted bool = false
+const Default_DeleteRequest_Force bool = false
+const Default_DeleteRequest_MarkChanges bool = false
+
+func (m *DeleteRequest) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *DeleteRequest) GetKey() []*Reference {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+func (m *DeleteRequest) GetTransaction() *Transaction {
+ if m != nil {
+ return m.Transaction
+ }
+ return nil
+}
+
+func (m *DeleteRequest) GetTrusted() bool {
+ if m != nil && m.Trusted != nil {
+ return *m.Trusted
+ }
+ return Default_DeleteRequest_Trusted
+}
+
+func (m *DeleteRequest) GetForce() bool {
+ if m != nil && m.Force != nil {
+ return *m.Force
+ }
+ return Default_DeleteRequest_Force
+}
+
+func (m *DeleteRequest) GetMarkChanges() bool {
+ if m != nil && m.MarkChanges != nil {
+ return *m.MarkChanges
+ }
+ return Default_DeleteRequest_MarkChanges
+}
+
+func (m *DeleteRequest) GetSnapshot() []*Snapshot {
+ if m != nil {
+ return m.Snapshot
+ }
+ return nil
+}
+
+type DeleteResponse struct {
+ Cost *Cost `protobuf:"bytes,1,opt,name=cost" json:"cost,omitempty"`
+ Version []int64 `protobuf:"varint,3,rep,name=version" json:"version,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *DeleteResponse) Reset() { *m = DeleteResponse{} }
+func (m *DeleteResponse) String() string { return proto.CompactTextString(m) }
+func (*DeleteResponse) ProtoMessage() {}
+func (*DeleteResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{28}
+}
+func (m *DeleteResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_DeleteResponse.Unmarshal(m, b)
+}
+func (m *DeleteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_DeleteResponse.Marshal(b, m, deterministic)
+}
+func (dst *DeleteResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_DeleteResponse.Merge(dst, src)
+}
+func (m *DeleteResponse) XXX_Size() int {
+ return xxx_messageInfo_DeleteResponse.Size(m)
+}
+func (m *DeleteResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_DeleteResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_DeleteResponse proto.InternalMessageInfo
+
+func (m *DeleteResponse) GetCost() *Cost {
+ if m != nil {
+ return m.Cost
+ }
+ return nil
+}
+
+func (m *DeleteResponse) GetVersion() []int64 {
+ if m != nil {
+ return m.Version
+ }
+ return nil
+}
+
+type NextRequest struct {
+ Header *InternalHeader `protobuf:"bytes,5,opt,name=header" json:"header,omitempty"`
+ Cursor *Cursor `protobuf:"bytes,1,req,name=cursor" json:"cursor,omitempty"`
+ Count *int32 `protobuf:"varint,2,opt,name=count" json:"count,omitempty"`
+ Offset *int32 `protobuf:"varint,4,opt,name=offset,def=0" json:"offset,omitempty"`
+ Compile *bool `protobuf:"varint,3,opt,name=compile,def=0" json:"compile,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *NextRequest) Reset() { *m = NextRequest{} }
+func (m *NextRequest) String() string { return proto.CompactTextString(m) }
+func (*NextRequest) ProtoMessage() {}
+func (*NextRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{29}
+}
+func (m *NextRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_NextRequest.Unmarshal(m, b)
+}
+func (m *NextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_NextRequest.Marshal(b, m, deterministic)
+}
+func (dst *NextRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_NextRequest.Merge(dst, src)
+}
+func (m *NextRequest) XXX_Size() int {
+ return xxx_messageInfo_NextRequest.Size(m)
+}
+func (m *NextRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_NextRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_NextRequest proto.InternalMessageInfo
+
+const Default_NextRequest_Offset int32 = 0
+const Default_NextRequest_Compile bool = false
+
+func (m *NextRequest) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *NextRequest) GetCursor() *Cursor {
+ if m != nil {
+ return m.Cursor
+ }
+ return nil
+}
+
+func (m *NextRequest) GetCount() int32 {
+ if m != nil && m.Count != nil {
+ return *m.Count
+ }
+ return 0
+}
+
+func (m *NextRequest) GetOffset() int32 {
+ if m != nil && m.Offset != nil {
+ return *m.Offset
+ }
+ return Default_NextRequest_Offset
+}
+
+func (m *NextRequest) GetCompile() bool {
+ if m != nil && m.Compile != nil {
+ return *m.Compile
+ }
+ return Default_NextRequest_Compile
+}
+
+type QueryResult struct {
+ Cursor *Cursor `protobuf:"bytes,1,opt,name=cursor" json:"cursor,omitempty"`
+ Result []*EntityProto `protobuf:"bytes,2,rep,name=result" json:"result,omitempty"`
+ SkippedResults *int32 `protobuf:"varint,7,opt,name=skipped_results,json=skippedResults" json:"skipped_results,omitempty"`
+ MoreResults *bool `protobuf:"varint,3,req,name=more_results,json=moreResults" json:"more_results,omitempty"`
+ KeysOnly *bool `protobuf:"varint,4,opt,name=keys_only,json=keysOnly" json:"keys_only,omitempty"`
+ IndexOnly *bool `protobuf:"varint,9,opt,name=index_only,json=indexOnly" json:"index_only,omitempty"`
+ SmallOps *bool `protobuf:"varint,10,opt,name=small_ops,json=smallOps" json:"small_ops,omitempty"`
+ CompiledQuery *CompiledQuery `protobuf:"bytes,5,opt,name=compiled_query,json=compiledQuery" json:"compiled_query,omitempty"`
+ CompiledCursor *CompiledCursor `protobuf:"bytes,6,opt,name=compiled_cursor,json=compiledCursor" json:"compiled_cursor,omitempty"`
+ Index []*CompositeIndex `protobuf:"bytes,8,rep,name=index" json:"index,omitempty"`
+ Version []int64 `protobuf:"varint,11,rep,name=version" json:"version,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *QueryResult) Reset() { *m = QueryResult{} }
+func (m *QueryResult) String() string { return proto.CompactTextString(m) }
+func (*QueryResult) ProtoMessage() {}
+func (*QueryResult) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{30}
+}
+func (m *QueryResult) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_QueryResult.Unmarshal(m, b)
+}
+func (m *QueryResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_QueryResult.Marshal(b, m, deterministic)
+}
+func (dst *QueryResult) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_QueryResult.Merge(dst, src)
+}
+func (m *QueryResult) XXX_Size() int {
+ return xxx_messageInfo_QueryResult.Size(m)
+}
+func (m *QueryResult) XXX_DiscardUnknown() {
+ xxx_messageInfo_QueryResult.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_QueryResult proto.InternalMessageInfo
+
+func (m *QueryResult) GetCursor() *Cursor {
+ if m != nil {
+ return m.Cursor
+ }
+ return nil
+}
+
+func (m *QueryResult) GetResult() []*EntityProto {
+ if m != nil {
+ return m.Result
+ }
+ return nil
+}
+
+func (m *QueryResult) GetSkippedResults() int32 {
+ if m != nil && m.SkippedResults != nil {
+ return *m.SkippedResults
+ }
+ return 0
+}
+
+func (m *QueryResult) GetMoreResults() bool {
+ if m != nil && m.MoreResults != nil {
+ return *m.MoreResults
+ }
+ return false
+}
+
+func (m *QueryResult) GetKeysOnly() bool {
+ if m != nil && m.KeysOnly != nil {
+ return *m.KeysOnly
+ }
+ return false
+}
+
+func (m *QueryResult) GetIndexOnly() bool {
+ if m != nil && m.IndexOnly != nil {
+ return *m.IndexOnly
+ }
+ return false
+}
+
+func (m *QueryResult) GetSmallOps() bool {
+ if m != nil && m.SmallOps != nil {
+ return *m.SmallOps
+ }
+ return false
+}
+
+func (m *QueryResult) GetCompiledQuery() *CompiledQuery {
+ if m != nil {
+ return m.CompiledQuery
+ }
+ return nil
+}
+
+func (m *QueryResult) GetCompiledCursor() *CompiledCursor {
+ if m != nil {
+ return m.CompiledCursor
+ }
+ return nil
+}
+
+func (m *QueryResult) GetIndex() []*CompositeIndex {
+ if m != nil {
+ return m.Index
+ }
+ return nil
+}
+
+func (m *QueryResult) GetVersion() []int64 {
+ if m != nil {
+ return m.Version
+ }
+ return nil
+}
+
+type AllocateIdsRequest struct {
+ Header *InternalHeader `protobuf:"bytes,4,opt,name=header" json:"header,omitempty"`
+ ModelKey *Reference `protobuf:"bytes,1,opt,name=model_key,json=modelKey" json:"model_key,omitempty"`
+ Size *int64 `protobuf:"varint,2,opt,name=size" json:"size,omitempty"`
+ Max *int64 `protobuf:"varint,3,opt,name=max" json:"max,omitempty"`
+ Reserve []*Reference `protobuf:"bytes,5,rep,name=reserve" json:"reserve,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *AllocateIdsRequest) Reset() { *m = AllocateIdsRequest{} }
+func (m *AllocateIdsRequest) String() string { return proto.CompactTextString(m) }
+func (*AllocateIdsRequest) ProtoMessage() {}
+func (*AllocateIdsRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{31}
+}
+func (m *AllocateIdsRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_AllocateIdsRequest.Unmarshal(m, b)
+}
+func (m *AllocateIdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_AllocateIdsRequest.Marshal(b, m, deterministic)
+}
+func (dst *AllocateIdsRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_AllocateIdsRequest.Merge(dst, src)
+}
+func (m *AllocateIdsRequest) XXX_Size() int {
+ return xxx_messageInfo_AllocateIdsRequest.Size(m)
+}
+func (m *AllocateIdsRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_AllocateIdsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AllocateIdsRequest proto.InternalMessageInfo
+
+func (m *AllocateIdsRequest) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *AllocateIdsRequest) GetModelKey() *Reference {
+ if m != nil {
+ return m.ModelKey
+ }
+ return nil
+}
+
+func (m *AllocateIdsRequest) GetSize() int64 {
+ if m != nil && m.Size != nil {
+ return *m.Size
+ }
+ return 0
+}
+
+func (m *AllocateIdsRequest) GetMax() int64 {
+ if m != nil && m.Max != nil {
+ return *m.Max
+ }
+ return 0
+}
+
+func (m *AllocateIdsRequest) GetReserve() []*Reference {
+ if m != nil {
+ return m.Reserve
+ }
+ return nil
+}
+
+type AllocateIdsResponse struct {
+ Start *int64 `protobuf:"varint,1,req,name=start" json:"start,omitempty"`
+ End *int64 `protobuf:"varint,2,req,name=end" json:"end,omitempty"`
+ Cost *Cost `protobuf:"bytes,3,opt,name=cost" json:"cost,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *AllocateIdsResponse) Reset() { *m = AllocateIdsResponse{} }
+func (m *AllocateIdsResponse) String() string { return proto.CompactTextString(m) }
+func (*AllocateIdsResponse) ProtoMessage() {}
+func (*AllocateIdsResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{32}
+}
+func (m *AllocateIdsResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_AllocateIdsResponse.Unmarshal(m, b)
+}
+func (m *AllocateIdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_AllocateIdsResponse.Marshal(b, m, deterministic)
+}
+func (dst *AllocateIdsResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_AllocateIdsResponse.Merge(dst, src)
+}
+func (m *AllocateIdsResponse) XXX_Size() int {
+ return xxx_messageInfo_AllocateIdsResponse.Size(m)
+}
+func (m *AllocateIdsResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_AllocateIdsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AllocateIdsResponse proto.InternalMessageInfo
+
+func (m *AllocateIdsResponse) GetStart() int64 {
+ if m != nil && m.Start != nil {
+ return *m.Start
+ }
+ return 0
+}
+
+func (m *AllocateIdsResponse) GetEnd() int64 {
+ if m != nil && m.End != nil {
+ return *m.End
+ }
+ return 0
+}
+
+func (m *AllocateIdsResponse) GetCost() *Cost {
+ if m != nil {
+ return m.Cost
+ }
+ return nil
+}
+
+type CompositeIndices struct {
+ Index []*CompositeIndex `protobuf:"bytes,1,rep,name=index" json:"index,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CompositeIndices) Reset() { *m = CompositeIndices{} }
+func (m *CompositeIndices) String() string { return proto.CompactTextString(m) }
+func (*CompositeIndices) ProtoMessage() {}
+func (*CompositeIndices) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{33}
+}
+func (m *CompositeIndices) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CompositeIndices.Unmarshal(m, b)
+}
+func (m *CompositeIndices) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CompositeIndices.Marshal(b, m, deterministic)
+}
+func (dst *CompositeIndices) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CompositeIndices.Merge(dst, src)
+}
+func (m *CompositeIndices) XXX_Size() int {
+ return xxx_messageInfo_CompositeIndices.Size(m)
+}
+func (m *CompositeIndices) XXX_DiscardUnknown() {
+ xxx_messageInfo_CompositeIndices.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompositeIndices proto.InternalMessageInfo
+
+func (m *CompositeIndices) GetIndex() []*CompositeIndex {
+ if m != nil {
+ return m.Index
+ }
+ return nil
+}
+
+type AddActionsRequest struct {
+ Header *InternalHeader `protobuf:"bytes,3,opt,name=header" json:"header,omitempty"`
+ Transaction *Transaction `protobuf:"bytes,1,req,name=transaction" json:"transaction,omitempty"`
+ Action []*Action `protobuf:"bytes,2,rep,name=action" json:"action,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *AddActionsRequest) Reset() { *m = AddActionsRequest{} }
+func (m *AddActionsRequest) String() string { return proto.CompactTextString(m) }
+func (*AddActionsRequest) ProtoMessage() {}
+func (*AddActionsRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{34}
+}
+func (m *AddActionsRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_AddActionsRequest.Unmarshal(m, b)
+}
+func (m *AddActionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_AddActionsRequest.Marshal(b, m, deterministic)
+}
+func (dst *AddActionsRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_AddActionsRequest.Merge(dst, src)
+}
+func (m *AddActionsRequest) XXX_Size() int {
+ return xxx_messageInfo_AddActionsRequest.Size(m)
+}
+func (m *AddActionsRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_AddActionsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AddActionsRequest proto.InternalMessageInfo
+
+func (m *AddActionsRequest) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *AddActionsRequest) GetTransaction() *Transaction {
+ if m != nil {
+ return m.Transaction
+ }
+ return nil
+}
+
+func (m *AddActionsRequest) GetAction() []*Action {
+ if m != nil {
+ return m.Action
+ }
+ return nil
+}
+
+type AddActionsResponse struct {
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *AddActionsResponse) Reset() { *m = AddActionsResponse{} }
+func (m *AddActionsResponse) String() string { return proto.CompactTextString(m) }
+func (*AddActionsResponse) ProtoMessage() {}
+func (*AddActionsResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{35}
+}
+func (m *AddActionsResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_AddActionsResponse.Unmarshal(m, b)
+}
+func (m *AddActionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_AddActionsResponse.Marshal(b, m, deterministic)
+}
+func (dst *AddActionsResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_AddActionsResponse.Merge(dst, src)
+}
+func (m *AddActionsResponse) XXX_Size() int {
+ return xxx_messageInfo_AddActionsResponse.Size(m)
+}
+func (m *AddActionsResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_AddActionsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AddActionsResponse proto.InternalMessageInfo
+
+type BeginTransactionRequest struct {
+ Header *InternalHeader `protobuf:"bytes,3,opt,name=header" json:"header,omitempty"`
+ App *string `protobuf:"bytes,1,req,name=app" json:"app,omitempty"`
+ AllowMultipleEg *bool `protobuf:"varint,2,opt,name=allow_multiple_eg,json=allowMultipleEg,def=0" json:"allow_multiple_eg,omitempty"`
+ DatabaseId *string `protobuf:"bytes,4,opt,name=database_id,json=databaseId" json:"database_id,omitempty"`
+ Mode *BeginTransactionRequest_TransactionMode `protobuf:"varint,5,opt,name=mode,enum=appengine.BeginTransactionRequest_TransactionMode,def=0" json:"mode,omitempty"`
+ PreviousTransaction *Transaction `protobuf:"bytes,7,opt,name=previous_transaction,json=previousTransaction" json:"previous_transaction,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *BeginTransactionRequest) Reset() { *m = BeginTransactionRequest{} }
+func (m *BeginTransactionRequest) String() string { return proto.CompactTextString(m) }
+func (*BeginTransactionRequest) ProtoMessage() {}
+func (*BeginTransactionRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{36}
+}
+func (m *BeginTransactionRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_BeginTransactionRequest.Unmarshal(m, b)
+}
+func (m *BeginTransactionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_BeginTransactionRequest.Marshal(b, m, deterministic)
+}
+func (dst *BeginTransactionRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_BeginTransactionRequest.Merge(dst, src)
+}
+func (m *BeginTransactionRequest) XXX_Size() int {
+ return xxx_messageInfo_BeginTransactionRequest.Size(m)
+}
+func (m *BeginTransactionRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_BeginTransactionRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_BeginTransactionRequest proto.InternalMessageInfo
+
+const Default_BeginTransactionRequest_AllowMultipleEg bool = false
+const Default_BeginTransactionRequest_Mode BeginTransactionRequest_TransactionMode = BeginTransactionRequest_UNKNOWN
+
+func (m *BeginTransactionRequest) GetHeader() *InternalHeader {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *BeginTransactionRequest) GetApp() string {
+ if m != nil && m.App != nil {
+ return *m.App
+ }
+ return ""
+}
+
+func (m *BeginTransactionRequest) GetAllowMultipleEg() bool {
+ if m != nil && m.AllowMultipleEg != nil {
+ return *m.AllowMultipleEg
+ }
+ return Default_BeginTransactionRequest_AllowMultipleEg
+}
+
+func (m *BeginTransactionRequest) GetDatabaseId() string {
+ if m != nil && m.DatabaseId != nil {
+ return *m.DatabaseId
+ }
+ return ""
+}
+
+func (m *BeginTransactionRequest) GetMode() BeginTransactionRequest_TransactionMode {
+ if m != nil && m.Mode != nil {
+ return *m.Mode
+ }
+ return Default_BeginTransactionRequest_Mode
+}
+
+func (m *BeginTransactionRequest) GetPreviousTransaction() *Transaction {
+ if m != nil {
+ return m.PreviousTransaction
+ }
+ return nil
+}
+
+type CommitResponse struct {
+ Cost *Cost `protobuf:"bytes,1,opt,name=cost" json:"cost,omitempty"`
+ Version []*CommitResponse_Version `protobuf:"group,3,rep,name=Version,json=version" json:"version,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CommitResponse) Reset() { *m = CommitResponse{} }
+func (m *CommitResponse) String() string { return proto.CompactTextString(m) }
+func (*CommitResponse) ProtoMessage() {}
+func (*CommitResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{37}
+}
+func (m *CommitResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CommitResponse.Unmarshal(m, b)
+}
+func (m *CommitResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CommitResponse.Marshal(b, m, deterministic)
+}
+func (dst *CommitResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CommitResponse.Merge(dst, src)
+}
+func (m *CommitResponse) XXX_Size() int {
+ return xxx_messageInfo_CommitResponse.Size(m)
+}
+func (m *CommitResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_CommitResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CommitResponse proto.InternalMessageInfo
+
+func (m *CommitResponse) GetCost() *Cost {
+ if m != nil {
+ return m.Cost
+ }
+ return nil
+}
+
+func (m *CommitResponse) GetVersion() []*CommitResponse_Version {
+ if m != nil {
+ return m.Version
+ }
+ return nil
+}
+
+type CommitResponse_Version struct {
+ RootEntityKey *Reference `protobuf:"bytes,4,req,name=root_entity_key,json=rootEntityKey" json:"root_entity_key,omitempty"`
+ Version *int64 `protobuf:"varint,5,req,name=version" json:"version,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CommitResponse_Version) Reset() { *m = CommitResponse_Version{} }
+func (m *CommitResponse_Version) String() string { return proto.CompactTextString(m) }
+func (*CommitResponse_Version) ProtoMessage() {}
+func (*CommitResponse_Version) Descriptor() ([]byte, []int) {
+ return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{37, 0}
+}
+func (m *CommitResponse_Version) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CommitResponse_Version.Unmarshal(m, b)
+}
+func (m *CommitResponse_Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CommitResponse_Version.Marshal(b, m, deterministic)
+}
+func (dst *CommitResponse_Version) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CommitResponse_Version.Merge(dst, src)
+}
+func (m *CommitResponse_Version) XXX_Size() int {
+ return xxx_messageInfo_CommitResponse_Version.Size(m)
+}
+func (m *CommitResponse_Version) XXX_DiscardUnknown() {
+ xxx_messageInfo_CommitResponse_Version.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CommitResponse_Version proto.InternalMessageInfo
+
+func (m *CommitResponse_Version) GetRootEntityKey() *Reference {
+ if m != nil {
+ return m.RootEntityKey
+ }
+ return nil
+}
+
+func (m *CommitResponse_Version) GetVersion() int64 {
+ if m != nil && m.Version != nil {
+ return *m.Version
+ }
+ return 0
+}
+
+func init() {
+ proto.RegisterType((*Action)(nil), "appengine.Action")
+ proto.RegisterType((*PropertyValue)(nil), "appengine.PropertyValue")
+ proto.RegisterType((*PropertyValue_PointValue)(nil), "appengine.PropertyValue.PointValue")
+ proto.RegisterType((*PropertyValue_UserValue)(nil), "appengine.PropertyValue.UserValue")
+ proto.RegisterType((*PropertyValue_ReferenceValue)(nil), "appengine.PropertyValue.ReferenceValue")
+ proto.RegisterType((*PropertyValue_ReferenceValue_PathElement)(nil), "appengine.PropertyValue.ReferenceValue.PathElement")
+ proto.RegisterType((*Property)(nil), "appengine.Property")
+ proto.RegisterType((*Path)(nil), "appengine.Path")
+ proto.RegisterType((*Path_Element)(nil), "appengine.Path.Element")
+ proto.RegisterType((*Reference)(nil), "appengine.Reference")
+ proto.RegisterType((*User)(nil), "appengine.User")
+ proto.RegisterType((*EntityProto)(nil), "appengine.EntityProto")
+ proto.RegisterType((*CompositeProperty)(nil), "appengine.CompositeProperty")
+ proto.RegisterType((*Index)(nil), "appengine.Index")
+ proto.RegisterType((*Index_Property)(nil), "appengine.Index.Property")
+ proto.RegisterType((*CompositeIndex)(nil), "appengine.CompositeIndex")
+ proto.RegisterType((*IndexPostfix)(nil), "appengine.IndexPostfix")
+ proto.RegisterType((*IndexPostfix_IndexValue)(nil), "appengine.IndexPostfix.IndexValue")
+ proto.RegisterType((*IndexPosition)(nil), "appengine.IndexPosition")
+ proto.RegisterType((*Snapshot)(nil), "appengine.Snapshot")
+ proto.RegisterType((*InternalHeader)(nil), "appengine.InternalHeader")
+ proto.RegisterType((*Transaction)(nil), "appengine.Transaction")
+ proto.RegisterType((*Query)(nil), "appengine.Query")
+ proto.RegisterType((*Query_Filter)(nil), "appengine.Query.Filter")
+ proto.RegisterType((*Query_Order)(nil), "appengine.Query.Order")
+ proto.RegisterType((*CompiledQuery)(nil), "appengine.CompiledQuery")
+ proto.RegisterType((*CompiledQuery_PrimaryScan)(nil), "appengine.CompiledQuery.PrimaryScan")
+ proto.RegisterType((*CompiledQuery_MergeJoinScan)(nil), "appengine.CompiledQuery.MergeJoinScan")
+ proto.RegisterType((*CompiledQuery_EntityFilter)(nil), "appengine.CompiledQuery.EntityFilter")
+ proto.RegisterType((*CompiledCursor)(nil), "appengine.CompiledCursor")
+ proto.RegisterType((*CompiledCursor_Position)(nil), "appengine.CompiledCursor.Position")
+ proto.RegisterType((*CompiledCursor_Position_IndexValue)(nil), "appengine.CompiledCursor.Position.IndexValue")
+ proto.RegisterType((*Cursor)(nil), "appengine.Cursor")
+ proto.RegisterType((*Error)(nil), "appengine.Error")
+ proto.RegisterType((*Cost)(nil), "appengine.Cost")
+ proto.RegisterType((*Cost_CommitCost)(nil), "appengine.Cost.CommitCost")
+ proto.RegisterType((*GetRequest)(nil), "appengine.GetRequest")
+ proto.RegisterType((*GetResponse)(nil), "appengine.GetResponse")
+ proto.RegisterType((*GetResponse_Entity)(nil), "appengine.GetResponse.Entity")
+ proto.RegisterType((*PutRequest)(nil), "appengine.PutRequest")
+ proto.RegisterType((*PutResponse)(nil), "appengine.PutResponse")
+ proto.RegisterType((*TouchRequest)(nil), "appengine.TouchRequest")
+ proto.RegisterType((*TouchResponse)(nil), "appengine.TouchResponse")
+ proto.RegisterType((*DeleteRequest)(nil), "appengine.DeleteRequest")
+ proto.RegisterType((*DeleteResponse)(nil), "appengine.DeleteResponse")
+ proto.RegisterType((*NextRequest)(nil), "appengine.NextRequest")
+ proto.RegisterType((*QueryResult)(nil), "appengine.QueryResult")
+ proto.RegisterType((*AllocateIdsRequest)(nil), "appengine.AllocateIdsRequest")
+ proto.RegisterType((*AllocateIdsResponse)(nil), "appengine.AllocateIdsResponse")
+ proto.RegisterType((*CompositeIndices)(nil), "appengine.CompositeIndices")
+ proto.RegisterType((*AddActionsRequest)(nil), "appengine.AddActionsRequest")
+ proto.RegisterType((*AddActionsResponse)(nil), "appengine.AddActionsResponse")
+ proto.RegisterType((*BeginTransactionRequest)(nil), "appengine.BeginTransactionRequest")
+ proto.RegisterType((*CommitResponse)(nil), "appengine.CommitResponse")
+ proto.RegisterType((*CommitResponse_Version)(nil), "appengine.CommitResponse.Version")
+}
+
+func init() {
+ proto.RegisterFile("google.golang.org/appengine/internal/datastore/datastore_v3.proto", fileDescriptor_datastore_v3_83b17b80c34f6179)
+}
+
+var fileDescriptor_datastore_v3_83b17b80c34f6179 = []byte{
+ // 4156 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x5a, 0xcd, 0x73, 0xe3, 0x46,
+ 0x76, 0x37, 0xc1, 0xef, 0x47, 0x89, 0x82, 0x5a, 0xf3, 0xc1, 0xa1, 0x3f, 0x46, 0xc6, 0xac, 0x6d,
+ 0xd9, 0x6b, 0x73, 0x6c, 0xf9, 0x23, 0x5b, 0x4a, 0x76, 0x1d, 0x4a, 0xc4, 0x68, 0x90, 0xa1, 0x48,
+ 0xb9, 0x09, 0xd9, 0x9e, 0x5c, 0x50, 0x18, 0xa2, 0x29, 0x21, 0x43, 0x02, 0x30, 0x00, 0x6a, 0x46,
+ 0x93, 0xe4, 0x90, 0x4b, 0x2a, 0x55, 0x5b, 0xa9, 0x1c, 0x92, 0x4a, 0x25, 0xf9, 0x07, 0x72, 0xc8,
+ 0x39, 0x95, 0xaa, 0x54, 0xf6, 0x98, 0x5b, 0x0e, 0x7b, 0xc9, 0x31, 0x95, 0x73, 0xf2, 0x27, 0x24,
+ 0x39, 0xa4, 0xfa, 0x75, 0x03, 0x02, 0x28, 0x4a, 0x23, 0x6d, 0xf6, 0x90, 0x13, 0xd1, 0xef, 0xfd,
+ 0xba, 0xf1, 0xfa, 0xf5, 0xfb, 0x6c, 0x10, 0xba, 0xc7, 0xbe, 0x7f, 0x3c, 0x65, 0x9d, 0x63, 0x7f,
+ 0x6a, 0x7b, 0xc7, 0x1d, 0x3f, 0x3c, 0x7e, 0x68, 0x07, 0x01, 0xf3, 0x8e, 0x5d, 0x8f, 0x3d, 0x74,
+ 0xbd, 0x98, 0x85, 0x9e, 0x3d, 0x7d, 0xe8, 0xd8, 0xb1, 0x1d, 0xc5, 0x7e, 0xc8, 0xce, 0x9f, 0xac,
+ 0xd3, 0xcf, 0x3b, 0x41, 0xe8, 0xc7, 0x3e, 0xa9, 0xa7, 0x13, 0xb4, 0x1a, 0x54, 0xba, 0xe3, 0xd8,
+ 0xf5, 0x3d, 0xed, 0x1f, 0x2b, 0xb0, 0x7a, 0x18, 0xfa, 0x01, 0x0b, 0xe3, 0xb3, 0x6f, 0xed, 0xe9,
+ 0x9c, 0x91, 0x77, 0x00, 0x5c, 0x2f, 0xfe, 0xea, 0x0b, 0x1c, 0xb5, 0x0a, 0x9b, 0x85, 0xad, 0x22,
+ 0xcd, 0x50, 0x88, 0x06, 0x2b, 0xcf, 0x7c, 0x7f, 0xca, 0x6c, 0x4f, 0x20, 0x94, 0xcd, 0xc2, 0x56,
+ 0x8d, 0xe6, 0x68, 0x64, 0x13, 0x1a, 0x51, 0x1c, 0xba, 0xde, 0xb1, 0x80, 0x14, 0x37, 0x0b, 0x5b,
+ 0x75, 0x9a, 0x25, 0x71, 0x84, 0xe3, 0xcf, 0x9f, 0x4d, 0x99, 0x40, 0x94, 0x36, 0x0b, 0x5b, 0x05,
+ 0x9a, 0x25, 0x91, 0x3d, 0x80, 0xc0, 0x77, 0xbd, 0xf8, 0x14, 0x01, 0xe5, 0xcd, 0xc2, 0x16, 0x6c,
+ 0x3f, 0xe8, 0xa4, 0x7b, 0xe8, 0xe4, 0xa4, 0xee, 0x1c, 0x72, 0x28, 0x3e, 0xd2, 0xcc, 0x34, 0xf2,
+ 0xdb, 0x50, 0x9f, 0x47, 0x2c, 0x14, 0x6b, 0xd4, 0x70, 0x0d, 0xed, 0xd2, 0x35, 0x8e, 0x22, 0x16,
+ 0x8a, 0x25, 0xce, 0x27, 0x91, 0x21, 0x34, 0x43, 0x36, 0x61, 0x21, 0xf3, 0xc6, 0x4c, 0x2c, 0xb3,
+ 0x82, 0xcb, 0x7c, 0x70, 0xe9, 0x32, 0x34, 0x81, 0x8b, 0xb5, 0x16, 0xa6, 0xb7, 0xb7, 0x00, 0xce,
+ 0x85, 0x25, 0x2b, 0x50, 0x78, 0xd9, 0xaa, 0x6c, 0x2a, 0x5b, 0x05, 0x5a, 0x78, 0xc9, 0x47, 0x67,
+ 0xad, 0xaa, 0x18, 0x9d, 0xb5, 0xff, 0xa9, 0x00, 0xf5, 0x54, 0x26, 0x72, 0x0b, 0xca, 0x6c, 0x66,
+ 0xbb, 0xd3, 0x56, 0x7d, 0x53, 0xd9, 0xaa, 0x53, 0x31, 0x20, 0xf7, 0xa1, 0x61, 0xcf, 0xe3, 0x13,
+ 0xcb, 0xf1, 0x67, 0xb6, 0xeb, 0xb5, 0x00, 0x79, 0xc0, 0x49, 0x3d, 0xa4, 0x90, 0x36, 0xd4, 0x3c,
+ 0x77, 0xfc, 0xdc, 0xb3, 0x67, 0xac, 0xd5, 0xc0, 0x73, 0x48, 0xc7, 0xe4, 0x13, 0x20, 0x13, 0xe6,
+ 0xb0, 0xd0, 0x8e, 0x99, 0x63, 0xb9, 0x0e, 0xf3, 0x62, 0x37, 0x3e, 0x6b, 0xdd, 0x46, 0xd4, 0x7a,
+ 0xca, 0x31, 0x24, 0x23, 0x0f, 0x0f, 0x42, 0xff, 0xd4, 0x75, 0x58, 0xd8, 0xba, 0xb3, 0x00, 0x3f,
+ 0x94, 0x8c, 0xf6, 0xbf, 0x17, 0xa0, 0x99, 0xd7, 0x05, 0x51, 0xa1, 0x68, 0x07, 0x41, 0x6b, 0x15,
+ 0xa5, 0xe4, 0x8f, 0xe4, 0x6d, 0x00, 0x2e, 0x8a, 0x15, 0x05, 0xf6, 0x98, 0xb5, 0x6e, 0xe1, 0x5a,
+ 0x75, 0x4e, 0x19, 0x71, 0x02, 0x39, 0x82, 0x46, 0x60, 0xc7, 0x27, 0x6c, 0xca, 0x66, 0xcc, 0x8b,
+ 0x5b, 0xcd, 0xcd, 0xe2, 0x16, 0x6c, 0x7f, 0x7e, 0x4d, 0xd5, 0x77, 0x0e, 0xed, 0xf8, 0x44, 0x17,
+ 0x53, 0x69, 0x76, 0x9d, 0xb6, 0x0e, 0x8d, 0x0c, 0x8f, 0x10, 0x28, 0xc5, 0x67, 0x01, 0x6b, 0xad,
+ 0xa1, 0x5c, 0xf8, 0x4c, 0x9a, 0xa0, 0xb8, 0x4e, 0x4b, 0x45, 0xf3, 0x57, 0x5c, 0x87, 0x63, 0x50,
+ 0x87, 0xeb, 0x28, 0x22, 0x3e, 0x6b, 0xff, 0x51, 0x86, 0x5a, 0x22, 0x00, 0xe9, 0x42, 0x75, 0xc6,
+ 0x6c, 0xcf, 0xf5, 0x8e, 0xd1, 0x69, 0x9a, 0xdb, 0x6f, 0x2e, 0x11, 0xb3, 0x73, 0x20, 0x20, 0x3b,
+ 0x30, 0x18, 0x5a, 0x07, 0x7a, 0x77, 0x60, 0x0c, 0xf6, 0x69, 0x32, 0x8f, 0x1f, 0xa6, 0x7c, 0xb4,
+ 0xe6, 0xa1, 0x8b, 0x9e, 0x55, 0xa7, 0x20, 0x49, 0x47, 0xa1, 0x9b, 0x0a, 0x51, 0x14, 0x82, 0xe2,
+ 0x21, 0x76, 0xa0, 0x9c, 0xb8, 0x88, 0xb2, 0xd5, 0xd8, 0x6e, 0x5d, 0xa6, 0x1c, 0x2a, 0x60, 0xdc,
+ 0x20, 0x66, 0xf3, 0x69, 0xec, 0x06, 0x53, 0xee, 0x76, 0xca, 0x56, 0x8d, 0xa6, 0x63, 0xf2, 0x1e,
+ 0x40, 0xc4, 0xec, 0x70, 0x7c, 0x62, 0x3f, 0x9b, 0xb2, 0x56, 0x85, 0x7b, 0xf6, 0x4e, 0x79, 0x62,
+ 0x4f, 0x23, 0x46, 0x33, 0x0c, 0x62, 0xc3, 0xdd, 0x49, 0x1c, 0x59, 0xb1, 0xff, 0x9c, 0x79, 0xee,
+ 0x2b, 0x9b, 0x07, 0x12, 0xcb, 0x0f, 0xf8, 0x0f, 0xfa, 0x58, 0x73, 0xfb, 0xc3, 0x65, 0x5b, 0x7f,
+ 0x14, 0x47, 0x66, 0x66, 0xc6, 0x10, 0x27, 0xd0, 0xdb, 0x93, 0x65, 0x64, 0xd2, 0x86, 0xca, 0xd4,
+ 0x1f, 0xdb, 0x53, 0xd6, 0xaa, 0x73, 0x2d, 0xec, 0x28, 0xcc, 0xa3, 0x92, 0xa2, 0xfd, 0xb3, 0x02,
+ 0x55, 0xa9, 0x47, 0xd2, 0x84, 0x8c, 0x26, 0xd5, 0x37, 0x48, 0x0d, 0x4a, 0xbb, 0xfd, 0xe1, 0xae,
+ 0xda, 0xe4, 0x4f, 0xa6, 0xfe, 0xbd, 0xa9, 0xae, 0x71, 0xcc, 0xee, 0x53, 0x53, 0x1f, 0x99, 0x94,
+ 0x63, 0x54, 0xb2, 0x0e, 0xab, 0x5d, 0x73, 0x78, 0x60, 0xed, 0x75, 0x4d, 0x7d, 0x7f, 0x48, 0x9f,
+ 0xaa, 0x05, 0xb2, 0x0a, 0x75, 0x24, 0xf5, 0x8d, 0xc1, 0x13, 0x55, 0xe1, 0x33, 0x70, 0x68, 0x1a,
+ 0x66, 0x5f, 0x57, 0x8b, 0x44, 0x85, 0x15, 0x31, 0x63, 0x38, 0x30, 0xf5, 0x81, 0xa9, 0x96, 0x52,
+ 0xca, 0xe8, 0xe8, 0xe0, 0xa0, 0x4b, 0x9f, 0xaa, 0x65, 0xb2, 0x06, 0x0d, 0xa4, 0x74, 0x8f, 0xcc,
+ 0xc7, 0x43, 0xaa, 0x56, 0x48, 0x03, 0xaa, 0xfb, 0x3d, 0xeb, 0xbb, 0xc7, 0xfa, 0x40, 0xad, 0x92,
+ 0x15, 0xa8, 0xed, 0xf7, 0x2c, 0xfd, 0xa0, 0x6b, 0xf4, 0xd5, 0x1a, 0x9f, 0xbd, 0xaf, 0x0f, 0xe9,
+ 0x68, 0x64, 0x1d, 0x0e, 0x8d, 0x81, 0xa9, 0xd6, 0x49, 0x1d, 0xca, 0xfb, 0x3d, 0xcb, 0x38, 0x50,
+ 0x81, 0x10, 0x68, 0xee, 0xf7, 0xac, 0xc3, 0xc7, 0xc3, 0x81, 0x3e, 0x38, 0x3a, 0xd8, 0xd5, 0xa9,
+ 0xda, 0x20, 0xb7, 0x40, 0xe5, 0xb4, 0xe1, 0xc8, 0xec, 0xf6, 0xbb, 0xbd, 0x1e, 0xd5, 0x47, 0x23,
+ 0x75, 0x85, 0x4b, 0xbd, 0xdf, 0xb3, 0x68, 0xd7, 0xe4, 0xfb, 0x5a, 0xe5, 0x2f, 0xe4, 0x7b, 0x7f,
+ 0xa2, 0x3f, 0x55, 0xd7, 0xf9, 0x2b, 0xf4, 0x81, 0x69, 0x98, 0x4f, 0xad, 0x43, 0x3a, 0x34, 0x87,
+ 0xea, 0x06, 0x17, 0xd0, 0x18, 0xf4, 0xf4, 0xef, 0xad, 0x6f, 0xbb, 0xfd, 0x23, 0x5d, 0x25, 0xda,
+ 0x8f, 0xe1, 0xf6, 0xd2, 0x33, 0xe1, 0xaa, 0x7b, 0x6c, 0x1e, 0xf4, 0xd5, 0x02, 0x7f, 0xe2, 0x9b,
+ 0x52, 0x15, 0xed, 0x0f, 0xa0, 0xc4, 0x5d, 0x86, 0x7c, 0x06, 0xd5, 0xc4, 0x1b, 0x0b, 0xe8, 0x8d,
+ 0x77, 0xb3, 0x67, 0x6d, 0xc7, 0x27, 0x9d, 0xc4, 0xe3, 0x12, 0x5c, 0xbb, 0x0b, 0xd5, 0x45, 0x4f,
+ 0x53, 0x2e, 0x78, 0x5a, 0xf1, 0x82, 0xa7, 0x95, 0x32, 0x9e, 0x66, 0x43, 0x3d, 0xf5, 0xed, 0x9b,
+ 0x47, 0x91, 0x07, 0x50, 0xe2, 0xde, 0xdf, 0x6a, 0xa2, 0x87, 0xac, 0x2d, 0x08, 0x4c, 0x91, 0xa9,
+ 0xfd, 0x43, 0x01, 0x4a, 0x3c, 0xda, 0x9e, 0x07, 0xda, 0xc2, 0x15, 0x81, 0x56, 0xb9, 0x32, 0xd0,
+ 0x16, 0xaf, 0x15, 0x68, 0x2b, 0x37, 0x0b, 0xb4, 0xd5, 0x4b, 0x02, 0xad, 0xf6, 0x67, 0x45, 0x68,
+ 0xe8, 0x38, 0xf3, 0x10, 0x13, 0xfd, 0xfb, 0x50, 0x7c, 0xce, 0xce, 0x50, 0x3f, 0x8d, 0xed, 0x5b,
+ 0x99, 0xdd, 0xa6, 0x2a, 0xa4, 0x1c, 0x40, 0xb6, 0x61, 0x45, 0xbc, 0xd0, 0x3a, 0x0e, 0xfd, 0x79,
+ 0xd0, 0x52, 0x97, 0xab, 0xa7, 0x21, 0x40, 0xfb, 0x1c, 0x43, 0xde, 0x83, 0xb2, 0xff, 0xc2, 0x63,
+ 0x21, 0xc6, 0xc1, 0x3c, 0x98, 0x2b, 0x8f, 0x0a, 0x2e, 0x79, 0x08, 0xa5, 0xe7, 0xae, 0xe7, 0xe0,
+ 0x19, 0xe6, 0x23, 0x61, 0x46, 0xd0, 0xce, 0x13, 0xd7, 0x73, 0x28, 0x02, 0xc9, 0x3d, 0xa8, 0xf1,
+ 0x5f, 0x8c, 0x7b, 0x65, 0xdc, 0x68, 0x95, 0x8f, 0x79, 0xd0, 0x7b, 0x08, 0xb5, 0x40, 0xc6, 0x10,
+ 0x4c, 0x00, 0x8d, 0xed, 0x8d, 0x25, 0xe1, 0x85, 0xa6, 0x20, 0xf2, 0x15, 0xac, 0x84, 0xf6, 0x0b,
+ 0x2b, 0x9d, 0xb4, 0x76, 0xf9, 0xa4, 0x46, 0x68, 0xbf, 0x48, 0x23, 0x38, 0x81, 0x52, 0x68, 0x7b,
+ 0xcf, 0x5b, 0x64, 0xb3, 0xb0, 0x55, 0xa6, 0xf8, 0xac, 0x7d, 0x01, 0x25, 0x2e, 0x25, 0x8f, 0x08,
+ 0xfb, 0x3d, 0xf4, 0xff, 0xee, 0x9e, 0xa9, 0x16, 0x12, 0x7f, 0xfe, 0x96, 0x47, 0x03, 0x45, 0x72,
+ 0x0f, 0xf4, 0xd1, 0xa8, 0xbb, 0xaf, 0xab, 0x45, 0xad, 0x07, 0xeb, 0x7b, 0xfe, 0x2c, 0xf0, 0x23,
+ 0x37, 0x66, 0xe9, 0xf2, 0xf7, 0xa0, 0xe6, 0x7a, 0x0e, 0x7b, 0x69, 0xb9, 0x0e, 0x9a, 0x56, 0x91,
+ 0x56, 0x71, 0x6c, 0x38, 0xdc, 0xe4, 0x4e, 0x65, 0x31, 0x55, 0xe4, 0x26, 0x87, 0x03, 0xed, 0x2f,
+ 0x15, 0x28, 0x1b, 0x1c, 0xc1, 0x8d, 0x4f, 0x9e, 0x14, 0x7a, 0x8f, 0x30, 0x4c, 0x10, 0x24, 0x93,
+ 0xfb, 0x50, 0x1b, 0x6a, 0xb6, 0x37, 0x66, 0xbc, 0xe2, 0xc3, 0x3c, 0x50, 0xa3, 0xe9, 0x98, 0x7c,
+ 0x99, 0xd1, 0x9f, 0x82, 0x2e, 0x7b, 0x2f, 0xa3, 0x0a, 0x7c, 0xc1, 0x12, 0x2d, 0xb6, 0xff, 0xaa,
+ 0x90, 0x49, 0x6e, 0xcb, 0x12, 0x4f, 0x1f, 0xea, 0x8e, 0x1b, 0x32, 0xac, 0x23, 0xe5, 0x41, 0x3f,
+ 0xb8, 0x74, 0xe1, 0x4e, 0x2f, 0x81, 0xee, 0xd4, 0xbb, 0xa3, 0x3d, 0x7d, 0xd0, 0xe3, 0x99, 0xef,
+ 0x7c, 0x01, 0xed, 0x23, 0xa8, 0xa7, 0x10, 0x0c, 0xc7, 0x09, 0x48, 0x2d, 0x70, 0xf5, 0xf6, 0xf4,
+ 0x74, 0xac, 0x68, 0x7f, 0xad, 0x40, 0x33, 0xd5, 0xaf, 0xd0, 0xd0, 0x6d, 0xa8, 0xd8, 0x41, 0x90,
+ 0xa8, 0xb6, 0x4e, 0xcb, 0x76, 0x10, 0x18, 0x8e, 0x8c, 0x2d, 0x0a, 0x6a, 0x9b, 0xc7, 0x96, 0x4f,
+ 0x01, 0x1c, 0x36, 0x71, 0x3d, 0x17, 0x85, 0x2e, 0xa2, 0xc1, 0xab, 0x8b, 0x42, 0xd3, 0x0c, 0x86,
+ 0x7c, 0x09, 0xe5, 0x28, 0xb6, 0x63, 0x91, 0x2b, 0x9b, 0xdb, 0xf7, 0x33, 0xe0, 0xbc, 0x08, 0x9d,
+ 0x11, 0x87, 0x51, 0x81, 0x26, 0x5f, 0xc1, 0x2d, 0xdf, 0x9b, 0x9e, 0x59, 0xf3, 0x88, 0x59, 0xee,
+ 0xc4, 0x0a, 0xd9, 0x0f, 0x73, 0x37, 0x64, 0x4e, 0x3e, 0xa7, 0xae, 0x73, 0xc8, 0x51, 0xc4, 0x8c,
+ 0x09, 0x95, 0x7c, 0xed, 0x6b, 0x28, 0xe3, 0x3a, 0x7c, 0xcf, 0xdf, 0x51, 0xc3, 0xd4, 0xad, 0xe1,
+ 0xa0, 0xff, 0x54, 0xe8, 0x80, 0xea, 0xdd, 0x9e, 0x85, 0x44, 0x55, 0xe1, 0xc1, 0xbe, 0xa7, 0xf7,
+ 0x75, 0x53, 0xef, 0xa9, 0x45, 0x9e, 0x3d, 0x74, 0x4a, 0x87, 0x54, 0x2d, 0x69, 0xff, 0x53, 0x80,
+ 0x15, 0x94, 0xe7, 0xd0, 0x8f, 0xe2, 0x89, 0xfb, 0x92, 0xec, 0x41, 0x43, 0x98, 0xdd, 0xa9, 0x2c,
+ 0xe8, 0xb9, 0x33, 0x68, 0x8b, 0x7b, 0x96, 0x68, 0x31, 0x90, 0x75, 0xb4, 0x9b, 0x3e, 0x27, 0x21,
+ 0x45, 0x41, 0xa7, 0xbf, 0x22, 0xa4, 0xbc, 0x05, 0x95, 0x67, 0x6c, 0xe2, 0x87, 0x22, 0x04, 0xd6,
+ 0x76, 0x4a, 0x71, 0x38, 0x67, 0x54, 0xd2, 0xda, 0x36, 0xc0, 0xf9, 0xfa, 0xe4, 0x01, 0xac, 0x26,
+ 0xc6, 0x66, 0xa1, 0x71, 0x89, 0x93, 0x5b, 0x49, 0x88, 0x83, 0x5c, 0x75, 0xa3, 0x5c, 0xab, 0xba,
+ 0xd1, 0xbe, 0x86, 0xd5, 0x64, 0x3f, 0xe2, 0xfc, 0x54, 0x21, 0x79, 0x01, 0x63, 0xca, 0x82, 0x8c,
+ 0xca, 0x45, 0x19, 0xb5, 0x9f, 0x41, 0x6d, 0xe4, 0xd9, 0x41, 0x74, 0xe2, 0xc7, 0xdc, 0x7a, 0xe2,
+ 0x48, 0xfa, 0xaa, 0x12, 0x47, 0x9a, 0x06, 0x15, 0x7e, 0x38, 0xf3, 0x88, 0xbb, 0xbf, 0x31, 0xe8,
+ 0xee, 0x99, 0xc6, 0xb7, 0xba, 0xfa, 0x06, 0x01, 0xa8, 0xc8, 0xe7, 0x82, 0xa6, 0x41, 0xd3, 0x90,
+ 0xed, 0xd8, 0x63, 0x66, 0x3b, 0x2c, 0xe4, 0x12, 0xfc, 0xe0, 0x47, 0x89, 0x04, 0x3f, 0xf8, 0x91,
+ 0xf6, 0x17, 0x05, 0x68, 0x98, 0xa1, 0xed, 0x45, 0xb6, 0x30, 0xf7, 0xcf, 0xa0, 0x72, 0x82, 0x58,
+ 0x74, 0xa3, 0xc6, 0x82, 0x7f, 0x66, 0x17, 0xa3, 0x12, 0x48, 0xee, 0x40, 0xe5, 0xc4, 0xf6, 0x9c,
+ 0xa9, 0xd0, 0x5a, 0x85, 0xca, 0x51, 0x92, 0x1b, 0x95, 0xf3, 0xdc, 0xb8, 0x05, 0x2b, 0x33, 0x3b,
+ 0x7c, 0x6e, 0x8d, 0x4f, 0x6c, 0xef, 0x98, 0x45, 0xf2, 0x60, 0xa4, 0x05, 0x36, 0x38, 0x6b, 0x4f,
+ 0x70, 0xb4, 0xbf, 0x5f, 0x81, 0xf2, 0x37, 0x73, 0x16, 0x9e, 0x65, 0x04, 0xfa, 0xe0, 0xba, 0x02,
+ 0xc9, 0x17, 0x17, 0x2e, 0x4b, 0xca, 0x6f, 0x2f, 0x26, 0x65, 0x22, 0x53, 0x84, 0xc8, 0x95, 0x22,
+ 0x0b, 0x7c, 0x9a, 0x09, 0x63, 0xeb, 0x57, 0xd8, 0xda, 0x79, 0x70, 0x7b, 0x08, 0x95, 0x89, 0x3b,
+ 0x8d, 0x51, 0x75, 0x8b, 0xd5, 0x08, 0xee, 0xa5, 0xf3, 0x08, 0xd9, 0x54, 0xc2, 0xc8, 0xbb, 0xb0,
+ 0x22, 0x2a, 0x59, 0xeb, 0x07, 0xce, 0xc6, 0x82, 0x95, 0xf7, 0xa6, 0x48, 0x13, 0xbb, 0xff, 0x18,
+ 0xca, 0x7e, 0xc8, 0x37, 0x5f, 0xc7, 0x25, 0xef, 0x5c, 0x58, 0x72, 0xc8, 0xb9, 0x54, 0x80, 0xc8,
+ 0x87, 0x50, 0x3a, 0x71, 0xbd, 0x18, 0xb3, 0x46, 0x73, 0xfb, 0xf6, 0x05, 0xf0, 0x63, 0xd7, 0x8b,
+ 0x29, 0x42, 0x78, 0x98, 0x1f, 0xfb, 0x73, 0x2f, 0x6e, 0xdd, 0xc5, 0x0c, 0x23, 0x06, 0xe4, 0x1e,
+ 0x54, 0xfc, 0xc9, 0x24, 0x62, 0x31, 0x76, 0x96, 0xe5, 0x9d, 0xc2, 0xa7, 0x54, 0x12, 0xf8, 0x84,
+ 0xa9, 0x3b, 0x73, 0x63, 0xec, 0x43, 0xca, 0x54, 0x0c, 0xc8, 0x2e, 0xac, 0x8d, 0xfd, 0x59, 0xe0,
+ 0x4e, 0x99, 0x63, 0x8d, 0xe7, 0x61, 0xe4, 0x87, 0xad, 0x77, 0x2e, 0x1c, 0xd3, 0x9e, 0x44, 0xec,
+ 0x21, 0x80, 0x36, 0xc7, 0xb9, 0x31, 0x31, 0x60, 0x83, 0x79, 0x8e, 0xb5, 0xb8, 0xce, 0xfd, 0xd7,
+ 0xad, 0xb3, 0xce, 0x3c, 0x27, 0x4f, 0x4a, 0xc4, 0xc1, 0x48, 0x68, 0x61, 0xcc, 0x68, 0x6d, 0x60,
+ 0x90, 0xb9, 0x77, 0x69, 0xac, 0x14, 0xe2, 0x64, 0xc2, 0xf7, 0x6f, 0xc0, 0x2d, 0x19, 0x22, 0xad,
+ 0x80, 0x85, 0x13, 0x36, 0x8e, 0xad, 0x60, 0x6a, 0x7b, 0x58, 0xca, 0xa5, 0xc6, 0x4a, 0x24, 0xe4,
+ 0x50, 0x20, 0x0e, 0xa7, 0xb6, 0x47, 0x34, 0xa8, 0x3f, 0x67, 0x67, 0x91, 0xc5, 0x23, 0x29, 0x76,
+ 0xae, 0x29, 0xba, 0xc6, 0xe9, 0x43, 0x6f, 0x7a, 0x46, 0x7e, 0x02, 0x8d, 0xf8, 0xdc, 0xdb, 0xb0,
+ 0x61, 0x6d, 0xe4, 0x4e, 0x35, 0xe3, 0x8b, 0x34, 0x0b, 0x25, 0xf7, 0xa1, 0x2a, 0x35, 0xd4, 0xba,
+ 0x97, 0x5d, 0x3b, 0xa1, 0xf2, 0xc4, 0x3c, 0xb1, 0xdd, 0xa9, 0x7f, 0xca, 0x42, 0x6b, 0x16, 0xb5,
+ 0xda, 0xe2, 0xb6, 0x24, 0x21, 0x1d, 0x44, 0xdc, 0x4f, 0xa3, 0x38, 0xf4, 0xbd, 0xe3, 0xd6, 0x26,
+ 0xde, 0x93, 0xc8, 0xd1, 0xc5, 0xe0, 0xf7, 0x2e, 0x66, 0xfe, 0x7c, 0xf0, 0xfb, 0x1c, 0xee, 0x60,
+ 0x65, 0x66, 0x3d, 0x3b, 0xb3, 0xf2, 0x68, 0x0d, 0xd1, 0x1b, 0xc8, 0xdd, 0x3d, 0x3b, 0xcc, 0x4e,
+ 0x6a, 0x43, 0xcd, 0x71, 0xa3, 0xd8, 0xf5, 0xc6, 0x71, 0xab, 0x85, 0xef, 0x4c, 0xc7, 0xe4, 0x33,
+ 0xb8, 0x3d, 0x73, 0x3d, 0x2b, 0xb2, 0x27, 0xcc, 0x8a, 0x5d, 0xee, 0x9b, 0x6c, 0xec, 0x7b, 0x4e,
+ 0xd4, 0x7a, 0x80, 0x82, 0x93, 0x99, 0xeb, 0x8d, 0xec, 0x09, 0x33, 0xdd, 0x19, 0x1b, 0x09, 0x0e,
+ 0xf9, 0x08, 0xd6, 0x11, 0x1e, 0xb2, 0x60, 0xea, 0x8e, 0x6d, 0xf1, 0xfa, 0x1f, 0xe1, 0xeb, 0xd7,
+ 0x38, 0x83, 0x0a, 0x3a, 0xbe, 0xfa, 0x63, 0x68, 0x06, 0x2c, 0x8c, 0xdc, 0x28, 0xb6, 0xa4, 0x45,
+ 0xbf, 0x97, 0xd5, 0xda, 0xaa, 0x64, 0x0e, 0x91, 0xd7, 0xfe, 0xcf, 0x02, 0x54, 0x84, 0x73, 0x92,
+ 0x4f, 0x41, 0xf1, 0x03, 0xbc, 0x06, 0x69, 0x6e, 0x6f, 0x5e, 0xe2, 0xc1, 0x9d, 0x61, 0xc0, 0xeb,
+ 0x5e, 0x3f, 0xa4, 0x8a, 0x1f, 0xdc, 0xb8, 0x28, 0xd4, 0xfe, 0x10, 0x6a, 0xc9, 0x02, 0xbc, 0xbc,
+ 0xe8, 0xeb, 0xa3, 0x91, 0x65, 0x3e, 0xee, 0x0e, 0xd4, 0x02, 0xb9, 0x03, 0x24, 0x1d, 0x5a, 0x43,
+ 0x6a, 0xe9, 0xdf, 0x1c, 0x75, 0xfb, 0xaa, 0x82, 0x5d, 0x1a, 0xd5, 0xbb, 0xa6, 0x4e, 0x05, 0xb2,
+ 0x48, 0xee, 0xc1, 0xed, 0x2c, 0xe5, 0x1c, 0x5c, 0xc2, 0x14, 0x8c, 0x8f, 0x65, 0x52, 0x01, 0xc5,
+ 0x18, 0xa8, 0x15, 0x9e, 0x16, 0xf4, 0xef, 0x8d, 0x91, 0x39, 0x52, 0xab, 0xed, 0xbf, 0x29, 0x40,
+ 0x19, 0xc3, 0x06, 0x3f, 0x9f, 0x54, 0x72, 0x71, 0x5d, 0x73, 0x5e, 0xb9, 0x1a, 0xd9, 0x92, 0xaa,
+ 0x81, 0x01, 0x65, 0x73, 0x79, 0xf4, 0xf9, 0xb5, 0xd6, 0x53, 0x3f, 0x85, 0x12, 0x8f, 0x52, 0xbc,
+ 0x43, 0x1c, 0xd2, 0x9e, 0x4e, 0xad, 0x47, 0x06, 0x1d, 0xf1, 0x2a, 0x97, 0x40, 0xb3, 0x3b, 0xd8,
+ 0xd3, 0x47, 0xe6, 0x30, 0xa1, 0xa1, 0x56, 0x1e, 0x19, 0x7d, 0x33, 0x45, 0x15, 0xb5, 0x9f, 0xd7,
+ 0x60, 0x35, 0x89, 0x09, 0x22, 0x82, 0x3e, 0x82, 0x46, 0x10, 0xba, 0x33, 0x3b, 0x3c, 0x8b, 0xc6,
+ 0xb6, 0x87, 0x49, 0x01, 0xb6, 0x7f, 0xb4, 0x24, 0xaa, 0x88, 0x1d, 0x1d, 0x0a, 0xec, 0x68, 0x6c,
+ 0x7b, 0x34, 0x3b, 0x91, 0xf4, 0x61, 0x75, 0xc6, 0xc2, 0x63, 0xf6, 0x7b, 0xbe, 0xeb, 0xe1, 0x4a,
+ 0x55, 0x8c, 0xc8, 0xef, 0x5f, 0xba, 0xd2, 0x01, 0x47, 0xff, 0x8e, 0xef, 0x7a, 0xb8, 0x56, 0x7e,
+ 0x32, 0xf9, 0x04, 0xea, 0xa2, 0x12, 0x72, 0xd8, 0x04, 0x63, 0xc5, 0xb2, 0xda, 0x4f, 0xd4, 0xe8,
+ 0x3d, 0x36, 0xc9, 0xc4, 0x65, 0xb8, 0x34, 0x2e, 0x37, 0xb2, 0x71, 0xf9, 0xcd, 0x6c, 0x2c, 0x5a,
+ 0x11, 0x55, 0x78, 0x1a, 0x84, 0x2e, 0x38, 0x7c, 0x6b, 0x89, 0xc3, 0x77, 0x60, 0x23, 0xf1, 0x55,
+ 0xcb, 0xf5, 0x26, 0xee, 0x4b, 0x2b, 0x72, 0x5f, 0x89, 0xd8, 0x53, 0xa6, 0xeb, 0x09, 0xcb, 0xe0,
+ 0x9c, 0x91, 0xfb, 0x8a, 0x11, 0x23, 0xe9, 0xe0, 0x64, 0x0e, 0x5c, 0xc5, 0xab, 0xc9, 0xf7, 0x2e,
+ 0x55, 0x8f, 0x68, 0xbe, 0x64, 0x46, 0xcc, 0x4d, 0x6d, 0xff, 0x52, 0x81, 0x46, 0xe6, 0x1c, 0x78,
+ 0xf6, 0x16, 0xca, 0x42, 0x61, 0xc5, 0x55, 0x94, 0x50, 0x1f, 0x4a, 0xfa, 0x26, 0xd4, 0xa3, 0xd8,
+ 0x0e, 0x63, 0x8b, 0x17, 0x57, 0xb2, 0xdd, 0x45, 0xc2, 0x13, 0x76, 0x46, 0x3e, 0x80, 0x35, 0xc1,
+ 0x74, 0xbd, 0xf1, 0x74, 0x1e, 0xb9, 0xa7, 0xa2, 0x99, 0xaf, 0xd1, 0x26, 0x92, 0x8d, 0x84, 0x4a,
+ 0xee, 0x42, 0x95, 0x67, 0x21, 0xbe, 0x86, 0x68, 0xfa, 0x2a, 0xcc, 0x73, 0xf8, 0x0a, 0x0f, 0x60,
+ 0x95, 0x33, 0xce, 0xe7, 0x57, 0xc4, 0x2d, 0x33, 0xf3, 0x9c, 0xf3, 0xd9, 0x1d, 0xd8, 0x10, 0xaf,
+ 0x09, 0x44, 0xf1, 0x2a, 0x2b, 0xdc, 0x3b, 0xa8, 0xd8, 0x75, 0x64, 0xc9, 0xb2, 0x56, 0x14, 0x9c,
+ 0x1f, 0x01, 0xcf, 0x5e, 0x0b, 0xe8, 0xbb, 0x22, 0x94, 0x31, 0xcf, 0xc9, 0x61, 0x77, 0xe1, 0x1d,
+ 0x8e, 0x9d, 0x7b, 0x76, 0x10, 0x4c, 0x5d, 0xe6, 0x58, 0x53, 0xff, 0x18, 0x43, 0x66, 0x14, 0xdb,
+ 0xb3, 0xc0, 0x9a, 0x47, 0xad, 0x0d, 0x0c, 0x99, 0x6d, 0xe6, 0x39, 0x47, 0x09, 0xa8, 0xef, 0x1f,
+ 0x9b, 0x09, 0xe4, 0x28, 0x6a, 0xff, 0x3e, 0xac, 0xe6, 0xec, 0x71, 0x41, 0xa7, 0x35, 0x74, 0xfe,
+ 0x8c, 0x4e, 0xdf, 0x85, 0x95, 0x20, 0x64, 0xe7, 0xa2, 0xd5, 0x51, 0xb4, 0x86, 0xa0, 0x09, 0xb1,
+ 0xb6, 0x60, 0x05, 0x79, 0x96, 0x20, 0xe6, 0xf3, 0x63, 0x03, 0x59, 0x87, 0xc8, 0x69, 0xbf, 0x80,
+ 0x95, 0xec, 0x69, 0x93, 0x77, 0x33, 0x69, 0xa1, 0x99, 0xcb, 0x93, 0x69, 0x76, 0x48, 0x2a, 0xb2,
+ 0xf5, 0x4b, 0x2a, 0x32, 0x72, 0x9d, 0x8a, 0x4c, 0xfb, 0x2f, 0xd9, 0x9c, 0x65, 0x2a, 0x84, 0x9f,
+ 0x41, 0x2d, 0x90, 0xf5, 0x38, 0x5a, 0x52, 0xfe, 0x12, 0x3e, 0x0f, 0xee, 0x24, 0x95, 0x3b, 0x4d,
+ 0xe7, 0xb4, 0xff, 0x56, 0x81, 0x5a, 0x5a, 0xd0, 0xe7, 0x2c, 0xef, 0xcd, 0x05, 0xcb, 0x3b, 0x90,
+ 0x1a, 0x16, 0x0a, 0x7c, 0x1b, 0xa3, 0xc5, 0x27, 0xaf, 0x7f, 0xd7, 0xc5, 0xb6, 0xe7, 0x34, 0xdb,
+ 0xf6, 0x6c, 0xbe, 0xae, 0xed, 0xf9, 0xe4, 0xa2, 0xc1, 0xbf, 0x95, 0xe9, 0x2d, 0x16, 0xcc, 0xbe,
+ 0xfd, 0x7d, 0xae, 0x0f, 0xca, 0x26, 0x84, 0x77, 0xc4, 0x7e, 0xd2, 0x84, 0x90, 0xb6, 0x3f, 0xf7,
+ 0xaf, 0xd7, 0xfe, 0x6c, 0x43, 0x45, 0xea, 0xfc, 0x0e, 0x54, 0x64, 0x4d, 0x27, 0x1b, 0x04, 0x31,
+ 0x3a, 0x6f, 0x10, 0x0a, 0xb2, 0x4e, 0xd7, 0x7e, 0xae, 0x40, 0x59, 0x0f, 0x43, 0x3f, 0xd4, 0xfe,
+ 0x48, 0x81, 0x3a, 0x3e, 0xed, 0xf9, 0x0e, 0xe3, 0xd9, 0x60, 0xb7, 0xdb, 0xb3, 0xa8, 0xfe, 0xcd,
+ 0x91, 0x8e, 0xd9, 0xa0, 0x0d, 0x77, 0xf6, 0x86, 0x83, 0xbd, 0x23, 0x4a, 0xf5, 0x81, 0x69, 0x99,
+ 0xb4, 0x3b, 0x18, 0xf1, 0xb6, 0x67, 0x38, 0x50, 0x15, 0x9e, 0x29, 0x8c, 0x81, 0xa9, 0xd3, 0x41,
+ 0xb7, 0x6f, 0x89, 0x56, 0xb4, 0x88, 0x77, 0xb3, 0xba, 0xde, 0xb3, 0xf0, 0xd6, 0x51, 0x2d, 0xf1,
+ 0x96, 0xd5, 0x34, 0x0e, 0xf4, 0xe1, 0x91, 0xa9, 0x96, 0xc9, 0x6d, 0x58, 0x3f, 0xd4, 0xe9, 0x81,
+ 0x31, 0x1a, 0x19, 0xc3, 0x81, 0xd5, 0xd3, 0x07, 0x86, 0xde, 0x53, 0x2b, 0x7c, 0x9d, 0x5d, 0x63,
+ 0xdf, 0xec, 0xee, 0xf6, 0x75, 0xb9, 0x4e, 0x95, 0x6c, 0xc2, 0x5b, 0x7b, 0xc3, 0x83, 0x03, 0xc3,
+ 0x34, 0xf5, 0x9e, 0xb5, 0x7b, 0x64, 0x5a, 0x23, 0xd3, 0xe8, 0xf7, 0xad, 0xee, 0xe1, 0x61, 0xff,
+ 0x29, 0x4f, 0x60, 0x35, 0x72, 0x17, 0x36, 0xf6, 0xba, 0x87, 0xdd, 0x5d, 0xa3, 0x6f, 0x98, 0x4f,
+ 0xad, 0x9e, 0x31, 0xe2, 0xf3, 0x7b, 0x6a, 0x9d, 0x27, 0x6c, 0x93, 0x3e, 0xb5, 0xba, 0x7d, 0x14,
+ 0xcd, 0xd4, 0xad, 0xdd, 0xee, 0xde, 0x13, 0x7d, 0xd0, 0x53, 0x81, 0x0b, 0x30, 0xea, 0x3e, 0xd2,
+ 0x2d, 0x2e, 0x92, 0x65, 0x0e, 0x87, 0xd6, 0xb0, 0xdf, 0x53, 0x1b, 0xda, 0xbf, 0x14, 0xa1, 0xb4,
+ 0xe7, 0x47, 0x31, 0xf7, 0x46, 0xe1, 0xac, 0x2f, 0x42, 0x37, 0x66, 0xa2, 0x7f, 0x2b, 0x53, 0xd1,
+ 0x4b, 0x7f, 0x87, 0x24, 0x1e, 0x50, 0x32, 0x10, 0xeb, 0xd9, 0x19, 0xc7, 0x29, 0x88, 0x5b, 0x3b,
+ 0xc7, 0xed, 0x72, 0xb2, 0x88, 0x68, 0x78, 0x85, 0x23, 0xd7, 0x2b, 0x22, 0x4e, 0x06, 0x61, 0xb9,
+ 0xe0, 0xc7, 0x40, 0xb2, 0x20, 0xb9, 0x62, 0x09, 0x91, 0x6a, 0x06, 0x29, 0x96, 0xdc, 0x01, 0x18,
+ 0xfb, 0xb3, 0x99, 0x1b, 0x8f, 0xfd, 0x28, 0x96, 0x5f, 0xc8, 0xda, 0x39, 0x63, 0x8f, 0x62, 0x6e,
+ 0xf1, 0x33, 0x37, 0xe6, 0x8f, 0x34, 0x83, 0x26, 0x3b, 0x70, 0xcf, 0x0e, 0x82, 0xd0, 0x7f, 0xe9,
+ 0xce, 0xec, 0x98, 0x59, 0xdc, 0x73, 0xed, 0x63, 0x66, 0x39, 0x6c, 0x1a, 0xdb, 0xd8, 0x13, 0x95,
+ 0xe9, 0xdd, 0x0c, 0x60, 0x24, 0xf8, 0x3d, 0xce, 0xe6, 0x71, 0xd7, 0x75, 0xac, 0x88, 0xfd, 0x30,
+ 0xe7, 0x1e, 0x60, 0xcd, 0x03, 0xc7, 0xe6, 0x62, 0xd6, 0x45, 0x96, 0x72, 0x9d, 0x91, 0xe4, 0x1c,
+ 0x09, 0x46, 0xfb, 0x15, 0xc0, 0xb9, 0x14, 0x64, 0x1b, 0x6e, 0xf3, 0x3a, 0x9e, 0x45, 0x31, 0x73,
+ 0x2c, 0xb9, 0xdb, 0x60, 0x1e, 0x47, 0x18, 0xe2, 0xcb, 0x74, 0x23, 0x65, 0xca, 0x9b, 0xc2, 0x79,
+ 0x1c, 0x91, 0x9f, 0x40, 0xeb, 0xc2, 0x1c, 0x87, 0x4d, 0x19, 0x7f, 0x6d, 0x15, 0xa7, 0xdd, 0x59,
+ 0x98, 0xd6, 0x13, 0x5c, 0xed, 0x4f, 0x14, 0x80, 0x7d, 0x16, 0x53, 0xc1, 0xcd, 0x34, 0xb6, 0x95,
+ 0xeb, 0x36, 0xb6, 0xef, 0x27, 0x17, 0x08, 0xc5, 0xab, 0x63, 0xc0, 0x42, 0x97, 0xa1, 0xdc, 0xa4,
+ 0xcb, 0xc8, 0x35, 0x11, 0xc5, 0x2b, 0x9a, 0x88, 0x52, 0xae, 0x89, 0xf8, 0x18, 0x9a, 0xf6, 0x74,
+ 0xea, 0xbf, 0xe0, 0x05, 0x0d, 0x0b, 0x43, 0xe6, 0xa0, 0x11, 0x9c, 0xd7, 0xdb, 0xc8, 0xec, 0x49,
+ 0x9e, 0xf6, 0xe7, 0x0a, 0x34, 0x50, 0x15, 0x51, 0xe0, 0x7b, 0x11, 0x23, 0x5f, 0x42, 0x45, 0x5e,
+ 0x44, 0x8b, 0x8b, 0xfc, 0xb7, 0x33, 0xb2, 0x66, 0x70, 0xb2, 0x68, 0xa0, 0x12, 0xcc, 0x33, 0x42,
+ 0xe6, 0x75, 0x97, 0x2b, 0x25, 0x45, 0x91, 0xfb, 0x50, 0x73, 0x3d, 0x4b, 0xb4, 0xd4, 0x95, 0x4c,
+ 0x58, 0xac, 0xba, 0x1e, 0xd6, 0xb2, 0xed, 0x57, 0x50, 0x11, 0x2f, 0x21, 0x9d, 0x54, 0xa6, 0x8b,
+ 0xfa, 0xcb, 0xdc, 0x1c, 0xa7, 0xc2, 0xc8, 0xc3, 0x29, 0xbd, 0x2e, 0x40, 0xb7, 0xa0, 0x7a, 0xca,
+ 0x9b, 0x0f, 0xbc, 0xf4, 0xe3, 0xea, 0x4d, 0x86, 0xda, 0x1f, 0x97, 0x00, 0x0e, 0xe7, 0x4b, 0x0c,
+ 0xa4, 0x71, 0x5d, 0x03, 0xe9, 0xe4, 0xf4, 0xf8, 0x7a, 0x99, 0x7f, 0x75, 0x43, 0x59, 0xd2, 0x69,
+ 0x17, 0x6f, 0xda, 0x69, 0xdf, 0x87, 0x6a, 0x1c, 0xce, 0xb9, 0xa3, 0x08, 0x63, 0x4a, 0x5b, 0x5a,
+ 0x49, 0x25, 0x6f, 0x42, 0x79, 0xe2, 0x87, 0x63, 0x86, 0x8e, 0x95, 0xb2, 0x05, 0xed, 0xc2, 0x65,
+ 0x52, 0xed, 0xb2, 0xcb, 0x24, 0xde, 0xa0, 0x45, 0xf2, 0x1e, 0x0d, 0x0b, 0x99, 0x7c, 0x83, 0x96,
+ 0x5c, 0xb1, 0xd1, 0x14, 0x44, 0xbe, 0x81, 0xa6, 0x3d, 0x8f, 0x7d, 0xcb, 0xe5, 0x15, 0xda, 0xd4,
+ 0x1d, 0x9f, 0x61, 0xd9, 0xdd, 0xcc, 0x7f, 0xaf, 0x4f, 0x0f, 0xaa, 0xd3, 0x9d, 0xc7, 0xbe, 0xe1,
+ 0x1c, 0x22, 0x72, 0xa7, 0x2a, 0x93, 0x12, 0x5d, 0xb1, 0x33, 0x64, 0xed, 0xc7, 0xb0, 0x92, 0x85,
+ 0xf1, 0x04, 0x24, 0x81, 0xea, 0x1b, 0x3c, 0x3b, 0x8d, 0x78, 0x6a, 0x1b, 0x98, 0x46, 0xb7, 0xaf,
+ 0x16, 0xb4, 0x18, 0x1a, 0xb8, 0xbc, 0xf4, 0x8e, 0xeb, 0xba, 0xfd, 0x03, 0x28, 0x61, 0xf8, 0x55,
+ 0x2e, 0x7c, 0x0f, 0xc1, 0x98, 0x8b, 0xcc, 0xbc, 0xf9, 0x15, 0xb3, 0xe6, 0xf7, 0xdf, 0x05, 0x58,
+ 0x31, 0xfd, 0xf9, 0xf8, 0xe4, 0xa2, 0x01, 0xc2, 0xaf, 0x3b, 0x42, 0x2d, 0x31, 0x1f, 0xe5, 0xa6,
+ 0xe6, 0x93, 0x5a, 0x47, 0x71, 0x89, 0x75, 0xdc, 0xf4, 0xcc, 0xb5, 0x2f, 0x60, 0x55, 0x6e, 0x5e,
+ 0x6a, 0x3d, 0xd1, 0x66, 0xe1, 0x0a, 0x6d, 0x6a, 0xbf, 0x50, 0x60, 0x55, 0xc4, 0xf7, 0xff, 0xbb,
+ 0xd2, 0x2a, 0x37, 0x0c, 0xeb, 0xe5, 0x1b, 0x5d, 0x1e, 0xfd, 0xbf, 0xf4, 0x34, 0x6d, 0x08, 0xcd,
+ 0x44, 0x7d, 0x37, 0x50, 0xfb, 0x15, 0x46, 0xfc, 0x8b, 0x02, 0x34, 0x06, 0xec, 0xe5, 0x92, 0x20,
+ 0x5a, 0xbe, 0xee, 0x71, 0x7c, 0x98, 0x2b, 0x57, 0x1b, 0xdb, 0xeb, 0x59, 0x19, 0xc4, 0xd5, 0x63,
+ 0x52, 0xc1, 0xa6, 0xb7, 0xa8, 0xca, 0xf2, 0x5b, 0xd4, 0xd2, 0x62, 0xb7, 0x9e, 0xb9, 0xc5, 0x2b,
+ 0x2e, 0xbb, 0xc5, 0xd3, 0xfe, 0xad, 0x08, 0x0d, 0x6c, 0x90, 0x29, 0x8b, 0xe6, 0xd3, 0x38, 0x27,
+ 0x4c, 0xe1, 0x6a, 0x61, 0x3a, 0x50, 0x09, 0x71, 0x92, 0x74, 0xa5, 0x4b, 0x83, 0xbf, 0x40, 0x61,
+ 0x6b, 0xfc, 0xdc, 0x0d, 0x02, 0xe6, 0x58, 0x82, 0x92, 0x14, 0x30, 0x4d, 0x49, 0x16, 0x22, 0x44,
+ 0xbc, 0xfc, 0x9c, 0xf9, 0x21, 0x4b, 0x51, 0x45, 0xbc, 0x4f, 0x68, 0x70, 0x5a, 0x02, 0xc9, 0xdd,
+ 0x37, 0x88, 0xca, 0xe0, 0xfc, 0xbe, 0x21, 0xed, 0x35, 0x91, 0x5b, 0x47, 0xae, 0xe8, 0x35, 0x91,
+ 0xcd, 0xbb, 0xa8, 0x99, 0x3d, 0x9d, 0x5a, 0x7e, 0x10, 0xa1, 0xd3, 0xd4, 0x68, 0x0d, 0x09, 0xc3,
+ 0x20, 0x22, 0x5f, 0x43, 0x7a, 0x5d, 0x2c, 0x6f, 0xc9, 0xc5, 0x39, 0xb6, 0x2e, 0xbb, 0x58, 0xa0,
+ 0xab, 0xe3, 0xdc, 0xfd, 0xcf, 0x92, 0x1b, 0xea, 0xca, 0x4d, 0x6f, 0xa8, 0x1f, 0x42, 0x59, 0xc4,
+ 0xa8, 0xda, 0xeb, 0x62, 0x94, 0xc0, 0x65, 0xed, 0xb3, 0x91, 0xb7, 0xcf, 0x5f, 0x16, 0x80, 0x74,
+ 0xa7, 0x53, 0x7f, 0x6c, 0xc7, 0xcc, 0x70, 0xa2, 0x8b, 0x66, 0x7a, 0xed, 0xcf, 0x2e, 0x9f, 0x41,
+ 0x7d, 0xe6, 0x3b, 0x6c, 0x6a, 0x25, 0xdf, 0x94, 0x2e, 0xad, 0x7e, 0x10, 0xc6, 0x5b, 0x52, 0x02,
+ 0x25, 0xbc, 0xc4, 0x51, 0xb0, 0xee, 0xc0, 0x67, 0xde, 0x84, 0xcd, 0xec, 0x97, 0xb2, 0x14, 0xe1,
+ 0x8f, 0xa4, 0x03, 0xd5, 0x90, 0x45, 0x2c, 0x3c, 0x65, 0x57, 0x16, 0x55, 0x09, 0x48, 0x7b, 0x06,
+ 0x1b, 0xb9, 0x1d, 0x49, 0x47, 0xbe, 0x85, 0x5f, 0x2b, 0xc3, 0x58, 0x7e, 0xb4, 0x12, 0x03, 0xfe,
+ 0x3a, 0xe6, 0x25, 0x9f, 0x41, 0xf9, 0x63, 0xea, 0xf0, 0xc5, 0xab, 0xe2, 0xec, 0x1e, 0xa8, 0x59,
+ 0x4d, 0xbb, 0x63, 0x0c, 0x36, 0xf2, 0x54, 0x0a, 0xd7, 0x3b, 0x15, 0xed, 0xef, 0x0a, 0xb0, 0xde,
+ 0x75, 0x1c, 0xf1, 0x77, 0xc3, 0x25, 0xaa, 0x2f, 0x5e, 0x57, 0xf5, 0x0b, 0x81, 0x58, 0x84, 0x89,
+ 0x6b, 0x05, 0xe2, 0x0f, 0xa1, 0x92, 0xd6, 0x5a, 0xc5, 0x05, 0x77, 0x16, 0x72, 0x51, 0x09, 0xd0,
+ 0x6e, 0x01, 0xc9, 0x0a, 0x2b, 0xb4, 0xaa, 0xfd, 0x69, 0x11, 0xee, 0xee, 0xb2, 0x63, 0xd7, 0xcb,
+ 0xbe, 0xe2, 0x57, 0xdf, 0xc9, 0xc5, 0x4f, 0x65, 0x9f, 0xc1, 0xba, 0x28, 0xe4, 0x93, 0x7f, 0x62,
+ 0x59, 0xec, 0x58, 0x7e, 0x9d, 0x94, 0xb1, 0x6a, 0x0d, 0xf9, 0x07, 0x92, 0xad, 0xe3, 0x7f, 0xc5,
+ 0x1c, 0x3b, 0xb6, 0x9f, 0xd9, 0x11, 0xb3, 0x5c, 0x47, 0xfe, 0x59, 0x06, 0x12, 0x92, 0xe1, 0x90,
+ 0x21, 0x94, 0xb8, 0x0d, 0xa2, 0xeb, 0x36, 0xb7, 0xb7, 0x33, 0x62, 0x5d, 0xb2, 0x95, 0xac, 0x02,
+ 0x0f, 0x7c, 0x87, 0xed, 0x54, 0x8f, 0x06, 0x4f, 0x06, 0xc3, 0xef, 0x06, 0x14, 0x17, 0x22, 0x06,
+ 0xdc, 0x0a, 0x42, 0x76, 0xea, 0xfa, 0xf3, 0xc8, 0xca, 0x9e, 0x44, 0xf5, 0xca, 0x94, 0xb8, 0x91,
+ 0xcc, 0xc9, 0x10, 0xb5, 0x9f, 0xc2, 0xda, 0xc2, 0xcb, 0x78, 0x6d, 0x26, 0x5f, 0xa7, 0xbe, 0x41,
+ 0x56, 0xa1, 0x8e, 0x1f, 0xbb, 0x97, 0x7f, 0xfb, 0xd6, 0xfe, 0xb5, 0x80, 0x57, 0x4c, 0x33, 0x37,
+ 0xbe, 0x59, 0x06, 0xfb, 0xcd, 0x7c, 0x06, 0x83, 0xed, 0x77, 0xf3, 0xe6, 0x9b, 0x59, 0xb0, 0xf3,
+ 0xad, 0x00, 0xa6, 0x41, 0xa4, 0x6d, 0x43, 0x55, 0xd2, 0xc8, 0x6f, 0xc1, 0x5a, 0xe8, 0xfb, 0x71,
+ 0xd2, 0x89, 0x8a, 0x0e, 0xe4, 0xf2, 0x3f, 0xdb, 0xac, 0x72, 0xb0, 0x48, 0x06, 0x4f, 0xf2, 0xbd,
+ 0x48, 0x59, 0xfc, 0x0d, 0x44, 0x0e, 0x77, 0x1b, 0xbf, 0x5b, 0x4f, 0xff, 0xb7, 0xfb, 0xbf, 0x01,
+ 0x00, 0x00, 0xff, 0xff, 0x35, 0x9f, 0x30, 0x98, 0xf2, 0x2b, 0x00, 0x00,
+}
diff --git a/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto b/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto
new file mode 100755
index 0000000..497b4d9
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto
@@ -0,0 +1,551 @@
+syntax = "proto2";
+option go_package = "datastore";
+
+package appengine;
+
+message Action{}
+
+message PropertyValue {
+ optional int64 int64Value = 1;
+ optional bool booleanValue = 2;
+ optional string stringValue = 3;
+ optional double doubleValue = 4;
+
+ optional group PointValue = 5 {
+ required double x = 6;
+ required double y = 7;
+ }
+
+ optional group UserValue = 8 {
+ required string email = 9;
+ required string auth_domain = 10;
+ optional string nickname = 11;
+ optional string federated_identity = 21;
+ optional string federated_provider = 22;
+ }
+
+ optional group ReferenceValue = 12 {
+ required string app = 13;
+ optional string name_space = 20;
+ repeated group PathElement = 14 {
+ required string type = 15;
+ optional int64 id = 16;
+ optional string name = 17;
+ }
+ }
+}
+
+message Property {
+ enum Meaning {
+ NO_MEANING = 0;
+ BLOB = 14;
+ TEXT = 15;
+ BYTESTRING = 16;
+
+ ATOM_CATEGORY = 1;
+ ATOM_LINK = 2;
+ ATOM_TITLE = 3;
+ ATOM_CONTENT = 4;
+ ATOM_SUMMARY = 5;
+ ATOM_AUTHOR = 6;
+
+ GD_WHEN = 7;
+ GD_EMAIL = 8;
+ GEORSS_POINT = 9;
+ GD_IM = 10;
+
+ GD_PHONENUMBER = 11;
+ GD_POSTALADDRESS = 12;
+
+ GD_RATING = 13;
+
+ BLOBKEY = 17;
+ ENTITY_PROTO = 19;
+
+ INDEX_VALUE = 18;
+ };
+
+ optional Meaning meaning = 1 [default = NO_MEANING];
+ optional string meaning_uri = 2;
+
+ required string name = 3;
+
+ required PropertyValue value = 5;
+
+ required bool multiple = 4;
+
+ optional bool searchable = 6 [default=false];
+
+ enum FtsTokenizationOption {
+ HTML = 1;
+ ATOM = 2;
+ }
+
+ optional FtsTokenizationOption fts_tokenization_option = 8;
+
+ optional string locale = 9 [default = "en"];
+}
+
+message Path {
+ repeated group Element = 1 {
+ required string type = 2;
+ optional int64 id = 3;
+ optional string name = 4;
+ }
+}
+
+message Reference {
+ required string app = 13;
+ optional string name_space = 20;
+ required Path path = 14;
+}
+
+message User {
+ required string email = 1;
+ required string auth_domain = 2;
+ optional string nickname = 3;
+ optional string federated_identity = 6;
+ optional string federated_provider = 7;
+}
+
+message EntityProto {
+ required Reference key = 13;
+ required Path entity_group = 16;
+ optional User owner = 17;
+
+ enum Kind {
+ GD_CONTACT = 1;
+ GD_EVENT = 2;
+ GD_MESSAGE = 3;
+ }
+ optional Kind kind = 4;
+ optional string kind_uri = 5;
+
+ repeated Property property = 14;
+ repeated Property raw_property = 15;
+
+ optional int32 rank = 18;
+}
+
+message CompositeProperty {
+ required int64 index_id = 1;
+ repeated string value = 2;
+}
+
+message Index {
+ required string entity_type = 1;
+ required bool ancestor = 5;
+ repeated group Property = 2 {
+ required string name = 3;
+ enum Direction {
+ ASCENDING = 1;
+ DESCENDING = 2;
+ }
+ optional Direction direction = 4 [default = ASCENDING];
+ }
+}
+
+message CompositeIndex {
+ required string app_id = 1;
+ required int64 id = 2;
+ required Index definition = 3;
+
+ enum State {
+ WRITE_ONLY = 1;
+ READ_WRITE = 2;
+ DELETED = 3;
+ ERROR = 4;
+ }
+ required State state = 4;
+
+ optional bool only_use_if_required = 6 [default = false];
+}
+
+message IndexPostfix {
+ message IndexValue {
+ required string property_name = 1;
+ required PropertyValue value = 2;
+ }
+
+ repeated IndexValue index_value = 1;
+
+ optional Reference key = 2;
+
+ optional bool before = 3 [default=true];
+}
+
+message IndexPosition {
+ optional string key = 1;
+
+ optional bool before = 2 [default=true];
+}
+
+message Snapshot {
+ enum Status {
+ INACTIVE = 0;
+ ACTIVE = 1;
+ }
+
+ required int64 ts = 1;
+}
+
+message InternalHeader {
+ optional string qos = 1;
+}
+
+message Transaction {
+ optional InternalHeader header = 4;
+ required fixed64 handle = 1;
+ required string app = 2;
+ optional bool mark_changes = 3 [default = false];
+}
+
+message Query {
+ optional InternalHeader header = 39;
+
+ required string app = 1;
+ optional string name_space = 29;
+
+ optional string kind = 3;
+ optional Reference ancestor = 17;
+
+ repeated group Filter = 4 {
+ enum Operator {
+ LESS_THAN = 1;
+ LESS_THAN_OR_EQUAL = 2;
+ GREATER_THAN = 3;
+ GREATER_THAN_OR_EQUAL = 4;
+ EQUAL = 5;
+ IN = 6;
+ EXISTS = 7;
+ }
+
+ required Operator op = 6;
+ repeated Property property = 14;
+ }
+
+ optional string search_query = 8;
+
+ repeated group Order = 9 {
+ enum Direction {
+ ASCENDING = 1;
+ DESCENDING = 2;
+ }
+
+ required string property = 10;
+ optional Direction direction = 11 [default = ASCENDING];
+ }
+
+ enum Hint {
+ ORDER_FIRST = 1;
+ ANCESTOR_FIRST = 2;
+ FILTER_FIRST = 3;
+ }
+ optional Hint hint = 18;
+
+ optional int32 count = 23;
+
+ optional int32 offset = 12 [default = 0];
+
+ optional int32 limit = 16;
+
+ optional CompiledCursor compiled_cursor = 30;
+ optional CompiledCursor end_compiled_cursor = 31;
+
+ repeated CompositeIndex composite_index = 19;
+
+ optional bool require_perfect_plan = 20 [default = false];
+
+ optional bool keys_only = 21 [default = false];
+
+ optional Transaction transaction = 22;
+
+ optional bool compile = 25 [default = false];
+
+ optional int64 failover_ms = 26;
+
+ optional bool strong = 32;
+
+ repeated string property_name = 33;
+
+ repeated string group_by_property_name = 34;
+
+ optional bool distinct = 24;
+
+ optional int64 min_safe_time_seconds = 35;
+
+ repeated string safe_replica_name = 36;
+
+ optional bool persist_offset = 37 [default=false];
+}
+
+message CompiledQuery {
+ required group PrimaryScan = 1 {
+ optional string index_name = 2;
+
+ optional string start_key = 3;
+ optional bool start_inclusive = 4;
+ optional string end_key = 5;
+ optional bool end_inclusive = 6;
+
+ repeated string start_postfix_value = 22;
+ repeated string end_postfix_value = 23;
+
+ optional int64 end_unapplied_log_timestamp_us = 19;
+ }
+
+ repeated group MergeJoinScan = 7 {
+ required string index_name = 8;
+
+ repeated string prefix_value = 9;
+
+ optional bool value_prefix = 20 [default=false];
+ }
+
+ optional Index index_def = 21;
+
+ optional int32 offset = 10 [default = 0];
+
+ optional int32 limit = 11;
+
+ required bool keys_only = 12;
+
+ repeated string property_name = 24;
+
+ optional int32 distinct_infix_size = 25;
+
+ optional group EntityFilter = 13 {
+ optional bool distinct = 14 [default=false];
+
+ optional string kind = 17;
+ optional Reference ancestor = 18;
+ }
+}
+
+message CompiledCursor {
+ optional group Position = 2 {
+ optional string start_key = 27;
+
+ repeated group IndexValue = 29 {
+ optional string property = 30;
+ required PropertyValue value = 31;
+ }
+
+ optional Reference key = 32;
+
+ optional bool start_inclusive = 28 [default=true];
+ }
+}
+
+message Cursor {
+ required fixed64 cursor = 1;
+
+ optional string app = 2;
+}
+
+message Error {
+ enum ErrorCode {
+ BAD_REQUEST = 1;
+ CONCURRENT_TRANSACTION = 2;
+ INTERNAL_ERROR = 3;
+ NEED_INDEX = 4;
+ TIMEOUT = 5;
+ PERMISSION_DENIED = 6;
+ BIGTABLE_ERROR = 7;
+ COMMITTED_BUT_STILL_APPLYING = 8;
+ CAPABILITY_DISABLED = 9;
+ TRY_ALTERNATE_BACKEND = 10;
+ SAFE_TIME_TOO_OLD = 11;
+ }
+}
+
+message Cost {
+ optional int32 index_writes = 1;
+ optional int32 index_write_bytes = 2;
+ optional int32 entity_writes = 3;
+ optional int32 entity_write_bytes = 4;
+ optional group CommitCost = 5 {
+ optional int32 requested_entity_puts = 6;
+ optional int32 requested_entity_deletes = 7;
+ };
+ optional int32 approximate_storage_delta = 8;
+ optional int32 id_sequence_updates = 9;
+}
+
+message GetRequest {
+ optional InternalHeader header = 6;
+
+ repeated Reference key = 1;
+ optional Transaction transaction = 2;
+
+ optional int64 failover_ms = 3;
+
+ optional bool strong = 4;
+
+ optional bool allow_deferred = 5 [default=false];
+}
+
+message GetResponse {
+ repeated group Entity = 1 {
+ optional EntityProto entity = 2;
+ optional Reference key = 4;
+
+ optional int64 version = 3;
+ }
+
+ repeated Reference deferred = 5;
+
+ optional bool in_order = 6 [default=true];
+}
+
+message PutRequest {
+ optional InternalHeader header = 11;
+
+ repeated EntityProto entity = 1;
+ optional Transaction transaction = 2;
+ repeated CompositeIndex composite_index = 3;
+
+ optional bool trusted = 4 [default = false];
+
+ optional bool force = 7 [default = false];
+
+ optional bool mark_changes = 8 [default = false];
+ repeated Snapshot snapshot = 9;
+
+ enum AutoIdPolicy {
+ CURRENT = 0;
+ SEQUENTIAL = 1;
+ }
+ optional AutoIdPolicy auto_id_policy = 10 [default = CURRENT];
+}
+
+message PutResponse {
+ repeated Reference key = 1;
+ optional Cost cost = 2;
+ repeated int64 version = 3;
+}
+
+message TouchRequest {
+ optional InternalHeader header = 10;
+
+ repeated Reference key = 1;
+ repeated CompositeIndex composite_index = 2;
+ optional bool force = 3 [default = false];
+ repeated Snapshot snapshot = 9;
+}
+
+message TouchResponse {
+ optional Cost cost = 1;
+}
+
+message DeleteRequest {
+ optional InternalHeader header = 10;
+
+ repeated Reference key = 6;
+ optional Transaction transaction = 5;
+
+ optional bool trusted = 4 [default = false];
+
+ optional bool force = 7 [default = false];
+
+ optional bool mark_changes = 8 [default = false];
+ repeated Snapshot snapshot = 9;
+}
+
+message DeleteResponse {
+ optional Cost cost = 1;
+ repeated int64 version = 3;
+}
+
+message NextRequest {
+ optional InternalHeader header = 5;
+
+ required Cursor cursor = 1;
+ optional int32 count = 2;
+
+ optional int32 offset = 4 [default = 0];
+
+ optional bool compile = 3 [default = false];
+}
+
+message QueryResult {
+ optional Cursor cursor = 1;
+
+ repeated EntityProto result = 2;
+
+ optional int32 skipped_results = 7;
+
+ required bool more_results = 3;
+
+ optional bool keys_only = 4;
+
+ optional bool index_only = 9;
+
+ optional bool small_ops = 10;
+
+ optional CompiledQuery compiled_query = 5;
+
+ optional CompiledCursor compiled_cursor = 6;
+
+ repeated CompositeIndex index = 8;
+
+ repeated int64 version = 11;
+}
+
+message AllocateIdsRequest {
+ optional InternalHeader header = 4;
+
+ optional Reference model_key = 1;
+
+ optional int64 size = 2;
+
+ optional int64 max = 3;
+
+ repeated Reference reserve = 5;
+}
+
+message AllocateIdsResponse {
+ required int64 start = 1;
+ required int64 end = 2;
+ optional Cost cost = 3;
+}
+
+message CompositeIndices {
+ repeated CompositeIndex index = 1;
+}
+
+message AddActionsRequest {
+ optional InternalHeader header = 3;
+
+ required Transaction transaction = 1;
+ repeated Action action = 2;
+}
+
+message AddActionsResponse {
+}
+
+message BeginTransactionRequest {
+ optional InternalHeader header = 3;
+
+ required string app = 1;
+ optional bool allow_multiple_eg = 2 [default = false];
+ optional string database_id = 4;
+
+ enum TransactionMode {
+ UNKNOWN = 0;
+ READ_ONLY = 1;
+ READ_WRITE = 2;
+ }
+ optional TransactionMode mode = 5 [default = UNKNOWN];
+
+ optional Transaction previous_transaction = 7;
+}
+
+message CommitResponse {
+ optional Cost cost = 1;
+
+ repeated group Version = 3 {
+ required Reference root_entity_key = 4;
+ required int64 version = 5;
+ }
+}
diff --git a/vendor/google.golang.org/appengine/internal/identity.go b/vendor/google.golang.org/appengine/internal/identity.go
new file mode 100644
index 0000000..9b4134e
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/identity.go
@@ -0,0 +1,55 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+ "os"
+
+ netcontext "golang.org/x/net/context"
+)
+
+var (
+ // This is set to true in identity_classic.go, which is behind the appengine build tag.
+ // The appengine build tag is set for the first generation runtimes (<= Go 1.9) but not
+ // the second generation runtimes (>= Go 1.11), so this indicates whether we're on a
+ // first-gen runtime. See IsStandard below for the second-gen check.
+ appengineStandard bool
+
+ // This is set to true in identity_flex.go, which is behind the appenginevm build tag.
+ appengineFlex bool
+)
+
+// AppID is the implementation of the wrapper function of the same name in
+// ../identity.go. See that file for commentary.
+func AppID(c netcontext.Context) string {
+ return appID(FullyQualifiedAppID(c))
+}
+
+// IsStandard is the implementation of the wrapper function of the same name in
+// ../appengine.go. See that file for commentary.
+func IsStandard() bool {
+ // appengineStandard will be true for first-gen runtimes (<= Go 1.9) but not
+ // second-gen (>= Go 1.11).
+ return appengineStandard || IsSecondGen()
+}
+
+// IsStandard is the implementation of the wrapper function of the same name in
+// ../appengine.go. See that file for commentary.
+func IsSecondGen() bool {
+ // Second-gen runtimes set $GAE_ENV so we use that to check if we're on a second-gen runtime.
+ return os.Getenv("GAE_ENV") == "standard"
+}
+
+// IsFlex is the implementation of the wrapper function of the same name in
+// ../appengine.go. See that file for commentary.
+func IsFlex() bool {
+ return appengineFlex
+}
+
+// IsAppEngine is the implementation of the wrapper function of the same name in
+// ../appengine.go. See that file for commentary.
+func IsAppEngine() bool {
+ return IsStandard() || IsFlex()
+}
diff --git a/vendor/google.golang.org/appengine/internal/identity_classic.go b/vendor/google.golang.org/appengine/internal/identity_classic.go
new file mode 100644
index 0000000..4e979f4
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/identity_classic.go
@@ -0,0 +1,61 @@
+// Copyright 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// +build appengine
+
+package internal
+
+import (
+ "appengine"
+
+ netcontext "golang.org/x/net/context"
+)
+
+func init() {
+ appengineStandard = true
+}
+
+func DefaultVersionHostname(ctx netcontext.Context) string {
+ c := fromContext(ctx)
+ if c == nil {
+ panic(errNotAppEngineContext)
+ }
+ return appengine.DefaultVersionHostname(c)
+}
+
+func Datacenter(_ netcontext.Context) string { return appengine.Datacenter() }
+func ServerSoftware() string { return appengine.ServerSoftware() }
+func InstanceID() string { return appengine.InstanceID() }
+func IsDevAppServer() bool { return appengine.IsDevAppServer() }
+
+func RequestID(ctx netcontext.Context) string {
+ c := fromContext(ctx)
+ if c == nil {
+ panic(errNotAppEngineContext)
+ }
+ return appengine.RequestID(c)
+}
+
+func ModuleName(ctx netcontext.Context) string {
+ c := fromContext(ctx)
+ if c == nil {
+ panic(errNotAppEngineContext)
+ }
+ return appengine.ModuleName(c)
+}
+func VersionID(ctx netcontext.Context) string {
+ c := fromContext(ctx)
+ if c == nil {
+ panic(errNotAppEngineContext)
+ }
+ return appengine.VersionID(c)
+}
+
+func fullyQualifiedAppID(ctx netcontext.Context) string {
+ c := fromContext(ctx)
+ if c == nil {
+ panic(errNotAppEngineContext)
+ }
+ return c.FullyQualifiedAppID()
+}
diff --git a/vendor/google.golang.org/appengine/internal/identity_flex.go b/vendor/google.golang.org/appengine/internal/identity_flex.go
new file mode 100644
index 0000000..d5e2e7b
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/identity_flex.go
@@ -0,0 +1,11 @@
+// Copyright 2018 Google LLC. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// +build appenginevm
+
+package internal
+
+func init() {
+ appengineFlex = true
+}
diff --git a/vendor/google.golang.org/appengine/internal/identity_vm.go b/vendor/google.golang.org/appengine/internal/identity_vm.go
new file mode 100644
index 0000000..5d80672
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/identity_vm.go
@@ -0,0 +1,134 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// +build !appengine
+
+package internal
+
+import (
+ "log"
+ "net/http"
+ "os"
+ "strings"
+
+ netcontext "golang.org/x/net/context"
+)
+
+// These functions are implementations of the wrapper functions
+// in ../appengine/identity.go. See that file for commentary.
+
+const (
+ hDefaultVersionHostname = "X-AppEngine-Default-Version-Hostname"
+ hRequestLogId = "X-AppEngine-Request-Log-Id"
+ hDatacenter = "X-AppEngine-Datacenter"
+)
+
+func ctxHeaders(ctx netcontext.Context) http.Header {
+ c := fromContext(ctx)
+ if c == nil {
+ return nil
+ }
+ return c.Request().Header
+}
+
+func DefaultVersionHostname(ctx netcontext.Context) string {
+ return ctxHeaders(ctx).Get(hDefaultVersionHostname)
+}
+
+func RequestID(ctx netcontext.Context) string {
+ return ctxHeaders(ctx).Get(hRequestLogId)
+}
+
+func Datacenter(ctx netcontext.Context) string {
+ if dc := ctxHeaders(ctx).Get(hDatacenter); dc != "" {
+ return dc
+ }
+ // If the header isn't set, read zone from the metadata service.
+ // It has the format projects/[NUMERIC_PROJECT_ID]/zones/[ZONE]
+ zone, err := getMetadata("instance/zone")
+ if err != nil {
+ log.Printf("Datacenter: %v", err)
+ return ""
+ }
+ parts := strings.Split(string(zone), "/")
+ if len(parts) == 0 {
+ return ""
+ }
+ return parts[len(parts)-1]
+}
+
+func ServerSoftware() string {
+ // TODO(dsymonds): Remove fallback when we've verified this.
+ if s := os.Getenv("SERVER_SOFTWARE"); s != "" {
+ return s
+ }
+ if s := os.Getenv("GAE_ENV"); s != "" {
+ return s
+ }
+ return "Google App Engine/1.x.x"
+}
+
+// TODO(dsymonds): Remove the metadata fetches.
+
+func ModuleName(_ netcontext.Context) string {
+ if s := os.Getenv("GAE_MODULE_NAME"); s != "" {
+ return s
+ }
+ if s := os.Getenv("GAE_SERVICE"); s != "" {
+ return s
+ }
+ return string(mustGetMetadata("instance/attributes/gae_backend_name"))
+}
+
+func VersionID(_ netcontext.Context) string {
+ if s1, s2 := os.Getenv("GAE_MODULE_VERSION"), os.Getenv("GAE_MINOR_VERSION"); s1 != "" && s2 != "" {
+ return s1 + "." + s2
+ }
+ if s1, s2 := os.Getenv("GAE_VERSION"), os.Getenv("GAE_DEPLOYMENT_ID"); s1 != "" && s2 != "" {
+ return s1 + "." + s2
+ }
+ return string(mustGetMetadata("instance/attributes/gae_backend_version")) + "." + string(mustGetMetadata("instance/attributes/gae_backend_minor_version"))
+}
+
+func InstanceID() string {
+ if s := os.Getenv("GAE_MODULE_INSTANCE"); s != "" {
+ return s
+ }
+ if s := os.Getenv("GAE_INSTANCE"); s != "" {
+ return s
+ }
+ return string(mustGetMetadata("instance/attributes/gae_backend_instance"))
+}
+
+func partitionlessAppID() string {
+ // gae_project has everything except the partition prefix.
+ if appID := os.Getenv("GAE_LONG_APP_ID"); appID != "" {
+ return appID
+ }
+ if project := os.Getenv("GOOGLE_CLOUD_PROJECT"); project != "" {
+ return project
+ }
+ return string(mustGetMetadata("instance/attributes/gae_project"))
+}
+
+func fullyQualifiedAppID(_ netcontext.Context) string {
+ if s := os.Getenv("GAE_APPLICATION"); s != "" {
+ return s
+ }
+ appID := partitionlessAppID()
+
+ part := os.Getenv("GAE_PARTITION")
+ if part == "" {
+ part = string(mustGetMetadata("instance/attributes/gae_partition"))
+ }
+
+ if part != "" {
+ appID = part + "~" + appID
+ }
+ return appID
+}
+
+func IsDevAppServer() bool {
+ return os.Getenv("RUN_WITH_DEVAPPSERVER") != ""
+}
diff --git a/vendor/google.golang.org/appengine/internal/internal.go b/vendor/google.golang.org/appengine/internal/internal.go
new file mode 100644
index 0000000..051ea39
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/internal.go
@@ -0,0 +1,110 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package internal provides support for package appengine.
+//
+// Programs should not use this package directly. Its API is not stable.
+// Use packages appengine and appengine/* instead.
+package internal
+
+import (
+ "fmt"
+
+ "github.com/golang/protobuf/proto"
+
+ remotepb "google.golang.org/appengine/internal/remote_api"
+)
+
+// errorCodeMaps is a map of service name to the error code map for the service.
+var errorCodeMaps = make(map[string]map[int32]string)
+
+// RegisterErrorCodeMap is called from API implementations to register their
+// error code map. This should only be called from init functions.
+func RegisterErrorCodeMap(service string, m map[int32]string) {
+ errorCodeMaps[service] = m
+}
+
+type timeoutCodeKey struct {
+ service string
+ code int32
+}
+
+// timeoutCodes is the set of service+code pairs that represent timeouts.
+var timeoutCodes = make(map[timeoutCodeKey]bool)
+
+func RegisterTimeoutErrorCode(service string, code int32) {
+ timeoutCodes[timeoutCodeKey{service, code}] = true
+}
+
+// APIError is the type returned by appengine.Context's Call method
+// when an API call fails in an API-specific way. This may be, for instance,
+// a taskqueue API call failing with TaskQueueServiceError::UNKNOWN_QUEUE.
+type APIError struct {
+ Service string
+ Detail string
+ Code int32 // API-specific error code
+}
+
+func (e *APIError) Error() string {
+ if e.Code == 0 {
+ if e.Detail == "" {
+ return "APIError "
+ }
+ return e.Detail
+ }
+ s := fmt.Sprintf("API error %d", e.Code)
+ if m, ok := errorCodeMaps[e.Service]; ok {
+ s += " (" + e.Service + ": " + m[e.Code] + ")"
+ } else {
+ // Shouldn't happen, but provide a bit more detail if it does.
+ s = e.Service + " " + s
+ }
+ if e.Detail != "" {
+ s += ": " + e.Detail
+ }
+ return s
+}
+
+func (e *APIError) IsTimeout() bool {
+ return timeoutCodes[timeoutCodeKey{e.Service, e.Code}]
+}
+
+// CallError is the type returned by appengine.Context's Call method when an
+// API call fails in a generic way, such as RpcError::CAPABILITY_DISABLED.
+type CallError struct {
+ Detail string
+ Code int32
+ // TODO: Remove this if we get a distinguishable error code.
+ Timeout bool
+}
+
+func (e *CallError) Error() string {
+ var msg string
+ switch remotepb.RpcError_ErrorCode(e.Code) {
+ case remotepb.RpcError_UNKNOWN:
+ return e.Detail
+ case remotepb.RpcError_OVER_QUOTA:
+ msg = "Over quota"
+ case remotepb.RpcError_CAPABILITY_DISABLED:
+ msg = "Capability disabled"
+ case remotepb.RpcError_CANCELLED:
+ msg = "Canceled"
+ default:
+ msg = fmt.Sprintf("Call error %d", e.Code)
+ }
+ s := msg + ": " + e.Detail
+ if e.Timeout {
+ s += " (timeout)"
+ }
+ return s
+}
+
+func (e *CallError) IsTimeout() bool {
+ return e.Timeout
+}
+
+// NamespaceMods is a map from API service to a function that will mutate an RPC request to attach a namespace.
+// The function should be prepared to be called on the same message more than once; it should only modify the
+// RPC request the first time.
+var NamespaceMods = make(map[string]func(m proto.Message, namespace string))
diff --git a/vendor/google.golang.org/appengine/internal/log/log_service.pb.go b/vendor/google.golang.org/appengine/internal/log/log_service.pb.go
new file mode 100644
index 0000000..8545ac4
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/log/log_service.pb.go
@@ -0,0 +1,1313 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/log/log_service.proto
+
+package log
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type LogServiceError_ErrorCode int32
+
+const (
+ LogServiceError_OK LogServiceError_ErrorCode = 0
+ LogServiceError_INVALID_REQUEST LogServiceError_ErrorCode = 1
+ LogServiceError_STORAGE_ERROR LogServiceError_ErrorCode = 2
+)
+
+var LogServiceError_ErrorCode_name = map[int32]string{
+ 0: "OK",
+ 1: "INVALID_REQUEST",
+ 2: "STORAGE_ERROR",
+}
+var LogServiceError_ErrorCode_value = map[string]int32{
+ "OK": 0,
+ "INVALID_REQUEST": 1,
+ "STORAGE_ERROR": 2,
+}
+
+func (x LogServiceError_ErrorCode) Enum() *LogServiceError_ErrorCode {
+ p := new(LogServiceError_ErrorCode)
+ *p = x
+ return p
+}
+func (x LogServiceError_ErrorCode) String() string {
+ return proto.EnumName(LogServiceError_ErrorCode_name, int32(x))
+}
+func (x *LogServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(LogServiceError_ErrorCode_value, data, "LogServiceError_ErrorCode")
+ if err != nil {
+ return err
+ }
+ *x = LogServiceError_ErrorCode(value)
+ return nil
+}
+func (LogServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{0, 0}
+}
+
+type LogServiceError struct {
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *LogServiceError) Reset() { *m = LogServiceError{} }
+func (m *LogServiceError) String() string { return proto.CompactTextString(m) }
+func (*LogServiceError) ProtoMessage() {}
+func (*LogServiceError) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{0}
+}
+func (m *LogServiceError) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_LogServiceError.Unmarshal(m, b)
+}
+func (m *LogServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_LogServiceError.Marshal(b, m, deterministic)
+}
+func (dst *LogServiceError) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_LogServiceError.Merge(dst, src)
+}
+func (m *LogServiceError) XXX_Size() int {
+ return xxx_messageInfo_LogServiceError.Size(m)
+}
+func (m *LogServiceError) XXX_DiscardUnknown() {
+ xxx_messageInfo_LogServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LogServiceError proto.InternalMessageInfo
+
+type UserAppLogLine struct {
+ TimestampUsec *int64 `protobuf:"varint,1,req,name=timestamp_usec,json=timestampUsec" json:"timestamp_usec,omitempty"`
+ Level *int64 `protobuf:"varint,2,req,name=level" json:"level,omitempty"`
+ Message *string `protobuf:"bytes,3,req,name=message" json:"message,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *UserAppLogLine) Reset() { *m = UserAppLogLine{} }
+func (m *UserAppLogLine) String() string { return proto.CompactTextString(m) }
+func (*UserAppLogLine) ProtoMessage() {}
+func (*UserAppLogLine) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{1}
+}
+func (m *UserAppLogLine) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_UserAppLogLine.Unmarshal(m, b)
+}
+func (m *UserAppLogLine) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_UserAppLogLine.Marshal(b, m, deterministic)
+}
+func (dst *UserAppLogLine) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_UserAppLogLine.Merge(dst, src)
+}
+func (m *UserAppLogLine) XXX_Size() int {
+ return xxx_messageInfo_UserAppLogLine.Size(m)
+}
+func (m *UserAppLogLine) XXX_DiscardUnknown() {
+ xxx_messageInfo_UserAppLogLine.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_UserAppLogLine proto.InternalMessageInfo
+
+func (m *UserAppLogLine) GetTimestampUsec() int64 {
+ if m != nil && m.TimestampUsec != nil {
+ return *m.TimestampUsec
+ }
+ return 0
+}
+
+func (m *UserAppLogLine) GetLevel() int64 {
+ if m != nil && m.Level != nil {
+ return *m.Level
+ }
+ return 0
+}
+
+func (m *UserAppLogLine) GetMessage() string {
+ if m != nil && m.Message != nil {
+ return *m.Message
+ }
+ return ""
+}
+
+type UserAppLogGroup struct {
+ LogLine []*UserAppLogLine `protobuf:"bytes,2,rep,name=log_line,json=logLine" json:"log_line,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *UserAppLogGroup) Reset() { *m = UserAppLogGroup{} }
+func (m *UserAppLogGroup) String() string { return proto.CompactTextString(m) }
+func (*UserAppLogGroup) ProtoMessage() {}
+func (*UserAppLogGroup) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{2}
+}
+func (m *UserAppLogGroup) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_UserAppLogGroup.Unmarshal(m, b)
+}
+func (m *UserAppLogGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_UserAppLogGroup.Marshal(b, m, deterministic)
+}
+func (dst *UserAppLogGroup) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_UserAppLogGroup.Merge(dst, src)
+}
+func (m *UserAppLogGroup) XXX_Size() int {
+ return xxx_messageInfo_UserAppLogGroup.Size(m)
+}
+func (m *UserAppLogGroup) XXX_DiscardUnknown() {
+ xxx_messageInfo_UserAppLogGroup.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_UserAppLogGroup proto.InternalMessageInfo
+
+func (m *UserAppLogGroup) GetLogLine() []*UserAppLogLine {
+ if m != nil {
+ return m.LogLine
+ }
+ return nil
+}
+
+type FlushRequest struct {
+ Logs []byte `protobuf:"bytes,1,opt,name=logs" json:"logs,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *FlushRequest) Reset() { *m = FlushRequest{} }
+func (m *FlushRequest) String() string { return proto.CompactTextString(m) }
+func (*FlushRequest) ProtoMessage() {}
+func (*FlushRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{3}
+}
+func (m *FlushRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_FlushRequest.Unmarshal(m, b)
+}
+func (m *FlushRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_FlushRequest.Marshal(b, m, deterministic)
+}
+func (dst *FlushRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_FlushRequest.Merge(dst, src)
+}
+func (m *FlushRequest) XXX_Size() int {
+ return xxx_messageInfo_FlushRequest.Size(m)
+}
+func (m *FlushRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_FlushRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_FlushRequest proto.InternalMessageInfo
+
+func (m *FlushRequest) GetLogs() []byte {
+ if m != nil {
+ return m.Logs
+ }
+ return nil
+}
+
+type SetStatusRequest struct {
+ Status *string `protobuf:"bytes,1,req,name=status" json:"status,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *SetStatusRequest) Reset() { *m = SetStatusRequest{} }
+func (m *SetStatusRequest) String() string { return proto.CompactTextString(m) }
+func (*SetStatusRequest) ProtoMessage() {}
+func (*SetStatusRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{4}
+}
+func (m *SetStatusRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_SetStatusRequest.Unmarshal(m, b)
+}
+func (m *SetStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_SetStatusRequest.Marshal(b, m, deterministic)
+}
+func (dst *SetStatusRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_SetStatusRequest.Merge(dst, src)
+}
+func (m *SetStatusRequest) XXX_Size() int {
+ return xxx_messageInfo_SetStatusRequest.Size(m)
+}
+func (m *SetStatusRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_SetStatusRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_SetStatusRequest proto.InternalMessageInfo
+
+func (m *SetStatusRequest) GetStatus() string {
+ if m != nil && m.Status != nil {
+ return *m.Status
+ }
+ return ""
+}
+
+type LogOffset struct {
+ RequestId []byte `protobuf:"bytes,1,opt,name=request_id,json=requestId" json:"request_id,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *LogOffset) Reset() { *m = LogOffset{} }
+func (m *LogOffset) String() string { return proto.CompactTextString(m) }
+func (*LogOffset) ProtoMessage() {}
+func (*LogOffset) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{5}
+}
+func (m *LogOffset) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_LogOffset.Unmarshal(m, b)
+}
+func (m *LogOffset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_LogOffset.Marshal(b, m, deterministic)
+}
+func (dst *LogOffset) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_LogOffset.Merge(dst, src)
+}
+func (m *LogOffset) XXX_Size() int {
+ return xxx_messageInfo_LogOffset.Size(m)
+}
+func (m *LogOffset) XXX_DiscardUnknown() {
+ xxx_messageInfo_LogOffset.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LogOffset proto.InternalMessageInfo
+
+func (m *LogOffset) GetRequestId() []byte {
+ if m != nil {
+ return m.RequestId
+ }
+ return nil
+}
+
+type LogLine struct {
+ Time *int64 `protobuf:"varint,1,req,name=time" json:"time,omitempty"`
+ Level *int32 `protobuf:"varint,2,req,name=level" json:"level,omitempty"`
+ LogMessage *string `protobuf:"bytes,3,req,name=log_message,json=logMessage" json:"log_message,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *LogLine) Reset() { *m = LogLine{} }
+func (m *LogLine) String() string { return proto.CompactTextString(m) }
+func (*LogLine) ProtoMessage() {}
+func (*LogLine) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{6}
+}
+func (m *LogLine) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_LogLine.Unmarshal(m, b)
+}
+func (m *LogLine) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_LogLine.Marshal(b, m, deterministic)
+}
+func (dst *LogLine) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_LogLine.Merge(dst, src)
+}
+func (m *LogLine) XXX_Size() int {
+ return xxx_messageInfo_LogLine.Size(m)
+}
+func (m *LogLine) XXX_DiscardUnknown() {
+ xxx_messageInfo_LogLine.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LogLine proto.InternalMessageInfo
+
+func (m *LogLine) GetTime() int64 {
+ if m != nil && m.Time != nil {
+ return *m.Time
+ }
+ return 0
+}
+
+func (m *LogLine) GetLevel() int32 {
+ if m != nil && m.Level != nil {
+ return *m.Level
+ }
+ return 0
+}
+
+func (m *LogLine) GetLogMessage() string {
+ if m != nil && m.LogMessage != nil {
+ return *m.LogMessage
+ }
+ return ""
+}
+
+type RequestLog struct {
+ AppId *string `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+ ModuleId *string `protobuf:"bytes,37,opt,name=module_id,json=moduleId,def=default" json:"module_id,omitempty"`
+ VersionId *string `protobuf:"bytes,2,req,name=version_id,json=versionId" json:"version_id,omitempty"`
+ RequestId []byte `protobuf:"bytes,3,req,name=request_id,json=requestId" json:"request_id,omitempty"`
+ Offset *LogOffset `protobuf:"bytes,35,opt,name=offset" json:"offset,omitempty"`
+ Ip *string `protobuf:"bytes,4,req,name=ip" json:"ip,omitempty"`
+ Nickname *string `protobuf:"bytes,5,opt,name=nickname" json:"nickname,omitempty"`
+ StartTime *int64 `protobuf:"varint,6,req,name=start_time,json=startTime" json:"start_time,omitempty"`
+ EndTime *int64 `protobuf:"varint,7,req,name=end_time,json=endTime" json:"end_time,omitempty"`
+ Latency *int64 `protobuf:"varint,8,req,name=latency" json:"latency,omitempty"`
+ Mcycles *int64 `protobuf:"varint,9,req,name=mcycles" json:"mcycles,omitempty"`
+ Method *string `protobuf:"bytes,10,req,name=method" json:"method,omitempty"`
+ Resource *string `protobuf:"bytes,11,req,name=resource" json:"resource,omitempty"`
+ HttpVersion *string `protobuf:"bytes,12,req,name=http_version,json=httpVersion" json:"http_version,omitempty"`
+ Status *int32 `protobuf:"varint,13,req,name=status" json:"status,omitempty"`
+ ResponseSize *int64 `protobuf:"varint,14,req,name=response_size,json=responseSize" json:"response_size,omitempty"`
+ Referrer *string `protobuf:"bytes,15,opt,name=referrer" json:"referrer,omitempty"`
+ UserAgent *string `protobuf:"bytes,16,opt,name=user_agent,json=userAgent" json:"user_agent,omitempty"`
+ UrlMapEntry *string `protobuf:"bytes,17,req,name=url_map_entry,json=urlMapEntry" json:"url_map_entry,omitempty"`
+ Combined *string `protobuf:"bytes,18,req,name=combined" json:"combined,omitempty"`
+ ApiMcycles *int64 `protobuf:"varint,19,opt,name=api_mcycles,json=apiMcycles" json:"api_mcycles,omitempty"`
+ Host *string `protobuf:"bytes,20,opt,name=host" json:"host,omitempty"`
+ Cost *float64 `protobuf:"fixed64,21,opt,name=cost" json:"cost,omitempty"`
+ TaskQueueName *string `protobuf:"bytes,22,opt,name=task_queue_name,json=taskQueueName" json:"task_queue_name,omitempty"`
+ TaskName *string `protobuf:"bytes,23,opt,name=task_name,json=taskName" json:"task_name,omitempty"`
+ WasLoadingRequest *bool `protobuf:"varint,24,opt,name=was_loading_request,json=wasLoadingRequest" json:"was_loading_request,omitempty"`
+ PendingTime *int64 `protobuf:"varint,25,opt,name=pending_time,json=pendingTime" json:"pending_time,omitempty"`
+ ReplicaIndex *int32 `protobuf:"varint,26,opt,name=replica_index,json=replicaIndex,def=-1" json:"replica_index,omitempty"`
+ Finished *bool `protobuf:"varint,27,opt,name=finished,def=1" json:"finished,omitempty"`
+ CloneKey []byte `protobuf:"bytes,28,opt,name=clone_key,json=cloneKey" json:"clone_key,omitempty"`
+ Line []*LogLine `protobuf:"bytes,29,rep,name=line" json:"line,omitempty"`
+ LinesIncomplete *bool `protobuf:"varint,36,opt,name=lines_incomplete,json=linesIncomplete" json:"lines_incomplete,omitempty"`
+ AppEngineRelease []byte `protobuf:"bytes,38,opt,name=app_engine_release,json=appEngineRelease" json:"app_engine_release,omitempty"`
+ ExitReason *int32 `protobuf:"varint,30,opt,name=exit_reason,json=exitReason" json:"exit_reason,omitempty"`
+ WasThrottledForTime *bool `protobuf:"varint,31,opt,name=was_throttled_for_time,json=wasThrottledForTime" json:"was_throttled_for_time,omitempty"`
+ WasThrottledForRequests *bool `protobuf:"varint,32,opt,name=was_throttled_for_requests,json=wasThrottledForRequests" json:"was_throttled_for_requests,omitempty"`
+ ThrottledTime *int64 `protobuf:"varint,33,opt,name=throttled_time,json=throttledTime" json:"throttled_time,omitempty"`
+ ServerName []byte `protobuf:"bytes,34,opt,name=server_name,json=serverName" json:"server_name,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *RequestLog) Reset() { *m = RequestLog{} }
+func (m *RequestLog) String() string { return proto.CompactTextString(m) }
+func (*RequestLog) ProtoMessage() {}
+func (*RequestLog) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{7}
+}
+func (m *RequestLog) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_RequestLog.Unmarshal(m, b)
+}
+func (m *RequestLog) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_RequestLog.Marshal(b, m, deterministic)
+}
+func (dst *RequestLog) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_RequestLog.Merge(dst, src)
+}
+func (m *RequestLog) XXX_Size() int {
+ return xxx_messageInfo_RequestLog.Size(m)
+}
+func (m *RequestLog) XXX_DiscardUnknown() {
+ xxx_messageInfo_RequestLog.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_RequestLog proto.InternalMessageInfo
+
+const Default_RequestLog_ModuleId string = "default"
+const Default_RequestLog_ReplicaIndex int32 = -1
+const Default_RequestLog_Finished bool = true
+
+func (m *RequestLog) GetAppId() string {
+ if m != nil && m.AppId != nil {
+ return *m.AppId
+ }
+ return ""
+}
+
+func (m *RequestLog) GetModuleId() string {
+ if m != nil && m.ModuleId != nil {
+ return *m.ModuleId
+ }
+ return Default_RequestLog_ModuleId
+}
+
+func (m *RequestLog) GetVersionId() string {
+ if m != nil && m.VersionId != nil {
+ return *m.VersionId
+ }
+ return ""
+}
+
+func (m *RequestLog) GetRequestId() []byte {
+ if m != nil {
+ return m.RequestId
+ }
+ return nil
+}
+
+func (m *RequestLog) GetOffset() *LogOffset {
+ if m != nil {
+ return m.Offset
+ }
+ return nil
+}
+
+func (m *RequestLog) GetIp() string {
+ if m != nil && m.Ip != nil {
+ return *m.Ip
+ }
+ return ""
+}
+
+func (m *RequestLog) GetNickname() string {
+ if m != nil && m.Nickname != nil {
+ return *m.Nickname
+ }
+ return ""
+}
+
+func (m *RequestLog) GetStartTime() int64 {
+ if m != nil && m.StartTime != nil {
+ return *m.StartTime
+ }
+ return 0
+}
+
+func (m *RequestLog) GetEndTime() int64 {
+ if m != nil && m.EndTime != nil {
+ return *m.EndTime
+ }
+ return 0
+}
+
+func (m *RequestLog) GetLatency() int64 {
+ if m != nil && m.Latency != nil {
+ return *m.Latency
+ }
+ return 0
+}
+
+func (m *RequestLog) GetMcycles() int64 {
+ if m != nil && m.Mcycles != nil {
+ return *m.Mcycles
+ }
+ return 0
+}
+
+func (m *RequestLog) GetMethod() string {
+ if m != nil && m.Method != nil {
+ return *m.Method
+ }
+ return ""
+}
+
+func (m *RequestLog) GetResource() string {
+ if m != nil && m.Resource != nil {
+ return *m.Resource
+ }
+ return ""
+}
+
+func (m *RequestLog) GetHttpVersion() string {
+ if m != nil && m.HttpVersion != nil {
+ return *m.HttpVersion
+ }
+ return ""
+}
+
+func (m *RequestLog) GetStatus() int32 {
+ if m != nil && m.Status != nil {
+ return *m.Status
+ }
+ return 0
+}
+
+func (m *RequestLog) GetResponseSize() int64 {
+ if m != nil && m.ResponseSize != nil {
+ return *m.ResponseSize
+ }
+ return 0
+}
+
+func (m *RequestLog) GetReferrer() string {
+ if m != nil && m.Referrer != nil {
+ return *m.Referrer
+ }
+ return ""
+}
+
+func (m *RequestLog) GetUserAgent() string {
+ if m != nil && m.UserAgent != nil {
+ return *m.UserAgent
+ }
+ return ""
+}
+
+func (m *RequestLog) GetUrlMapEntry() string {
+ if m != nil && m.UrlMapEntry != nil {
+ return *m.UrlMapEntry
+ }
+ return ""
+}
+
+func (m *RequestLog) GetCombined() string {
+ if m != nil && m.Combined != nil {
+ return *m.Combined
+ }
+ return ""
+}
+
+func (m *RequestLog) GetApiMcycles() int64 {
+ if m != nil && m.ApiMcycles != nil {
+ return *m.ApiMcycles
+ }
+ return 0
+}
+
+func (m *RequestLog) GetHost() string {
+ if m != nil && m.Host != nil {
+ return *m.Host
+ }
+ return ""
+}
+
+func (m *RequestLog) GetCost() float64 {
+ if m != nil && m.Cost != nil {
+ return *m.Cost
+ }
+ return 0
+}
+
+func (m *RequestLog) GetTaskQueueName() string {
+ if m != nil && m.TaskQueueName != nil {
+ return *m.TaskQueueName
+ }
+ return ""
+}
+
+func (m *RequestLog) GetTaskName() string {
+ if m != nil && m.TaskName != nil {
+ return *m.TaskName
+ }
+ return ""
+}
+
+func (m *RequestLog) GetWasLoadingRequest() bool {
+ if m != nil && m.WasLoadingRequest != nil {
+ return *m.WasLoadingRequest
+ }
+ return false
+}
+
+func (m *RequestLog) GetPendingTime() int64 {
+ if m != nil && m.PendingTime != nil {
+ return *m.PendingTime
+ }
+ return 0
+}
+
+func (m *RequestLog) GetReplicaIndex() int32 {
+ if m != nil && m.ReplicaIndex != nil {
+ return *m.ReplicaIndex
+ }
+ return Default_RequestLog_ReplicaIndex
+}
+
+func (m *RequestLog) GetFinished() bool {
+ if m != nil && m.Finished != nil {
+ return *m.Finished
+ }
+ return Default_RequestLog_Finished
+}
+
+func (m *RequestLog) GetCloneKey() []byte {
+ if m != nil {
+ return m.CloneKey
+ }
+ return nil
+}
+
+func (m *RequestLog) GetLine() []*LogLine {
+ if m != nil {
+ return m.Line
+ }
+ return nil
+}
+
+func (m *RequestLog) GetLinesIncomplete() bool {
+ if m != nil && m.LinesIncomplete != nil {
+ return *m.LinesIncomplete
+ }
+ return false
+}
+
+func (m *RequestLog) GetAppEngineRelease() []byte {
+ if m != nil {
+ return m.AppEngineRelease
+ }
+ return nil
+}
+
+func (m *RequestLog) GetExitReason() int32 {
+ if m != nil && m.ExitReason != nil {
+ return *m.ExitReason
+ }
+ return 0
+}
+
+func (m *RequestLog) GetWasThrottledForTime() bool {
+ if m != nil && m.WasThrottledForTime != nil {
+ return *m.WasThrottledForTime
+ }
+ return false
+}
+
+func (m *RequestLog) GetWasThrottledForRequests() bool {
+ if m != nil && m.WasThrottledForRequests != nil {
+ return *m.WasThrottledForRequests
+ }
+ return false
+}
+
+func (m *RequestLog) GetThrottledTime() int64 {
+ if m != nil && m.ThrottledTime != nil {
+ return *m.ThrottledTime
+ }
+ return 0
+}
+
+func (m *RequestLog) GetServerName() []byte {
+ if m != nil {
+ return m.ServerName
+ }
+ return nil
+}
+
+type LogModuleVersion struct {
+ ModuleId *string `protobuf:"bytes,1,opt,name=module_id,json=moduleId,def=default" json:"module_id,omitempty"`
+ VersionId *string `protobuf:"bytes,2,opt,name=version_id,json=versionId" json:"version_id,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *LogModuleVersion) Reset() { *m = LogModuleVersion{} }
+func (m *LogModuleVersion) String() string { return proto.CompactTextString(m) }
+func (*LogModuleVersion) ProtoMessage() {}
+func (*LogModuleVersion) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{8}
+}
+func (m *LogModuleVersion) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_LogModuleVersion.Unmarshal(m, b)
+}
+func (m *LogModuleVersion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_LogModuleVersion.Marshal(b, m, deterministic)
+}
+func (dst *LogModuleVersion) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_LogModuleVersion.Merge(dst, src)
+}
+func (m *LogModuleVersion) XXX_Size() int {
+ return xxx_messageInfo_LogModuleVersion.Size(m)
+}
+func (m *LogModuleVersion) XXX_DiscardUnknown() {
+ xxx_messageInfo_LogModuleVersion.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LogModuleVersion proto.InternalMessageInfo
+
+const Default_LogModuleVersion_ModuleId string = "default"
+
+func (m *LogModuleVersion) GetModuleId() string {
+ if m != nil && m.ModuleId != nil {
+ return *m.ModuleId
+ }
+ return Default_LogModuleVersion_ModuleId
+}
+
+func (m *LogModuleVersion) GetVersionId() string {
+ if m != nil && m.VersionId != nil {
+ return *m.VersionId
+ }
+ return ""
+}
+
+type LogReadRequest struct {
+ AppId *string `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+ VersionId []string `protobuf:"bytes,2,rep,name=version_id,json=versionId" json:"version_id,omitempty"`
+ ModuleVersion []*LogModuleVersion `protobuf:"bytes,19,rep,name=module_version,json=moduleVersion" json:"module_version,omitempty"`
+ StartTime *int64 `protobuf:"varint,3,opt,name=start_time,json=startTime" json:"start_time,omitempty"`
+ EndTime *int64 `protobuf:"varint,4,opt,name=end_time,json=endTime" json:"end_time,omitempty"`
+ Offset *LogOffset `protobuf:"bytes,5,opt,name=offset" json:"offset,omitempty"`
+ RequestId [][]byte `protobuf:"bytes,6,rep,name=request_id,json=requestId" json:"request_id,omitempty"`
+ MinimumLogLevel *int32 `protobuf:"varint,7,opt,name=minimum_log_level,json=minimumLogLevel" json:"minimum_log_level,omitempty"`
+ IncludeIncomplete *bool `protobuf:"varint,8,opt,name=include_incomplete,json=includeIncomplete" json:"include_incomplete,omitempty"`
+ Count *int64 `protobuf:"varint,9,opt,name=count" json:"count,omitempty"`
+ CombinedLogRegex *string `protobuf:"bytes,14,opt,name=combined_log_regex,json=combinedLogRegex" json:"combined_log_regex,omitempty"`
+ HostRegex *string `protobuf:"bytes,15,opt,name=host_regex,json=hostRegex" json:"host_regex,omitempty"`
+ ReplicaIndex *int32 `protobuf:"varint,16,opt,name=replica_index,json=replicaIndex" json:"replica_index,omitempty"`
+ IncludeAppLogs *bool `protobuf:"varint,10,opt,name=include_app_logs,json=includeAppLogs" json:"include_app_logs,omitempty"`
+ AppLogsPerRequest *int32 `protobuf:"varint,17,opt,name=app_logs_per_request,json=appLogsPerRequest" json:"app_logs_per_request,omitempty"`
+ IncludeHost *bool `protobuf:"varint,11,opt,name=include_host,json=includeHost" json:"include_host,omitempty"`
+ IncludeAll *bool `protobuf:"varint,12,opt,name=include_all,json=includeAll" json:"include_all,omitempty"`
+ CacheIterator *bool `protobuf:"varint,13,opt,name=cache_iterator,json=cacheIterator" json:"cache_iterator,omitempty"`
+ NumShards *int32 `protobuf:"varint,18,opt,name=num_shards,json=numShards" json:"num_shards,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *LogReadRequest) Reset() { *m = LogReadRequest{} }
+func (m *LogReadRequest) String() string { return proto.CompactTextString(m) }
+func (*LogReadRequest) ProtoMessage() {}
+func (*LogReadRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{9}
+}
+func (m *LogReadRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_LogReadRequest.Unmarshal(m, b)
+}
+func (m *LogReadRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_LogReadRequest.Marshal(b, m, deterministic)
+}
+func (dst *LogReadRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_LogReadRequest.Merge(dst, src)
+}
+func (m *LogReadRequest) XXX_Size() int {
+ return xxx_messageInfo_LogReadRequest.Size(m)
+}
+func (m *LogReadRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_LogReadRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LogReadRequest proto.InternalMessageInfo
+
+func (m *LogReadRequest) GetAppId() string {
+ if m != nil && m.AppId != nil {
+ return *m.AppId
+ }
+ return ""
+}
+
+func (m *LogReadRequest) GetVersionId() []string {
+ if m != nil {
+ return m.VersionId
+ }
+ return nil
+}
+
+func (m *LogReadRequest) GetModuleVersion() []*LogModuleVersion {
+ if m != nil {
+ return m.ModuleVersion
+ }
+ return nil
+}
+
+func (m *LogReadRequest) GetStartTime() int64 {
+ if m != nil && m.StartTime != nil {
+ return *m.StartTime
+ }
+ return 0
+}
+
+func (m *LogReadRequest) GetEndTime() int64 {
+ if m != nil && m.EndTime != nil {
+ return *m.EndTime
+ }
+ return 0
+}
+
+func (m *LogReadRequest) GetOffset() *LogOffset {
+ if m != nil {
+ return m.Offset
+ }
+ return nil
+}
+
+func (m *LogReadRequest) GetRequestId() [][]byte {
+ if m != nil {
+ return m.RequestId
+ }
+ return nil
+}
+
+func (m *LogReadRequest) GetMinimumLogLevel() int32 {
+ if m != nil && m.MinimumLogLevel != nil {
+ return *m.MinimumLogLevel
+ }
+ return 0
+}
+
+func (m *LogReadRequest) GetIncludeIncomplete() bool {
+ if m != nil && m.IncludeIncomplete != nil {
+ return *m.IncludeIncomplete
+ }
+ return false
+}
+
+func (m *LogReadRequest) GetCount() int64 {
+ if m != nil && m.Count != nil {
+ return *m.Count
+ }
+ return 0
+}
+
+func (m *LogReadRequest) GetCombinedLogRegex() string {
+ if m != nil && m.CombinedLogRegex != nil {
+ return *m.CombinedLogRegex
+ }
+ return ""
+}
+
+func (m *LogReadRequest) GetHostRegex() string {
+ if m != nil && m.HostRegex != nil {
+ return *m.HostRegex
+ }
+ return ""
+}
+
+func (m *LogReadRequest) GetReplicaIndex() int32 {
+ if m != nil && m.ReplicaIndex != nil {
+ return *m.ReplicaIndex
+ }
+ return 0
+}
+
+func (m *LogReadRequest) GetIncludeAppLogs() bool {
+ if m != nil && m.IncludeAppLogs != nil {
+ return *m.IncludeAppLogs
+ }
+ return false
+}
+
+func (m *LogReadRequest) GetAppLogsPerRequest() int32 {
+ if m != nil && m.AppLogsPerRequest != nil {
+ return *m.AppLogsPerRequest
+ }
+ return 0
+}
+
+func (m *LogReadRequest) GetIncludeHost() bool {
+ if m != nil && m.IncludeHost != nil {
+ return *m.IncludeHost
+ }
+ return false
+}
+
+func (m *LogReadRequest) GetIncludeAll() bool {
+ if m != nil && m.IncludeAll != nil {
+ return *m.IncludeAll
+ }
+ return false
+}
+
+func (m *LogReadRequest) GetCacheIterator() bool {
+ if m != nil && m.CacheIterator != nil {
+ return *m.CacheIterator
+ }
+ return false
+}
+
+func (m *LogReadRequest) GetNumShards() int32 {
+ if m != nil && m.NumShards != nil {
+ return *m.NumShards
+ }
+ return 0
+}
+
+type LogReadResponse struct {
+ Log []*RequestLog `protobuf:"bytes,1,rep,name=log" json:"log,omitempty"`
+ Offset *LogOffset `protobuf:"bytes,2,opt,name=offset" json:"offset,omitempty"`
+ LastEndTime *int64 `protobuf:"varint,3,opt,name=last_end_time,json=lastEndTime" json:"last_end_time,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *LogReadResponse) Reset() { *m = LogReadResponse{} }
+func (m *LogReadResponse) String() string { return proto.CompactTextString(m) }
+func (*LogReadResponse) ProtoMessage() {}
+func (*LogReadResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{10}
+}
+func (m *LogReadResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_LogReadResponse.Unmarshal(m, b)
+}
+func (m *LogReadResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_LogReadResponse.Marshal(b, m, deterministic)
+}
+func (dst *LogReadResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_LogReadResponse.Merge(dst, src)
+}
+func (m *LogReadResponse) XXX_Size() int {
+ return xxx_messageInfo_LogReadResponse.Size(m)
+}
+func (m *LogReadResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_LogReadResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LogReadResponse proto.InternalMessageInfo
+
+func (m *LogReadResponse) GetLog() []*RequestLog {
+ if m != nil {
+ return m.Log
+ }
+ return nil
+}
+
+func (m *LogReadResponse) GetOffset() *LogOffset {
+ if m != nil {
+ return m.Offset
+ }
+ return nil
+}
+
+func (m *LogReadResponse) GetLastEndTime() int64 {
+ if m != nil && m.LastEndTime != nil {
+ return *m.LastEndTime
+ }
+ return 0
+}
+
+type LogUsageRecord struct {
+ VersionId *string `protobuf:"bytes,1,opt,name=version_id,json=versionId" json:"version_id,omitempty"`
+ StartTime *int32 `protobuf:"varint,2,opt,name=start_time,json=startTime" json:"start_time,omitempty"`
+ EndTime *int32 `protobuf:"varint,3,opt,name=end_time,json=endTime" json:"end_time,omitempty"`
+ Count *int64 `protobuf:"varint,4,opt,name=count" json:"count,omitempty"`
+ TotalSize *int64 `protobuf:"varint,5,opt,name=total_size,json=totalSize" json:"total_size,omitempty"`
+ Records *int32 `protobuf:"varint,6,opt,name=records" json:"records,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *LogUsageRecord) Reset() { *m = LogUsageRecord{} }
+func (m *LogUsageRecord) String() string { return proto.CompactTextString(m) }
+func (*LogUsageRecord) ProtoMessage() {}
+func (*LogUsageRecord) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{11}
+}
+func (m *LogUsageRecord) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_LogUsageRecord.Unmarshal(m, b)
+}
+func (m *LogUsageRecord) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_LogUsageRecord.Marshal(b, m, deterministic)
+}
+func (dst *LogUsageRecord) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_LogUsageRecord.Merge(dst, src)
+}
+func (m *LogUsageRecord) XXX_Size() int {
+ return xxx_messageInfo_LogUsageRecord.Size(m)
+}
+func (m *LogUsageRecord) XXX_DiscardUnknown() {
+ xxx_messageInfo_LogUsageRecord.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LogUsageRecord proto.InternalMessageInfo
+
+func (m *LogUsageRecord) GetVersionId() string {
+ if m != nil && m.VersionId != nil {
+ return *m.VersionId
+ }
+ return ""
+}
+
+func (m *LogUsageRecord) GetStartTime() int32 {
+ if m != nil && m.StartTime != nil {
+ return *m.StartTime
+ }
+ return 0
+}
+
+func (m *LogUsageRecord) GetEndTime() int32 {
+ if m != nil && m.EndTime != nil {
+ return *m.EndTime
+ }
+ return 0
+}
+
+func (m *LogUsageRecord) GetCount() int64 {
+ if m != nil && m.Count != nil {
+ return *m.Count
+ }
+ return 0
+}
+
+func (m *LogUsageRecord) GetTotalSize() int64 {
+ if m != nil && m.TotalSize != nil {
+ return *m.TotalSize
+ }
+ return 0
+}
+
+func (m *LogUsageRecord) GetRecords() int32 {
+ if m != nil && m.Records != nil {
+ return *m.Records
+ }
+ return 0
+}
+
+type LogUsageRequest struct {
+ AppId *string `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+ VersionId []string `protobuf:"bytes,2,rep,name=version_id,json=versionId" json:"version_id,omitempty"`
+ StartTime *int32 `protobuf:"varint,3,opt,name=start_time,json=startTime" json:"start_time,omitempty"`
+ EndTime *int32 `protobuf:"varint,4,opt,name=end_time,json=endTime" json:"end_time,omitempty"`
+ ResolutionHours *uint32 `protobuf:"varint,5,opt,name=resolution_hours,json=resolutionHours,def=1" json:"resolution_hours,omitempty"`
+ CombineVersions *bool `protobuf:"varint,6,opt,name=combine_versions,json=combineVersions" json:"combine_versions,omitempty"`
+ UsageVersion *int32 `protobuf:"varint,7,opt,name=usage_version,json=usageVersion" json:"usage_version,omitempty"`
+ VersionsOnly *bool `protobuf:"varint,8,opt,name=versions_only,json=versionsOnly" json:"versions_only,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *LogUsageRequest) Reset() { *m = LogUsageRequest{} }
+func (m *LogUsageRequest) String() string { return proto.CompactTextString(m) }
+func (*LogUsageRequest) ProtoMessage() {}
+func (*LogUsageRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{12}
+}
+func (m *LogUsageRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_LogUsageRequest.Unmarshal(m, b)
+}
+func (m *LogUsageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_LogUsageRequest.Marshal(b, m, deterministic)
+}
+func (dst *LogUsageRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_LogUsageRequest.Merge(dst, src)
+}
+func (m *LogUsageRequest) XXX_Size() int {
+ return xxx_messageInfo_LogUsageRequest.Size(m)
+}
+func (m *LogUsageRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_LogUsageRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LogUsageRequest proto.InternalMessageInfo
+
+const Default_LogUsageRequest_ResolutionHours uint32 = 1
+
+func (m *LogUsageRequest) GetAppId() string {
+ if m != nil && m.AppId != nil {
+ return *m.AppId
+ }
+ return ""
+}
+
+func (m *LogUsageRequest) GetVersionId() []string {
+ if m != nil {
+ return m.VersionId
+ }
+ return nil
+}
+
+func (m *LogUsageRequest) GetStartTime() int32 {
+ if m != nil && m.StartTime != nil {
+ return *m.StartTime
+ }
+ return 0
+}
+
+func (m *LogUsageRequest) GetEndTime() int32 {
+ if m != nil && m.EndTime != nil {
+ return *m.EndTime
+ }
+ return 0
+}
+
+func (m *LogUsageRequest) GetResolutionHours() uint32 {
+ if m != nil && m.ResolutionHours != nil {
+ return *m.ResolutionHours
+ }
+ return Default_LogUsageRequest_ResolutionHours
+}
+
+func (m *LogUsageRequest) GetCombineVersions() bool {
+ if m != nil && m.CombineVersions != nil {
+ return *m.CombineVersions
+ }
+ return false
+}
+
+func (m *LogUsageRequest) GetUsageVersion() int32 {
+ if m != nil && m.UsageVersion != nil {
+ return *m.UsageVersion
+ }
+ return 0
+}
+
+func (m *LogUsageRequest) GetVersionsOnly() bool {
+ if m != nil && m.VersionsOnly != nil {
+ return *m.VersionsOnly
+ }
+ return false
+}
+
+type LogUsageResponse struct {
+ Usage []*LogUsageRecord `protobuf:"bytes,1,rep,name=usage" json:"usage,omitempty"`
+ Summary *LogUsageRecord `protobuf:"bytes,2,opt,name=summary" json:"summary,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *LogUsageResponse) Reset() { *m = LogUsageResponse{} }
+func (m *LogUsageResponse) String() string { return proto.CompactTextString(m) }
+func (*LogUsageResponse) ProtoMessage() {}
+func (*LogUsageResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_log_service_f054fd4b5012319d, []int{13}
+}
+func (m *LogUsageResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_LogUsageResponse.Unmarshal(m, b)
+}
+func (m *LogUsageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_LogUsageResponse.Marshal(b, m, deterministic)
+}
+func (dst *LogUsageResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_LogUsageResponse.Merge(dst, src)
+}
+func (m *LogUsageResponse) XXX_Size() int {
+ return xxx_messageInfo_LogUsageResponse.Size(m)
+}
+func (m *LogUsageResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_LogUsageResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LogUsageResponse proto.InternalMessageInfo
+
+func (m *LogUsageResponse) GetUsage() []*LogUsageRecord {
+ if m != nil {
+ return m.Usage
+ }
+ return nil
+}
+
+func (m *LogUsageResponse) GetSummary() *LogUsageRecord {
+ if m != nil {
+ return m.Summary
+ }
+ return nil
+}
+
+func init() {
+ proto.RegisterType((*LogServiceError)(nil), "appengine.LogServiceError")
+ proto.RegisterType((*UserAppLogLine)(nil), "appengine.UserAppLogLine")
+ proto.RegisterType((*UserAppLogGroup)(nil), "appengine.UserAppLogGroup")
+ proto.RegisterType((*FlushRequest)(nil), "appengine.FlushRequest")
+ proto.RegisterType((*SetStatusRequest)(nil), "appengine.SetStatusRequest")
+ proto.RegisterType((*LogOffset)(nil), "appengine.LogOffset")
+ proto.RegisterType((*LogLine)(nil), "appengine.LogLine")
+ proto.RegisterType((*RequestLog)(nil), "appengine.RequestLog")
+ proto.RegisterType((*LogModuleVersion)(nil), "appengine.LogModuleVersion")
+ proto.RegisterType((*LogReadRequest)(nil), "appengine.LogReadRequest")
+ proto.RegisterType((*LogReadResponse)(nil), "appengine.LogReadResponse")
+ proto.RegisterType((*LogUsageRecord)(nil), "appengine.LogUsageRecord")
+ proto.RegisterType((*LogUsageRequest)(nil), "appengine.LogUsageRequest")
+ proto.RegisterType((*LogUsageResponse)(nil), "appengine.LogUsageResponse")
+}
+
+func init() {
+ proto.RegisterFile("google.golang.org/appengine/internal/log/log_service.proto", fileDescriptor_log_service_f054fd4b5012319d)
+}
+
+var fileDescriptor_log_service_f054fd4b5012319d = []byte{
+ // 1553 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xdd, 0x72, 0xdb, 0xc6,
+ 0x15, 0x2e, 0x48, 0x51, 0x24, 0x0f, 0x49, 0x91, 0x5a, 0xcb, 0xce, 0xda, 0xae, 0x6b, 0x1a, 0x4e,
+ 0x1c, 0xd6, 0x93, 0x48, 0x93, 0xa4, 0x57, 0xca, 0x95, 0xd3, 0x2a, 0x8e, 0x26, 0xb4, 0xd5, 0x40,
+ 0x72, 0x3a, 0xd3, 0x1b, 0x0c, 0x0a, 0x1c, 0x81, 0x18, 0x2f, 0xb1, 0xc8, 0xee, 0xc2, 0x91, 0x72,
+ 0xdb, 0xdb, 0x3e, 0x46, 0x1f, 0xa2, 0xaf, 0xd2, 0xb7, 0xe9, 0xec, 0xd9, 0x05, 0x44, 0x2a, 0x4d,
+ 0xc6, 0x33, 0xb9, 0xe0, 0x10, 0xfb, 0x9d, 0x83, 0xdd, 0xf3, 0xf3, 0x9d, 0x6f, 0x01, 0xc7, 0xb9,
+ 0x94, 0xb9, 0xc0, 0xc3, 0x5c, 0x8a, 0xa4, 0xcc, 0x0f, 0xa5, 0xca, 0x8f, 0x92, 0xaa, 0xc2, 0x32,
+ 0x2f, 0x4a, 0x3c, 0x2a, 0x4a, 0x83, 0xaa, 0x4c, 0xc4, 0x91, 0x90, 0xb9, 0xfd, 0xc5, 0x1a, 0xd5,
+ 0xbb, 0x22, 0xc5, 0xc3, 0x4a, 0x49, 0x23, 0xd9, 0xb0, 0xf5, 0x0c, 0x5f, 0xc3, 0x74, 0x29, 0xf3,
+ 0x73, 0x67, 0x3e, 0x51, 0x4a, 0xaa, 0xf0, 0x4b, 0x18, 0xd2, 0xc3, 0x9f, 0x65, 0x86, 0x6c, 0x17,
+ 0x3a, 0x67, 0xdf, 0xce, 0x7e, 0xc7, 0xee, 0xc0, 0xf4, 0xf4, 0xf5, 0xf7, 0x2f, 0x96, 0xa7, 0x7f,
+ 0x89, 0xa3, 0x93, 0xef, 0xde, 0x9c, 0x9c, 0x5f, 0xcc, 0x02, 0xb6, 0x0f, 0x93, 0xf3, 0x8b, 0xb3,
+ 0xe8, 0xc5, 0xcb, 0x93, 0xf8, 0x24, 0x8a, 0xce, 0xa2, 0x59, 0x27, 0xcc, 0x61, 0xef, 0x8d, 0x46,
+ 0xf5, 0xa2, 0xaa, 0x96, 0x32, 0x5f, 0x16, 0x25, 0xb2, 0x8f, 0x60, 0xcf, 0x14, 0x6b, 0xd4, 0x26,
+ 0x59, 0x57, 0x71, 0xad, 0x31, 0xe5, 0xc1, 0xbc, 0xb3, 0xe8, 0x46, 0x93, 0x16, 0x7d, 0xa3, 0x31,
+ 0x65, 0x07, 0xd0, 0x13, 0xf8, 0x0e, 0x05, 0xef, 0x90, 0xd5, 0x2d, 0x18, 0x87, 0xfe, 0x1a, 0xb5,
+ 0x4e, 0x72, 0xe4, 0xdd, 0x79, 0x67, 0x31, 0x8c, 0x9a, 0x65, 0xf8, 0x12, 0xa6, 0x37, 0x07, 0xbd,
+ 0x54, 0xb2, 0xae, 0xd8, 0x9f, 0x60, 0x60, 0x73, 0x15, 0x45, 0x89, 0xbc, 0x33, 0xef, 0x2e, 0x46,
+ 0x9f, 0xdf, 0x3f, 0x6c, 0x33, 0x3d, 0xdc, 0x0e, 0x2b, 0xea, 0x0b, 0xf7, 0x10, 0x86, 0x30, 0xfe,
+ 0x5a, 0xd4, 0x7a, 0x15, 0xe1, 0x0f, 0x35, 0x6a, 0xc3, 0x18, 0xec, 0x08, 0x99, 0x6b, 0x1e, 0xcc,
+ 0x83, 0xc5, 0x38, 0xa2, 0xe7, 0xf0, 0x39, 0xcc, 0xce, 0xd1, 0x9c, 0x9b, 0xc4, 0xd4, 0xba, 0xf1,
+ 0xbb, 0x07, 0xbb, 0x9a, 0x00, 0xca, 0x67, 0x18, 0xf9, 0x55, 0xf8, 0x1c, 0x86, 0x4b, 0x99, 0x9f,
+ 0x5d, 0x5e, 0x6a, 0x34, 0xec, 0x11, 0x80, 0x72, 0xfe, 0x71, 0x91, 0xf9, 0x2d, 0x87, 0x1e, 0x39,
+ 0xcd, 0xc2, 0x0b, 0xe8, 0x37, 0x65, 0x62, 0xb0, 0x63, 0x0b, 0xe2, 0x8b, 0x43, 0xcf, 0xdb, 0x35,
+ 0xe9, 0x35, 0x35, 0x79, 0x0c, 0x23, 0x9b, 0xe6, 0x76, 0x5d, 0x40, 0xc8, 0xfc, 0x95, 0x2f, 0xcd,
+ 0x3f, 0x01, 0xc0, 0x47, 0xb9, 0x94, 0x39, 0xbb, 0x0b, 0xbb, 0x49, 0x55, 0xb9, 0xf3, 0xad, 0x6b,
+ 0x2f, 0xa9, 0xaa, 0xd3, 0x8c, 0x7d, 0x08, 0xc3, 0xb5, 0xcc, 0x6a, 0x81, 0xd6, 0xf2, 0xd1, 0x3c,
+ 0x58, 0x0c, 0x8f, 0xfb, 0x19, 0x5e, 0x26, 0xb5, 0x30, 0xd1, 0xc0, 0x59, 0x4e, 0x33, 0x9b, 0xc0,
+ 0x3b, 0x54, 0xba, 0x90, 0xa5, 0x75, 0xeb, 0xd0, 0x06, 0x43, 0x8f, 0x38, 0xf3, 0x46, 0x7e, 0x36,
+ 0x94, 0xcd, 0xfc, 0xd8, 0x27, 0xb0, 0x2b, 0xa9, 0x10, 0xfc, 0xe9, 0x3c, 0x58, 0x8c, 0x3e, 0x3f,
+ 0xd8, 0xe8, 0x47, 0x5b, 0xa4, 0xc8, 0xfb, 0xb0, 0x3d, 0xe8, 0x14, 0x15, 0xdf, 0xa1, 0x33, 0x3a,
+ 0x45, 0xc5, 0x1e, 0xc0, 0xa0, 0x2c, 0xd2, 0xb7, 0x65, 0xb2, 0x46, 0xde, 0xb3, 0x01, 0x46, 0xed,
+ 0xda, 0x1e, 0xac, 0x4d, 0xa2, 0x4c, 0x4c, 0x45, 0xdb, 0xa5, 0xa2, 0x0d, 0x09, 0xb9, 0xb0, 0x95,
+ 0xbb, 0x0f, 0x03, 0x2c, 0x33, 0x67, 0xec, 0x93, 0xb1, 0x8f, 0x65, 0x46, 0x26, 0x0e, 0x7d, 0x91,
+ 0x18, 0x2c, 0xd3, 0x6b, 0x3e, 0x70, 0x16, 0xbf, 0x24, 0xb2, 0xa5, 0xd7, 0xa9, 0x40, 0xcd, 0x87,
+ 0xce, 0xe2, 0x97, 0xb6, 0xd7, 0x6b, 0x34, 0x2b, 0x99, 0x71, 0x70, 0xbd, 0x76, 0x2b, 0x1b, 0xa1,
+ 0x42, 0x2d, 0x6b, 0x95, 0x22, 0x1f, 0x91, 0xa5, 0x5d, 0xb3, 0x27, 0x30, 0x5e, 0x19, 0x53, 0xc5,
+ 0xbe, 0x58, 0x7c, 0x4c, 0xf6, 0x91, 0xc5, 0xbe, 0x77, 0xd0, 0x06, 0x85, 0x26, 0xd4, 0x60, 0xbf,
+ 0x62, 0x4f, 0x61, 0xa2, 0x50, 0x57, 0xb2, 0xd4, 0x18, 0xeb, 0xe2, 0x27, 0xe4, 0x7b, 0x14, 0xce,
+ 0xb8, 0x01, 0xcf, 0x8b, 0x9f, 0xd0, 0x9d, 0x7d, 0x89, 0x4a, 0xa1, 0xe2, 0x53, 0x57, 0x9d, 0x66,
+ 0x6d, 0xab, 0x53, 0x6b, 0x54, 0x71, 0x92, 0x63, 0x69, 0xf8, 0x8c, 0xac, 0x43, 0x8b, 0xbc, 0xb0,
+ 0x00, 0x0b, 0x61, 0x52, 0x2b, 0x11, 0xaf, 0x93, 0x2a, 0xc6, 0xd2, 0xa8, 0x6b, 0xbe, 0xef, 0x62,
+ 0xab, 0x95, 0x78, 0x95, 0x54, 0x27, 0x16, 0xb2, 0xdb, 0xa7, 0x72, 0xfd, 0x8f, 0xa2, 0xc4, 0x8c,
+ 0x33, 0x97, 0x5a, 0xb3, 0xb6, 0x0c, 0x4c, 0xaa, 0x22, 0x6e, 0x8a, 0x75, 0x67, 0x1e, 0x2c, 0xba,
+ 0x11, 0x24, 0x55, 0xf1, 0xca, 0xd7, 0x8b, 0xc1, 0xce, 0x4a, 0x6a, 0xc3, 0x0f, 0xe8, 0x64, 0x7a,
+ 0xb6, 0x58, 0x6a, 0xb1, 0xbb, 0xf3, 0x60, 0x11, 0x44, 0xf4, 0xcc, 0x9e, 0xc1, 0xd4, 0x24, 0xfa,
+ 0x6d, 0xfc, 0x43, 0x8d, 0x35, 0xc6, 0xd4, 0xe8, 0x7b, 0xf4, 0xca, 0xc4, 0xc2, 0xdf, 0x59, 0xf4,
+ 0xb5, 0xed, 0xf6, 0x43, 0x18, 0x92, 0x1f, 0x79, 0x7c, 0xe0, 0x92, 0xb5, 0x00, 0x19, 0x0f, 0xe1,
+ 0xce, 0x8f, 0x89, 0x8e, 0x85, 0x4c, 0xb2, 0xa2, 0xcc, 0x63, 0xcf, 0x3e, 0xce, 0xe7, 0xc1, 0x62,
+ 0x10, 0xed, 0xff, 0x98, 0xe8, 0xa5, 0xb3, 0x34, 0x83, 0xfb, 0x04, 0xc6, 0x15, 0x96, 0xe4, 0x4b,
+ 0xfc, 0xb8, 0x4f, 0xe1, 0x8f, 0x3c, 0x46, 0x1c, 0xf9, 0xd8, 0x36, 0xa0, 0x12, 0x45, 0x9a, 0xc4,
+ 0x45, 0x99, 0xe1, 0x15, 0x7f, 0x30, 0x0f, 0x16, 0xbd, 0xe3, 0xce, 0xa7, 0x9f, 0xd9, 0x26, 0x90,
+ 0xe1, 0xd4, 0xe2, 0x6c, 0x0e, 0x83, 0xcb, 0xa2, 0x2c, 0xf4, 0x0a, 0x33, 0xfe, 0xd0, 0x1e, 0x78,
+ 0xbc, 0x63, 0x54, 0x8d, 0x51, 0x8b, 0xda, 0xd0, 0x53, 0x21, 0x4b, 0x8c, 0xdf, 0xe2, 0x35, 0xff,
+ 0x3d, 0x09, 0xc0, 0x80, 0x80, 0x6f, 0xf1, 0x9a, 0x3d, 0x83, 0x1d, 0x52, 0xab, 0x47, 0xa4, 0x56,
+ 0x6c, 0x7b, 0x3a, 0x48, 0xa6, 0xc8, 0xce, 0xfe, 0x08, 0x33, 0xfb, 0xaf, 0xe3, 0xa2, 0x4c, 0xe5,
+ 0xba, 0x12, 0x68, 0x90, 0x7f, 0x48, 0xf9, 0x4d, 0x09, 0x3f, 0x6d, 0x61, 0xf6, 0x09, 0x30, 0x3b,
+ 0xed, 0x6e, 0x9b, 0x58, 0xa1, 0xc0, 0x44, 0x23, 0x7f, 0x46, 0x07, 0xcf, 0x92, 0xaa, 0x3a, 0x21,
+ 0x43, 0xe4, 0x70, 0xdb, 0x49, 0xbc, 0x2a, 0x4c, 0xac, 0x30, 0xd1, 0xb2, 0xe4, 0x7f, 0xb0, 0x69,
+ 0x46, 0x60, 0xa1, 0x88, 0x10, 0xf6, 0x05, 0xdc, 0xb3, 0xc5, 0x35, 0x2b, 0x25, 0x8d, 0x11, 0x98,
+ 0xc5, 0x97, 0x52, 0xb9, 0xb2, 0x3d, 0xa6, 0xf3, 0x6d, 0xe9, 0x2f, 0x1a, 0xe3, 0xd7, 0x52, 0x51,
+ 0xf9, 0xbe, 0x84, 0x07, 0x3f, 0x7f, 0xc9, 0xf7, 0x45, 0xf3, 0x39, 0xbd, 0xf8, 0xc1, 0xad, 0x17,
+ 0x7d, 0x77, 0x34, 0xdd, 0x17, 0xed, 0x8b, 0x74, 0xd2, 0x13, 0x6a, 0xd0, 0xa4, 0x45, 0xe9, 0x8c,
+ 0xc7, 0x30, 0xb2, 0x97, 0x1a, 0x2a, 0x47, 0x8a, 0x90, 0x12, 0x04, 0x07, 0x59, 0x5a, 0x84, 0x7f,
+ 0x83, 0xd9, 0x52, 0xe6, 0xaf, 0x48, 0xc8, 0x9a, 0x81, 0xdb, 0xd2, 0xbc, 0xe0, 0x7d, 0x35, 0x2f,
+ 0xd8, 0xd2, 0xbc, 0xf0, 0xbf, 0x3d, 0xd8, 0x5b, 0xca, 0x3c, 0xc2, 0x24, 0x6b, 0x28, 0xf5, 0x0b,
+ 0x12, 0x7b, 0x7b, 0xa3, 0xee, 0xb6, 0x78, 0x7e, 0x05, 0x7b, 0x3e, 0x9a, 0x46, 0x23, 0xee, 0x10,
+ 0x0f, 0x1e, 0x6e, 0xf3, 0x60, 0x2b, 0x85, 0x68, 0xb2, 0xde, 0xca, 0x68, 0x5b, 0x07, 0xbb, 0x54,
+ 0xa9, 0x5f, 0xd0, 0xc1, 0x1d, 0x32, 0xb6, 0x3a, 0x78, 0xa3, 0xcd, 0xbd, 0xf7, 0xd0, 0xe6, 0x6d,
+ 0xa1, 0xdf, 0x9d, 0x77, 0xb7, 0x85, 0xfe, 0x39, 0xec, 0xaf, 0x8b, 0xb2, 0x58, 0xd7, 0xeb, 0x98,
+ 0xae, 0x60, 0xba, 0xb5, 0xfa, 0xc4, 0xa6, 0xa9, 0x37, 0x58, 0x46, 0xd3, 0xfd, 0xf5, 0x29, 0xb0,
+ 0xa2, 0x4c, 0x45, 0x9d, 0xe1, 0x26, 0x9d, 0x07, 0x6e, 0x5c, 0xbd, 0x65, 0x83, 0xd0, 0x07, 0xd0,
+ 0x4b, 0x65, 0x5d, 0x1a, 0x3e, 0xa4, 0xf8, 0xdd, 0xc2, 0xd2, 0xbc, 0x91, 0x23, 0x3a, 0x51, 0x61,
+ 0x8e, 0x57, 0x7c, 0x8f, 0x7a, 0x35, 0x6b, 0x2c, 0xd4, 0xa5, 0x1c, 0xaf, 0x6c, 0xf4, 0x56, 0x83,
+ 0xbc, 0x97, 0x53, 0xcb, 0xa1, 0x45, 0x9c, 0xf9, 0xe9, 0xed, 0x71, 0x9f, 0x51, 0xe4, 0xdb, 0xa3,
+ 0xbe, 0x80, 0x59, 0x13, 0xb6, 0xed, 0x35, 0x7d, 0x23, 0x00, 0x05, 0xbd, 0xe7, 0x71, 0xf7, 0x75,
+ 0xa1, 0xd9, 0x11, 0x1c, 0x34, 0x1e, 0x71, 0x85, 0x2d, 0xf3, 0xf9, 0x3e, 0xed, 0xba, 0x9f, 0x38,
+ 0xb7, 0xbf, 0xa2, 0xda, 0x50, 0xa4, 0x66, 0x6b, 0x92, 0xcd, 0x11, 0x6d, 0x3b, 0xf2, 0xd8, 0x37,
+ 0x56, 0x29, 0x1f, 0xc3, 0xa8, 0x3d, 0x5d, 0x08, 0x3e, 0x26, 0x0f, 0x68, 0x0e, 0x16, 0xc2, 0x8e,
+ 0x4d, 0x9a, 0xa4, 0x2b, 0x8c, 0x0b, 0x83, 0x2a, 0x31, 0x52, 0xf1, 0x09, 0xf9, 0x4c, 0x08, 0x3d,
+ 0xf5, 0xa0, 0xad, 0x44, 0x59, 0xaf, 0x63, 0xbd, 0x4a, 0x54, 0xa6, 0x39, 0xa3, 0x88, 0x86, 0x65,
+ 0xbd, 0x3e, 0x27, 0x20, 0xfc, 0x57, 0x40, 0xdf, 0x83, 0x8e, 0xdb, 0xee, 0xb2, 0x61, 0x1f, 0x43,
+ 0x57, 0xc8, 0x9c, 0x07, 0xc4, 0xcd, 0xbb, 0x1b, 0x2c, 0xb9, 0xf9, 0xc6, 0x88, 0xac, 0xc7, 0x06,
+ 0xa3, 0x3a, 0xef, 0xc1, 0xa8, 0x10, 0x26, 0x22, 0xd1, 0x26, 0x6e, 0xf9, 0xe9, 0xc8, 0x3b, 0xb2,
+ 0xe0, 0x89, 0xe3, 0x68, 0xf8, 0x9f, 0x80, 0x46, 0xed, 0x8d, 0xfd, 0xac, 0x89, 0x30, 0x95, 0xea,
+ 0xf6, 0x4c, 0x05, 0xb7, 0x86, 0xf3, 0xd6, 0x3c, 0x74, 0x5c, 0x7e, 0xff, 0x7f, 0x1e, 0xba, 0x64,
+ 0x6c, 0xe7, 0xa1, 0xe5, 0xd9, 0xce, 0x26, 0xcf, 0x1e, 0x01, 0x18, 0x69, 0x12, 0xe1, 0xee, 0xe1,
+ 0x9e, 0x9b, 0x2f, 0x42, 0xe8, 0x12, 0xe6, 0xd0, 0x57, 0x14, 0x97, 0xe6, 0xbb, 0x6e, 0x3b, 0xbf,
+ 0x0c, 0xff, 0xdd, 0xa1, 0x4a, 0xfa, 0xd0, 0x7f, 0x8b, 0x4c, 0xfc, 0x7c, 0xc4, 0x7b, 0xbf, 0x36,
+ 0xe2, 0xbd, 0xcd, 0x11, 0x9f, 0xd9, 0xcf, 0x11, 0x51, 0x1b, 0xbb, 0xf7, 0x4a, 0xd6, 0x4a, 0x53,
+ 0x0a, 0x93, 0xe3, 0xe0, 0xb3, 0x68, 0x7a, 0x63, 0xfa, 0xc6, 0x5a, 0xec, 0x25, 0xe3, 0x07, 0xa7,
+ 0xd1, 0x23, 0x97, 0xd4, 0x20, 0x9a, 0x7a, 0xdc, 0x8b, 0x0e, 0x7d, 0xa0, 0xd4, 0x36, 0xb1, 0x56,
+ 0xb8, 0xdc, 0xa8, 0x8f, 0x09, 0x6c, 0xa4, 0xe9, 0x29, 0x4c, 0x9a, 0x7d, 0x62, 0x59, 0x8a, 0x6b,
+ 0x3f, 0xe2, 0xe3, 0x06, 0x3c, 0x2b, 0xc5, 0x75, 0x78, 0x45, 0x2a, 0xed, 0xab, 0xe4, 0x09, 0x77,
+ 0x04, 0x3d, 0xda, 0xc8, 0x53, 0xee, 0xfe, 0x36, 0x8d, 0x36, 0xc8, 0x10, 0x39, 0x3f, 0xf6, 0x05,
+ 0xf4, 0x75, 0xbd, 0x5e, 0x27, 0xea, 0xda, 0x33, 0xef, 0x57, 0x5e, 0x69, 0x3c, 0xbf, 0xea, 0xfd,
+ 0xdd, 0x92, 0xf6, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x70, 0xd9, 0xa0, 0xf8, 0x48, 0x0d, 0x00,
+ 0x00,
+}
diff --git a/vendor/google.golang.org/appengine/internal/log/log_service.proto b/vendor/google.golang.org/appengine/internal/log/log_service.proto
new file mode 100644
index 0000000..8981dc4
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/log/log_service.proto
@@ -0,0 +1,150 @@
+syntax = "proto2";
+option go_package = "log";
+
+package appengine;
+
+message LogServiceError {
+ enum ErrorCode {
+ OK = 0;
+ INVALID_REQUEST = 1;
+ STORAGE_ERROR = 2;
+ }
+}
+
+message UserAppLogLine {
+ required int64 timestamp_usec = 1;
+ required int64 level = 2;
+ required string message = 3;
+}
+
+message UserAppLogGroup {
+ repeated UserAppLogLine log_line = 2;
+}
+
+message FlushRequest {
+ optional bytes logs = 1;
+}
+
+message SetStatusRequest {
+ required string status = 1;
+}
+
+
+message LogOffset {
+ optional bytes request_id = 1;
+}
+
+message LogLine {
+ required int64 time = 1;
+ required int32 level = 2;
+ required string log_message = 3;
+}
+
+message RequestLog {
+ required string app_id = 1;
+ optional string module_id = 37 [default="default"];
+ required string version_id = 2;
+ required bytes request_id = 3;
+ optional LogOffset offset = 35;
+ required string ip = 4;
+ optional string nickname = 5;
+ required int64 start_time = 6;
+ required int64 end_time = 7;
+ required int64 latency = 8;
+ required int64 mcycles = 9;
+ required string method = 10;
+ required string resource = 11;
+ required string http_version = 12;
+ required int32 status = 13;
+ required int64 response_size = 14;
+ optional string referrer = 15;
+ optional string user_agent = 16;
+ required string url_map_entry = 17;
+ required string combined = 18;
+ optional int64 api_mcycles = 19;
+ optional string host = 20;
+ optional double cost = 21;
+
+ optional string task_queue_name = 22;
+ optional string task_name = 23;
+
+ optional bool was_loading_request = 24;
+ optional int64 pending_time = 25;
+ optional int32 replica_index = 26 [default = -1];
+ optional bool finished = 27 [default = true];
+ optional bytes clone_key = 28;
+
+ repeated LogLine line = 29;
+
+ optional bool lines_incomplete = 36;
+ optional bytes app_engine_release = 38;
+
+ optional int32 exit_reason = 30;
+ optional bool was_throttled_for_time = 31;
+ optional bool was_throttled_for_requests = 32;
+ optional int64 throttled_time = 33;
+
+ optional bytes server_name = 34;
+}
+
+message LogModuleVersion {
+ optional string module_id = 1 [default="default"];
+ optional string version_id = 2;
+}
+
+message LogReadRequest {
+ required string app_id = 1;
+ repeated string version_id = 2;
+ repeated LogModuleVersion module_version = 19;
+
+ optional int64 start_time = 3;
+ optional int64 end_time = 4;
+ optional LogOffset offset = 5;
+ repeated bytes request_id = 6;
+
+ optional int32 minimum_log_level = 7;
+ optional bool include_incomplete = 8;
+ optional int64 count = 9;
+
+ optional string combined_log_regex = 14;
+ optional string host_regex = 15;
+ optional int32 replica_index = 16;
+
+ optional bool include_app_logs = 10;
+ optional int32 app_logs_per_request = 17;
+ optional bool include_host = 11;
+ optional bool include_all = 12;
+ optional bool cache_iterator = 13;
+ optional int32 num_shards = 18;
+}
+
+message LogReadResponse {
+ repeated RequestLog log = 1;
+ optional LogOffset offset = 2;
+ optional int64 last_end_time = 3;
+}
+
+message LogUsageRecord {
+ optional string version_id = 1;
+ optional int32 start_time = 2;
+ optional int32 end_time = 3;
+ optional int64 count = 4;
+ optional int64 total_size = 5;
+ optional int32 records = 6;
+}
+
+message LogUsageRequest {
+ required string app_id = 1;
+ repeated string version_id = 2;
+ optional int32 start_time = 3;
+ optional int32 end_time = 4;
+ optional uint32 resolution_hours = 5 [default = 1];
+ optional bool combine_versions = 6;
+ optional int32 usage_version = 7;
+ optional bool versions_only = 8;
+}
+
+message LogUsageResponse {
+ repeated LogUsageRecord usage = 1;
+ optional LogUsageRecord summary = 2;
+}
diff --git a/vendor/google.golang.org/appengine/internal/main.go b/vendor/google.golang.org/appengine/internal/main.go
new file mode 100644
index 0000000..1e76531
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/main.go
@@ -0,0 +1,16 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// +build appengine
+
+package internal
+
+import (
+ "appengine_internal"
+)
+
+func Main() {
+ MainPath = ""
+ appengine_internal.Main()
+}
diff --git a/vendor/google.golang.org/appengine/internal/main_common.go b/vendor/google.golang.org/appengine/internal/main_common.go
new file mode 100644
index 0000000..357dce4
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/main_common.go
@@ -0,0 +1,7 @@
+package internal
+
+// MainPath stores the file path of the main package. On App Engine Standard
+// using Go version 1.9 and below, this will be unset. On App Engine Flex and
+// App Engine Standard second-gen (Go 1.11 and above), this will be the
+// filepath to package main.
+var MainPath string
diff --git a/vendor/google.golang.org/appengine/internal/main_vm.go b/vendor/google.golang.org/appengine/internal/main_vm.go
new file mode 100644
index 0000000..ddb79a3
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/main_vm.go
@@ -0,0 +1,69 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// +build !appengine
+
+package internal
+
+import (
+ "io"
+ "log"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "runtime"
+)
+
+func Main() {
+ MainPath = filepath.Dir(findMainPath())
+ installHealthChecker(http.DefaultServeMux)
+
+ port := "8080"
+ if s := os.Getenv("PORT"); s != "" {
+ port = s
+ }
+
+ host := ""
+ if IsDevAppServer() {
+ host = "127.0.0.1"
+ }
+ if err := http.ListenAndServe(host+":"+port, http.HandlerFunc(handleHTTP)); err != nil {
+ log.Fatalf("http.ListenAndServe: %v", err)
+ }
+}
+
+// Find the path to package main by looking at the root Caller.
+func findMainPath() string {
+ pc := make([]uintptr, 100)
+ n := runtime.Callers(2, pc)
+ frames := runtime.CallersFrames(pc[:n])
+ for {
+ frame, more := frames.Next()
+ // Tests won't have package main, instead they have testing.tRunner
+ if frame.Function == "main.main" || frame.Function == "testing.tRunner" {
+ return frame.File
+ }
+ if !more {
+ break
+ }
+ }
+ return ""
+}
+
+func installHealthChecker(mux *http.ServeMux) {
+ // If no health check handler has been installed by this point, add a trivial one.
+ const healthPath = "/_ah/health"
+ hreq := &http.Request{
+ Method: "GET",
+ URL: &url.URL{
+ Path: healthPath,
+ },
+ }
+ if _, pat := mux.Handler(hreq); pat != healthPath {
+ mux.HandleFunc(healthPath, func(w http.ResponseWriter, r *http.Request) {
+ io.WriteString(w, "ok")
+ })
+ }
+}
diff --git a/vendor/google.golang.org/appengine/internal/metadata.go b/vendor/google.golang.org/appengine/internal/metadata.go
new file mode 100644
index 0000000..c4ba63b
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/metadata.go
@@ -0,0 +1,60 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+// This file has code for accessing metadata.
+//
+// References:
+// https://cloud.google.com/compute/docs/metadata
+
+import (
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+)
+
+const (
+ metadataHost = "metadata"
+ metadataPath = "/computeMetadata/v1/"
+)
+
+var (
+ metadataRequestHeaders = http.Header{
+ "Metadata-Flavor": []string{"Google"},
+ }
+)
+
+// TODO(dsymonds): Do we need to support default values, like Python?
+func mustGetMetadata(key string) []byte {
+ b, err := getMetadata(key)
+ if err != nil {
+ panic(fmt.Sprintf("Metadata fetch failed for '%s': %v", key, err))
+ }
+ return b
+}
+
+func getMetadata(key string) ([]byte, error) {
+ // TODO(dsymonds): May need to use url.Parse to support keys with query args.
+ req := &http.Request{
+ Method: "GET",
+ URL: &url.URL{
+ Scheme: "http",
+ Host: metadataHost,
+ Path: metadataPath + key,
+ },
+ Header: metadataRequestHeaders,
+ Host: metadataHost,
+ }
+ resp, err := http.DefaultClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode != 200 {
+ return nil, fmt.Errorf("metadata server returned HTTP %d", resp.StatusCode)
+ }
+ return ioutil.ReadAll(resp.Body)
+}
diff --git a/vendor/google.golang.org/appengine/internal/net.go b/vendor/google.golang.org/appengine/internal/net.go
new file mode 100644
index 0000000..3b94cf0
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/net.go
@@ -0,0 +1,56 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+// This file implements a network dialer that limits the number of concurrent connections.
+// It is only used for API calls.
+
+import (
+ "log"
+ "net"
+ "runtime"
+ "sync"
+ "time"
+)
+
+var limitSem = make(chan int, 100) // TODO(dsymonds): Use environment variable.
+
+func limitRelease() {
+ // non-blocking
+ select {
+ case <-limitSem:
+ default:
+ // This should not normally happen.
+ log.Print("appengine: unbalanced limitSem release!")
+ }
+}
+
+func limitDial(network, addr string) (net.Conn, error) {
+ limitSem <- 1
+
+ // Dial with a timeout in case the API host is MIA.
+ // The connection should normally be very fast.
+ conn, err := net.DialTimeout(network, addr, 500*time.Millisecond)
+ if err != nil {
+ limitRelease()
+ return nil, err
+ }
+ lc := &limitConn{Conn: conn}
+ runtime.SetFinalizer(lc, (*limitConn).Close) // shouldn't usually be required
+ return lc, nil
+}
+
+type limitConn struct {
+ close sync.Once
+ net.Conn
+}
+
+func (lc *limitConn) Close() error {
+ defer lc.close.Do(func() {
+ limitRelease()
+ runtime.SetFinalizer(lc, nil)
+ })
+ return lc.Conn.Close()
+}
diff --git a/vendor/google.golang.org/appengine/internal/regen.sh b/vendor/google.golang.org/appengine/internal/regen.sh
new file mode 100755
index 0000000..2fdb546
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/regen.sh
@@ -0,0 +1,40 @@
+#!/bin/bash -e
+#
+# This script rebuilds the generated code for the protocol buffers.
+# To run this you will need protoc and goprotobuf installed;
+# see https://github.com/golang/protobuf for instructions.
+
+PKG=google.golang.org/appengine
+
+function die() {
+ echo 1>&2 $*
+ exit 1
+}
+
+# Sanity check that the right tools are accessible.
+for tool in go protoc protoc-gen-go; do
+ q=$(which $tool) || die "didn't find $tool"
+ echo 1>&2 "$tool: $q"
+done
+
+echo -n 1>&2 "finding package dir... "
+pkgdir=$(go list -f '{{.Dir}}' $PKG)
+echo 1>&2 $pkgdir
+base=$(echo $pkgdir | sed "s,/$PKG\$,,")
+echo 1>&2 "base: $base"
+cd $base
+
+# Run protoc once per package.
+for dir in $(find $PKG/internal -name '*.proto' | xargs dirname | sort | uniq); do
+ echo 1>&2 "* $dir"
+ protoc --go_out=. $dir/*.proto
+done
+
+for f in $(find $PKG/internal -name '*.pb.go'); do
+ # Remove proto.RegisterEnum calls.
+ # These cause duplicate registration panics when these packages
+ # are used on classic App Engine. proto.RegisterEnum only affects
+ # parsing the text format; we don't care about that.
+ # https://code.google.com/p/googleappengine/issues/detail?id=11670#c17
+ sed -i '/proto.RegisterEnum/d' $f
+done
diff --git a/vendor/google.golang.org/appengine/internal/remote_api/remote_api.pb.go b/vendor/google.golang.org/appengine/internal/remote_api/remote_api.pb.go
new file mode 100644
index 0000000..8d782a3
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/remote_api/remote_api.pb.go
@@ -0,0 +1,361 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/remote_api/remote_api.proto
+
+package remote_api
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type RpcError_ErrorCode int32
+
+const (
+ RpcError_UNKNOWN RpcError_ErrorCode = 0
+ RpcError_CALL_NOT_FOUND RpcError_ErrorCode = 1
+ RpcError_PARSE_ERROR RpcError_ErrorCode = 2
+ RpcError_SECURITY_VIOLATION RpcError_ErrorCode = 3
+ RpcError_OVER_QUOTA RpcError_ErrorCode = 4
+ RpcError_REQUEST_TOO_LARGE RpcError_ErrorCode = 5
+ RpcError_CAPABILITY_DISABLED RpcError_ErrorCode = 6
+ RpcError_FEATURE_DISABLED RpcError_ErrorCode = 7
+ RpcError_BAD_REQUEST RpcError_ErrorCode = 8
+ RpcError_RESPONSE_TOO_LARGE RpcError_ErrorCode = 9
+ RpcError_CANCELLED RpcError_ErrorCode = 10
+ RpcError_REPLAY_ERROR RpcError_ErrorCode = 11
+ RpcError_DEADLINE_EXCEEDED RpcError_ErrorCode = 12
+)
+
+var RpcError_ErrorCode_name = map[int32]string{
+ 0: "UNKNOWN",
+ 1: "CALL_NOT_FOUND",
+ 2: "PARSE_ERROR",
+ 3: "SECURITY_VIOLATION",
+ 4: "OVER_QUOTA",
+ 5: "REQUEST_TOO_LARGE",
+ 6: "CAPABILITY_DISABLED",
+ 7: "FEATURE_DISABLED",
+ 8: "BAD_REQUEST",
+ 9: "RESPONSE_TOO_LARGE",
+ 10: "CANCELLED",
+ 11: "REPLAY_ERROR",
+ 12: "DEADLINE_EXCEEDED",
+}
+var RpcError_ErrorCode_value = map[string]int32{
+ "UNKNOWN": 0,
+ "CALL_NOT_FOUND": 1,
+ "PARSE_ERROR": 2,
+ "SECURITY_VIOLATION": 3,
+ "OVER_QUOTA": 4,
+ "REQUEST_TOO_LARGE": 5,
+ "CAPABILITY_DISABLED": 6,
+ "FEATURE_DISABLED": 7,
+ "BAD_REQUEST": 8,
+ "RESPONSE_TOO_LARGE": 9,
+ "CANCELLED": 10,
+ "REPLAY_ERROR": 11,
+ "DEADLINE_EXCEEDED": 12,
+}
+
+func (x RpcError_ErrorCode) Enum() *RpcError_ErrorCode {
+ p := new(RpcError_ErrorCode)
+ *p = x
+ return p
+}
+func (x RpcError_ErrorCode) String() string {
+ return proto.EnumName(RpcError_ErrorCode_name, int32(x))
+}
+func (x *RpcError_ErrorCode) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(RpcError_ErrorCode_value, data, "RpcError_ErrorCode")
+ if err != nil {
+ return err
+ }
+ *x = RpcError_ErrorCode(value)
+ return nil
+}
+func (RpcError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_remote_api_1978114ec33a273d, []int{2, 0}
+}
+
+type Request struct {
+ ServiceName *string `protobuf:"bytes,2,req,name=service_name,json=serviceName" json:"service_name,omitempty"`
+ Method *string `protobuf:"bytes,3,req,name=method" json:"method,omitempty"`
+ Request []byte `protobuf:"bytes,4,req,name=request" json:"request,omitempty"`
+ RequestId *string `protobuf:"bytes,5,opt,name=request_id,json=requestId" json:"request_id,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Request) Reset() { *m = Request{} }
+func (m *Request) String() string { return proto.CompactTextString(m) }
+func (*Request) ProtoMessage() {}
+func (*Request) Descriptor() ([]byte, []int) {
+ return fileDescriptor_remote_api_1978114ec33a273d, []int{0}
+}
+func (m *Request) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Request.Unmarshal(m, b)
+}
+func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Request.Marshal(b, m, deterministic)
+}
+func (dst *Request) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Request.Merge(dst, src)
+}
+func (m *Request) XXX_Size() int {
+ return xxx_messageInfo_Request.Size(m)
+}
+func (m *Request) XXX_DiscardUnknown() {
+ xxx_messageInfo_Request.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Request proto.InternalMessageInfo
+
+func (m *Request) GetServiceName() string {
+ if m != nil && m.ServiceName != nil {
+ return *m.ServiceName
+ }
+ return ""
+}
+
+func (m *Request) GetMethod() string {
+ if m != nil && m.Method != nil {
+ return *m.Method
+ }
+ return ""
+}
+
+func (m *Request) GetRequest() []byte {
+ if m != nil {
+ return m.Request
+ }
+ return nil
+}
+
+func (m *Request) GetRequestId() string {
+ if m != nil && m.RequestId != nil {
+ return *m.RequestId
+ }
+ return ""
+}
+
+type ApplicationError struct {
+ Code *int32 `protobuf:"varint,1,req,name=code" json:"code,omitempty"`
+ Detail *string `protobuf:"bytes,2,req,name=detail" json:"detail,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *ApplicationError) Reset() { *m = ApplicationError{} }
+func (m *ApplicationError) String() string { return proto.CompactTextString(m) }
+func (*ApplicationError) ProtoMessage() {}
+func (*ApplicationError) Descriptor() ([]byte, []int) {
+ return fileDescriptor_remote_api_1978114ec33a273d, []int{1}
+}
+func (m *ApplicationError) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_ApplicationError.Unmarshal(m, b)
+}
+func (m *ApplicationError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_ApplicationError.Marshal(b, m, deterministic)
+}
+func (dst *ApplicationError) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_ApplicationError.Merge(dst, src)
+}
+func (m *ApplicationError) XXX_Size() int {
+ return xxx_messageInfo_ApplicationError.Size(m)
+}
+func (m *ApplicationError) XXX_DiscardUnknown() {
+ xxx_messageInfo_ApplicationError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ApplicationError proto.InternalMessageInfo
+
+func (m *ApplicationError) GetCode() int32 {
+ if m != nil && m.Code != nil {
+ return *m.Code
+ }
+ return 0
+}
+
+func (m *ApplicationError) GetDetail() string {
+ if m != nil && m.Detail != nil {
+ return *m.Detail
+ }
+ return ""
+}
+
+type RpcError struct {
+ Code *int32 `protobuf:"varint,1,req,name=code" json:"code,omitempty"`
+ Detail *string `protobuf:"bytes,2,opt,name=detail" json:"detail,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *RpcError) Reset() { *m = RpcError{} }
+func (m *RpcError) String() string { return proto.CompactTextString(m) }
+func (*RpcError) ProtoMessage() {}
+func (*RpcError) Descriptor() ([]byte, []int) {
+ return fileDescriptor_remote_api_1978114ec33a273d, []int{2}
+}
+func (m *RpcError) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_RpcError.Unmarshal(m, b)
+}
+func (m *RpcError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_RpcError.Marshal(b, m, deterministic)
+}
+func (dst *RpcError) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_RpcError.Merge(dst, src)
+}
+func (m *RpcError) XXX_Size() int {
+ return xxx_messageInfo_RpcError.Size(m)
+}
+func (m *RpcError) XXX_DiscardUnknown() {
+ xxx_messageInfo_RpcError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_RpcError proto.InternalMessageInfo
+
+func (m *RpcError) GetCode() int32 {
+ if m != nil && m.Code != nil {
+ return *m.Code
+ }
+ return 0
+}
+
+func (m *RpcError) GetDetail() string {
+ if m != nil && m.Detail != nil {
+ return *m.Detail
+ }
+ return ""
+}
+
+type Response struct {
+ Response []byte `protobuf:"bytes,1,opt,name=response" json:"response,omitempty"`
+ Exception []byte `protobuf:"bytes,2,opt,name=exception" json:"exception,omitempty"`
+ ApplicationError *ApplicationError `protobuf:"bytes,3,opt,name=application_error,json=applicationError" json:"application_error,omitempty"`
+ JavaException []byte `protobuf:"bytes,4,opt,name=java_exception,json=javaException" json:"java_exception,omitempty"`
+ RpcError *RpcError `protobuf:"bytes,5,opt,name=rpc_error,json=rpcError" json:"rpc_error,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *Response) Reset() { *m = Response{} }
+func (m *Response) String() string { return proto.CompactTextString(m) }
+func (*Response) ProtoMessage() {}
+func (*Response) Descriptor() ([]byte, []int) {
+ return fileDescriptor_remote_api_1978114ec33a273d, []int{3}
+}
+func (m *Response) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_Response.Unmarshal(m, b)
+}
+func (m *Response) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_Response.Marshal(b, m, deterministic)
+}
+func (dst *Response) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_Response.Merge(dst, src)
+}
+func (m *Response) XXX_Size() int {
+ return xxx_messageInfo_Response.Size(m)
+}
+func (m *Response) XXX_DiscardUnknown() {
+ xxx_messageInfo_Response.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Response proto.InternalMessageInfo
+
+func (m *Response) GetResponse() []byte {
+ if m != nil {
+ return m.Response
+ }
+ return nil
+}
+
+func (m *Response) GetException() []byte {
+ if m != nil {
+ return m.Exception
+ }
+ return nil
+}
+
+func (m *Response) GetApplicationError() *ApplicationError {
+ if m != nil {
+ return m.ApplicationError
+ }
+ return nil
+}
+
+func (m *Response) GetJavaException() []byte {
+ if m != nil {
+ return m.JavaException
+ }
+ return nil
+}
+
+func (m *Response) GetRpcError() *RpcError {
+ if m != nil {
+ return m.RpcError
+ }
+ return nil
+}
+
+func init() {
+ proto.RegisterType((*Request)(nil), "remote_api.Request")
+ proto.RegisterType((*ApplicationError)(nil), "remote_api.ApplicationError")
+ proto.RegisterType((*RpcError)(nil), "remote_api.RpcError")
+ proto.RegisterType((*Response)(nil), "remote_api.Response")
+}
+
+func init() {
+ proto.RegisterFile("google.golang.org/appengine/internal/remote_api/remote_api.proto", fileDescriptor_remote_api_1978114ec33a273d)
+}
+
+var fileDescriptor_remote_api_1978114ec33a273d = []byte{
+ // 531 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x51, 0x6e, 0xd3, 0x40,
+ 0x10, 0x86, 0xb1, 0x9b, 0x34, 0xf1, 0xc4, 0x2d, 0xdb, 0xa5, 0x14, 0x0b, 0x15, 0x29, 0x44, 0x42,
+ 0xca, 0x53, 0x2a, 0x38, 0x00, 0x62, 0x63, 0x6f, 0x91, 0x85, 0x65, 0xa7, 0x6b, 0xbb, 0x50, 0x5e,
+ 0x56, 0x2b, 0x67, 0x65, 0x8c, 0x12, 0xaf, 0xd9, 0x98, 0x8a, 0x17, 0x6e, 0xc0, 0xb5, 0x38, 0x0c,
+ 0xb7, 0x40, 0x36, 0x6e, 0x63, 0xf5, 0x89, 0xb7, 0x7f, 0x7e, 0x7b, 0xe6, 0x1b, 0xcd, 0xcc, 0xc2,
+ 0xbb, 0x5c, 0xa9, 0x7c, 0x23, 0x17, 0xb9, 0xda, 0x88, 0x32, 0x5f, 0x28, 0x9d, 0x5f, 0x88, 0xaa,
+ 0x92, 0x65, 0x5e, 0x94, 0xf2, 0xa2, 0x28, 0x6b, 0xa9, 0x4b, 0xb1, 0xb9, 0xd0, 0x72, 0xab, 0x6a,
+ 0xc9, 0x45, 0x55, 0xf4, 0xe4, 0xa2, 0xd2, 0xaa, 0x56, 0x18, 0xf6, 0xce, 0xec, 0x27, 0x8c, 0x98,
+ 0xfc, 0xf6, 0x5d, 0xee, 0x6a, 0xfc, 0x12, 0xec, 0x9d, 0xd4, 0xb7, 0x45, 0x26, 0x79, 0x29, 0xb6,
+ 0xd2, 0x31, 0xa7, 0xe6, 0xdc, 0x62, 0x93, 0xce, 0x0b, 0xc5, 0x56, 0xe2, 0x33, 0x38, 0xdc, 0xca,
+ 0xfa, 0x8b, 0x5a, 0x3b, 0x07, 0xed, 0xc7, 0x2e, 0xc2, 0x0e, 0x8c, 0xf4, 0xbf, 0x2a, 0xce, 0x60,
+ 0x6a, 0xce, 0x6d, 0x76, 0x17, 0xe2, 0x17, 0x00, 0x9d, 0xe4, 0xc5, 0xda, 0x19, 0x4e, 0x8d, 0xb9,
+ 0xc5, 0xac, 0xce, 0xf1, 0xd7, 0xb3, 0xb7, 0x80, 0x48, 0x55, 0x6d, 0x8a, 0x4c, 0xd4, 0x85, 0x2a,
+ 0xa9, 0xd6, 0x4a, 0x63, 0x0c, 0x83, 0x4c, 0xad, 0xa5, 0x63, 0x4c, 0xcd, 0xf9, 0x90, 0xb5, 0xba,
+ 0x01, 0xaf, 0x65, 0x2d, 0x8a, 0x4d, 0xd7, 0x55, 0x17, 0xcd, 0x7e, 0x9b, 0x30, 0x66, 0x55, 0xf6,
+ 0x7f, 0x89, 0x46, 0x2f, 0xf1, 0x97, 0x09, 0x56, 0x9b, 0xe5, 0x36, 0x7f, 0x4d, 0x60, 0x94, 0x86,
+ 0x1f, 0xc2, 0xe8, 0x63, 0x88, 0x1e, 0x61, 0x0c, 0xc7, 0x2e, 0x09, 0x02, 0x1e, 0x46, 0x09, 0xbf,
+ 0x8c, 0xd2, 0xd0, 0x43, 0x06, 0x7e, 0x0c, 0x93, 0x15, 0x61, 0x31, 0xe5, 0x94, 0xb1, 0x88, 0x21,
+ 0x13, 0x9f, 0x01, 0x8e, 0xa9, 0x9b, 0x32, 0x3f, 0xb9, 0xe1, 0xd7, 0x7e, 0x14, 0x90, 0xc4, 0x8f,
+ 0x42, 0x74, 0x80, 0x8f, 0x01, 0xa2, 0x6b, 0xca, 0xf8, 0x55, 0x1a, 0x25, 0x04, 0x0d, 0xf0, 0x53,
+ 0x38, 0x61, 0xf4, 0x2a, 0xa5, 0x71, 0xc2, 0x93, 0x28, 0xe2, 0x01, 0x61, 0xef, 0x29, 0x1a, 0xe2,
+ 0x67, 0xf0, 0xc4, 0x25, 0x2b, 0xb2, 0xf4, 0x83, 0xa6, 0x80, 0xe7, 0xc7, 0x64, 0x19, 0x50, 0x0f,
+ 0x1d, 0xe2, 0x53, 0x40, 0x97, 0x94, 0x24, 0x29, 0xa3, 0x7b, 0x77, 0xd4, 0xe0, 0x97, 0xc4, 0xe3,
+ 0x5d, 0x25, 0x34, 0x6e, 0xf0, 0x8c, 0xc6, 0xab, 0x28, 0x8c, 0x69, 0xaf, 0xae, 0x85, 0x8f, 0xc0,
+ 0x72, 0x49, 0xe8, 0xd2, 0xa0, 0xc9, 0x03, 0x8c, 0xc0, 0x66, 0x74, 0x15, 0x90, 0x9b, 0xae, 0xef,
+ 0x49, 0xd3, 0x8f, 0x47, 0x89, 0x17, 0xf8, 0x21, 0xe5, 0xf4, 0x93, 0x4b, 0xa9, 0x47, 0x3d, 0x64,
+ 0xcf, 0xfe, 0x18, 0x30, 0x66, 0x72, 0x57, 0xa9, 0x72, 0x27, 0xf1, 0x73, 0x18, 0xeb, 0x4e, 0x3b,
+ 0xc6, 0xd4, 0x98, 0xdb, 0xec, 0x3e, 0xc6, 0xe7, 0x60, 0xc9, 0x1f, 0x99, 0xac, 0x9a, 0x75, 0xb5,
+ 0x23, 0xb5, 0xd9, 0xde, 0xc0, 0x3e, 0x9c, 0x88, 0xfd, 0x3a, 0xb9, 0x6c, 0x06, 0xec, 0x1c, 0x4c,
+ 0x8d, 0xf9, 0xe4, 0xcd, 0xf9, 0xa2, 0x77, 0x87, 0x0f, 0x77, 0xce, 0x90, 0x78, 0x78, 0x05, 0xaf,
+ 0xe0, 0xf8, 0xab, 0xb8, 0x15, 0x7c, 0x4f, 0x1b, 0xb4, 0xb4, 0xa3, 0xc6, 0xa5, 0xf7, 0xc4, 0xd7,
+ 0x60, 0xe9, 0x2a, 0xeb, 0x48, 0xc3, 0x96, 0x74, 0xda, 0x27, 0xdd, 0x1d, 0x07, 0x1b, 0xeb, 0x4e,
+ 0x2d, 0xed, 0xcf, 0xbd, 0x07, 0xf0, 0x37, 0x00, 0x00, 0xff, 0xff, 0x38, 0xd1, 0x0f, 0x22, 0x4f,
+ 0x03, 0x00, 0x00,
+}
diff --git a/vendor/google.golang.org/appengine/internal/remote_api/remote_api.proto b/vendor/google.golang.org/appengine/internal/remote_api/remote_api.proto
new file mode 100644
index 0000000..f21763a
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/remote_api/remote_api.proto
@@ -0,0 +1,44 @@
+syntax = "proto2";
+option go_package = "remote_api";
+
+package remote_api;
+
+message Request {
+ required string service_name = 2;
+ required string method = 3;
+ required bytes request = 4;
+ optional string request_id = 5;
+}
+
+message ApplicationError {
+ required int32 code = 1;
+ required string detail = 2;
+}
+
+message RpcError {
+ enum ErrorCode {
+ UNKNOWN = 0;
+ CALL_NOT_FOUND = 1;
+ PARSE_ERROR = 2;
+ SECURITY_VIOLATION = 3;
+ OVER_QUOTA = 4;
+ REQUEST_TOO_LARGE = 5;
+ CAPABILITY_DISABLED = 6;
+ FEATURE_DISABLED = 7;
+ BAD_REQUEST = 8;
+ RESPONSE_TOO_LARGE = 9;
+ CANCELLED = 10;
+ REPLAY_ERROR = 11;
+ DEADLINE_EXCEEDED = 12;
+ }
+ required int32 code = 1;
+ optional string detail = 2;
+}
+
+message Response {
+ optional bytes response = 1;
+ optional bytes exception = 2;
+ optional ApplicationError application_error = 3;
+ optional bytes java_exception = 4;
+ optional RpcError rpc_error = 5;
+}
diff --git a/vendor/google.golang.org/appengine/internal/transaction.go b/vendor/google.golang.org/appengine/internal/transaction.go
new file mode 100644
index 0000000..9006ae6
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/transaction.go
@@ -0,0 +1,115 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+// This file implements hooks for applying datastore transactions.
+
+import (
+ "errors"
+ "reflect"
+
+ "github.com/golang/protobuf/proto"
+ netcontext "golang.org/x/net/context"
+
+ basepb "google.golang.org/appengine/internal/base"
+ pb "google.golang.org/appengine/internal/datastore"
+)
+
+var transactionSetters = make(map[reflect.Type]reflect.Value)
+
+// RegisterTransactionSetter registers a function that sets transaction information
+// in a protocol buffer message. f should be a function with two arguments,
+// the first being a protocol buffer type, and the second being *datastore.Transaction.
+func RegisterTransactionSetter(f interface{}) {
+ v := reflect.ValueOf(f)
+ transactionSetters[v.Type().In(0)] = v
+}
+
+// applyTransaction applies the transaction t to message pb
+// by using the relevant setter passed to RegisterTransactionSetter.
+func applyTransaction(pb proto.Message, t *pb.Transaction) {
+ v := reflect.ValueOf(pb)
+ if f, ok := transactionSetters[v.Type()]; ok {
+ f.Call([]reflect.Value{v, reflect.ValueOf(t)})
+ }
+}
+
+var transactionKey = "used for *Transaction"
+
+func transactionFromContext(ctx netcontext.Context) *transaction {
+ t, _ := ctx.Value(&transactionKey).(*transaction)
+ return t
+}
+
+func withTransaction(ctx netcontext.Context, t *transaction) netcontext.Context {
+ return netcontext.WithValue(ctx, &transactionKey, t)
+}
+
+type transaction struct {
+ transaction pb.Transaction
+ finished bool
+}
+
+var ErrConcurrentTransaction = errors.New("internal: concurrent transaction")
+
+func RunTransactionOnce(c netcontext.Context, f func(netcontext.Context) error, xg bool, readOnly bool, previousTransaction *pb.Transaction) (*pb.Transaction, error) {
+ if transactionFromContext(c) != nil {
+ return nil, errors.New("nested transactions are not supported")
+ }
+
+ // Begin the transaction.
+ t := &transaction{}
+ req := &pb.BeginTransactionRequest{
+ App: proto.String(FullyQualifiedAppID(c)),
+ }
+ if xg {
+ req.AllowMultipleEg = proto.Bool(true)
+ }
+ if previousTransaction != nil {
+ req.PreviousTransaction = previousTransaction
+ }
+ if readOnly {
+ req.Mode = pb.BeginTransactionRequest_READ_ONLY.Enum()
+ } else {
+ req.Mode = pb.BeginTransactionRequest_READ_WRITE.Enum()
+ }
+ if err := Call(c, "datastore_v3", "BeginTransaction", req, &t.transaction); err != nil {
+ return nil, err
+ }
+
+ // Call f, rolling back the transaction if f returns a non-nil error, or panics.
+ // The panic is not recovered.
+ defer func() {
+ if t.finished {
+ return
+ }
+ t.finished = true
+ // Ignore the error return value, since we are already returning a non-nil
+ // error (or we're panicking).
+ Call(c, "datastore_v3", "Rollback", &t.transaction, &basepb.VoidProto{})
+ }()
+ if err := f(withTransaction(c, t)); err != nil {
+ return &t.transaction, err
+ }
+ t.finished = true
+
+ // Commit the transaction.
+ res := &pb.CommitResponse{}
+ err := Call(c, "datastore_v3", "Commit", &t.transaction, res)
+ if ae, ok := err.(*APIError); ok {
+ /* TODO: restore this conditional
+ if appengine.IsDevAppServer() {
+ */
+ // The Python Dev AppServer raises an ApplicationError with error code 2 (which is
+ // Error.CONCURRENT_TRANSACTION) and message "Concurrency exception.".
+ if ae.Code == int32(pb.Error_BAD_REQUEST) && ae.Detail == "ApplicationError: 2 Concurrency exception." {
+ return &t.transaction, ErrConcurrentTransaction
+ }
+ if ae.Code == int32(pb.Error_CONCURRENT_TRANSACTION) {
+ return &t.transaction, ErrConcurrentTransaction
+ }
+ }
+ return &t.transaction, err
+}
diff --git a/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go
new file mode 100644
index 0000000..5f72775
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go
@@ -0,0 +1,527 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
+
+package urlfetch
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type URLFetchServiceError_ErrorCode int32
+
+const (
+ URLFetchServiceError_OK URLFetchServiceError_ErrorCode = 0
+ URLFetchServiceError_INVALID_URL URLFetchServiceError_ErrorCode = 1
+ URLFetchServiceError_FETCH_ERROR URLFetchServiceError_ErrorCode = 2
+ URLFetchServiceError_UNSPECIFIED_ERROR URLFetchServiceError_ErrorCode = 3
+ URLFetchServiceError_RESPONSE_TOO_LARGE URLFetchServiceError_ErrorCode = 4
+ URLFetchServiceError_DEADLINE_EXCEEDED URLFetchServiceError_ErrorCode = 5
+ URLFetchServiceError_SSL_CERTIFICATE_ERROR URLFetchServiceError_ErrorCode = 6
+ URLFetchServiceError_DNS_ERROR URLFetchServiceError_ErrorCode = 7
+ URLFetchServiceError_CLOSED URLFetchServiceError_ErrorCode = 8
+ URLFetchServiceError_INTERNAL_TRANSIENT_ERROR URLFetchServiceError_ErrorCode = 9
+ URLFetchServiceError_TOO_MANY_REDIRECTS URLFetchServiceError_ErrorCode = 10
+ URLFetchServiceError_MALFORMED_REPLY URLFetchServiceError_ErrorCode = 11
+ URLFetchServiceError_CONNECTION_ERROR URLFetchServiceError_ErrorCode = 12
+)
+
+var URLFetchServiceError_ErrorCode_name = map[int32]string{
+ 0: "OK",
+ 1: "INVALID_URL",
+ 2: "FETCH_ERROR",
+ 3: "UNSPECIFIED_ERROR",
+ 4: "RESPONSE_TOO_LARGE",
+ 5: "DEADLINE_EXCEEDED",
+ 6: "SSL_CERTIFICATE_ERROR",
+ 7: "DNS_ERROR",
+ 8: "CLOSED",
+ 9: "INTERNAL_TRANSIENT_ERROR",
+ 10: "TOO_MANY_REDIRECTS",
+ 11: "MALFORMED_REPLY",
+ 12: "CONNECTION_ERROR",
+}
+var URLFetchServiceError_ErrorCode_value = map[string]int32{
+ "OK": 0,
+ "INVALID_URL": 1,
+ "FETCH_ERROR": 2,
+ "UNSPECIFIED_ERROR": 3,
+ "RESPONSE_TOO_LARGE": 4,
+ "DEADLINE_EXCEEDED": 5,
+ "SSL_CERTIFICATE_ERROR": 6,
+ "DNS_ERROR": 7,
+ "CLOSED": 8,
+ "INTERNAL_TRANSIENT_ERROR": 9,
+ "TOO_MANY_REDIRECTS": 10,
+ "MALFORMED_REPLY": 11,
+ "CONNECTION_ERROR": 12,
+}
+
+func (x URLFetchServiceError_ErrorCode) Enum() *URLFetchServiceError_ErrorCode {
+ p := new(URLFetchServiceError_ErrorCode)
+ *p = x
+ return p
+}
+func (x URLFetchServiceError_ErrorCode) String() string {
+ return proto.EnumName(URLFetchServiceError_ErrorCode_name, int32(x))
+}
+func (x *URLFetchServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(URLFetchServiceError_ErrorCode_value, data, "URLFetchServiceError_ErrorCode")
+ if err != nil {
+ return err
+ }
+ *x = URLFetchServiceError_ErrorCode(value)
+ return nil
+}
+func (URLFetchServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{0, 0}
+}
+
+type URLFetchRequest_RequestMethod int32
+
+const (
+ URLFetchRequest_GET URLFetchRequest_RequestMethod = 1
+ URLFetchRequest_POST URLFetchRequest_RequestMethod = 2
+ URLFetchRequest_HEAD URLFetchRequest_RequestMethod = 3
+ URLFetchRequest_PUT URLFetchRequest_RequestMethod = 4
+ URLFetchRequest_DELETE URLFetchRequest_RequestMethod = 5
+ URLFetchRequest_PATCH URLFetchRequest_RequestMethod = 6
+)
+
+var URLFetchRequest_RequestMethod_name = map[int32]string{
+ 1: "GET",
+ 2: "POST",
+ 3: "HEAD",
+ 4: "PUT",
+ 5: "DELETE",
+ 6: "PATCH",
+}
+var URLFetchRequest_RequestMethod_value = map[string]int32{
+ "GET": 1,
+ "POST": 2,
+ "HEAD": 3,
+ "PUT": 4,
+ "DELETE": 5,
+ "PATCH": 6,
+}
+
+func (x URLFetchRequest_RequestMethod) Enum() *URLFetchRequest_RequestMethod {
+ p := new(URLFetchRequest_RequestMethod)
+ *p = x
+ return p
+}
+func (x URLFetchRequest_RequestMethod) String() string {
+ return proto.EnumName(URLFetchRequest_RequestMethod_name, int32(x))
+}
+func (x *URLFetchRequest_RequestMethod) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(URLFetchRequest_RequestMethod_value, data, "URLFetchRequest_RequestMethod")
+ if err != nil {
+ return err
+ }
+ *x = URLFetchRequest_RequestMethod(value)
+ return nil
+}
+func (URLFetchRequest_RequestMethod) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1, 0}
+}
+
+type URLFetchServiceError struct {
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchServiceError) Reset() { *m = URLFetchServiceError{} }
+func (m *URLFetchServiceError) String() string { return proto.CompactTextString(m) }
+func (*URLFetchServiceError) ProtoMessage() {}
+func (*URLFetchServiceError) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{0}
+}
+func (m *URLFetchServiceError) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchServiceError.Unmarshal(m, b)
+}
+func (m *URLFetchServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchServiceError.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchServiceError) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchServiceError.Merge(dst, src)
+}
+func (m *URLFetchServiceError) XXX_Size() int {
+ return xxx_messageInfo_URLFetchServiceError.Size(m)
+}
+func (m *URLFetchServiceError) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchServiceError proto.InternalMessageInfo
+
+type URLFetchRequest struct {
+ Method *URLFetchRequest_RequestMethod `protobuf:"varint,1,req,name=Method,enum=appengine.URLFetchRequest_RequestMethod" json:"Method,omitempty"`
+ Url *string `protobuf:"bytes,2,req,name=Url" json:"Url,omitempty"`
+ Header []*URLFetchRequest_Header `protobuf:"group,3,rep,name=Header,json=header" json:"header,omitempty"`
+ Payload []byte `protobuf:"bytes,6,opt,name=Payload" json:"Payload,omitempty"`
+ FollowRedirects *bool `protobuf:"varint,7,opt,name=FollowRedirects,def=1" json:"FollowRedirects,omitempty"`
+ Deadline *float64 `protobuf:"fixed64,8,opt,name=Deadline" json:"Deadline,omitempty"`
+ MustValidateServerCertificate *bool `protobuf:"varint,9,opt,name=MustValidateServerCertificate,def=1" json:"MustValidateServerCertificate,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchRequest) Reset() { *m = URLFetchRequest{} }
+func (m *URLFetchRequest) String() string { return proto.CompactTextString(m) }
+func (*URLFetchRequest) ProtoMessage() {}
+func (*URLFetchRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1}
+}
+func (m *URLFetchRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchRequest.Unmarshal(m, b)
+}
+func (m *URLFetchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchRequest.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchRequest.Merge(dst, src)
+}
+func (m *URLFetchRequest) XXX_Size() int {
+ return xxx_messageInfo_URLFetchRequest.Size(m)
+}
+func (m *URLFetchRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchRequest proto.InternalMessageInfo
+
+const Default_URLFetchRequest_FollowRedirects bool = true
+const Default_URLFetchRequest_MustValidateServerCertificate bool = true
+
+func (m *URLFetchRequest) GetMethod() URLFetchRequest_RequestMethod {
+ if m != nil && m.Method != nil {
+ return *m.Method
+ }
+ return URLFetchRequest_GET
+}
+
+func (m *URLFetchRequest) GetUrl() string {
+ if m != nil && m.Url != nil {
+ return *m.Url
+ }
+ return ""
+}
+
+func (m *URLFetchRequest) GetHeader() []*URLFetchRequest_Header {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *URLFetchRequest) GetPayload() []byte {
+ if m != nil {
+ return m.Payload
+ }
+ return nil
+}
+
+func (m *URLFetchRequest) GetFollowRedirects() bool {
+ if m != nil && m.FollowRedirects != nil {
+ return *m.FollowRedirects
+ }
+ return Default_URLFetchRequest_FollowRedirects
+}
+
+func (m *URLFetchRequest) GetDeadline() float64 {
+ if m != nil && m.Deadline != nil {
+ return *m.Deadline
+ }
+ return 0
+}
+
+func (m *URLFetchRequest) GetMustValidateServerCertificate() bool {
+ if m != nil && m.MustValidateServerCertificate != nil {
+ return *m.MustValidateServerCertificate
+ }
+ return Default_URLFetchRequest_MustValidateServerCertificate
+}
+
+type URLFetchRequest_Header struct {
+ Key *string `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
+ Value *string `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchRequest_Header) Reset() { *m = URLFetchRequest_Header{} }
+func (m *URLFetchRequest_Header) String() string { return proto.CompactTextString(m) }
+func (*URLFetchRequest_Header) ProtoMessage() {}
+func (*URLFetchRequest_Header) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1, 0}
+}
+func (m *URLFetchRequest_Header) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchRequest_Header.Unmarshal(m, b)
+}
+func (m *URLFetchRequest_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchRequest_Header.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchRequest_Header) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchRequest_Header.Merge(dst, src)
+}
+func (m *URLFetchRequest_Header) XXX_Size() int {
+ return xxx_messageInfo_URLFetchRequest_Header.Size(m)
+}
+func (m *URLFetchRequest_Header) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchRequest_Header.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchRequest_Header proto.InternalMessageInfo
+
+func (m *URLFetchRequest_Header) GetKey() string {
+ if m != nil && m.Key != nil {
+ return *m.Key
+ }
+ return ""
+}
+
+func (m *URLFetchRequest_Header) GetValue() string {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return ""
+}
+
+type URLFetchResponse struct {
+ Content []byte `protobuf:"bytes,1,opt,name=Content" json:"Content,omitempty"`
+ StatusCode *int32 `protobuf:"varint,2,req,name=StatusCode" json:"StatusCode,omitempty"`
+ Header []*URLFetchResponse_Header `protobuf:"group,3,rep,name=Header,json=header" json:"header,omitempty"`
+ ContentWasTruncated *bool `protobuf:"varint,6,opt,name=ContentWasTruncated,def=0" json:"ContentWasTruncated,omitempty"`
+ ExternalBytesSent *int64 `protobuf:"varint,7,opt,name=ExternalBytesSent" json:"ExternalBytesSent,omitempty"`
+ ExternalBytesReceived *int64 `protobuf:"varint,8,opt,name=ExternalBytesReceived" json:"ExternalBytesReceived,omitempty"`
+ FinalUrl *string `protobuf:"bytes,9,opt,name=FinalUrl" json:"FinalUrl,omitempty"`
+ ApiCpuMilliseconds *int64 `protobuf:"varint,10,opt,name=ApiCpuMilliseconds,def=0" json:"ApiCpuMilliseconds,omitempty"`
+ ApiBytesSent *int64 `protobuf:"varint,11,opt,name=ApiBytesSent,def=0" json:"ApiBytesSent,omitempty"`
+ ApiBytesReceived *int64 `protobuf:"varint,12,opt,name=ApiBytesReceived,def=0" json:"ApiBytesReceived,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchResponse) Reset() { *m = URLFetchResponse{} }
+func (m *URLFetchResponse) String() string { return proto.CompactTextString(m) }
+func (*URLFetchResponse) ProtoMessage() {}
+func (*URLFetchResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{2}
+}
+func (m *URLFetchResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchResponse.Unmarshal(m, b)
+}
+func (m *URLFetchResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchResponse.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchResponse.Merge(dst, src)
+}
+func (m *URLFetchResponse) XXX_Size() int {
+ return xxx_messageInfo_URLFetchResponse.Size(m)
+}
+func (m *URLFetchResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchResponse proto.InternalMessageInfo
+
+const Default_URLFetchResponse_ContentWasTruncated bool = false
+const Default_URLFetchResponse_ApiCpuMilliseconds int64 = 0
+const Default_URLFetchResponse_ApiBytesSent int64 = 0
+const Default_URLFetchResponse_ApiBytesReceived int64 = 0
+
+func (m *URLFetchResponse) GetContent() []byte {
+ if m != nil {
+ return m.Content
+ }
+ return nil
+}
+
+func (m *URLFetchResponse) GetStatusCode() int32 {
+ if m != nil && m.StatusCode != nil {
+ return *m.StatusCode
+ }
+ return 0
+}
+
+func (m *URLFetchResponse) GetHeader() []*URLFetchResponse_Header {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *URLFetchResponse) GetContentWasTruncated() bool {
+ if m != nil && m.ContentWasTruncated != nil {
+ return *m.ContentWasTruncated
+ }
+ return Default_URLFetchResponse_ContentWasTruncated
+}
+
+func (m *URLFetchResponse) GetExternalBytesSent() int64 {
+ if m != nil && m.ExternalBytesSent != nil {
+ return *m.ExternalBytesSent
+ }
+ return 0
+}
+
+func (m *URLFetchResponse) GetExternalBytesReceived() int64 {
+ if m != nil && m.ExternalBytesReceived != nil {
+ return *m.ExternalBytesReceived
+ }
+ return 0
+}
+
+func (m *URLFetchResponse) GetFinalUrl() string {
+ if m != nil && m.FinalUrl != nil {
+ return *m.FinalUrl
+ }
+ return ""
+}
+
+func (m *URLFetchResponse) GetApiCpuMilliseconds() int64 {
+ if m != nil && m.ApiCpuMilliseconds != nil {
+ return *m.ApiCpuMilliseconds
+ }
+ return Default_URLFetchResponse_ApiCpuMilliseconds
+}
+
+func (m *URLFetchResponse) GetApiBytesSent() int64 {
+ if m != nil && m.ApiBytesSent != nil {
+ return *m.ApiBytesSent
+ }
+ return Default_URLFetchResponse_ApiBytesSent
+}
+
+func (m *URLFetchResponse) GetApiBytesReceived() int64 {
+ if m != nil && m.ApiBytesReceived != nil {
+ return *m.ApiBytesReceived
+ }
+ return Default_URLFetchResponse_ApiBytesReceived
+}
+
+type URLFetchResponse_Header struct {
+ Key *string `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
+ Value *string `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchResponse_Header) Reset() { *m = URLFetchResponse_Header{} }
+func (m *URLFetchResponse_Header) String() string { return proto.CompactTextString(m) }
+func (*URLFetchResponse_Header) ProtoMessage() {}
+func (*URLFetchResponse_Header) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{2, 0}
+}
+func (m *URLFetchResponse_Header) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchResponse_Header.Unmarshal(m, b)
+}
+func (m *URLFetchResponse_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchResponse_Header.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchResponse_Header) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchResponse_Header.Merge(dst, src)
+}
+func (m *URLFetchResponse_Header) XXX_Size() int {
+ return xxx_messageInfo_URLFetchResponse_Header.Size(m)
+}
+func (m *URLFetchResponse_Header) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchResponse_Header.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchResponse_Header proto.InternalMessageInfo
+
+func (m *URLFetchResponse_Header) GetKey() string {
+ if m != nil && m.Key != nil {
+ return *m.Key
+ }
+ return ""
+}
+
+func (m *URLFetchResponse_Header) GetValue() string {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return ""
+}
+
+func init() {
+ proto.RegisterType((*URLFetchServiceError)(nil), "appengine.URLFetchServiceError")
+ proto.RegisterType((*URLFetchRequest)(nil), "appengine.URLFetchRequest")
+ proto.RegisterType((*URLFetchRequest_Header)(nil), "appengine.URLFetchRequest.Header")
+ proto.RegisterType((*URLFetchResponse)(nil), "appengine.URLFetchResponse")
+ proto.RegisterType((*URLFetchResponse_Header)(nil), "appengine.URLFetchResponse.Header")
+}
+
+func init() {
+ proto.RegisterFile("google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto", fileDescriptor_urlfetch_service_b245a7065f33bced)
+}
+
+var fileDescriptor_urlfetch_service_b245a7065f33bced = []byte{
+ // 770 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x6e, 0xe3, 0x54,
+ 0x10, 0xc6, 0x76, 0x7e, 0xa7, 0x5d, 0x7a, 0x76, 0xb6, 0x45, 0x66, 0xb5, 0xa0, 0x10, 0x09, 0x29,
+ 0x17, 0x90, 0x2e, 0x2b, 0x24, 0x44, 0xaf, 0x70, 0xed, 0x93, 0xad, 0xa9, 0x63, 0x47, 0xc7, 0x4e,
+ 0x61, 0xb9, 0xb1, 0xac, 0x78, 0x9a, 0x5a, 0xb2, 0xec, 0x60, 0x9f, 0x2c, 0xf4, 0x35, 0x78, 0x0d,
+ 0xde, 0x87, 0xa7, 0xe1, 0x02, 0x9d, 0xc4, 0xc9, 0x6e, 0xbb, 0xd1, 0x4a, 0x5c, 0x65, 0xe6, 0x9b,
+ 0xef, 0xcc, 0x99, 0x7c, 0xdf, 0xf8, 0x80, 0xb3, 0x2c, 0xcb, 0x65, 0x4e, 0xe3, 0x65, 0x99, 0x27,
+ 0xc5, 0x72, 0x5c, 0x56, 0xcb, 0xf3, 0x64, 0xb5, 0xa2, 0x62, 0x99, 0x15, 0x74, 0x9e, 0x15, 0x92,
+ 0xaa, 0x22, 0xc9, 0xcf, 0xd7, 0x55, 0x7e, 0x4b, 0x72, 0x71, 0xb7, 0x0f, 0xe2, 0x9a, 0xaa, 0xb7,
+ 0xd9, 0x82, 0xc6, 0xab, 0xaa, 0x94, 0x25, 0xf6, 0xf7, 0x67, 0x86, 0x7f, 0xeb, 0x70, 0x3a, 0x17,
+ 0xde, 0x44, 0xb1, 0xc2, 0x2d, 0x89, 0x57, 0x55, 0x59, 0x0d, 0xff, 0xd2, 0xa1, 0xbf, 0x89, 0xec,
+ 0x32, 0x25, 0xec, 0x80, 0x1e, 0x5c, 0xb3, 0x4f, 0xf0, 0x04, 0x8e, 0x5c, 0xff, 0xc6, 0xf2, 0x5c,
+ 0x27, 0x9e, 0x0b, 0x8f, 0x69, 0x0a, 0x98, 0xf0, 0xc8, 0xbe, 0x8a, 0xb9, 0x10, 0x81, 0x60, 0x3a,
+ 0x9e, 0xc1, 0xd3, 0xb9, 0x1f, 0xce, 0xb8, 0xed, 0x4e, 0x5c, 0xee, 0x34, 0xb0, 0x81, 0x9f, 0x01,
+ 0x0a, 0x1e, 0xce, 0x02, 0x3f, 0xe4, 0x71, 0x14, 0x04, 0xb1, 0x67, 0x89, 0xd7, 0x9c, 0xb5, 0x14,
+ 0xdd, 0xe1, 0x96, 0xe3, 0xb9, 0x3e, 0x8f, 0xf9, 0xaf, 0x36, 0xe7, 0x0e, 0x77, 0x58, 0x1b, 0x3f,
+ 0x87, 0xb3, 0x30, 0xf4, 0x62, 0x9b, 0x8b, 0xc8, 0x9d, 0xb8, 0xb6, 0x15, 0xf1, 0xa6, 0x53, 0x07,
+ 0x9f, 0x40, 0xdf, 0xf1, 0xc3, 0x26, 0xed, 0x22, 0x40, 0xc7, 0xf6, 0x82, 0x90, 0x3b, 0xac, 0x87,
+ 0x2f, 0xc0, 0x74, 0xfd, 0x88, 0x0b, 0xdf, 0xf2, 0xe2, 0x48, 0x58, 0x7e, 0xe8, 0x72, 0x3f, 0x6a,
+ 0x98, 0x7d, 0x35, 0x82, 0xba, 0x79, 0x6a, 0xf9, 0x6f, 0x62, 0xc1, 0x1d, 0x57, 0x70, 0x3b, 0x0a,
+ 0x19, 0xe0, 0x33, 0x38, 0x99, 0x5a, 0xde, 0x24, 0x10, 0x53, 0xee, 0xc4, 0x82, 0xcf, 0xbc, 0x37,
+ 0xec, 0x08, 0x4f, 0x81, 0xd9, 0x81, 0xef, 0x73, 0x3b, 0x72, 0x03, 0xbf, 0x69, 0x71, 0x3c, 0xfc,
+ 0xc7, 0x80, 0x93, 0x9d, 0x5a, 0x82, 0x7e, 0x5f, 0x53, 0x2d, 0xf1, 0x27, 0xe8, 0x4c, 0x49, 0xde,
+ 0x95, 0xa9, 0xa9, 0x0d, 0xf4, 0xd1, 0xa7, 0xaf, 0x46, 0xe3, 0xbd, 0xba, 0xe3, 0x47, 0xdc, 0x71,
+ 0xf3, 0xbb, 0xe5, 0x8b, 0xe6, 0x1c, 0x32, 0x30, 0xe6, 0x55, 0x6e, 0xea, 0x03, 0x7d, 0xd4, 0x17,
+ 0x2a, 0xc4, 0x1f, 0xa1, 0x73, 0x47, 0x49, 0x4a, 0x95, 0x69, 0x0c, 0x8c, 0x11, 0xbc, 0xfa, 0xea,
+ 0x23, 0x3d, 0xaf, 0x36, 0x44, 0xd1, 0x1c, 0xc0, 0x17, 0xd0, 0x9d, 0x25, 0xf7, 0x79, 0x99, 0xa4,
+ 0x66, 0x67, 0xa0, 0x8d, 0x8e, 0x2f, 0xf5, 0x9e, 0x26, 0x76, 0x10, 0x8e, 0xe1, 0x64, 0x52, 0xe6,
+ 0x79, 0xf9, 0x87, 0xa0, 0x34, 0xab, 0x68, 0x21, 0x6b, 0xb3, 0x3b, 0xd0, 0x46, 0xbd, 0x8b, 0x96,
+ 0xac, 0xd6, 0x24, 0x1e, 0x17, 0xf1, 0x39, 0xf4, 0x1c, 0x4a, 0xd2, 0x3c, 0x2b, 0xc8, 0xec, 0x0d,
+ 0xb4, 0x91, 0x26, 0xf6, 0x39, 0xfe, 0x0c, 0x5f, 0x4c, 0xd7, 0xb5, 0xbc, 0x49, 0xf2, 0x2c, 0x4d,
+ 0x24, 0xa9, 0xed, 0xa1, 0xca, 0xa6, 0x4a, 0x66, 0xb7, 0xd9, 0x22, 0x91, 0x64, 0xf6, 0xdf, 0xeb,
+ 0xfc, 0x71, 0xea, 0xf3, 0x97, 0xd0, 0xd9, 0xfe, 0x0f, 0x25, 0xc6, 0x35, 0xdd, 0x9b, 0xad, 0xad,
+ 0x18, 0xd7, 0x74, 0x8f, 0xa7, 0xd0, 0xbe, 0x49, 0xf2, 0x35, 0x99, 0xed, 0x0d, 0xb6, 0x4d, 0x86,
+ 0x1e, 0x3c, 0x79, 0xa0, 0x26, 0x76, 0xc1, 0x78, 0xcd, 0x23, 0xa6, 0x61, 0x0f, 0x5a, 0xb3, 0x20,
+ 0x8c, 0x98, 0xae, 0xa2, 0x2b, 0x6e, 0x39, 0xcc, 0x50, 0xc5, 0xd9, 0x3c, 0x62, 0x2d, 0xb5, 0x2e,
+ 0x0e, 0xf7, 0x78, 0xc4, 0x59, 0x1b, 0xfb, 0xd0, 0x9e, 0x59, 0x91, 0x7d, 0xc5, 0x3a, 0xc3, 0x7f,
+ 0x0d, 0x60, 0xef, 0x84, 0xad, 0x57, 0x65, 0x51, 0x13, 0x9a, 0xd0, 0xb5, 0xcb, 0x42, 0x52, 0x21,
+ 0x4d, 0x4d, 0x49, 0x29, 0x76, 0x29, 0x7e, 0x09, 0x10, 0xca, 0x44, 0xae, 0x6b, 0xf5, 0x71, 0x6c,
+ 0x8c, 0x6b, 0x8b, 0xf7, 0x10, 0xbc, 0x78, 0xe4, 0xdf, 0xf0, 0xa0, 0x7f, 0xdb, 0x6b, 0x1e, 0x1b,
+ 0xf8, 0x03, 0x3c, 0x6b, 0xae, 0xf9, 0x25, 0xa9, 0xa3, 0x6a, 0x5d, 0x28, 0x81, 0xb6, 0x66, 0xf6,
+ 0x2e, 0xda, 0xb7, 0x49, 0x5e, 0x93, 0x38, 0xc4, 0xc0, 0x6f, 0xe0, 0x29, 0xff, 0x73, 0xfb, 0x02,
+ 0x5c, 0xde, 0x4b, 0xaa, 0x43, 0x35, 0xb8, 0x72, 0xd7, 0x10, 0x1f, 0x16, 0xf0, 0x7b, 0x38, 0x7b,
+ 0x00, 0x0a, 0x5a, 0x50, 0xf6, 0x96, 0xd2, 0x8d, 0xcd, 0x86, 0x38, 0x5c, 0x54, 0xfb, 0x30, 0xc9,
+ 0x8a, 0x24, 0x57, 0xfb, 0xaa, 0xec, 0xed, 0x8b, 0x7d, 0x8e, 0xdf, 0x01, 0x5a, 0xab, 0xcc, 0x5e,
+ 0xad, 0xa7, 0x59, 0x9e, 0x67, 0x35, 0x2d, 0xca, 0x22, 0xad, 0x4d, 0x50, 0xed, 0x2e, 0xb4, 0x97,
+ 0xe2, 0x40, 0x11, 0xbf, 0x86, 0x63, 0x6b, 0x95, 0xbd, 0x9b, 0xf6, 0x68, 0x47, 0x7e, 0x00, 0xe3,
+ 0xb7, 0xc0, 0x76, 0xf9, 0x7e, 0xcc, 0xe3, 0x1d, 0xf5, 0x83, 0xd2, 0xff, 0x5f, 0xa6, 0x4b, 0xf8,
+ 0xad, 0xb7, 0x7b, 0x2a, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x1d, 0x9f, 0x6d, 0x24, 0x63, 0x05,
+ 0x00, 0x00,
+}
diff --git a/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
new file mode 100644
index 0000000..f695edf
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
@@ -0,0 +1,64 @@
+syntax = "proto2";
+option go_package = "urlfetch";
+
+package appengine;
+
+message URLFetchServiceError {
+ enum ErrorCode {
+ OK = 0;
+ INVALID_URL = 1;
+ FETCH_ERROR = 2;
+ UNSPECIFIED_ERROR = 3;
+ RESPONSE_TOO_LARGE = 4;
+ DEADLINE_EXCEEDED = 5;
+ SSL_CERTIFICATE_ERROR = 6;
+ DNS_ERROR = 7;
+ CLOSED = 8;
+ INTERNAL_TRANSIENT_ERROR = 9;
+ TOO_MANY_REDIRECTS = 10;
+ MALFORMED_REPLY = 11;
+ CONNECTION_ERROR = 12;
+ }
+}
+
+message URLFetchRequest {
+ enum RequestMethod {
+ GET = 1;
+ POST = 2;
+ HEAD = 3;
+ PUT = 4;
+ DELETE = 5;
+ PATCH = 6;
+ }
+ required RequestMethod Method = 1;
+ required string Url = 2;
+ repeated group Header = 3 {
+ required string Key = 4;
+ required string Value = 5;
+ }
+ optional bytes Payload = 6 [ctype=CORD];
+
+ optional bool FollowRedirects = 7 [default=true];
+
+ optional double Deadline = 8;
+
+ optional bool MustValidateServerCertificate = 9 [default=true];
+}
+
+message URLFetchResponse {
+ optional bytes Content = 1;
+ required int32 StatusCode = 2;
+ repeated group Header = 3 {
+ required string Key = 4;
+ required string Value = 5;
+ }
+ optional bool ContentWasTruncated = 6 [default=false];
+ optional int64 ExternalBytesSent = 7;
+ optional int64 ExternalBytesReceived = 8;
+
+ optional string FinalUrl = 9;
+
+ optional int64 ApiCpuMilliseconds = 10 [default=0];
+ optional int64 ApiBytesSent = 11 [default=0];
+ optional int64 ApiBytesReceived = 12 [default=0];
+}
diff --git a/vendor/google.golang.org/appengine/urlfetch/urlfetch.go b/vendor/google.golang.org/appengine/urlfetch/urlfetch.go
new file mode 100644
index 0000000..6ffe1e6
--- /dev/null
+++ b/vendor/google.golang.org/appengine/urlfetch/urlfetch.go
@@ -0,0 +1,210 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package urlfetch provides an http.RoundTripper implementation
+// for fetching URLs via App Engine's urlfetch service.
+package urlfetch // import "google.golang.org/appengine/urlfetch"
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/golang/protobuf/proto"
+ "golang.org/x/net/context"
+
+ "google.golang.org/appengine/internal"
+ pb "google.golang.org/appengine/internal/urlfetch"
+)
+
+// Transport is an implementation of http.RoundTripper for
+// App Engine. Users should generally create an http.Client using
+// this transport and use the Client rather than using this transport
+// directly.
+type Transport struct {
+ Context context.Context
+
+ // Controls whether the application checks the validity of SSL certificates
+ // over HTTPS connections. A value of false (the default) instructs the
+ // application to send a request to the server only if the certificate is
+ // valid and signed by a trusted certificate authority (CA), and also
+ // includes a hostname that matches the certificate. A value of true
+ // instructs the application to perform no certificate validation.
+ AllowInvalidServerCertificate bool
+}
+
+// Verify statically that *Transport implements http.RoundTripper.
+var _ http.RoundTripper = (*Transport)(nil)
+
+// Client returns an *http.Client using a default urlfetch Transport. This
+// client will have the default deadline of 5 seconds, and will check the
+// validity of SSL certificates.
+//
+// Any deadline of the provided context will be used for requests through this client;
+// if the client does not have a deadline then a 5 second default is used.
+func Client(ctx context.Context) *http.Client {
+ return &http.Client{
+ Transport: &Transport{
+ Context: ctx,
+ },
+ }
+}
+
+type bodyReader struct {
+ content []byte
+ truncated bool
+ closed bool
+}
+
+// ErrTruncatedBody is the error returned after the final Read() from a
+// response's Body if the body has been truncated by App Engine's proxy.
+var ErrTruncatedBody = errors.New("urlfetch: truncated body")
+
+func statusCodeToText(code int) string {
+ if t := http.StatusText(code); t != "" {
+ return t
+ }
+ return strconv.Itoa(code)
+}
+
+func (br *bodyReader) Read(p []byte) (n int, err error) {
+ if br.closed {
+ if br.truncated {
+ return 0, ErrTruncatedBody
+ }
+ return 0, io.EOF
+ }
+ n = copy(p, br.content)
+ if n > 0 {
+ br.content = br.content[n:]
+ return
+ }
+ if br.truncated {
+ br.closed = true
+ return 0, ErrTruncatedBody
+ }
+ return 0, io.EOF
+}
+
+func (br *bodyReader) Close() error {
+ br.closed = true
+ br.content = nil
+ return nil
+}
+
+// A map of the URL Fetch-accepted methods that take a request body.
+var methodAcceptsRequestBody = map[string]bool{
+ "POST": true,
+ "PUT": true,
+ "PATCH": true,
+}
+
+// urlString returns a valid string given a URL. This function is necessary because
+// the String method of URL doesn't correctly handle URLs with non-empty Opaque values.
+// See http://code.google.com/p/go/issues/detail?id=4860.
+func urlString(u *url.URL) string {
+ if u.Opaque == "" || strings.HasPrefix(u.Opaque, "//") {
+ return u.String()
+ }
+ aux := *u
+ aux.Opaque = "//" + aux.Host + aux.Opaque
+ return aux.String()
+}
+
+// RoundTrip issues a single HTTP request and returns its response. Per the
+// http.RoundTripper interface, RoundTrip only returns an error if there
+// was an unsupported request or the URL Fetch proxy fails.
+// Note that HTTP response codes such as 5xx, 403, 404, etc are not
+// errors as far as the transport is concerned and will be returned
+// with err set to nil.
+func (t *Transport) RoundTrip(req *http.Request) (res *http.Response, err error) {
+ methNum, ok := pb.URLFetchRequest_RequestMethod_value[req.Method]
+ if !ok {
+ return nil, fmt.Errorf("urlfetch: unsupported HTTP method %q", req.Method)
+ }
+
+ method := pb.URLFetchRequest_RequestMethod(methNum)
+
+ freq := &pb.URLFetchRequest{
+ Method: &method,
+ Url: proto.String(urlString(req.URL)),
+ FollowRedirects: proto.Bool(false), // http.Client's responsibility
+ MustValidateServerCertificate: proto.Bool(!t.AllowInvalidServerCertificate),
+ }
+ if deadline, ok := t.Context.Deadline(); ok {
+ freq.Deadline = proto.Float64(deadline.Sub(time.Now()).Seconds())
+ }
+
+ for k, vals := range req.Header {
+ for _, val := range vals {
+ freq.Header = append(freq.Header, &pb.URLFetchRequest_Header{
+ Key: proto.String(k),
+ Value: proto.String(val),
+ })
+ }
+ }
+ if methodAcceptsRequestBody[req.Method] && req.Body != nil {
+ // Avoid a []byte copy if req.Body has a Bytes method.
+ switch b := req.Body.(type) {
+ case interface {
+ Bytes() []byte
+ }:
+ freq.Payload = b.Bytes()
+ default:
+ freq.Payload, err = ioutil.ReadAll(req.Body)
+ if err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ fres := &pb.URLFetchResponse{}
+ if err := internal.Call(t.Context, "urlfetch", "Fetch", freq, fres); err != nil {
+ return nil, err
+ }
+
+ res = &http.Response{}
+ res.StatusCode = int(*fres.StatusCode)
+ res.Status = fmt.Sprintf("%d %s", res.StatusCode, statusCodeToText(res.StatusCode))
+ res.Header = make(http.Header)
+ res.Request = req
+
+ // Faked:
+ res.ProtoMajor = 1
+ res.ProtoMinor = 1
+ res.Proto = "HTTP/1.1"
+ res.Close = true
+
+ for _, h := range fres.Header {
+ hkey := http.CanonicalHeaderKey(*h.Key)
+ hval := *h.Value
+ if hkey == "Content-Length" {
+ // Will get filled in below for all but HEAD requests.
+ if req.Method == "HEAD" {
+ res.ContentLength, _ = strconv.ParseInt(hval, 10, 64)
+ }
+ continue
+ }
+ res.Header.Add(hkey, hval)
+ }
+
+ if req.Method != "HEAD" {
+ res.ContentLength = int64(len(fres.Content))
+ }
+
+ truncated := fres.GetContentWasTruncated()
+ res.Body = &bodyReader{content: fres.Content, truncated: truncated}
+ return
+}
+
+func init() {
+ internal.RegisterErrorCodeMap("urlfetch", pb.URLFetchServiceError_ErrorCode_name)
+ internal.RegisterTimeoutErrorCode("urlfetch", int32(pb.URLFetchServiceError_DEADLINE_EXCEEDED))
+}