diff --git a/.gitignore b/.gitignore index 8c2e171..7808b27 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ .glide/ bin/ +.idea/ diff --git a/cfg/config.go b/cfg/config.go index e9356ad..725cc39 100644 --- a/cfg/config.go +++ b/cfg/config.go @@ -21,33 +21,25 @@ import ( ) // dateFormat is the format used for the `since` configuration parameter -const dateFormat = "2006-01-02T15:04:05-0700" +const DateFormat = "2006-01-02T15:04:05-0700" // defaultLogLevel is the level logrus should default to if the configured option can't be parsed const defaultLogLevel = logrus.InfoLevel // fieldKey is an enum-like type to represent the customfield ID keys -type fieldKey int +type FieldKey int const ( - GitHubID fieldKey = iota - GitHubNumber fieldKey = iota - GitHubLabels fieldKey = iota - GitHubStatus fieldKey = iota - GitHubReporter fieldKey = iota - LastISUpdate fieldKey = iota + GitHubID FieldKey = iota + GitHubNumber FieldKey = iota + GitHubLabels FieldKey = iota + GitHubStatus FieldKey = iota + GitHubReporter FieldKey = iota + GitHubCommits FieldKey = iota + LastISUpdate FieldKey = iota + GitHubIssueData FieldKey = iota ) -// fields represents the custom field IDs of the JIRA custom fields we care about -type fields struct { - githubID string - githubNumber string - githubLabels string - githubReporter string - githubStatus string - lastUpdate string -} - // Config is the root configuration object the application creates. type Config struct { // cmdFile is the file Viper is using for its configuration (default $HOME/.issue-sync.json). @@ -62,7 +54,7 @@ type Config struct { basicAuth bool // fieldIDs is the list of custom fields we pulled from the `fields` JIRA endpoint. - fieldIDs fields + fieldIDs map[FieldKey]string // project represents the JIRA project the user has requested. project jira.Project @@ -70,6 +62,8 @@ type Config struct { // since is the parsed value of the `since` configuration parameter, which is the earliest that // a GitHub issue can have been updated to be retrieved. since time.Time + + fieldMapper FieldMapper } // NewConfig creates a new, immutable configuration object. This object @@ -89,7 +83,7 @@ func NewConfig(cmd *cobra.Command) (Config, error) { config.cmdFile = config.cmdConfig.ConfigFileUsed() - config.log = *newLogger("issue-sync", config.cmdConfig.GetString("log-level")) + config.log = *NewLogger("issue-sync", config.cmdConfig.GetString("log-level")) if err := config.validateConfig(); err != nil { return Config{}, err @@ -116,7 +110,18 @@ func (c *Config) LoadJIRAConfig(client jira.Client) error { } c.project = *proj - c.fieldIDs, err = c.getFieldIDs(client) + + if c.IsUsingGitHubIssueDataField() { + c.fieldMapper = JsonFieldMapper{ + Config: c, + } + } else { + c.fieldMapper = DefaultFieldMapper{ + Config: c, + } + } + + c.fieldIDs, err = c.fieldMapper.GetFieldIDs(client) if err != nil { return err } @@ -155,6 +160,15 @@ func (c Config) IsDryRun() bool { return c.cmdConfig.GetBool("dry-run") } +// FullSyncAlways returns whether the application should rewrite the config file to set "since" to now or if it should +// always do a full sync from the given sync. Full sync is required for GitHubProjectBoard -> Jira State syncing. This +// is becuase moving a GitHub issue on the project board does not change the last updated time of the issue itself - +// only that of the project card. +func (c Config) FullSyncAlways() bool { + return c.cmdConfig.GetBool("full-sync-always") +} + + // IsDaemon returns whether the application is running as a daemon func (c Config) IsDaemon() bool { return c.cmdConfig.GetDuration("period") != 0 @@ -171,27 +185,20 @@ func (c Config) GetTimeout() time.Duration { } // GetFieldID returns the customfield ID of a JIRA custom field. -func (c Config) GetFieldID(key fieldKey) string { - switch key { - case GitHubID: - return c.fieldIDs.githubID - case GitHubNumber: - return c.fieldIDs.githubNumber - case GitHubLabels: - return c.fieldIDs.githubLabels - case GitHubReporter: - return c.fieldIDs.githubReporter - case GitHubStatus: - return c.fieldIDs.githubStatus - case LastISUpdate: - return c.fieldIDs.lastUpdate - default: +func (c Config) GetFieldID(key FieldKey) string { + v, ok := c.fieldIDs[key] + if !ok { return "" } + return v } -// GetFieldKey returns customfield_XXXXX, where XXXXX is the custom field ID (see GetFieldID). -func (c Config) GetFieldKey(key fieldKey) string { +func (c Config) IsUsingGitHubIssueDataField() bool { + return c.cmdConfig.GetString("github-to-jira-field-mapper") == "json-field-mapper" +} + +// GetCompleteFieldKey returns customfield_XXXXX, where XXXXX is the custom field ID (see GetFieldID). +func (c Config) GetCompleteFieldKey(key FieldKey) string { return fmt.Sprintf("customfield_%s", c.GetFieldID(key)) } @@ -213,6 +220,10 @@ func (c Config) GetRepo() (string, string) { return parts[0], parts[1] } +func (c Config) GetFieldMapper() FieldMapper { + return c.fieldMapper +} + // SetJIRAToken adds the JIRA OAuth tokens in the Viper configuration, ensuring that they // are saved for future runs. func (c Config) SetJIRAToken(token *oauth1.Token) { @@ -224,11 +235,12 @@ func (c Config) SetJIRAToken(token *oauth1.Token) { type configFile struct { LogLevel string `json:"log-level" mapstructure:"log-level"` GithubToken string `json:"github-token" mapstructure:"github-token"` - JIRAUser string `json:"jira-user" mapstructure:"jira-user"` - JIRAToken string `json:"jira-token" mapstructure:"jira-token"` - JIRASecret string `json:"jira-secret" mapstructure:"jira-secret"` - JIRAKey string `json:"jira-private-key-path" mapstructure:"jira-private-key-path"` - JIRACKey string `json:"jira-consumer-key" mapstructure:"jira-consumer-key"` + JIRAUser string `json:"jira-user,omitempty" mapstructure:"jira-user"` + JIRAPass string `json:"jira-pass,omitempty" mapstructure:"jira-pass"` + JIRAToken string `json:"jira-token,omitempty" mapstructure:"jira-token"` + JIRASecret string `json:"jira-secret,omitempty" mapstructure:"jira-secret"` + JIRAKey string `json:"jira-private-key-path,omitempty" mapstructure:"jira-private-key-path"` + JIRACKey string `json:"jira-consumer-key,omitempty" mapstructure:"jira-consumer-key"` RepoName string `json:"repo-name" mapstructure:"repo-name"` JIRAURI string `json:"jira-uri" mapstructure:"jira-uri"` JIRAProject string `json:"jira-project" mapstructure:"jira-project"` @@ -238,12 +250,14 @@ type configFile struct { // SaveConfig updates the `since` parameter to now, then saves the configuration file. func (c *Config) SaveConfig() error { - c.cmdConfig.Set("since", time.Now().Format(dateFormat)) + c.cmdConfig.Set("since", time.Now().Format(DateFormat)) var cf configFile - c.cmdConfig.Unmarshal(&cf) + var err error + err = c.cmdConfig.Unmarshal(&cf) + var b []byte - b, err := json.MarshalIndent(cf, "", " ") + b, err = json.MarshalIndent(cf, "", " ") if err != nil { return err } @@ -317,7 +331,7 @@ func parseLogLevel(level string) logrus.Level { // newLogger uses the log level provided in the configuration // to create a new logrus logger and set fields on it to make // it easy to use. -func newLogger(app, level string) *logrus.Entry { +func NewLogger(app, level string) *logrus.Entry { logger := logrus.New() logger.Level = parseLogLevel(level) logEntry := logrus.NewEntry(logger).WithFields(logrus.Fields{ @@ -416,7 +430,7 @@ func (c *Config) validateConfig() error { c.cmdConfig.Set("since", "1970-01-01T00:00:00+0000") } - since, err := time.Parse(dateFormat, sinceStr) + since, err := time.Parse(DateFormat, sinceStr) if err != nil { return errors.New("Since date must be in ISO-8601 format") } @@ -445,57 +459,4 @@ type jiraField struct { Custom string `json:"custom,omitempty"` CustomID int `json:"customId,omitempty"` } `json:"schema,omitempty"` -} - -// getFieldIDs requests the metadata of every issue field in the JIRA -// project, and saves the IDs of the custom fields used by issue-sync. -func (c Config) getFieldIDs(client jira.Client) (fields, error) { - c.log.Debug("Collecting field IDs.") - req, err := client.NewRequest("GET", "/rest/api/2/field", nil) - if err != nil { - return fields{}, err - } - jFields := new([]jiraField) - - _, err = client.Do(req, jFields) - if err != nil { - return fields{}, err - } - - fieldIDs := fields{} - - for _, field := range *jFields { - switch field.Name { - case "GitHub ID": - fieldIDs.githubID = fmt.Sprint(field.Schema.CustomID) - case "GitHub Number": - fieldIDs.githubNumber = fmt.Sprint(field.Schema.CustomID) - case "GitHub Labels": - fieldIDs.githubLabels = fmt.Sprint(field.Schema.CustomID) - case "GitHub Status": - fieldIDs.githubStatus = fmt.Sprint(field.Schema.CustomID) - case "GitHub Reporter": - fieldIDs.githubReporter = fmt.Sprint(field.Schema.CustomID) - case "Last Issue-Sync Update": - fieldIDs.lastUpdate = fmt.Sprint(field.Schema.CustomID) - } - } - - if fieldIDs.githubID == "" { - return fieldIDs, errors.New("could not find ID of 'GitHub ID' custom field; check that it is named correctly") - } else if fieldIDs.githubNumber == "" { - return fieldIDs, errors.New("could not find ID of 'GitHub Number' custom field; check that it is named correctly") - } else if fieldIDs.githubLabels == "" { - return fieldIDs, errors.New("could not find ID of 'Github Labels' custom field; check that it is named correctly") - } else if fieldIDs.githubStatus == "" { - return fieldIDs, errors.New("could not find ID of 'Github Status' custom field; check that it is named correctly") - } else if fieldIDs.githubReporter == "" { - return fieldIDs, errors.New("could not find ID of 'Github Reporter' custom field; check that it is named correctly") - } else if fieldIDs.lastUpdate == "" { - return fieldIDs, errors.New("could not find ID of 'Last Issue-Sync Update' custom field; check that it is named correctly") - } - - c.log.Debug("All fields have been checked.") - - return fieldIDs, nil -} +} \ No newline at end of file diff --git a/cfg/githubfieldmapper.go b/cfg/githubfieldmapper.go new file mode 100644 index 0000000..83b7080 --- /dev/null +++ b/cfg/githubfieldmapper.go @@ -0,0 +1,239 @@ +package cfg + +import ( + "encoding/json" + "errors" + "fmt" + "github.com/andygrunwald/go-jira" + "github.com/coreos/issue-sync/lib/models" + "strings" + "time" +) + +type FieldMapper interface { + MapFields(issue *models.ExtendedGithubIssue) jira.IssueFields + + GetFieldValue(jIssue *jira.Issue, fieldKey FieldKey) (interface{}, error) + + // getFieldIDs requests the metadata of every issue field in the JIRA + // project, and saves the IDs of the custom fields used by issue-sync. + GetFieldIDs(client jira.Client) (map[FieldKey]string, error) +} + +type DefaultFieldMapper struct { + Config *Config +} + +type JsonFieldMapper struct { + Config *Config +} + +func (m DefaultFieldMapper) GetFieldValue(jIssue *jira.Issue, fieldKey FieldKey) (interface{}, error) { + switch fieldKey { + case GitHubID: + fallthrough + case GitHubNumber: + return jIssue.Fields.Unknowns.Int(m.Config.GetCompleteFieldKey(fieldKey)) + default: + result, exists := jIssue.Fields.Unknowns.Value(m.Config.GetCompleteFieldKey(fieldKey)) + if !exists { + return nil, errors.New("Field not found") + } + return result, nil + } +} + +func (m DefaultFieldMapper) MapFields(issue *models.ExtendedGithubIssue) jira.IssueFields { + fields := jira.IssueFields{ + Type: jira.IssueType{ + Name: "Task", // TODO: Determine issue type + }, + Project: m.Config.GetProject(), + Summary: issue.GetTitle(), + Description: issue.GetBody(), + Unknowns: map[string]interface{}{}, + } + + fields.Unknowns[m.Config.GetCompleteFieldKey(GitHubID)] = issue.GetID() + fields.Unknowns[m.Config.GetCompleteFieldKey(GitHubNumber)] = issue.GetNumber() + fields.Unknowns[m.Config.GetCompleteFieldKey(GitHubStatus)] = issue.GetState() + fields.Unknowns[m.Config.GetCompleteFieldKey(GitHubReporter)] = issue.User.GetLogin() + + githubLabels := make([]string, len(issue.Labels)) + for i, v := range issue.Labels { + githubLabels[i] = *v.Name + } + fields.Unknowns[m.Config.GetCompleteFieldKey(GitHubLabels)] = strings.Join(githubLabels, ",") + + fields.Unknowns[m.Config.GetCompleteFieldKey(LastISUpdate)] = time.Now().Format(DateFormat) + + return fields +} + +func (m DefaultFieldMapper) GetFieldIDs(client jira.Client) (map[FieldKey]string, error) { + m.Config.log.Debug("Collecting field IDs.") + req, err := client.NewRequest("GET", "/rest/api/2/field", nil) + if err != nil { + return map[FieldKey]string{}, err + } + jFields := new([]jiraField) + + _, err = client.Do(req, jFields) + if err != nil { + return map[FieldKey]string{}, err + } + + fieldIDs := map[FieldKey]string{} + + for _, field := range *jFields { + switch field.Name { + case "GitHub ID": + fieldIDs[GitHubID] = fmt.Sprint(field.Schema.CustomID) + case "GitHub Number": + fieldIDs[GitHubNumber] = fmt.Sprint(field.Schema.CustomID) + case "GitHub Labels": + fieldIDs[GitHubLabels] = fmt.Sprint(field.Schema.CustomID) + case "GitHub Status": + fieldIDs[GitHubStatus] = fmt.Sprint(field.Schema.CustomID) + case "GitHub Reporter": + fieldIDs[GitHubReporter] = fmt.Sprint(field.Schema.CustomID) + case "Last Issue-Sync Update": + fieldIDs[LastISUpdate] = fmt.Sprint(field.Schema.CustomID) + } + } + + _, ok := fieldIDs[GitHubID] + if !ok { + return fieldIDs, errors.New("could not find ID of 'GitHub ID' custom field; check that it is named correctly") + } + + _, ok = fieldIDs[GitHubNumber] + if !ok { + return fieldIDs, errors.New("could not find ID of 'GitHub Number' custom field; check that it is named correctly") + } + + _, ok = fieldIDs[GitHubLabels] + if !ok { + return fieldIDs, errors.New("could not find ID of 'Github Labels' custom field; check that it is named correctly") + } + + _, ok = fieldIDs[GitHubStatus] + if !ok { + return fieldIDs, errors.New("could not find ID of 'Github Status' custom field; check that it is named correctly") + } + + _, ok = fieldIDs[GitHubReporter] + if !ok { + return fieldIDs, errors.New("could not find ID of 'Github Reporter' custom field; check that it is named correctly") + } + + _, ok = fieldIDs[LastISUpdate] + if !ok { + return fieldIDs, errors.New("could not find ID of 'Last Issue-Sync Update' custom field; check that it is named correctly") + } + + m.Config.log.Debug("All fields have been checked.") + + return fieldIDs, nil +} + +func (m JsonFieldMapper) GetFieldValue(jIssue *jira.Issue, fieldKey FieldKey) (interface{}, error) { + + var result interface{} + + jsonGithubData, err := jIssue.Fields.Unknowns.String(m.Config.GetCompleteFieldKey(GitHubIssueData)) + if err != nil { + return nil, err + } + + var parsedJson map[string]interface{} + err = json.Unmarshal([]byte(jsonGithubData), &parsedJson) + if err != nil { + return nil, err + } + + switch fieldKey { + case GitHubID: + result = int64(parsedJson["githubId"].(float64)) + case GitHubNumber: + result = int64(parsedJson["githubNumber"].(float64)) + case GitHubLabels: + result = parsedJson["githubLabels"] + case GitHubStatus: + result = parsedJson["githubStatus"] + case GitHubReporter: + result = parsedJson["githubReporter"] + case GitHubCommits: + result = parsedJson["githubCommits"] + if result == nil { + result = make([]interface{}, 0) + } + case LastISUpdate: + result = parsedJson["lastIssueSyncUpdate"] + } + return result, nil +} + +func (m JsonFieldMapper) MapFields(issue *models.ExtendedGithubIssue) jira.IssueFields { + fields := jira.IssueFields{ + Type: jira.IssueType{ + Name: "Task", // TODO: Determine issue type + }, + Project: m.Config.GetProject(), + Summary: issue.GetTitle(), + Description: issue.GetBody(), + Unknowns: map[string]interface{}{}, + } + + githubLabels := make([]string, len(issue.Labels)) + for i, v := range issue.Labels { + githubLabels[i] = *v.Name + } + + data := map[string]interface{}{ + "githubId": issue.GetID(), + "githubNumber": issue.GetNumber(), + "githubStatus":issue.GetState(), + "githubReporter": issue.User.GetLogin(), + "githubLabels": strings.Join(githubLabels, ","), + "githubCommits": issue.CommitIds, + "lastIssueSyncUpdate": time.Now().Format(DateFormat), + } + + j, _ := json.Marshal(data) + fields.Unknowns[m.Config.GetCompleteFieldKey(GitHubIssueData)] = string(j) + + return fields +} + +func (m JsonFieldMapper) GetFieldIDs(client jira.Client) (map[FieldKey]string, error) { + m.Config.log.Debug("Collecting field IDs.") + req, err := client.NewRequest("GET", "/rest/api/2/field", nil) + if err != nil { + return map[FieldKey]string{}, err + } + jFields := new([]jiraField) + + _, err = client.Do(req, jFields) + if err != nil { + return map[FieldKey]string{}, err + } + + fieldIDs := map[FieldKey]string{} + + for _, field := range *jFields { + switch field.Name { + case "GitHub Issue Data": + fieldIDs[GitHubIssueData] = fmt.Sprint(field.Schema.CustomID) + } + } + + _, ok := fieldIDs[GitHubIssueData] + if !ok { + return fieldIDs, errors.New("could not find ID of 'GitHub Issue Data' custom field; check that it is named correctly") + } + + m.Config.log.Debug("All fields have been checked.") + + return fieldIDs, nil +} \ No newline at end of file diff --git a/cmd/root.go b/cmd/root.go index 9d7a29e..c612c42 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -1,12 +1,13 @@ package cmd import ( + "github.com/coreos/issue-sync/lib/issuesyncgithub" + "github.com/coreos/issue-sync/lib/issuesyncjira" "time" "github.com/Sirupsen/logrus" "github.com/coreos/issue-sync/cfg" "github.com/coreos/issue-sync/lib" - "github.com/coreos/issue-sync/lib/clients" "github.com/spf13/cobra" ) @@ -32,11 +33,11 @@ var RootCmd = &cobra.Command{ log := config.GetLogger() - jiraClient, err := clients.NewJIRAClient(&config) + jiraClient, err := issuesyncjira.NewClient(&config) if err != nil { return err } - ghClient, err := clients.NewGitHubClient(config) + ghClient, err := issuesyncgithub.NewClient(config) if err != nil { return err } @@ -45,7 +46,7 @@ var RootCmd = &cobra.Command{ if err := lib.CompareIssues(config, ghClient, jiraClient); err != nil { log.Error(err) } - if !config.IsDryRun() { + if !config.IsDryRun() && !config.FullSyncAlways() { if err := config.SaveConfig(); err != nil { log.Error(err) } diff --git a/glide.lock b/glide.lock index 1e57af6..36a96e9 100644 --- a/glide.lock +++ b/glide.lock @@ -1,8 +1,8 @@ hash: ca3d2b839b59106eb3850fc211958a5dd057e096e01aae7dbec995e4505a305b -updated: 2017-08-11T11:44:55.363517889-07:00 +updated: 2019-04-01T15:33:35.969889-07:00 imports: - name: github.com/andygrunwald/go-jira - version: aaa2d02b3ad37877e523eab9978638b107bdc821 + version: 53c468062dcbcce3d44d6c031e3cb31292ffcc31 - name: github.com/cenkalti/backoff version: 32cd0c5b3aef12c76ed64aaf678f6c79736be7dc - name: github.com/coreos/pkg @@ -10,26 +10,27 @@ imports: - name: github.com/dghubble/oauth1 version: 70562a5920ad9b6ff03ef697c0f90ae569abbd2b - name: github.com/fatih/structs - version: 7e5a8eef611ee84dd359503f3969f80df4c50723 + version: 878a968ab22548362a09bdb3322f98b00f470d46 - name: github.com/fsnotify/fsnotify - version: 4da3e2cfbabc9f751898f250b49f2439785783a1 + version: 1485a34d5d5723fea214f5710708e19a831720e4 - name: github.com/golang/protobuf - version: 0a4f71a498b7c4812f64969510bcb4eca251e33a + version: d3c38a4eb4970272b87a425ae00ccc4548e2f9bb subpackages: - proto - name: github.com/google/go-github - version: 00874eae3fbfc6750bfff364b1897101684d2f30 + version: 14c49e15859cee6aa620bfac2dad50899379c20e subpackages: - github - name: github.com/google/go-querystring - version: 53e6ce116135b80d037921a7fdd5138cf32d7a8a + version: c8c88dbee036db4e4808d1f2ec8c2e15e11c3f80 subpackages: - query - name: github.com/hashicorp/hcl - version: 392dba7d905ed5d04a5794ba89f558b27e2ba1ca + version: 65a6292f0157eff210d03ed1bf6c59b190b8b906 subpackages: - hcl/ast - hcl/parser + - hcl/printer - hcl/scanner - hcl/strconv - hcl/token @@ -39,67 +40,67 @@ imports: - name: github.com/inconshreveable/mousetrap version: 76626ae9c91c4f2a10f34cad8ce83ea42c93bb75 - name: github.com/magiconair/properties - version: be5ece7dd465ab0765a9682137865547526d1dfb + version: 7757cc9fdb852f7579b24170bcacda2c7471bb6a - name: github.com/mitchellh/mapstructure - version: d0303fe809921458f417bcf828397a65db30a7e4 + version: 3536a929edddb9a5b34bd6861dc4a9647cb459fe - name: github.com/pelletier/go-toml - version: 69d355db5304c0f7f809a2edc054553e7142f016 + version: f9070d3b400d7bf7c9b4e4e05916f92fd0b79e0c +- name: github.com/pkg/errors + version: 27936f6d90f9c8e1145f11ed52ffffbfdb9e0af7 - name: github.com/Sirupsen/logrus version: 202f25545ea4cf9b191ff7f846df5d87c9382c2b - name: github.com/spf13/afero - version: 9be650865eab0c12963d8753212f4f9c66cdcf12 + version: 588a75ec4f32903aa5e39a2619ba6a4631e28424 subpackages: - mem - name: github.com/spf13/cast - version: acbeb36b902d72a7a4c18e8f3241075e7ab763e4 + version: 8c9545af88b134710ab1cd196795e7f2388358d7 - name: github.com/spf13/cobra - version: d994347edadc56d6a7f863775fb6887606685ae6 + version: 67fc4837d267bc9bfd6e47f77783fcc3dffc68de - name: github.com/spf13/jwalterweatherman - version: 0efa5202c04663c757d84f90f5219c1250baf94f + version: 94f6ae3ed3bceceafa716478c5fbf8d29ca601a1 - name: github.com/spf13/pflag - version: e57e3eeb33f795204c1ca35f56c44f83227c6e66 + version: 24fa6976df40757dce6aea913e7b81ade90530e1 - name: github.com/spf13/viper - version: c1de95864d73a5465492829d7cb2dd422b19ac96 + version: 9e56dacc08fbbf8c9ee2dbc717553c758ce42bc9 - name: github.com/trivago/tgo - version: 8a6fc6a04f07d8623086322a7b8d805ddf7263eb + version: f8e8d39e118a11f4a45001822c4e806dedec7b50 subpackages: - tcontainer - treflect - name: golang.org/x/crypto - version: b176d7def5d71bdd214203491f89843ed217f420 + version: a5d413f7728c81fb97d96a2b722368945f651e78 subpackages: - ssh/terminal - name: golang.org/x/net - version: b3756b4b77d7b13260a0a2ec658753cf48922eac + version: 74de082e2cca95839e88aa0aeee5aadf6ce7710f subpackages: - context - context/ctxhttp - name: golang.org/x/oauth2 - version: cce311a261e6fcf29de72ca96827bdb0b7d9c9e6 + version: c85d3e98c914e3a33234ad863dcbff5dbc425bb8 subpackages: - internal - name: golang.org/x/sys - version: 4cd6d1a821c7175768725b55ca82f14683a29ea4 + version: 9eb1bfa1ce65ae8a6ff3114b0aaf9a41a6cf3560 subpackages: - unix - windows - name: golang.org/x/text - version: 836efe42bb4aa16aaa17b9c155d8813d336ed720 + version: e3703dcdd614d2d7488fff034c75c551ea25da95 subpackages: - transform - unicode/norm - name: google.golang.org/appengine - version: ad2570cd3913654e00c5f0183b39d2f998e54046 + version: 54a98f90d1c46b7731eb8fb305d2a321c30ef610 subpackages: - internal - - internal/app_identity - internal/base - internal/datastore - internal/log - - internal/modules - internal/remote_api - internal/urlfetch - urlfetch - name: gopkg.in/yaml.v2 - version: 3b4ad1db5b2a649883ff3782f5f9f6fb52be71af + version: 51d6538a90f86fe93ac480b35f37b2be17fef232 testImports: [] diff --git a/issue-sync b/issue-sync new file mode 100755 index 0000000..9b8cfbb Binary files /dev/null and b/issue-sync differ diff --git a/lib/clients/github.go b/lib/clients/github.go deleted file mode 100644 index e3178c2..0000000 --- a/lib/clients/github.go +++ /dev/null @@ -1,218 +0,0 @@ -package clients - -import ( - "context" - "fmt" - - "time" - - "github.com/cenkalti/backoff" - "github.com/coreos/issue-sync/cfg" - "github.com/google/go-github/github" - "golang.org/x/oauth2" -) - -// GitHubClient is a wrapper around the GitHub API Client library we -// use. It allows us to swap in other implementations, such as a dry run -// clients, or mock clients for testing. -type GitHubClient interface { - ListIssues() ([]github.Issue, error) - ListComments(issue github.Issue) ([]*github.IssueComment, error) - GetUser(login string) (github.User, error) - GetRateLimits() (github.RateLimits, error) -} - -// realGHClient is a standard GitHub clients, that actually makes all of the -// requests against the GitHub REST API. It is the canonical implementation -// of GitHubClient. -type realGHClient struct { - config cfg.Config - client github.Client -} - -// ListIssues returns the list of GitHub issues since the last run of the tool. -func (g realGHClient) ListIssues() ([]github.Issue, error) { - log := g.config.GetLogger() - - ctx := context.Background() - - user, repo := g.config.GetRepo() - - // Set it so that it will run the loop once, and it'll be updated in the loop. - pages := 1 - var issues []github.Issue - - for page := 1; page <= pages; page++ { - is, res, err := g.request(func() (interface{}, *github.Response, error) { - return g.client.Issues.ListByRepo(ctx, user, repo, &github.IssueListByRepoOptions{ - Since: g.config.GetSinceParam(), - State: "all", - Sort: "created", - Direction: "asc", - ListOptions: github.ListOptions{ - Page: page, - PerPage: 100, - }, - }) - }) - if err != nil { - return nil, err - } - issuePointers, ok := is.([]*github.Issue) - if !ok { - log.Errorf("Get GitHub issues did not return issues! Got: %v", is) - return nil, fmt.Errorf("get GitHub issues failed: expected []*github.Issue; got %T", is) - } - - var issuePage []github.Issue - for _, v := range issuePointers { - // If PullRequestLinks is not nil, it's a Pull Request - if v.PullRequestLinks == nil { - issuePage = append(issuePage, *v) - } - } - - pages = res.LastPage - issues = append(issues, issuePage...) - } - - log.Debug("Collected all GitHub issues") - - return issues, nil -} - -// ListComments returns the list of all comments on a GitHub issue in -// ascending order of creation. -func (g realGHClient) ListComments(issue github.Issue) ([]*github.IssueComment, error) { - log := g.config.GetLogger() - - ctx := context.Background() - user, repo := g.config.GetRepo() - c, _, err := g.request(func() (interface{}, *github.Response, error) { - return g.client.Issues.ListComments(ctx, user, repo, issue.GetNumber(), &github.IssueListCommentsOptions{ - Sort: "created", - Direction: "asc", - }) - }) - if err != nil { - log.Errorf("Error retrieving GitHub comments for issue #%d. Error: %v.", issue.GetNumber(), err) - return nil, err - } - comments, ok := c.([]*github.IssueComment) - if !ok { - log.Errorf("Get GitHub comments did not return comments! Got: %v", c) - return nil, fmt.Errorf("Get GitHub comments failed: expected []*github.IssueComment; got %T", c) - } - - return comments, nil -} - -// GetUser returns a GitHub user from its login. -func (g realGHClient) GetUser(login string) (github.User, error) { - log := g.config.GetLogger() - - u, _, err := g.request(func() (interface{}, *github.Response, error) { - return g.client.Users.Get(context.Background(), login) - }) - - if err != nil { - log.Errorf("Error retrieving GitHub user %s. Error: %v", login, err) - } - - user, ok := u.(*github.User) - if !ok { - log.Errorf("Get GitHub user did not return user! Got: %v", u) - return github.User{}, fmt.Errorf("Get GitHub user failed: expected *github.User; got %T", u) - } - - return *user, nil -} - -// GetRateLimits returns the current rate limits on the GitHub API. This is a -// simple and lightweight request that can also be used simply for testing the API. -func (g realGHClient) GetRateLimits() (github.RateLimits, error) { - log := g.config.GetLogger() - - ctx := context.Background() - - rl, _, err := g.request(func() (interface{}, *github.Response, error) { - return g.client.RateLimits(ctx) - }) - if err != nil { - log.Errorf("Error connecting to GitHub; check your token. Error: %v", err) - return github.RateLimits{}, err - } - rate, ok := rl.(*github.RateLimits) - if !ok { - log.Errorf("Get GitHub rate limits did not return rate limits! Got: %v", rl) - return github.RateLimits{}, fmt.Errorf("Get GitHub rate limits failed: expected *github.RateLimits; got %T", rl) - } - - return *rate, nil -} - -const retryBackoffRoundRatio = time.Millisecond / time.Nanosecond - -// request takes an API function from the GitHub library -// and calls it with exponential backoff. If the function succeeds, it -// returns the expected value and the GitHub API response, as well as a nil -// error. If it continues to fail until a maximum time is reached, it returns -// a nil result as well as the returned HTTP response and a timeout error. -func (g realGHClient) request(f func() (interface{}, *github.Response, error)) (interface{}, *github.Response, error) { - log := g.config.GetLogger() - - var ret interface{} - var res *github.Response - - op := func() error { - var err error - ret, res, err = f() - return err - } - - b := backoff.NewExponentialBackOff() - b.MaxElapsedTime = g.config.GetTimeout() - - backoffErr := backoff.RetryNotify(op, b, func(err error, duration time.Duration) { - // Round to a whole number of milliseconds - duration /= retryBackoffRoundRatio // Convert nanoseconds to milliseconds - duration *= retryBackoffRoundRatio // Convert back so it appears correct - - log.Errorf("Error performing operation; retrying in %v: %v", duration, err) - }) - - return ret, res, backoffErr -} - -// NewGitHubClient creates a GitHubClient and returns it; which -// implementation it uses depends on the configuration of this -// run. For example, a dry-run clients may be created which does -// not make any requests that would change anything on the server, -// but instead simply prints out the actions that it's asked to take. -func NewGitHubClient(config cfg.Config) (GitHubClient, error) { - var ret GitHubClient - - log := config.GetLogger() - - ctx := context.Background() - ts := oauth2.StaticTokenSource( - &oauth2.Token{AccessToken: config.GetConfigString("github-token")}, - ) - tc := oauth2.NewClient(ctx, ts) - - client := github.NewClient(tc) - - ret = realGHClient{ - config: config, - client: *client, - } - - // Make a request so we can check that we can connect fine. - _, err := ret.GetRateLimits() - if err != nil { - return realGHClient{}, err - } - log.Debug("Successfully connected to GitHub.") - - return ret, nil -} diff --git a/lib/clients/jira.go b/lib/clients/jira.go deleted file mode 100644 index 3c5c130..0000000 --- a/lib/clients/jira.go +++ /dev/null @@ -1,634 +0,0 @@ -package clients - -import ( - "errors" - "fmt" - "io/ioutil" - "net/http" - "regexp" - "strings" - - "time" - - "github.com/andygrunwald/go-jira" - "github.com/cenkalti/backoff" - "github.com/coreos/issue-sync/cfg" - "github.com/google/go-github/github" -) - -// commentDateFormat is the format used in the headers of JIRA comments. -const commentDateFormat = "15:04 PM, January 2 2006" - -// maxJQLIssueLength is the maximum number of GitHub issues we can -// use before we need to stop using JQL and filter issues ourself. -const maxJQLIssueLength = 100 - -// getErrorBody reads the HTTP response body of a JIRA API response, -// logs it as an error, and returns an error object with the contents -// of the body. If an error occurs during reading, that error is -// instead printed and returned. This function closes the body for -// further reading. -func getErrorBody(config cfg.Config, res *jira.Response) error { - log := config.GetLogger() - defer res.Body.Close() - body, err := ioutil.ReadAll(res.Body) - if err != nil { - log.Errorf("Error occured trying to read error body: %v", err) - return err - } - log.Debugf("Error body: %s", body) - return errors.New(string(body)) -} - -// JIRAClient is a wrapper around the JIRA API clients library we -// use. It allows us to hide implementation details such as backoff -// as well as swap in other implementations, such as for dry run -// or test mocking. -type JIRAClient interface { - ListIssues(ids []int) ([]jira.Issue, error) - GetIssue(key string) (jira.Issue, error) - CreateIssue(issue jira.Issue) (jira.Issue, error) - UpdateIssue(issue jira.Issue) (jira.Issue, error) - CreateComment(issue jira.Issue, comment github.IssueComment, github GitHubClient) (jira.Comment, error) - UpdateComment(issue jira.Issue, id string, comment github.IssueComment, github GitHubClient) (jira.Comment, error) -} - -// NewJIRAClient creates a new JIRAClient and configures it with -// the config object provided. The type of clients created depends -// on the configuration; currently, it creates either a standard -// clients, or a dry-run clients. -func NewJIRAClient(config *cfg.Config) (JIRAClient, error) { - log := config.GetLogger() - - var oauth *http.Client - var err error - if !config.IsBasicAuth() { - oauth, err = newJIRAHTTPClient(*config) - if err != nil { - log.Errorf("Error getting OAuth config: %v", err) - return dryrunJIRAClient{}, err - } - } - - var j JIRAClient - - client, err := jira.NewClient(oauth, config.GetConfigString("jira-uri")) - if err != nil { - log.Errorf("Error initializing JIRA clients; check your base URI. Error: %v", err) - return dryrunJIRAClient{}, err - } - - if config.IsBasicAuth() { - client.Authentication.SetBasicAuth(config.GetConfigString("jira-user"), config.GetConfigString("jira-pass")) - } - - log.Debug("JIRA clients initialized") - - config.LoadJIRAConfig(*client) - - if config.IsDryRun() { - j = dryrunJIRAClient{ - config: *config, - client: *client, - } - } else { - j = realJIRAClient{ - config: *config, - client: *client, - } - } - - return j, nil -} - -// realJIRAClient is a standard JIRA clients, which actually makes -// of the requests against the JIRA REST API. It is the canonical -// implementation of JIRAClient. -type realJIRAClient struct { - config cfg.Config - client jira.Client -} - -// ListIssues returns a list of JIRA issues on the configured project which -// have GitHub IDs in the provided list. `ids` should be a comma-separated -// list of GitHub IDs. -func (j realJIRAClient) ListIssues(ids []int) ([]jira.Issue, error) { - log := j.config.GetLogger() - - idStrs := make([]string, len(ids)) - for i, v := range ids { - idStrs[i] = fmt.Sprint(v) - } - - var jql string - // If the list of IDs is too long, we get a 414 Request-URI Too Large, so in that case, - // we'll need to do the filtering ourselves. - if len(ids) < maxJQLIssueLength { - jql = fmt.Sprintf("project='%s' AND cf[%s] in (%s)", - j.config.GetProjectKey(), j.config.GetFieldID(cfg.GitHubID), strings.Join(idStrs, ",")) - } else { - jql = fmt.Sprintf("project='%s'", j.config.GetProjectKey()) - } - - ji, res, err := j.request(func() (interface{}, *jira.Response, error) { - return j.client.Issue.Search(jql, nil) - }) - if err != nil { - log.Errorf("Error retrieving JIRA issues: %v", err) - return nil, getErrorBody(j.config, res) - } - jiraIssues, ok := ji.([]jira.Issue) - if !ok { - log.Errorf("Get JIRA issues did not return issues! Got: %v", ji) - return nil, fmt.Errorf("get JIRA issues failed: expected []jira.Issue; got %T", ji) - } - - var issues []jira.Issue - if len(ids) < maxJQLIssueLength { - // The issues were already filtered by our JQL, so use as is - issues = jiraIssues - } else { - // Filter only issues which have a defined GitHub ID in the list of IDs - for _, v := range jiraIssues { - if id, err := v.Fields.Unknowns.Int(j.config.GetFieldKey(cfg.GitHubID)); err == nil { - for _, idOpt := range ids { - if id == int64(idOpt) { - issues = append(issues, v) - break - } - } - } - } - } - - return issues, nil -} - -// GetIssue returns a single JIRA issue within the configured project -// according to the issue key (e.g. "PROJ-13"). -func (j realJIRAClient) GetIssue(key string) (jira.Issue, error) { - log := j.config.GetLogger() - - i, res, err := j.request(func() (interface{}, *jira.Response, error) { - return j.client.Issue.Get(key, nil) - }) - if err != nil { - log.Errorf("Error retrieving JIRA issue: %v", err) - return jira.Issue{}, getErrorBody(j.config, res) - } - issue, ok := i.(*jira.Issue) - if !ok { - log.Errorf("Get JIRA issue did not return issue! Got %v", i) - return jira.Issue{}, fmt.Errorf("Get JIRA issue failed: expected *jira.Issue; got %T", i) - } - - return *issue, nil -} - -// CreateIssue creates a new JIRA issue according to the fields provided in -// the provided issue object. It returns the created issue, with all the -// fields provided (including e.g. ID and Key). -func (j realJIRAClient) CreateIssue(issue jira.Issue) (jira.Issue, error) { - log := j.config.GetLogger() - - i, res, err := j.request(func() (interface{}, *jira.Response, error) { - return j.client.Issue.Create(&issue) - }) - if err != nil { - log.Errorf("Error creating JIRA issue: %v", err) - return jira.Issue{}, getErrorBody(j.config, res) - } - is, ok := i.(*jira.Issue) - if !ok { - log.Errorf("Create JIRA issue did not return issue! Got: %v", i) - return jira.Issue{}, fmt.Errorf("Create JIRA issue failed: expected *jira.Issue; got %T", i) - } - - return *is, nil -} - -// UpdateIssue updates a given issue (identified by the Key field of the provided -// issue object) with the fields on the provided issue. It returns the updated -// issue as it exists on JIRA. -func (j realJIRAClient) UpdateIssue(issue jira.Issue) (jira.Issue, error) { - log := j.config.GetLogger() - - i, res, err := j.request(func() (interface{}, *jira.Response, error) { - return j.client.Issue.Update(&issue) - }) - if err != nil { - log.Errorf("Error updating JIRA issue %s: %v", issue.Key, err) - return jira.Issue{}, getErrorBody(j.config, res) - } - is, ok := i.(*jira.Issue) - if !ok { - log.Errorf("Update JIRA issue did not return issue! Got: %v", i) - return jira.Issue{}, fmt.Errorf("Update JIRA issue failed: expected *jira.Issue; got %T", i) - } - - return *is, nil -} - -// maxBodyLength is the maximum length of a JIRA comment body, which is currently -// 2^15-1. -const maxBodyLength = 1 << 15 - -// CreateComment adds a comment to the provided JIRA issue using the fields from -// the provided GitHub comment. It then returns the created comment. -func (j realJIRAClient) CreateComment(issue jira.Issue, comment github.IssueComment, github GitHubClient) (jira.Comment, error) { - log := j.config.GetLogger() - - user, err := github.GetUser(comment.User.GetLogin()) - if err != nil { - return jira.Comment{}, err - } - - body := fmt.Sprintf("Comment [(ID %d)|%s]", comment.GetID(), comment.GetHTMLURL()) - body = fmt.Sprintf("%s from GitHub user [%s|%s]", body, user.GetLogin(), user.GetHTMLURL()) - if user.GetName() != "" { - body = fmt.Sprintf("%s (%s)", body, user.GetName()) - } - body = fmt.Sprintf( - "%s at %s:\n\n%s", - body, - comment.CreatedAt.Format(commentDateFormat), - comment.GetBody(), - ) - - if len(body) > maxBodyLength { - body = body[:maxBodyLength] - } - - jComment := jira.Comment{ - Body: body, - } - - com, res, err := j.request(func() (interface{}, *jira.Response, error) { - return j.client.Issue.AddComment(issue.ID, &jComment) - }) - if err != nil { - log.Errorf("Error creating JIRA comment on issue %s. Error: %v", issue.Key, err) - return jira.Comment{}, getErrorBody(j.config, res) - } - co, ok := com.(*jira.Comment) - if !ok { - log.Errorf("Create JIRA comment did not return comment! Got: %v", com) - return jira.Comment{}, fmt.Errorf("Create JIRA comment failed: expected *jira.Comment; got %T", com) - } - return *co, nil -} - -// UpdateComment updates a comment (identified by the `id` parameter) on a given -// JIRA with a new body from the fields of the given GitHub comment. It returns -// the updated comment. -func (j realJIRAClient) UpdateComment(issue jira.Issue, id string, comment github.IssueComment, github GitHubClient) (jira.Comment, error) { - log := j.config.GetLogger() - - user, err := github.GetUser(comment.User.GetLogin()) - if err != nil { - return jira.Comment{}, err - } - - body := fmt.Sprintf("Comment [(ID %d)|%s]", comment.GetID(), comment.GetHTMLURL()) - body = fmt.Sprintf("%s from GitHub user [%s|%s]", body, user.GetLogin(), user.GetHTMLURL()) - if user.GetName() != "" { - body = fmt.Sprintf("%s (%s)", body, user.GetName()) - } - body = fmt.Sprintf( - "%s at %s:\n\n%s", - body, - comment.CreatedAt.Format(commentDateFormat), - comment.GetBody(), - ) - - if len(body) > maxBodyLength { - body = body[:maxBodyLength] - } - - // As it is, the JIRA API we're using doesn't have any way to update comments natively. - // So, we have to build the request ourselves. - request := struct { - Body string `json:"body"` - }{ - Body: body, - } - - req, err := j.client.NewRequest("PUT", fmt.Sprintf("rest/api/2/issue/%s/comment/%s", issue.Key, id), request) - if err != nil { - log.Errorf("Error creating comment update request: %s", err) - return jira.Comment{}, err - } - - com, res, err := j.request(func() (interface{}, *jira.Response, error) { - res, err := j.client.Do(req, nil) - return nil, res, err - }) - if err != nil { - log.Errorf("Error updating comment: %v", err) - return jira.Comment{}, getErrorBody(j.config, res) - } - co, ok := com.(*jira.Comment) - if !ok { - log.Errorf("Update JIRA comment did not return comment! Got: %v", com) - return jira.Comment{}, fmt.Errorf("Update JIRA comment failed: expected *jira.Comment; got %T", com) - } - return *co, nil -} - -// request takes an API function from the JIRA library -// and calls it with exponential backoff. If the function succeeds, it -// returns the expected value and the JIRA API response, as well as a nil -// error. If it continues to fail until a maximum time is reached, it returns -// a nil result as well as the returned HTTP response and a timeout error. -func (j realJIRAClient) request(f func() (interface{}, *jira.Response, error)) (interface{}, *jira.Response, error) { - log := j.config.GetLogger() - - var ret interface{} - var res *jira.Response - - op := func() error { - var err error - ret, res, err = f() - return err - } - - b := backoff.NewExponentialBackOff() - b.MaxElapsedTime = j.config.GetTimeout() - - backoffErr := backoff.RetryNotify(op, b, func(err error, duration time.Duration) { - // Round to a whole number of milliseconds - duration /= retryBackoffRoundRatio // Convert nanoseconds to milliseconds - duration *= retryBackoffRoundRatio // Convert back so it appears correct - - log.Errorf("Error performing operation; retrying in %v: %v", duration, err) - }) - - return ret, res, backoffErr -} - -// dryrunJIRAClient is an implementation of JIRAClient which performs all -// GET requests the same as the realJIRAClient, but does not perform any -// unsafe requests which may modify server data, instead printing out the -// actions it is asked to perform without making the request. -type dryrunJIRAClient struct { - config cfg.Config - client jira.Client -} - -// newlineReplaceRegex is a regex to match both "\r\n" and just "\n" newline styles, -// in order to allow us to escape both sequences cleanly in the output of a dry run. -var newlineReplaceRegex = regexp.MustCompile("\r?\n") - -// truncate is a utility function to replace all the newlines in -// the string with the characters "\n", then truncate it to no -// more than 50 characters -func truncate(s string, length int) string { - if s == "" { - return "empty" - } - - s = newlineReplaceRegex.ReplaceAllString(s, "\\n") - if len(s) <= length { - return s - } - return fmt.Sprintf("%s...", s[0:length]) -} - -// ListIssues returns a list of JIRA issues on the configured project which -// have GitHub IDs in the provided list. `ids` should be a comma-separated -// list of GitHub IDs. -// -// This function is identical to that in realJIRAClient. -func (j dryrunJIRAClient) ListIssues(ids []int) ([]jira.Issue, error) { - log := j.config.GetLogger() - - idStrs := make([]string, len(ids)) - for i, v := range ids { - idStrs[i] = fmt.Sprint(v) - } - - var jql string - // If the list of IDs is too long, we get a 414 Request-URI Too Large, so in that case, - // we'll need to do the filtering ourselves. - if len(ids) < maxJQLIssueLength { - jql = fmt.Sprintf("project='%s' AND cf[%s] in (%s)", - j.config.GetProjectKey(), j.config.GetFieldID(cfg.GitHubID), strings.Join(idStrs, ",")) - } else { - jql = fmt.Sprintf("project='%s'", j.config.GetProjectKey()) - } - - ji, res, err := j.request(func() (interface{}, *jira.Response, error) { - return j.client.Issue.Search(jql, nil) - }) - if err != nil { - log.Errorf("Error retrieving JIRA issues: %v", err) - return nil, getErrorBody(j.config, res) - } - jiraIssues, ok := ji.([]jira.Issue) - if !ok { - log.Errorf("Get JIRA issues did not return issues! Got: %v", ji) - return nil, fmt.Errorf("get JIRA issues failed: expected []jira.Issue; got %T", ji) - } - - var issues []jira.Issue - if len(ids) < maxJQLIssueLength { - // The issues were already filtered by our JQL, so use as is - issues = jiraIssues - } else { - // Filter only issues which have a defined GitHub ID in the list of IDs - for _, v := range jiraIssues { - if id, err := v.Fields.Unknowns.Int(j.config.GetFieldKey(cfg.GitHubID)); err == nil { - for _, idOpt := range ids { - if id == int64(idOpt) { - issues = append(issues, v) - break - } - } - } - } - } - - return issues, nil -} - -// GetIssue returns a single JIRA issue within the configured project -// according to the issue key (e.g. "PROJ-13"). -// -// This function is identical to that in realJIRAClient. -func (j dryrunJIRAClient) GetIssue(key string) (jira.Issue, error) { - log := j.config.GetLogger() - - i, res, err := j.request(func() (interface{}, *jira.Response, error) { - return j.client.Issue.Get(key, nil) - }) - if err != nil { - log.Errorf("Error retrieving JIRA issue: %v", err) - return jira.Issue{}, getErrorBody(j.config, res) - } - issue, ok := i.(*jira.Issue) - if !ok { - log.Errorf("Get JIRA issue did not return issue! Got %v", i) - return jira.Issue{}, fmt.Errorf("Get JIRA issue failed: expected *jira.Issue; got %T", i) - } - - return *issue, nil -} - -// CreateIssue prints out the fields that would be set on a new issue were -// it to be created according to the provided issue object. It returns the -// provided issue object as-is. -func (j dryrunJIRAClient) CreateIssue(issue jira.Issue) (jira.Issue, error) { - log := j.config.GetLogger() - - fields := issue.Fields - - log.Info("") - log.Info("Create new JIRA issue:") - log.Infof(" Summary: %s", fields.Summary) - log.Infof(" Description: %s", truncate(fields.Description, 50)) - log.Infof(" GitHub ID: %d", fields.Unknowns[j.config.GetFieldKey(cfg.GitHubID)]) - log.Infof(" GitHub Number: %d", fields.Unknowns[j.config.GetFieldKey(cfg.GitHubNumber)]) - log.Infof(" Labels: %s", fields.Unknowns[j.config.GetFieldKey(cfg.GitHubLabels)]) - log.Infof(" State: %s", fields.Unknowns[j.config.GetFieldKey(cfg.GitHubStatus)]) - log.Infof(" Reporter: %s", fields.Unknowns[j.config.GetFieldKey(cfg.GitHubReporter)]) - log.Info("") - - return issue, nil -} - -// UpdateIssue prints out the fields that would be set on a JIRA issue -// (identified by issue.Key) were it to be updated according to the issue -// object. It then returns the provided issue object as-is. -func (j dryrunJIRAClient) UpdateIssue(issue jira.Issue) (jira.Issue, error) { - log := j.config.GetLogger() - - fields := issue.Fields - - log.Info("") - log.Infof("Update JIRA issue %s:", issue.Key) - log.Infof(" Summary: %s", fields.Summary) - log.Infof(" Description: %s", truncate(fields.Description, 50)) - key := j.config.GetFieldKey(cfg.GitHubLabels) - if labels, err := fields.Unknowns.String(key); err == nil { - log.Infof(" Labels: %s", labels) - } - key = j.config.GetFieldKey(cfg.GitHubStatus) - if state, err := fields.Unknowns.String(key); err == nil { - log.Infof(" State: %s", state) - } - log.Info("") - - return issue, nil -} - -// CreateComment prints the body that would be set on a new comment if it were -// to be created according to the fields of the provided GitHub comment. It then -// returns a comment object containing the body that would be used. -func (j dryrunJIRAClient) CreateComment(issue jira.Issue, comment github.IssueComment, github GitHubClient) (jira.Comment, error) { - log := j.config.GetLogger() - - user, err := github.GetUser(comment.User.GetLogin()) - if err != nil { - return jira.Comment{}, err - } - - body := fmt.Sprintf("Comment (ID %d) from GitHub user %s", comment.GetID(), user.GetLogin()) - if user.GetName() != "" { - body = fmt.Sprintf("%s (%s)", body, user.GetName()) - } - body = fmt.Sprintf( - "%s at %s:\n\n%s", - body, - comment.CreatedAt.Format(commentDateFormat), - comment.GetBody(), - ) - - log.Info("") - log.Infof("Create comment on JIRA issue %s:", issue.Key) - log.Infof(" GitHub ID: %d", comment.GetID()) - if user.GetName() != "" { - log.Infof(" User: %s (%s)", user.GetLogin(), user.GetName()) - } else { - log.Infof(" User: %s", user.GetLogin()) - } - log.Infof(" Posted at: %s", comment.CreatedAt.Format(commentDateFormat)) - log.Infof(" Body: %s", truncate(comment.GetBody(), 100)) - log.Info("") - - return jira.Comment{ - Body: body, - }, nil -} - -// UpdateComment prints the body that would be set on a comment were it to be -// updated according to the provided GitHub comment. It then returns a comment -// object containing the body that would be used. -func (j dryrunJIRAClient) UpdateComment(issue jira.Issue, id string, comment github.IssueComment, github GitHubClient) (jira.Comment, error) { - log := j.config.GetLogger() - - user, err := github.GetUser(comment.User.GetLogin()) - if err != nil { - return jira.Comment{}, err - } - - body := fmt.Sprintf("Comment (ID %d) from GitHub user %s", comment.GetID(), user.GetLogin()) - if user.GetName() != "" { - body = fmt.Sprintf("%s (%s)", body, user.GetName()) - } - body = fmt.Sprintf( - "%s at %s:\n\n%s", - body, - comment.CreatedAt.Format(commentDateFormat), - comment.GetBody(), - ) - - log.Info("") - log.Infof("Update JIRA comment %s on issue %s:", id, issue.Key) - log.Infof(" GitHub ID: %d", comment.GetID()) - if user.GetName() != "" { - log.Infof(" User: %s (%s)", user.GetLogin(), user.GetName()) - } else { - log.Infof(" User: %s", user.GetLogin()) - } - log.Infof(" Posted at: %s", comment.CreatedAt.Format(commentDateFormat)) - log.Infof(" Body: %s", truncate(comment.GetBody(), 100)) - log.Info("") - - return jira.Comment{ - ID: id, - Body: body, - }, nil -} - -// request takes an API function from the JIRA library -// and calls it with exponential backoff. If the function succeeds, it -// returns the expected value and the JIRA API response, as well as a nil -// error. If it continues to fail until a maximum time is reached, it returns -// a nil result as well as the returned HTTP response and a timeout error. -// -// This function is identical to that in realJIRAClient. -func (j dryrunJIRAClient) request(f func() (interface{}, *jira.Response, error)) (interface{}, *jira.Response, error) { - log := j.config.GetLogger() - - var ret interface{} - var res *jira.Response - - op := func() error { - var err error - ret, res, err = f() - return err - } - - b := backoff.NewExponentialBackOff() - b.MaxElapsedTime = j.config.GetTimeout() - - backoffErr := backoff.RetryNotify(op, b, func(err error, duration time.Duration) { - // Round to a whole number of milliseconds - duration /= retryBackoffRoundRatio // Convert nanoseconds to milliseconds - duration *= retryBackoffRoundRatio // Convert back so it appears correct - - log.Errorf("Error performing operation; retrying in %v: %v", duration, err) - }) - - return ret, res, backoffErr -} diff --git a/lib/comments.go b/lib/comments.go index 1b2ae6a..21a9c95 100644 --- a/lib/comments.go +++ b/lib/comments.go @@ -1,19 +1,20 @@ package lib import ( + "github.com/coreos/issue-sync/lib/issuesyncgithub" + "github.com/coreos/issue-sync/lib/issuesyncjira" "regexp" "strconv" "github.com/andygrunwald/go-jira" "github.com/coreos/issue-sync/cfg" - "github.com/coreos/issue-sync/lib/clients" "github.com/google/go-github/github" ) // jCommentRegex matches a generated JIRA comment. It has matching groups to retrieve the // GitHub Comment ID (\1), the GitHub username (\2), the GitHub real name (\3, if it exists), // the time the comment was posted (\3 or \4), and the body of the comment (\4 or \5). -var jCommentRegex = regexp.MustCompile("^Comment \\[\\(ID (\\d+)\\)\\|.*?] from GitHub user \\[(\\w+)\\|.*?] \\((.+)\\)? at (.+):\\n\\n(.+)$") +var jCommentRegex = regexp.MustCompile("^Comment \\[\\(ID (\\d+)\\)\\|.*?] from GitHub user \\[(.+)\\|.*?] \\((.+)\\) at (.+):\\n\\n(.+)$") // jCommentIDRegex just matches the beginning of a generated JIRA comment. It's a smaller, // simpler, and more efficient regex, to quickly filter only generated comments and retrieve @@ -23,15 +24,16 @@ var jCommentIDRegex = regexp.MustCompile("^Comment \\[\\(ID (\\d+)\\)\\|") // CompareComments takes a GitHub issue, and retrieves all of its comments. It then // matches each one to a comment in `existing`. If it finds a match, it calls // UpdateComment; if it doesn't, it calls CreateComment. -func CompareComments(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, ghClient clients.GitHubClient, jClient clients.JIRAClient) error { +func CompareComments(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, ghClient issuesyncgithub.Client, jClient issuesyncjira.Client) error { log := config.GetLogger() + user, repoName := config.GetRepo() if ghIssue.GetComments() == 0 { log.Debugf("Issue #%d has no comments, skipping.", *ghIssue.Number) return nil } - ghComments, err := ghClient.ListComments(ghIssue) + ghComments, err := issuesyncgithub.ListComments(ghClient, log, config.GetTimeout(), user, repoName, ghIssue) if err != nil { return err } @@ -56,7 +58,7 @@ func CompareComments(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, } // matches[0] is the whole string, matches[1] is the ID matches := jCommentIDRegex.FindStringSubmatch(jComment.Body) - id, _ := strconv.Atoi(matches[1]) + id, _ := strconv.ParseInt(matches[1], 10, 64) if *ghComment.ID != id { continue } @@ -69,7 +71,7 @@ func CompareComments(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, continue } - comment, err := jClient.CreateComment(jIssue, *ghComment, ghClient) + comment, err := issuesyncjira.CreateComment(jClient, jIssue, *ghComment, ghClient) if err != nil { return err } @@ -83,7 +85,7 @@ func CompareComments(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, // UpdateComment compares the body of a GitHub comment with the body (minus header) // of the JIRA comment, and updates the JIRA comment if necessary. -func UpdateComment(config cfg.Config, ghComment github.IssueComment, jComment jira.Comment, jIssue jira.Issue, ghClient clients.GitHubClient, jClient clients.JIRAClient) error { +func UpdateComment(config cfg.Config, ghComment github.IssueComment, jComment jira.Comment, jIssue jira.Issue, ghClient issuesyncgithub.Client, jClient issuesyncjira.Client) error { log := config.GetLogger() // fields[0] is the whole body, 1 is the ID, 2 is the username, 3 is the real name (or "" if none) @@ -94,12 +96,12 @@ func UpdateComment(config cfg.Config, ghComment github.IssueComment, jComment ji return nil } - comment, err := jClient.UpdateComment(jIssue, jComment.ID, ghComment, ghClient) + comment, err := issuesyncjira.UpdateComment(jClient, jIssue, jComment.ID, ghComment, ghClient) if err != nil { return err } - log.Debug("Updated JIRA comment %s.", comment.ID) + log.Debugf("Updated JIRA comment %s.", comment.ID) return nil } diff --git a/lib/comments_test.go b/lib/comments_test.go new file mode 100644 index 0000000..5d154da --- /dev/null +++ b/lib/comments_test.go @@ -0,0 +1,33 @@ +package lib + +import "testing" + +func TestJiraCommentRegex(t *testing.T) { + var fields = jCommentRegex.FindStringSubmatch(`Comment [(ID 484163403)|https://github.com] from GitHub user [bilbo-baggins|https://github.com/bilbo-baggins] (Bilbo Baggins) at 16:27 PM, April 17 2019: + +Bla blibidy bloo bla`) + + if len(fields) != 6 { + t.Fatalf("Regex failed to parse fields %v", fields) + } + + if fields[1] != "484163403" { + t.Fatalf("Expected field[1] = 484163403; Got field[1] = %s", fields[1]) + } + + if fields[2] != "bilbo-baggins" { + t.Fatalf("Expected field[2] = bilbo-baggins; Got field[2] = %s", fields[2]) + } + + if fields[3] != "Bilbo Baggins" { + t.Fatalf("Expected field[3] = Bilbo Baggins; Got field[3] = %s", fields[3]) + } + + if fields[4] != "16:27 PM, April 17 2019" { + t.Fatalf("Expected field[4] = 16:27 PM, April 17 2019; Got field[4] = %s", fields[4]) + } + + if fields[5] != "Bla blibidy bloo bla" { + t.Fatalf("Expected field[5] = Bla blibidy bloo bla; Got field[5] = %s", fields[5]) + } +} diff --git a/lib/issues.go b/lib/issues.go index 51d1b71..51a53f4 100644 --- a/lib/issues.go +++ b/lib/issues.go @@ -1,28 +1,26 @@ package lib import ( - "strings" - "time" - + "fmt" "github.com/andygrunwald/go-jira" "github.com/coreos/issue-sync/cfg" - "github.com/coreos/issue-sync/lib/clients" - "github.com/google/go-github/github" + "github.com/coreos/issue-sync/lib/issuesyncgithub" + "github.com/coreos/issue-sync/lib/issuesyncjira" + "github.com/coreos/issue-sync/lib/models" + "strings" ) -// dateFormat is the format used for the Last IS Update field -const dateFormat = "2006-01-02T15:04:05.0-0700" - // CompareIssues gets the list of GitHub issues updated since the `since` date, // gets the list of JIRA issues which have GitHub ID custom fields in that list, // then matches each one. If a JIRA issue already exists for a given GitHub issue, // it calls UpdateIssue; if no JIRA issue already exists, it calls CreateIssue. -func CompareIssues(config cfg.Config, ghClient clients.GitHubClient, jiraClient clients.JIRAClient) error { +func CompareIssues(config cfg.Config, ghClient issuesyncgithub.Client, jiraClient issuesyncjira.Client) error { log := config.GetLogger() + user, repoName := config.GetRepo() log.Debug("Collecting issues") - ghIssues, err := ghClient.ListIssues() + ghIssues, err := issuesyncgithub.ListIssues(ghClient, log, config.GetTimeout(), user, repoName, config.GetSinceParam()) if err != nil { return err } @@ -32,12 +30,12 @@ func CompareIssues(config cfg.Config, ghClient clients.GitHubClient, jiraClient return nil } - ids := make([]int, len(ghIssues)) + ids := make([]int64, len(ghIssues)) for i, v := range ghIssues { ids[i] = v.GetID() } - jiraIssues, err := jiraClient.ListIssues(ids) + jiraIssues, err := issuesyncjira.ListIssues(jiraClient, ids) if err != nil { return err } @@ -47,10 +45,11 @@ func CompareIssues(config cfg.Config, ghClient clients.GitHubClient, jiraClient for _, ghIssue := range ghIssues { found := false for _, jIssue := range jiraIssues { - id, _ := jIssue.Fields.Unknowns.Int(config.GetFieldKey(cfg.GitHubID)) - if int64(*ghIssue.ID) == id { + + id, _ := config.GetFieldMapper().GetFieldValue(&jIssue, cfg.GitHubID) + if int64(*ghIssue.ID) == id.(int64) { found = true - if err := UpdateIssue(config, ghIssue, jIssue, ghClient, jiraClient); err != nil { + if err := UpdateIssue(config, ghIssue, jIssue, ghClient, jiraClient,); err != nil { log.Errorf("Error updating issue %s. Error: %v", jIssue.Key, err) } break @@ -66,9 +65,45 @@ func CompareIssues(config cfg.Config, ghClient clients.GitHubClient, jiraClient return nil } +func jiraCustomFieldsNeedUpdate(config cfg.Config, jIssue jira.Issue, fieldKey cfg.FieldKey, githubFieldValue string) bool { + jiraField, err := config.GetFieldMapper().GetFieldValue(&jIssue, fieldKey) + + // update if there was an error retrieving the value + if err != nil { + return true + } + + if jiraField == nil { + // treat empty string and nil as equal + return len(githubFieldValue) != 0 + } else { + // update if there is any difference between actual values + return jiraField.(string) != githubFieldValue + } +} + +func sliceStringsEq(a []string, b []string) bool { + + if (a == nil) != (b == nil) { + return false; + } + + if len(a) != len(b) { + return false + } + + for i := range a { + if a[i] != b[i] { + return false + } + } + + return true +} + // DidIssueChange tests each of the relevant fields on the provided JIRA and GitHub issue // and returns whether or not they differ. -func DidIssueChange(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue) bool { +func DidIssueChange(config cfg.Config, ghIssue models.ExtendedGithubIssue, jIssue jira.Issue) bool { log := config.GetLogger() log.Debugf("Comparing GitHub issue #%d and JIRA issue %s", ghIssue.GetNumber(), jIssue.Key) @@ -77,31 +112,30 @@ func DidIssueChange(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue) anyDifferent = anyDifferent || (ghIssue.GetTitle() != jIssue.Fields.Summary) anyDifferent = anyDifferent || (ghIssue.GetBody() != jIssue.Fields.Description) + anyDifferent = anyDifferent || jiraCustomFieldsNeedUpdate(config, jIssue, cfg.GitHubStatus, ghIssue.GetState()) + anyDifferent = anyDifferent || jiraCustomFieldsNeedUpdate(config, jIssue, cfg.GitHubReporter, ghIssue.User.GetLogin()) + commits, err := config.GetFieldMapper().GetFieldValue(&jIssue, cfg.GitHubCommits) + commitIdsInJiraUntyped := commits.([]interface{}) - key := config.GetFieldKey(cfg.GitHubStatus) - field, err := jIssue.Fields.Unknowns.String(key) - if err != nil || *ghIssue.State != field { - anyDifferent = true + if err != nil { + return true } - key = config.GetFieldKey(cfg.GitHubReporter) - field, err = jIssue.Fields.Unknowns.String(key) - if err != nil || *ghIssue.User.Login != field { - anyDifferent = true + commitIdsInJira := make([]string, len(commitIdsInJiraUntyped)) + for i, v := range commitIdsInJiraUntyped { + commitIdsInJira[i] = fmt.Sprint(v) } - labels := make([]string, len(ghIssue.Labels)) + anyDifferent = anyDifferent || sliceStringsEq(commitIdsInJira, ghIssue.CommitIds) + ghLabels := make([]string, len(ghIssue.Labels)) for i, l := range ghIssue.Labels { - labels[i] = *l.Name - } - - key = config.GetFieldKey(cfg.GitHubLabels) - field, err = jIssue.Fields.Unknowns.String(key) - if err != nil && strings.Join(labels, ",") != field { - anyDifferent = true + ghLabels[i] = *l.Name } + ghLabelsString := strings.Join(ghLabels, ",") - log.Debugf("Issues have any differences: %b", anyDifferent) + anyDifferent = anyDifferent || jiraCustomFieldsNeedUpdate(config, jIssue, cfg.GitHubLabels, ghLabelsString) + anyDifferent = anyDifferent || (ghIssue.ProjectCard != nil && strings.ToLower(jIssue.Fields.Status.Name) != strings.ToLower(ghIssue.ProjectCard.GetColumnName())) + log.Debugf("Issues have any differences: %t", anyDifferent) return anyDifferent } @@ -109,7 +143,7 @@ func DidIssueChange(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue) // UpdateIssue compares each field of a GitHub issue to a JIRA issue; if any of them // differ, the differing fields of the JIRA issue are updated to match the GitHub // issue. -func UpdateIssue(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, ghClient clients.GitHubClient, jClient clients.JIRAClient) error { +func UpdateIssue(config cfg.Config, ghIssue models.ExtendedGithubIssue, jIssue jira.Issue, ghClient issuesyncgithub.Client, jClient issuesyncjira.Client) error { log := config.GetLogger() log.Debugf("Updating JIRA %s with GitHub #%d", jIssue.Key, *ghIssue.Number) @@ -117,23 +151,7 @@ func UpdateIssue(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, ghC var issue jira.Issue if DidIssueChange(config, ghIssue, jIssue) { - fields := jira.IssueFields{} - fields.Unknowns = map[string]interface{}{} - - fields.Summary = ghIssue.GetTitle() - fields.Description = ghIssue.GetBody() - fields.Unknowns[config.GetFieldKey(cfg.GitHubStatus)] = ghIssue.GetState() - fields.Unknowns[config.GetFieldKey(cfg.GitHubReporter)] = ghIssue.User.GetLogin() - - labels := make([]string, len(ghIssue.Labels)) - for i, l := range ghIssue.Labels { - labels[i] = l.GetName() - } - fields.Unknowns[config.GetFieldKey(cfg.GitHubLabels)] = strings.Join(labels, ",") - - fields.Unknowns[config.GetFieldKey(cfg.LastISUpdate)] = time.Now().Format(dateFormat) - - fields.Type = jIssue.Fields.Type + fields := config.GetFieldMapper().MapFields(&ghIssue) issue = jira.Issue{ Fields: &fields, @@ -142,7 +160,15 @@ func UpdateIssue(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, ghC } var err error - issue, err = jClient.UpdateIssue(issue) + + if ghIssue.ProjectCard != nil { + err = issuesyncjira.TryApplyTransitionWithName(jClient, jIssue, ghIssue.ProjectCard.GetColumnName()) + if err != nil { + return err + } + } + + issue, err = issuesyncjira.UpdateIssue(jClient, issue) if err != nil { return err } @@ -152,13 +178,13 @@ func UpdateIssue(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, ghC log.Debugf("JIRA issue %s is already up to date!", jIssue.Key) } - issue, err := jClient.GetIssue(jIssue.Key) + issue, err := issuesyncjira.GetIssue(jClient, jIssue.Key) if err != nil { log.Debugf("Failed to retrieve JIRA issue %s!", jIssue.Key) return err } - if err := CompareComments(config, ghIssue, issue, ghClient, jClient); err != nil { + if err := CompareComments(config, ghIssue.Issue, issue, ghClient, jClient); err != nil { return err } @@ -167,51 +193,37 @@ func UpdateIssue(config cfg.Config, ghIssue github.Issue, jIssue jira.Issue, ghC // CreateIssue generates a JIRA issue from the various fields on the given GitHub issue, then // sends it to the JIRA API. -func CreateIssue(config cfg.Config, issue github.Issue, ghClient clients.GitHubClient, jClient clients.JIRAClient) error { +func CreateIssue(config cfg.Config, ghIssue models.ExtendedGithubIssue, ghClient issuesyncgithub.Client, jClient issuesyncjira.Client) error { log := config.GetLogger() - log.Debugf("Creating JIRA issue based on GitHub issue #%d", *issue.Number) - - fields := jira.IssueFields{ - Type: jira.IssueType{ - Name: "Task", // TODO: Determine issue type - }, - Project: config.GetProject(), - Summary: issue.GetTitle(), - Description: issue.GetBody(), - Unknowns: map[string]interface{}{}, - } - - fields.Unknowns[config.GetFieldKey(cfg.GitHubID)] = issue.GetID() - fields.Unknowns[config.GetFieldKey(cfg.GitHubNumber)] = issue.GetNumber() - fields.Unknowns[config.GetFieldKey(cfg.GitHubStatus)] = issue.GetState() - fields.Unknowns[config.GetFieldKey(cfg.GitHubReporter)] = issue.User.GetLogin() - - strs := make([]string, len(issue.Labels)) - for i, v := range issue.Labels { - strs[i] = *v.Name - } - fields.Unknowns[config.GetFieldKey(cfg.GitHubLabels)] = strings.Join(strs, ",") + log.Debugf("Creating JIRA issue based on GitHub issue #%d", *ghIssue.Issue.Number) - fields.Unknowns[config.GetFieldKey(cfg.LastISUpdate)] = time.Now().Format(dateFormat) + fields := config.GetFieldMapper().MapFields(&ghIssue) jIssue := jira.Issue{ Fields: &fields, } - jIssue, err := jClient.CreateIssue(jIssue) + jIssue, err := issuesyncjira.CreateIssue(jClient, jIssue) if err != nil { return err } - jIssue, err = jClient.GetIssue(jIssue.Key) + if ghIssue.ProjectCard != nil { + err = issuesyncjira.TryApplyTransitionWithName(jClient, jIssue, ghIssue.ProjectCard.GetColumnName()) + if err != nil { + return err + } + } + + jIssue, err = issuesyncjira.GetIssue(jClient, jIssue.Key) if err != nil { return err } log.Debugf("Created JIRA issue %s!", jIssue.Key) - if err := CompareComments(config, issue, jIssue, ghClient, jClient); err != nil { + if err := CompareComments(config, ghIssue.Issue, jIssue, ghClient, jClient); err != nil { return err } diff --git a/lib/issuesyncgithub/github.go b/lib/issuesyncgithub/github.go new file mode 100644 index 0000000..904adf2 --- /dev/null +++ b/lib/issuesyncgithub/github.go @@ -0,0 +1,325 @@ +package issuesyncgithub + +import ( + "context" + "fmt" + "github.com/Sirupsen/logrus" + "github.com/coreos/issue-sync/lib/models" + "time" + + "github.com/cenkalti/backoff" + "github.com/coreos/issue-sync/cfg" + "github.com/google/go-github/github" + "golang.org/x/oauth2" +) + +// Client is a wrapper around the GitHub API Client library we +// use. It allows us to swap in other implementations, such as a dry run +// clients, or mock clients for testing. +type Client interface { + listIssueEvents(ctx context.Context, owner, repo string, number int, page int) ([]*github.IssueEvent, *github.Response, error) + listByRepo(ctx context.Context, owner string, repo string, page int, since time.Time) ([]*github.Issue, *github.Response, error) + getRepository(ctx context.Context, owner string, repo string) (*github.Repository, *github.Response, error) + listComments(ctx context.Context, owner string, repo string, number int) ([]*github.IssueComment, *github.Response, error) + getUser(ctx context.Context, user string) (*github.User, *github.Response, error) + getRateLimits(ctx context.Context) (*github.RateLimits, *github.Response, error) +} + +// realGHClient is a standard GitHub clients, that actually makes all of the +// requests against the GitHub REST API. It is the canonical implementation +// of Client. +type realGHClient struct { + client github.Client +} + +func (g realGHClient) listIssueEvents(ctx context.Context, owner, repo string, number int, page int) ([]*github.IssueEvent, *github.Response, error) { + return g.client.Issues.ListIssueEvents(ctx, owner, repo, number, &github.ListOptions{ + Page: page, + PerPage: 100, + }) +} + +func (g realGHClient) getRepository(ctx context.Context, owner string, repo string) (*github.Repository, *github.Response, error) { + return g.client.Repositories.Get(ctx, owner, repo) +} + +func (g realGHClient) listByRepo(ctx context.Context, owner string, repo string, page int, since time.Time) ([]*github.Issue, *github.Response, error) { + return g.client.Issues.ListByRepo(ctx, owner, repo, &github.IssueListByRepoOptions{ + Since: since, + State: "all", + Sort: "created", + Direction: "asc", + ListOptions: github.ListOptions{ + Page: page, + PerPage: 100, + }, + }) +} + +func (g realGHClient) listComments(ctx context.Context, owner string, repo string, number int) ([]*github.IssueComment, *github.Response, error) { + return g.client.Issues.ListComments(ctx, owner, repo, number, &github.IssueListCommentsOptions{ + Sort: "created", + Direction: "asc", + }) +} + +func (g realGHClient) getUser(ctx context.Context, user string) (*github.User, *github.Response, error) { + return g.client.Users.Get(context.Background(), user) +} + +func (g realGHClient) getRateLimits(ctx context.Context) (*github.RateLimits, *github.Response, error) { + return g.client.RateLimits(ctx) +} + +type TestGHClient struct { + handleListIssueEvents func(ctx context.Context, owner, repo string, number int, page int) ([]*github.IssueEvent, *github.Response, error) + handleListByRepo func(ctx context.Context, owner string, repo string, page int, since time.Time) ([]*github.Issue, *github.Response, error) + handleGetRepository func(ctx context.Context, owner string, repo string) (*github.Repository, *github.Response, error) + handleListComments func(ctx context.Context, owner string, repo string, number int) ([]*github.IssueComment, *github.Response, error) + handleGetUser func(ctx context.Context, user string) (*github.User, *github.Response, error) + handleGetRateLimits func(ctx context.Context) (*github.RateLimits, *github.Response, error) +} + +func (g TestGHClient) listIssueEvents(ctx context.Context, owner, repo string, number int, page int) ([]*github.IssueEvent, *github.Response, error) { + return g.handleListIssueEvents(ctx, owner, repo, number, page) +} + +func (g TestGHClient) getRepository(ctx context.Context, owner string, repo string) (*github.Repository, *github.Response, error) { + return g.handleGetRepository(ctx, owner, repo) +} + +func (g TestGHClient) listByRepo(ctx context.Context, owner string, repo string, page int, since time.Time) ([]*github.Issue, *github.Response, error) { + return g.handleListByRepo(ctx, owner, repo, page, since) +} + +func (g TestGHClient) listComments(ctx context.Context, owner string, repo string, number int) ([]*github.IssueComment, *github.Response, error) { + return g.handleListComments(ctx, owner, repo, number) +} + +func (g TestGHClient) getUser(ctx context.Context, user string) (*github.User, *github.Response, error) { + return g.handleGetUser(ctx, user) +} + +func (g TestGHClient) getRateLimits(ctx context.Context) (*github.RateLimits, *github.Response, error) { + return g.getRateLimits(ctx) +} + +func getCurrentProjectCardAndCommitIds(g Client, log logrus.Entry, timeout time.Duration, user string, repoName string, issue *github.Issue) (*github.ProjectCard, []string, error) { + ctx := context.Background() + pages := 1 + + var currentProjectCard *github.ProjectCard + + var commitIds []string + + // search for the current project card + for page := 1; page <= pages; page++ { + is, res, err := request(log, timeout, func() (interface{}, *github.Response, error) { + return g.listIssueEvents(ctx, user, repoName, issue.GetNumber(), page) + }) + if err != nil { + return nil, nil, err + } + issueEventPointers, ok := is.([]*github.IssueEvent) + if !ok { + log.Errorf("Get GitHub issue events did not return issue events! Got: %v", is) + return nil, nil, fmt.Errorf("get GitHub issues events failed: expected []*github.IssueEvent; got %T", is) + } + + for _, v := range issueEventPointers { + if v.ProjectCard != nil { + if v.GetEvent() == "added_to_project" || v.GetEvent() == "moved_columns_in_project" { + currentProjectCard = v.ProjectCard + } else if *v.Event == "removed_from_project" { + currentProjectCard = nil + } + } + + if v.CommitID != nil { + commitIds = append(commitIds, *v.CommitID) + } + } + pages = res.LastPage + } + + log.Debugf("Found current Project Card for issue #%d", issue.GetNumber()) + + return currentProjectCard, commitIds, nil +} + +// ListIssues returns the list of GitHub issues since the last run of the tool. +func ListIssues(g Client, log logrus.Entry, timeout time.Duration, user string, repoName string, since time.Time) ([]models.ExtendedGithubIssue, error) { + ctx := context.Background() + + repo, _, _ := g.getRepository(ctx, user, repoName) + + // Set it so that it will run the loop once, and it'll be updated in the loop. + pages := 1 + var issues []models.ExtendedGithubIssue + + for page := 1; page <= pages; page++ { + is, res, err := request(log, timeout, func() (interface{}, *github.Response, error) { + return g.listByRepo(ctx, user, repoName, page, since) + }) + if err != nil { + return nil, err + } + issuePointers, ok := is.([]*github.Issue) + if !ok { + log.Errorf("Get GitHub issues did not return issues! Got: %v", is) + return nil, fmt.Errorf("get GitHub issues failed: expected []*github.Issue; got %T", is) + } + + var issuePage []models.ExtendedGithubIssue + for _, v := range issuePointers { + // If PullRequestLinks is not nil, it's a Pull Request + if v.PullRequestLinks == nil { + + var currentProjectCard *github.ProjectCard + var commitIds []string + if repo.GetHasProjects() { + currentProjectCard, commitIds, _ = getCurrentProjectCardAndCommitIds(g, log, timeout, user, repoName, v) + } + + issuePage = append(issuePage, models.ExtendedGithubIssue{Issue: *v, ProjectCard: currentProjectCard, CommitIds: commitIds}) + } + } + + pages = res.LastPage + issues = append(issues, issuePage...) + } + + log.Debug("Collected all GitHub issues") + + return issues, nil +} + +// ListComments returns the list of all comments on a GitHub issue in +// ascending order of creation. +func ListComments(g Client, log logrus.Entry, timeout time.Duration, user string, repoName string, issue github.Issue) ([]*github.IssueComment, error) { + + ctx := context.Background() + c, _, err := request(log, timeout, func() (interface{}, *github.Response, error) { + return g.listComments(ctx, user, repoName, issue.GetNumber()) + }) + if err != nil { + log.Errorf("Error retrieving GitHub comments for issue #%d. Error: %v.", issue.GetNumber(), err) + return nil, err + } + comments, ok := c.([]*github.IssueComment) + if !ok { + log.Errorf("Get GitHub comments did not return comments! Got: %v", c) + return nil, fmt.Errorf("Get GitHub comments failed: expected []*github.IssueComment; got %T", c) + } + + return comments, nil +} + +// GetUser returns a GitHub user from its login. +func GetUser(g Client, log logrus.Entry, timeout time.Duration, userName string) (github.User, error) { + u, _, err := request(log, timeout, func() (interface{}, *github.Response, error) { + return g.getUser(context.Background(), userName) + }) + + if err != nil { + log.Errorf("Error retrieving GitHub user %s. Error: %v", userName, err) + } + + user, ok := u.(*github.User) + if !ok { + log.Errorf("Get GitHub user did not return user! Got: %v", u) + return github.User{}, fmt.Errorf("Get GitHub user failed: expected *github.User; got %T", u) + } + + return *user, nil +} + +// GetRateLimits returns the current rate limits on the GitHub API. This is a +// simple and lightweight request that can also be used simply for testing the API. +func GetRateLimits(g Client, log logrus.Entry, timeout time.Duration) (github.RateLimits, error) { + ctx := context.Background() + + rl, _, err := request(log, timeout, func() (interface{}, *github.Response, error) { + return g.getRateLimits(ctx) + }) + if err != nil { + log.Errorf("Error connecting to GitHub; check your token. Error: %v", err) + return github.RateLimits{}, err + } + rate, ok := rl.(*github.RateLimits) + if !ok { + log.Errorf("Get GitHub rate limits did not return rate limits! Got: %v", rl) + return github.RateLimits{}, fmt.Errorf("Get GitHub rate limits failed: expected *github.RateLimits; got %T", rl) + } + + return *rate, nil +} + +const retryBackoffRoundRatio = time.Millisecond / time.Nanosecond + +// request takes an API function from the GitHub library +// and calls it with exponential backoff. If the function succeeds, it +// returns the expected value and the GitHub API response, as well as a nil +// error. If it continues to fail until a maximum time is reached, it returns +// a nil result as well as the returned HTTP response and a timeout error. +func request(log logrus.Entry, timeout time.Duration, f func() (interface{}, *github.Response, error)) (interface{}, *github.Response, error) { + + var ret interface{} + var res *github.Response + + op := func() error { + var err error + ret, res, err = f() + return err + } + + b := backoff.NewExponentialBackOff() + b.MaxElapsedTime = timeout + + backoffErr := backoff.RetryNotify(op, b, func(err error, duration time.Duration) { + // Round to a whole number of milliseconds + duration /= retryBackoffRoundRatio // Convert nanoseconds to milliseconds + duration *= retryBackoffRoundRatio // Convert back so it appears correct + + log.Errorf("Error performing operation; retrying in %v: %v", duration, err) + }) + + return ret, res, backoffErr +} + +// NewClient creates a Client and returns it; which +// implementation it uses depends on the configuration of this +// run. For example, a dry-run clients may be created which does +// not make any requests that would change anything on the server, +// but instead simply prints out the actions that it's asked to take. +func NewClient(config cfg.Config) (Client, error) { + var ret Client + + log := config.GetLogger() + + ctx := context.Background() + ts := oauth2.StaticTokenSource( + &oauth2.Token{AccessToken: config.GetConfigString("github-token")}, + ) + tc := oauth2.NewClient(ctx, ts) + + client := github.NewClient(tc) + + ret = realGHClient{ + client: *client, + } + + // Make a request so we can check that we can connect fine. + _, _, err := ret.getRateLimits(context.Background()) + if err != nil { + return realGHClient{}, err + } + log.Debug("Successfully connected to GitHub.") + + return ret, nil +} + +func NewTestClient() TestGHClient { + return TestGHClient {} +} + diff --git a/lib/issuesyncgithub/github_test.go b/lib/issuesyncgithub/github_test.go new file mode 100644 index 0000000..2836464 --- /dev/null +++ b/lib/issuesyncgithub/github_test.go @@ -0,0 +1,27 @@ +package issuesyncgithub + +import ( + "context" + "github.com/coreos/issue-sync/cfg" + "github.com/google/go-github/github" + "testing" + "time" +) + + +func TestListIssues(t *testing.T) { + + client := NewTestClient() + + client.handleGetRepository = func(ctx context.Context, owner string, repo string) (repository *github.Repository, response *github.Response, e error) { + hasProject := true + return &github.Repository{ HasProjects: &hasProject}, nil, nil + } + + log := *cfg.NewLogger("test", "debug") + + issues, err := ListIssues(client, log, 10, "", "", time.Now()) + + print(issues) + print(err) +} diff --git a/lib/issuesyncjira/jira.go b/lib/issuesyncjira/jira.go new file mode 100644 index 0000000..8a23a4e --- /dev/null +++ b/lib/issuesyncjira/jira.go @@ -0,0 +1,624 @@ +package issuesyncjira + +import ( + "errors" + "fmt" + "github.com/coreos/issue-sync/lib/issuesyncgithub" + "io/ioutil" + "net/http" + "regexp" + "strings" + + "time" + + "github.com/andygrunwald/go-jira" + "github.com/cenkalti/backoff" + "github.com/coreos/issue-sync/cfg" + "github.com/coreos/issue-sync/lib/utils" + "github.com/google/go-github/github" +) + +// commentDateFormat is the format used in the headers of JIRA comments. +const commentDateFormat = "15:04 PM, January 2 2006" + +// maxJQLIssueLength is the maximum number of GitHub issues we can +// use before we need to stop using JQL and filter issues ourself. +const maxJQLIssueLength = 100 + +// getErrorBody reads the HTTP response body of a JIRA API response, +// logs it as an error, and returns an error object with the contents +// of the body. If an error occurs during reading, that error is +// instead printed and returned. This function closes the body for +// further reading. +func getErrorBody(config cfg.Config, res *jira.Response) error { + log := config.GetLogger() + defer res.Body.Close() + body, err := ioutil.ReadAll(res.Body) + if err != nil { + log.Errorf("Error occured trying to read error body: %v", err) + return err + } + log.Debugf("Error body: %s", body) + return errors.New(string(body)) +} + +// Client is a wrapper around the JIRA API clients library we +// use. It allows us to hide implementation details such as backoff +// as well as swap in other implementations, such as for dry run +// or test mocking. +type Client interface { + getConfig() cfg.Config + searchIssues(jql string) (interface{}, *jira.Response, error) + do(method string, url string, body interface{}, out interface{}) (*jira.Response, error) + getIssue(key string) (*jira.Issue, *jira.Response, error) + createIssue(issue *jira.Issue) (*jira.Issue, *jira.Response, error) + updateIssue(issue *jira.Issue) (*jira.Issue, *jira.Response, error) + addComment(id string, jComment *jira.Comment, jIssue *jira.Issue, ghComment *github.IssueComment, ghUser *github.User) (*jira.Comment, *jira.Response, error) + getTransitions(issue jira.Issue) ([]jira.Transition, *jira.Response, error) + applyTransition(issue jira.Issue, transition jira.Transition) (*jira.Response, error) +} + +// realJIRAClient is a standard JIRA clients, which actually makes +// of the requests against the JIRA REST API. It is the canonical +// implementation of Client. +type realJIRAClient struct { + config cfg.Config + client jira.Client +} + +// dryrunJIRAClient is an implementation of Client which performs all +// GET requests the same as the realJIRAClient, but does not perform any +// unsafe requests which may modify server data, instead printing out the +// actions it is asked to perform without making the request. +type dryrunJIRAClient struct { + config cfg.Config + client jira.Client +} + +func (j realJIRAClient) getConfig() cfg.Config { + return j.config +} + +func (j realJIRAClient) getTransitions(issue jira.Issue) ([]jira.Transition, *jira.Response, error) { + return j.client.Issue.GetTransitions(issue.ID) +} + +func (j realJIRAClient) applyTransition(issue jira.Issue, transition jira.Transition) (*jira.Response, error) { + return j.client.Issue.DoTransition(issue.ID, transition.ID) +} + +func (j realJIRAClient) searchIssues(jql string) (interface{}, *jira.Response, error) { + return j.client.Issue.Search(jql, nil) +} + +func (j realJIRAClient) getIssue(key string) (*jira.Issue, *jira.Response, error) { + return j.client.Issue.Get(key, nil) +} + +func (j realJIRAClient) createIssue(issue *jira.Issue) (*jira.Issue, *jira.Response, error) { + return j.client.Issue.Create(issue) +} + +func (j realJIRAClient) updateIssue(issue *jira.Issue) (*jira.Issue, *jira.Response, error) { + return j.client.Issue.Update(issue) +} + +func (j realJIRAClient) addComment(id string, jComment *jira.Comment, jIssue *jira.Issue, ghComment *github.IssueComment, ghUser *github.User) (*jira.Comment, *jira.Response, error) { + return j.client.Issue.AddComment(id, jComment) +} + +func (j realJIRAClient) do(method string, url string, body interface{}, out interface{}) (*jira.Response, error) { + req, _ := j.client.NewRequest(method, url, body) + return j.client.Do(req, out) +} + +// DRY RUN CLIENT + +func (j dryrunJIRAClient) getTransitions(issue jira.Issue) ([]jira.Transition, *jira.Response, error) { + return j.client.Issue.GetTransitions(issue.ID) +} + +func (j dryrunJIRAClient) applyTransition(issue jira.Issue, transition jira.Transition) (*jira.Response, error) { + log := j.config.GetLogger() + log.Info("") + log.Info("Applying Transition:") + log.Infof(" Jira Issue ID: %s", issue.ID) + log.Infof(" Transition Id: %s", transition.ID) + log.Infof(" Old Status: %s", issue.Fields.Status.Name) + log.Infof(" New Satus: %s", transition.To.Name) + log.Info("") + return nil, nil +} + +func (j dryrunJIRAClient) getConfig() cfg.Config { + return j.config +} + +func (j dryrunJIRAClient) searchIssues(jql string) (interface{}, *jira.Response, error) { + return j.client.Issue.Search(jql, nil) +} + +func (j dryrunJIRAClient) getIssue(key string) (*jira.Issue, *jira.Response, error) { + return j.client.Issue.Get(key, nil) +} + +func (j dryrunJIRAClient) createIssue(issue *jira.Issue) (*jira.Issue, *jira.Response, error) { + log := j.config.GetLogger() + + fields := issue.Fields + + log.Info("") + log.Info("Create new JIRA issue:") + log.Infof(" Summary: %s", fields.Summary) + log.Infof(" Description: %s", truncate(fields.Description, 50)) + log.Infof(" GitHub ID: %d", utils.GetOrElse(j.config.GetFieldMapper().GetFieldValue(issue, cfg.GitHubID))("")) + log.Infof(" GitHub Number: %d", utils.GetOrElse(j.config.GetFieldMapper().GetFieldValue(issue, cfg.GitHubNumber))) + log.Infof(" GitHub Labels: %s", utils.GetOrElse(j.config.GetFieldMapper().GetFieldValue(issue, cfg.GitHubLabels))) + log.Infof(" GitHub Status: %s", utils.GetOrElse(j.config.GetFieldMapper().GetFieldValue(issue, cfg.GitHubStatus))) + log.Infof(" GitHub Reporter: %s", utils.GetOrElse(j.config.GetFieldMapper().GetFieldValue(issue, cfg.GitHubReporter))) + log.Info("") + + return issue, nil, nil +} + +func (j dryrunJIRAClient) updateIssue(issue *jira.Issue) (*jira.Issue, *jira.Response, error) { + log := j.config.GetLogger() + + fields := issue.Fields + + log.Info("") + log.Infof("Update JIRA issue %s:", issue.Key) + log.Infof(" Summary: %s", fields.Summary) + log.Infof(" Description: %s", truncate(fields.Description, 50)) + if labels, err := j.config.GetFieldMapper().GetFieldValue(issue, cfg.GitHubLabels); err == nil { + log.Infof(" Labels: %s", labels) + } + if state, err := j.config.GetFieldMapper().GetFieldValue(issue, cfg.GitHubStatus); err == nil { + log.Infof(" State: %s", state) + } + log.Info("") + + return issue, nil, nil +} + +func (j dryrunJIRAClient) addComment(id string, jComment *jira.Comment, jIssue *jira.Issue, ghComment *github.IssueComment, ghUser *github.User) (*jira.Comment, *jira.Response, error) { + log := j.config.GetLogger() + + body := fmt.Sprintf("Comment (ID %d) from GitHub user %s", ghComment.GetID(), ghUser.GetLogin()) + if ghUser.GetName() != "" { + body = fmt.Sprintf("%s (%s)", body, ghUser.GetName()) + } + body = fmt.Sprintf( + "%s at %s:\n\n%s", + body, + ghComment.CreatedAt.Format(commentDateFormat), + ghComment.GetBody(), + ) + + log.Info("") + log.Infof("Create comment on JIRA issue %s:", jIssue.Key) + log.Infof(" GitHub ID: %d", ghComment.GetID()) + if ghUser.GetName() != "" { + log.Infof(" User: %s (%s)", ghUser.GetLogin(), ghUser.GetName()) + } else { + log.Infof(" User: %s", ghUser.GetLogin()) + } + log.Infof(" Posted at: %s", ghComment.CreatedAt.Format(commentDateFormat)) + log.Infof(" Body: %s", truncate(ghComment.GetBody(), 100)) + log.Info("") + + return &jira.Comment{ + Body: body, + }, nil, nil +} + +func (j dryrunJIRAClient) do(method string, url string, body interface{}, out interface{}) (*jira.Response, error) { + disallowedMethods := map[string]bool{"POST": true, "PUT": true} + + if disallowedMethods[method] { + return nil, nil + } + req, _ := j.client.NewRequest(method, url, nil) + return j.client.Do(req, out) +} + + + +const retryBackoffRoundRatio = time.Millisecond / time.Nanosecond + +// request takes an API function from the JIRA library +// and calls it with exponential backoff. If the function succeeds, it +// returns the expected value and the JIRA API response, as well as a nil +// error. If it continues to fail until a maximum time is reached, it returns +// a nil result as well as the returned HTTP response and a timeout error. +func request(config cfg.Config, f func() (interface{}, *jira.Response, error)) (interface{}, *jira.Response, error) { + log := config.GetLogger() + + var ret interface{} + var res *jira.Response + + op := func() error { + var err error + ret, res, err = f() + return err + } + + b := backoff.NewExponentialBackOff() + b.MaxElapsedTime = config.GetTimeout() + + backoffErr := backoff.RetryNotify(op, b, func(err error, duration time.Duration) { + // Round to a whole number of milliseconds + duration /= retryBackoffRoundRatio // Convert nanoseconds to milliseconds + duration *= retryBackoffRoundRatio // Convert back so it appears correct + + log.Errorf("Error performing operation; retrying in %v: %v", duration, err) + }) + + return ret, res, backoffErr +} + +// NewClient creates a new Client and configures it with +// the config object provided. The type of clients created depends +// on the configuration; currently, it creates either a standard +// clients, or a dry-run clients. +func NewClient(config *cfg.Config) (Client, error) { + log := config.GetLogger() + + var oauth *http.Client + var err error + if !config.IsBasicAuth() { + oauth, err = newJIRAHTTPClient(*config) + if err != nil { + log.Errorf("Error getting OAuth config: %v", err) + return dryrunJIRAClient{}, err + } + } + + var j Client + + client, err := jira.NewClient(oauth, config.GetConfigString("jira-uri")) + if err != nil { + log.Errorf("Error initializing JIRA clients; check your base URI. Error: %v", err) + return dryrunJIRAClient{}, err + } + + if config.IsBasicAuth() { + client.Authentication.SetBasicAuth(config.GetConfigString("jira-user"), config.GetConfigString("jira-pass")) + } + + log.Debug("JIRA clients initialized") + + err = config.LoadJIRAConfig(*client) + + if err != nil { + log.Errorf("Error loading config", err) + return dryrunJIRAClient{}, err + } + + if config.IsDryRun() { + j = dryrunJIRAClient{ + config: *config, + client: *client, + } + } else { + j = realJIRAClient{ + config: *config, + client: *client, + } + } + + return j, nil +} + +func TryApplyTransitionWithName(j Client, issue jira.Issue, statusName string) error { + log := j.getConfig().GetLogger() + + + var currentStatusName string + if issue.Fields == nil || issue.Fields.Status == nil { + currentStatusName = "" + } else { + currentStatusName = strings.ToLower(issue.Fields.Status.Name) + } + + targetStatusName := strings.ToLower(statusName) + if currentStatusName == targetStatusName { + log.Debug("Issue Status is already in sync") + return nil + } + + transitions, res, err := j.getTransitions(issue) + + if err != nil { + log.Errorf("Error retrieving JIRA transitions: %v", err) + return getErrorBody(j.getConfig(), res) + } + + for _, v := range transitions { + if strings.ToLower(v.To.Name) == targetStatusName { + log.Info(fmt.Sprintf("Applying transition %s -> %s on issue %s", currentStatusName, v.To.Name, issue.ID)) + _, err = j.applyTransition(issue, v) + if err != nil { + log.Errorf("Error applying JIRA transitions: %v", err) + return getErrorBody(j.getConfig(), res) + } else { + return nil + } + } + } + // TODO: This is where we can decide what to do about invalid transitions + return errors.New(fmt.Sprintf("No transition from '%s' to '%s' found for issue %s", currentStatusName, statusName, issue.ID)) +} + +// ListIssues returns a list of JIRA issues on the configured project which +// have GitHub IDs in the provided list. `ids` should be a comma-separated +// list of GitHub IDs. +func ListIssues(j Client, ghIssueIds []int64) ([]jira.Issue, error) { + log := j.getConfig().GetLogger() + + ghIssueIdStrs := make([]string, len(ghIssueIds)) + for i, v := range ghIssueIds { + ghIssueIdStrs[i] = fmt.Sprint(v) + } + + var jql string + // If the list of IDs is too long, we get a 414 Request-URI Too Large, so in that case, + // we'll need to do the filtering ourselves. + if !j.getConfig().IsUsingGitHubIssueDataField() && len(ghIssueIds) < maxJQLIssueLength { + jql = fmt.Sprintf("project='%s' AND cf[%s] in (%s)", + j.getConfig().GetProjectKey(), j.getConfig().GetFieldID(cfg.GitHubID), strings.Join(ghIssueIdStrs, ",")) + } else { + jql = fmt.Sprintf("project='%s'", j.getConfig().GetProjectKey()) + } + + ji, res, err := request(j.getConfig(), func() (interface{}, *jira.Response, error) { + return j.searchIssues(jql) + }) + + if err != nil { + log.Errorf("Error retrieving JIRA issues: %v", err) + return nil, getErrorBody(j.getConfig(), res) + } + + var jiraIssues []jira.Issue + // simulating a set + projectKeySet := make(map[string] bool) + jis, ok := ji.([]jira.Issue) + for _, v := range jis { + projectKeySet[v.Fields.Project.Key] = true + } + + projectKeys := make([]string, 0, len(projectKeySet)) + for k := range projectKeySet { + projectKeys = append(projectKeys, k) + } + + for _, v := range jis { + jiraIssues = append(jiraIssues, v) + } + + if !ok { + log.Errorf("Get JIRA issues did not return issues! Got: %v", ji) + return nil, fmt.Errorf("get JIRA issues failed: expected []jira.Issue; got %T", ji) + } + + var issues []jira.Issue + if !j.getConfig().IsUsingGitHubIssueDataField() && len(ghIssueIds) < maxJQLIssueLength { + // The issues were already filtered by our JQL, so use as is + issues = jiraIssues + } else { + // Filter only issues which have a defined GitHub ID in the list of IDs + for _, v := range jiraIssues { + if id, err := j.getConfig().GetFieldMapper().GetFieldValue(&v, cfg.GitHubID); err == nil { + for _, idOpt := range ghIssueIds { + if id.(int64) == int64(idOpt) { + issues = append(issues, v) + break + } + } + } + } + } + return issues, nil +} + +// GetIssue returns a single JIRA issue within the configured project +// according to the issue key (e.g. "PROJ-13"). +func GetIssue(j Client, key string) (jira.Issue, error) { + log := j.getConfig().GetLogger() + + i, res, err := request(j.getConfig(), func() (interface{}, *jira.Response, error) { + return j.getIssue(key) + }) + if err != nil { + log.Errorf("Error retrieving JIRA issue: %v", err) + return jira.Issue{}, getErrorBody(j.getConfig(), res) + } + issue, ok := i.(*jira.Issue) + if !ok { + log.Errorf("Get JIRA issue did not return issue! Got %v", i) + return jira.Issue{}, fmt.Errorf("Get JIRA issue failed: expected *jira.Issue; got %T", i) + } + + return *issue, nil +} + +// CreateIssue creates a new JIRA issue according to the fields provided in +// the provided issue object. It returns the created issue, with all the +// fields provided (including e.g. ID and Key). +func CreateIssue(j Client, issue jira.Issue) (jira.Issue, error) { + log := j.getConfig().GetLogger() + + i, res, err := request(j.getConfig(), func() (interface{}, *jira.Response, error) { + return j.createIssue(&issue) + }) + if err != nil { + log.Errorf("Error creating JIRA issue: %v", err) + return jira.Issue{}, getErrorBody(j.getConfig(), res) + } + is, ok := i.(*jira.Issue) + if !ok { + log.Errorf("Create JIRA issue did not return issue! Got: %v", i) + return jira.Issue{}, fmt.Errorf("Create JIRA issue failed: expected *jira.Issue; got %T", i) + } + + return *is, nil +} + +// UpdateIssue updates a given issue (identified by the Key field of the provided +// issue object) with the fields on the provided issue. It returns the updated +// issue as it exists on JIRA. +func UpdateIssue(j Client, issue jira.Issue) (jira.Issue, error) { + log := j.getConfig().GetLogger() + + i, res, err := request(j.getConfig(), func() (interface{}, *jira.Response, error) { + return j.updateIssue(&issue) + }) + if err != nil { + log.Errorf("Error updating JIRA issue %s: %v", issue.Key, err) + return jira.Issue{}, getErrorBody(j.getConfig(), res) + } + is, ok := i.(*jira.Issue) + if !ok { + log.Errorf("Update JIRA issue did not return issue! Got: %v", i) + return jira.Issue{}, fmt.Errorf("Update JIRA issue failed: expected *jira.Issue; got %T", i) + } + + return *is, nil +} + +// maxBodyLength is the maximum length of a JIRA comment body, which is currently +// 2^15-1. +const maxBodyLength = 1 << 15 + +// CreateComment adds a comment to the provided JIRA issue using the fields from +// the provided GitHub comment. It then returns the created comment. +func CreateComment(j Client, jIssue jira.Issue, ghComment github.IssueComment, g issuesyncgithub.Client) (jira.Comment, error) { + config := j.getConfig() + log := config.GetLogger() + + user, err := issuesyncgithub.GetUser(g, log, config.GetTimeout(), ghComment.User.GetLogin()) + if err != nil { + return jira.Comment{}, err + } + + body := fmt.Sprintf("Comment [(ID %d)|%s]", ghComment.GetID(), ghComment.GetHTMLURL()) + body = fmt.Sprintf("%s from GitHub user [%s|%s]", body, user.GetLogin(), user.GetHTMLURL()) + if user.GetName() != "" { + body = fmt.Sprintf("%s (%s)", body, user.GetName()) + } + body = fmt.Sprintf( + "%s at %s:\n\n%s", + body, + ghComment.CreatedAt.Format(commentDateFormat), + ghComment.GetBody(), + ) + + if len(body) > maxBodyLength { + body = body[:maxBodyLength] + } + + jComment := jira.Comment{ + Body: body, + } + + ghUser, err := issuesyncgithub.GetUser(g, log, config.GetTimeout(), ghComment.User.GetLogin()) + if err != nil { + return jira.Comment{}, err + } + + com, res, err := request(j.getConfig(), func() (interface{}, *jira.Response, error) { + return j.addComment(jIssue.ID, &jComment, &jIssue, &ghComment, &ghUser) + }) + if err != nil { + log.Errorf("Error creating JIRA ghComment on jIssue %s. Error: %v", jIssue.Key, err) + return jira.Comment{}, getErrorBody(j.getConfig(), res) + } + co, ok := com.(*jira.Comment) + if !ok { + log.Errorf("Create JIRA ghComment did not return ghComment! Got: %v", com) + return jira.Comment{}, fmt.Errorf("Create JIRA ghComment failed: expected *jira.Comment; got %T", com) + } + return *co, nil +} + +// UpdateComment updates a comment (identified by the `id` parameter) on a given +// JIRA with a new body from the fields of the given GitHub comment. It returns +// the updated comment. +func UpdateComment(j Client, issue jira.Issue, id string, comment github.IssueComment, g issuesyncgithub.Client) (jira.Comment, error) { + config := j.getConfig() + log := config.GetLogger() + + user, err := issuesyncgithub.GetUser(g, log, config.GetTimeout(), comment.User.GetLogin()) + if err != nil { + return jira.Comment{}, err + } + + body := fmt.Sprintf("Comment [(ID %d)|%s]", comment.GetID(), comment.GetHTMLURL()) + body = fmt.Sprintf("%s from GitHub user [%s|%s]", body, user.GetLogin(), user.GetHTMLURL()) + if user.GetName() != "" { + body = fmt.Sprintf("%s (%s)", body, user.GetName()) + } + body = fmt.Sprintf( + "%s at %s:\n\n%s", + body, + comment.CreatedAt.Format(commentDateFormat), + comment.GetBody(), + ) + + if len(body) > maxBodyLength { + body = body[:maxBodyLength] + } + + // As it is, the JIRA API we're using doesn't have any way to update comments natively. + // So, we have to build the request ourselves. + requestBody := struct { + Body string `json:"body"` + }{ + Body: body, + } + + jComment := new(jira.Comment) + + if err != nil { + log.Errorf("Error creating comment update request: %s", err) + return jira.Comment{}, err + } + + com, res, err := request(j.getConfig(), func() (interface{}, *jira.Response, error) { + url := fmt.Sprintf("rest/api/2/issue/%s/comment/%s", issue.Key, id) + res, err := j.do("PUT", url, requestBody, nil) + return jComment, res, err + }) + if err != nil { + log.Errorf("Error updating comment: %v", err) + return jira.Comment{}, getErrorBody(j.getConfig(), res) + } + co, ok := com.(*jira.Comment) + if !ok { + log.Errorf("Update JIRA comment did not return comment! Got: %v", com) + return jira.Comment{}, fmt.Errorf("Update JIRA comment failed: expected *jira.Comment; got %T", com) + } + return *co, nil +} + +// newlineReplaceRegex is a regex to match both "\r\n" and just "\n" newline styles, +// in order to allow us to escape both sequences cleanly in the output of a dry run. +var newlineReplaceRegex = regexp.MustCompile("\r?\n") + +// truncate is a utility function to replace all the newlines in +// the string with the characters "\n", then truncate it to no +// more than 50 characters +func truncate(s string, length int) string { + if s == "" { + return "empty" + } + + s = newlineReplaceRegex.ReplaceAllString(s, "\\n") + if len(s) <= length { + return s + } + return fmt.Sprintf("%s...", s[0:length]) +} + + diff --git a/lib/clients/oauth.go b/lib/issuesyncjira/oauth.go similarity index 99% rename from lib/clients/oauth.go rename to lib/issuesyncjira/oauth.go index d0d0643..4013620 100644 --- a/lib/clients/oauth.go +++ b/lib/issuesyncjira/oauth.go @@ -1,4 +1,4 @@ -package clients +package issuesyncjira import ( "context" diff --git a/lib/models/extensions.go b/lib/models/extensions.go new file mode 100644 index 0000000..3aa1f57 --- /dev/null +++ b/lib/models/extensions.go @@ -0,0 +1,11 @@ +package models + +import ( + "github.com/google/go-github/github" +) + +type ExtendedGithubIssue struct { + github.Issue + ProjectCard *github.ProjectCard + CommitIds []string +} \ No newline at end of file diff --git a/lib/utils/utils.go b/lib/utils/utils.go new file mode 100644 index 0000000..37ed77f --- /dev/null +++ b/lib/utils/utils.go @@ -0,0 +1,11 @@ +package utils + +func GetOrElse(value interface{}, err error) func(defaultValue interface{}) interface{} { + return func(defaultValue interface{}) interface{} { + if err != nil { + return defaultValue + } else { + return value + } + } +} \ No newline at end of file diff --git a/vendor/github.com/andygrunwald/go-jira/.gitignore b/vendor/github.com/andygrunwald/go-jira/.gitignore index dfbdb5e..1e57f8a 100644 --- a/vendor/github.com/andygrunwald/go-jira/.gitignore +++ b/vendor/github.com/andygrunwald/go-jira/.gitignore @@ -3,6 +3,9 @@ *.a *.so +# Don't check in vendor +vendor/ + # Folders _obj _test diff --git a/vendor/github.com/andygrunwald/go-jira/.travis.yml b/vendor/github.com/andygrunwald/go-jira/.travis.yml index 962d319..54fb29c 100644 --- a/vendor/github.com/andygrunwald/go-jira/.travis.yml +++ b/vendor/github.com/andygrunwald/go-jira/.travis.yml @@ -3,11 +3,11 @@ language: go sudo: false go: - - 1.4 - - 1.5 - - 1.6 - - 1.7 - - 1.8 + - "1.7.x" + - "1.8.x" + - "1.9.x" + - "1.10.x" + - "1.11.x" before_install: - go get -t ./... diff --git a/vendor/github.com/andygrunwald/go-jira/Gopkg.lock b/vendor/github.com/andygrunwald/go-jira/Gopkg.lock new file mode 100644 index 0000000..00b3051 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/Gopkg.lock @@ -0,0 +1,36 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/fatih/structs" + packages = ["."] + revision = "a720dfa8df582c51dee1b36feabb906bde1588bd" + version = "v1.0" + +[[projects]] + branch = "master" + name = "github.com/google/go-querystring" + packages = ["query"] + revision = "53e6ce116135b80d037921a7fdd5138cf32d7a8a" + +[[projects]] + name = "github.com/pkg/errors" + packages = ["."] + revision = "645ef00459ed84a119197bfb8d8205042c6df63d" + version = "v0.8.0" + +[[projects]] + name = "github.com/trivago/tgo" + packages = [ + "tcontainer", + "treflect" + ] + revision = "e4d1ddd28c17dd89ed26327cf69fded22060671b" + version = "v1.0.1" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "e84ca9eea6d233e0947b0d760913db2983fd4cbf6fd0d8690c737a71affb635c" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/andygrunwald/go-jira/Gopkg.toml b/vendor/github.com/andygrunwald/go-jira/Gopkg.toml new file mode 100644 index 0000000..5ebe7d4 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/Gopkg.toml @@ -0,0 +1,46 @@ +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" +# +# [prune] +# non-go = false +# go-tests = true +# unused-packages = true + + +[[constraint]] + name = "github.com/fatih/structs" + version = "1.0.0" + +[[constraint]] + branch = "master" + name = "github.com/google/go-querystring" + +[[constraint]] + name = "github.com/pkg/errors" + version = "0.8.0" + +[[constraint]] + name = "github.com/trivago/tgo" + version = "1.0.1" + +[prune] + go-tests = true + unused-packages = true diff --git a/vendor/github.com/andygrunwald/go-jira/README.md b/vendor/github.com/andygrunwald/go-jira/README.md index 1981bcb..da5851e 100644 --- a/vendor/github.com/andygrunwald/go-jira/README.md +++ b/vendor/github.com/andygrunwald/go-jira/README.md @@ -6,16 +6,16 @@ [Go](https://golang.org/) client library for [Atlassian JIRA](https://www.atlassian.com/software/jira). -![Go client library for Atlassian JIRA](./img/go-jira-compressed.png "Go client library for Atlassian JIRA.") +![Go client library for Atlassian JIRA](./img/logo_small.png "Go client library for Atlassian JIRA.") ## Features * Authentication (HTTP Basic, OAuth, Session Cookie) -* Create and receive issues +* Create and retrieve issues * Create and retrieve issue transitions (status updates) -* Call every API endpoint of the JIRA, even it is not directly implemented in this library +* Call every API endpoint of the JIRA, even if it is not directly implemented in this library -This package is not JIRA API complete (yet), but you can call every API endpoint you want. See [Call a not implemented API endpoint](#call-a-not-implemented-api-endpoint) how to do this. For all possible API endpoints of JRIA have a look at [latest JIRA REST API documentation](https://docs.atlassian.com/jira/REST/latest/). +This package is not JIRA API complete (yet), but you can call every API endpoint you want. See [Call a not implemented API endpoint](#call-a-not-implemented-api-endpoint) how to do this. For all possible API endpoints of JIRA have a look at [latest JIRA REST API documentation](https://docs.atlassian.com/jira/REST/latest/). ## Compatible JIRA versions @@ -27,6 +27,17 @@ It is go gettable $ go get github.com/andygrunwald/go-jira +For stable versions you can use one of our tags with [gopkg.in](http://labix.org/gopkg.in). E.g. + +```go +package main + +import ( + jira "gopkg.in/andygrunwald/go-jira.v1" +) +... +``` + (optional) to run unit / example tests: $ cd $GOPATH/src/github.com/andygrunwald/go-jira @@ -69,70 +80,36 @@ func main() { } ``` -### Authenticate with jira +### Authentication -Some actions require an authenticated user. +The `go-jira` library does not handle most authentication directly. Instead, authentication should be handled within +an `http.Client`. That client can then be passed into the `NewClient` function when creating a jira client. -#### Authenticate with basic auth +For convenience, capability for basic and cookie-based authentication is included in the main library. -Here is an example with a basic auth authentification. +#### Basic auth example -```go -package main - -import ( - "fmt" - "github.com/andygrunwald/go-jira" -) +A more thorough, [runnable example](examples/basicauth/main.go) is provided in the examples directory. **It's worth noting that using passwords in basic auth is now deprecated and will be removed.** Jira gives you the ability to [create tokens now.](https://confluence.atlassian.com/cloud/api-tokens-938839638.html) +```go func main() { - jiraClient, err := jira.NewClient(nil, "https://your.jira-instance.com/") - if err != nil { - panic(err) + tp := jira.BasicAuthTransport{ + Username: "username", + Password: "token", } - jiraClient.Authentication.SetBasicAuth("username", "password") - issue, _, err := jiraClient.Issue.Get("SYS-5156", nil) - if err != nil { - panic(err) - } + client, err := jira.NewClient(tp.Client(), "https://my.jira.com") - fmt.Printf("%s: %+v\n", issue.Key, issue.Fields.Summary) + u, _, err := client.User.Get("some_user") + + fmt.Printf("\nEmail: %v\nSuccess!\n", u.EmailAddress) } ``` -#### Authenticate with session cookie +#### Authenticate with session cookie [DEPRECATED] -Here is an example with a session cookie authentification. +JIRA [deprecated this authentication method.](https://developer.atlassian.com/cloud/jira/platform/deprecation-notice-basic-auth-and-cookie-based-auth/) It's not longer available for use. -```go -package main - -import ( - "fmt" - "github.com/andygrunwald/go-jira" -) - -func main() { - jiraClient, err := jira.NewClient(nil, "https://your.jira-instance.com/") - if err != nil { - panic(err) - } - - res, err := jiraClient.Authentication.AcquireSessionCookie("username", "password") - if err != nil || res == false { - fmt.Printf("Result: %v\n", res) - panic(err) - } - - issue, _, err := jiraClient.Issue.Get("SYS-5156", nil) - if err != nil { - panic(err) - } - - fmt.Printf("%s: %+v\n", issue.Key, issue.Fields.Summary) -} -``` #### Authenticate with OAuth @@ -153,14 +130,14 @@ import ( ) func main() { - jiraClient, err := jira.NewClient(nil, "https://your.jira-instance.com/") - if err != nil { - panic(err) + base := "https://my.jira.com" + tp := jira.BasicAuthTransport{ + Username: "username", + Password: "token", } - res, err := jiraClient.Authentication.AcquireSessionCookie("username", "password") - if err != nil || res == false { - fmt.Printf("Result: %v\n", res) + jiraClient, err := jira.NewClient(tp.Client(), base) + if err != nil { panic(err) } @@ -206,11 +183,17 @@ import ( ) func main() { - jiraClient, _ := jira.NewClient(nil, "https://jira.atlassian.com/") - req, _ := jiraClient.NewRequest("GET", "/rest/api/2/project", nil) + base := "https://my.jira.com" + tp := jira.BasicAuthTransport{ + Username: "username", + Password: "token", + } + + jiraClient, err := jira.NewClient(tp.Client(), base) + req, _ := jiraClient.NewRequest("GET", "rest/api/2/project", nil) projects := new([]jira.Project) - _, err := jiraClient.Do(req, projects) + _, err = jiraClient.Do(req, projects) if err != nil { panic(err) } @@ -250,10 +233,22 @@ A few examples: * Correct typos in the README / documentation * Reporting bugs * Implement a new feature or endpoint -* Sharing the love if [go-jira](https://github.com/andygrunwald/go-jira) and help people to get use to it +* Sharing the love of [go-jira](https://github.com/andygrunwald/go-jira) and help people to get use to it If you are new to pull requests, checkout [Collaborating on projects using issues and pull requests / Creating a pull request](https://help.github.com/articles/creating-a-pull-request/). +### Dependency management + +`go-jira` uses `dep` for dependency management. After cloning the repo, it's easy to make sure you have the correct dependencies by running `dep ensure`. + +For adding new dependencies, updating dependencies, and other operations, the [Daily Dep](https://golang.github.io/dep/docs/daily-dep.html) is a good place to start. + +### Sandbox environment for testing + +Jira offers sandbox test environments at http://go.atlassian.com/cloud-dev. + +You can read more about them at https://developer.atlassian.com/blog/2016/04/cloud-ecosystem-dev-env/. + ## License This project is released under the terms of the [MIT license](http://en.wikipedia.org/wiki/MIT_License). diff --git a/vendor/github.com/andygrunwald/go-jira/authentication.go b/vendor/github.com/andygrunwald/go-jira/authentication.go index 56df3ad..f848a1d 100644 --- a/vendor/github.com/andygrunwald/go-jira/authentication.go +++ b/vendor/github.com/andygrunwald/go-jira/authentication.go @@ -54,6 +54,8 @@ type Session struct { // However, this resource may be used to mimic the behaviour of JIRA's log-in page (e.g. to display log-in errors to a user). // // JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#auth/1/session +// +// Deprecated: Use CookieAuthTransport instead func (s *AuthenticationService) AcquireSessionCookie(username, password string) (bool, error) { apiEndpoint := "rest/auth/1/session" body := struct { @@ -90,6 +92,8 @@ func (s *AuthenticationService) AcquireSessionCookie(username, password string) } // SetBasicAuth sets username and password for the basic auth against the JIRA instance. +// +// Deprecated: Use BasicAuthTransport instead func (s *AuthenticationService) SetBasicAuth(username, password string) { s.username = username s.password = password @@ -112,9 +116,12 @@ func (s *AuthenticationService) Authenticated() bool { // Logout logs out the current user that has been authenticated and the session in the client is destroyed. // // JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#auth/1/session +// +// Deprecated: Use CookieAuthTransport to create base client. Logging out is as simple as not using the +// client anymore func (s *AuthenticationService) Logout() error { if s.authType != authTypeSession || s.client.session == nil { - return fmt.Errorf("No user is authenticated yet.") + return fmt.Errorf("no user is authenticated") } apiEndpoint := "rest/auth/1/session" diff --git a/vendor/github.com/andygrunwald/go-jira/board.go b/vendor/github.com/andygrunwald/go-jira/board.go index a79ed93..091797d 100644 --- a/vendor/github.com/andygrunwald/go-jira/board.go +++ b/vendor/github.com/andygrunwald/go-jira/board.go @@ -2,6 +2,7 @@ package jira import ( "fmt" + "strconv" "time" ) @@ -44,9 +45,21 @@ type BoardListOptions struct { SearchOptions } -// Wrapper struct for search result -type sprintsResult struct { - Sprints []Sprint `json:"values" structs:"values"` +// GetAllSprintsOptions specifies the optional parameters to the BoardService.GetList +type GetAllSprintsOptions struct { + // State filters results to sprints in the specified states, comma-separate list + State string `url:"state,omitempty"` + + SearchOptions +} + +// SprintsList reflects a list of agile sprints +type SprintsList struct { + MaxResults int `json:"maxResults" structs:"maxResults"` + StartAt int `json:"startAt" structs:"startAt"` + Total int `json:"total" structs:"total"` + IsLast bool `json:"isLast" structs:"isLast"` + Values []Sprint `json:"values" structs:"values"` } // Sprint represents a sprint on JIRA agile board @@ -67,6 +80,9 @@ type Sprint struct { func (s *BoardService) GetAllBoards(opt *BoardListOptions) (*BoardsList, *Response, error) { apiEndpoint := "rest/agile/1.0/board" url, err := addOptions(apiEndpoint, opt) + if err != nil { + return nil, nil, err + } req, err := s.client.NewRequest("GET", url, nil) if err != nil { return nil, nil, err @@ -75,7 +91,8 @@ func (s *BoardService) GetAllBoards(opt *BoardListOptions) (*BoardsList, *Respon boards := new(BoardsList) resp, err := s.client.Do(req, boards) if err != nil { - return nil, resp, err + jerr := NewJiraError(resp, err) + return nil, resp, jerr } return boards, resp, err @@ -95,8 +112,10 @@ func (s *BoardService) GetBoard(boardID int) (*Board, *Response, error) { board := new(Board) resp, err := s.client.Do(req, board) if err != nil { - return nil, resp, err + jerr := NewJiraError(resp, err) + return nil, resp, jerr } + return board, resp, nil } @@ -118,7 +137,8 @@ func (s *BoardService) CreateBoard(board *Board) (*Board, *Response, error) { responseBoard := new(Board) resp, err := s.client.Do(req, responseBoard) if err != nil { - return nil, resp, err + jerr := NewJiraError(resp, err) + return nil, resp, jerr } return responseBoard, resp, nil @@ -135,21 +155,50 @@ func (s *BoardService) DeleteBoard(boardID int) (*Board, *Response, error) { } resp, err := s.client.Do(req, nil) + if err != nil { + err = NewJiraError(resp, err) + } return nil, resp, err } -// GetAllSprints will returns all sprints from a board, for a given board Id. +// GetAllSprints will return all sprints from a board, for a given board Id. // This only includes sprints that the user has permission to view. // // JIRA API docs: https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/sprint func (s *BoardService) GetAllSprints(boardID string) ([]Sprint, *Response, error) { - apiEndpoint := fmt.Sprintf("rest/agile/1.0/board/%s/sprint", boardID) - req, err := s.client.NewRequest("GET", apiEndpoint, nil) + id, err := strconv.Atoi(boardID) + if err != nil { + return nil, nil, err + } + + result, response, err := s.GetAllSprintsWithOptions(id, &GetAllSprintsOptions{}) if err != nil { return nil, nil, err } - result := new(sprintsResult) + return result.Values, response, nil +} + +// GetAllSprintsWithOptions will return sprints from a board, for a given board Id and filtering options +// This only includes sprints that the user has permission to view. +// +// JIRA API docs: https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/sprint +func (s *BoardService) GetAllSprintsWithOptions(boardID int, options *GetAllSprintsOptions) (*SprintsList, *Response, error) { + apiEndpoint := fmt.Sprintf("rest/agile/1.0/board/%d/sprint", boardID) + url, err := addOptions(apiEndpoint, options) + if err != nil { + return nil, nil, err + } + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + result := new(SprintsList) resp, err := s.client.Do(req, result) - return result.Sprints, resp, err + if err != nil { + err = NewJiraError(resp, err) + } + + return result, resp, err } diff --git a/vendor/github.com/andygrunwald/go-jira/board_test.go b/vendor/github.com/andygrunwald/go-jira/board_test.go index 454fc44..192caf0 100644 --- a/vendor/github.com/andygrunwald/go-jira/board_test.go +++ b/vendor/github.com/andygrunwald/go-jira/board_test.go @@ -184,3 +184,35 @@ func TestBoardService_GetAllSprints(t *testing.T) { t.Errorf("Expected 4 transitions. Got %d", len(sprints)) } } + +func TestBoardService_GetAllSprintsWithOptions(t *testing.T) { + setup() + defer teardown() + + testAPIEndpoint := "/rest/agile/1.0/board/123/sprint" + + raw, err := ioutil.ReadFile("./mocks/sprints_filtered.json") + if err != nil { + t.Error(err.Error()) + } + + testMux.HandleFunc(testAPIEndpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, testAPIEndpoint) + fmt.Fprint(w, string(raw)) + }) + + sprints, _, err := testClient.Board.GetAllSprintsWithOptions(123, &GetAllSprintsOptions{State: "active,future"}) + + if err != nil { + t.Errorf("Got error: %v", err) + } + + if sprints == nil { + t.Error("Expected sprint list. Got nil.") + } + + if len(sprints.Values) != 1 { + t.Errorf("Expected 1 transition. Got %d", len(sprints.Values)) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/component.go b/vendor/github.com/andygrunwald/go-jira/component.go new file mode 100644 index 0000000..407ad36 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/component.go @@ -0,0 +1,38 @@ +package jira + +// ComponentService handles components for the JIRA instance / API. +// +// JIRA API docs: https://docs.atlassian.com/software/jira/docs/api/REST/7.10.1/#api/2/component +type ComponentService struct { + client *Client +} + +// CreateComponentOptions are passed to the ComponentService.Create function to create a new JIRA component +type CreateComponentOptions struct { + Name string `json:"name,omitempty" structs:"name,omitempty"` + Description string `json:"description,omitempty" structs:"description,omitempty"` + Lead *User `json:"lead,omitempty" structs:"lead,omitempty"` + LeadUserName string `json:"leadUserName,omitempty" structs:"leadUserName,omitempty"` + AssigneeType string `json:"assigneeType,omitempty" structs:"assigneeType,omitempty"` + Assignee *User `json:"assignee,omitempty" structs:"assignee,omitempty"` + Project string `json:"project,omitempty" structs:"project,omitempty"` + ProjectID int `json:"projectId,omitempty" structs:"projectId,omitempty"` +} + +// Create creates a new JIRA component based on the given options. +func (s *ComponentService) Create(options *CreateComponentOptions) (*ProjectComponent, *Response, error) { + apiEndpoint := "rest/api/2/component" + req, err := s.client.NewRequest("POST", apiEndpoint, options) + if err != nil { + return nil, nil, err + } + + component := new(ProjectComponent) + resp, err := s.client.Do(req, component) + + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + + return component, resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/component_test.go b/vendor/github.com/andygrunwald/go-jira/component_test.go new file mode 100644 index 0000000..d92c27e --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/component_test.go @@ -0,0 +1,29 @@ +package jira + +import ( + "fmt" + "net/http" + "testing" +) + +func TestComponentService_Create_Success(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/component", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testRequestURL(t, r, "/rest/api/2/component") + + w.WriteHeader(http.StatusCreated) + fmt.Fprint(w, `{ "self": "http://www.example.com/jira/rest/api/2/component/10000", "id": "10000", "name": "Component 1", "description": "This is a JIRA component", "lead": { "self": "http://www.example.com/jira/rest/api/2/user?username=fred", "name": "fred", "avatarUrls": { "48x48": "http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred", "24x24": "http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred", "16x16": "http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred", "32x32": "http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred" }, "displayName": "Fred F. User", "active": false }, "assigneeType": "PROJECT_LEAD", "assignee": { "self": "http://www.example.com/jira/rest/api/2/user?username=fred", "name": "fred", "avatarUrls": { "48x48": "http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred", "24x24": "http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred", "16x16": "http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred", "32x32": "http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred" }, "displayName": "Fred F. User", "active": false }, "realAssigneeType": "PROJECT_LEAD", "realAssignee": { "self": "http://www.example.com/jira/rest/api/2/user?username=fred", "name": "fred", "avatarUrls": { "48x48": "http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred", "24x24": "http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred", "16x16": "http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred", "32x32": "http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred" }, "displayName": "Fred F. User", "active": false }, "isAssigneeTypeValid": false, "project": "HSP", "projectId": 10000 }`) + }) + + component, _, err := testClient.Component.Create(&CreateComponentOptions{ + Name: "foo-bar", + }) + if component == nil { + t.Error("Expected component. Component is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/error.go b/vendor/github.com/andygrunwald/go-jira/error.go new file mode 100644 index 0000000..bd1a5b9 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/error.go @@ -0,0 +1,90 @@ +package jira + +import ( + "bytes" + "encoding/json" + "fmt" + "io/ioutil" + "strings" + + "github.com/pkg/errors" +) + +// Error message from JIRA +// See https://docs.atlassian.com/jira/REST/cloud/#error-responses +type Error struct { + HTTPError error + ErrorMessages []string `json:"errorMessages"` + Errors map[string]string `json:"errors"` +} + +// NewJiraError creates a new jira Error +func NewJiraError(resp *Response, httpError error) error { + if resp == nil { + return errors.Wrap(httpError, "No response returned") + } + + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return errors.Wrap(err, httpError.Error()) + } + jerr := Error{HTTPError: httpError} + contentType := resp.Header.Get("Content-Type") + if strings.HasPrefix(contentType, "application/json") { + err = json.Unmarshal(body, &jerr) + if err != nil { + httpError = errors.Wrap(errors.New("Could not parse JSON"), httpError.Error()) + return errors.Wrap(err, httpError.Error()) + } + } else { + if httpError == nil { + return fmt.Errorf("Got Response Status %s:%s", resp.Status, string(body)) + } + return errors.Wrap(httpError, fmt.Sprintf("%s: %s", resp.Status, string(body))) + } + + return &jerr +} + +// Error is a short string representing the error +func (e *Error) Error() string { + if len(e.ErrorMessages) > 0 { + // return fmt.Sprintf("%v", e.HTTPError) + return fmt.Sprintf("%s: %v", e.ErrorMessages[0], e.HTTPError) + } + if len(e.Errors) > 0 { + for key, value := range e.Errors { + return fmt.Sprintf("%s - %s: %v", key, value, e.HTTPError) + } + } + return e.HTTPError.Error() +} + +// LongError is a full representation of the error as a string +func (e *Error) LongError() string { + var msg bytes.Buffer + if e.HTTPError != nil { + msg.WriteString("Original:\n") + msg.WriteString(e.HTTPError.Error()) + msg.WriteString("\n") + } + if len(e.ErrorMessages) > 0 { + msg.WriteString("Messages:\n") + for _, v := range e.ErrorMessages { + msg.WriteString(" - ") + msg.WriteString(v) + msg.WriteString("\n") + } + } + if len(e.Errors) > 0 { + for key, value := range e.Errors { + msg.WriteString(" - ") + msg.WriteString(key) + msg.WriteString(" - ") + msg.WriteString(value) + msg.WriteString("\n") + } + } + return msg.String() +} diff --git a/vendor/github.com/andygrunwald/go-jira/error_test.go b/vendor/github.com/andygrunwald/go-jira/error_test.go new file mode 100644 index 0000000..f010ee0 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/error_test.go @@ -0,0 +1,205 @@ +package jira + +import ( + "errors" + "fmt" + "net/http" + "strings" + "testing" +) + +func TestError_NewJiraError(t *testing.T) { + setup() + defer teardown() + + testMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + fmt.Fprint(w, `{"errorMessages":["Issue does not exist or you do not have permission to see it."],"errors":{}}`) + }) + + req, _ := testClient.NewRequest("GET", "/", nil) + resp, _ := testClient.Do(req, nil) + + err := NewJiraError(resp, errors.New("Original http error")) + if err, ok := err.(*Error); !ok { + t.Errorf("Expected jira Error. Got %s", err.Error()) + } + + if !strings.Contains(err.Error(), "Issue does not exist") { + t.Errorf("Expected issue message. Got: %s", err.Error()) + } +} + +func TestError_NoResponse(t *testing.T) { + err := NewJiraError(nil, errors.New("Original http error")) + + msg := err.Error() + if !strings.Contains(msg, "Original http error") { + t.Errorf("Expected the original error message: Got\n%s\n", msg) + } + + if !strings.Contains(msg, "No response") { + t.Errorf("Expected the 'No response' error message: Got\n%s\n", msg) + } +} + +func TestError_NoJSON(t *testing.T) { + setup() + defer teardown() + + testMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `Original message body`) + }) + + req, _ := testClient.NewRequest("GET", "/", nil) + resp, _ := testClient.Do(req, nil) + + err := NewJiraError(resp, errors.New("Original http error")) + msg := err.Error() + + if !strings.Contains(msg, "200 OK: Original message body: Original http error") { + t.Errorf("Expected the HTTP status: Got\n%s\n", msg) + } +} + +func TestError_Unauthorized_NilError(t *testing.T) { + setup() + defer teardown() + + testMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusUnauthorized) + fmt.Fprint(w, `User is not authorized`) + }) + + req, _ := testClient.NewRequest("GET", "/", nil) + resp, _ := testClient.Do(req, nil) + + err := NewJiraError(resp, nil) + msg := err.Error() + if !strings.Contains(msg, "401 Unauthorized:User is not authorized") { + t.Errorf("Expected Unauthorized HTTP status: Got\n%s\n", msg) + } +} + +func TestError_BadJSON(t *testing.T) { + setup() + defer teardown() + + testMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + fmt.Fprint(w, `Not JSON`) + }) + + req, _ := testClient.NewRequest("GET", "/", nil) + resp, _ := testClient.Do(req, nil) + + err := NewJiraError(resp, errors.New("Original http error")) + msg := err.Error() + + if !strings.Contains(msg, "Could not parse JSON") { + t.Errorf("Expected the 'Could not parse JSON' error message: Got\n%s\n", msg) + } +} + +func TestError_NilOriginalMessage(t *testing.T) { + defer func() { + if r := recover(); r != nil { + t.Errorf("Expected an error message. Got a panic (%v)", r) + } + }() + + msgErr := &Error{ + HTTPError: nil, + ErrorMessages: []string{"Issue does not exist"}, + Errors: map[string]string{ + "issuetype": "issue type is required", + "title": "title is required", + }, + } + + _ = msgErr.Error() +} + +func TestError_NilOriginalMessageLongError(t *testing.T) { + defer func() { + if r := recover(); r != nil { + t.Errorf("Expected an error message. Got a panic (%v)", r) + } + }() + + msgErr := &Error{ + HTTPError: nil, + ErrorMessages: []string{"Issue does not exist"}, + Errors: map[string]string{ + "issuetype": "issue type is required", + "title": "title is required", + }, + } + + _ = msgErr.LongError() +} + +func TestError_ShortMessage(t *testing.T) { + msgErr := &Error{ + HTTPError: errors.New("Original http error"), + ErrorMessages: []string{"Issue does not exist"}, + Errors: map[string]string{ + "issuetype": "issue type is required", + "title": "title is required", + }, + } + + mapErr := &Error{ + HTTPError: errors.New("Original http error"), + ErrorMessages: nil, + Errors: map[string]string{ + "issuetype": "issue type is required", + "title": "title is required", + }, + } + + noErr := &Error{ + HTTPError: errors.New("Original http error"), + ErrorMessages: nil, + Errors: nil, + } + + err := msgErr.Error() + if err != "Issue does not exist: Original http error" { + t.Errorf("Expected short message. Got %s", err) + } + + err = mapErr.Error() + if !(strings.Contains(err, "issue type is required") || strings.Contains(err, "title is required")) { + t.Errorf("Expected short message. Got %s", err) + } + + err = noErr.Error() + if err != "Original http error" { + t.Errorf("Expected original error message. Got %s", err) + } +} + +func TestError_LongMessage(t *testing.T) { + longError := &Error{ + HTTPError: errors.New("Original http error"), + ErrorMessages: []string{"Issue does not exist."}, + Errors: map[string]string{ + "issuetype": "issue type is required", + "title": "title is required", + }, + } + + msg := longError.LongError() + if !strings.Contains(msg, "Original http error") { + t.Errorf("Expected the error message: Got\n%s\n", msg) + } + + if !strings.Contains(msg, "Issue does not exist") { + t.Errorf("Expected the error message: Got\n%s\n", msg) + } + + if !strings.Contains(msg, "title - title is required") { + t.Errorf("Expected the error map: Got\n%s\n", msg) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/examples/basicauth/main.go b/vendor/github.com/andygrunwald/go-jira/examples/basicauth/main.go new file mode 100644 index 0000000..54deebc --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/examples/basicauth/main.go @@ -0,0 +1,47 @@ +package main + +import ( + "bufio" + "fmt" + "os" + "strings" + "syscall" + + jira "github.com/andygrunwald/go-jira" + "golang.org/x/crypto/ssh/terminal" +) + +func main() { + r := bufio.NewReader(os.Stdin) + + fmt.Print("Jira URL: ") + jiraURL, _ := r.ReadString('\n') + + fmt.Print("Jira Username: ") + username, _ := r.ReadString('\n') + + fmt.Print("Jira Password: ") + bytePassword, _ := terminal.ReadPassword(int(syscall.Stdin)) + password := string(bytePassword) + + tp := jira.BasicAuthTransport{ + Username: strings.TrimSpace(username), + Password: strings.TrimSpace(password), + } + + client, err := jira.NewClient(tp.Client(), strings.TrimSpace(jiraURL)) + if err != nil { + fmt.Printf("\nerror: %v\n", err) + return + } + + u, _, err := client.User.Get("admin") + + if err != nil { + fmt.Printf("\nerror: %v\n", err) + return + } + + fmt.Printf("\nEmail: %v\nSuccess!\n", u.EmailAddress) + +} diff --git a/vendor/github.com/andygrunwald/go-jira/examples/create/main.go b/vendor/github.com/andygrunwald/go-jira/examples/create/main.go new file mode 100644 index 0000000..3f4d8bb --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/examples/create/main.go @@ -0,0 +1,63 @@ +package main + +import ( + "bufio" + "fmt" + "os" + "strings" + "syscall" + + jira "github.com/andygrunwald/go-jira" + "golang.org/x/crypto/ssh/terminal" +) + +func main() { + r := bufio.NewReader(os.Stdin) + + fmt.Print("Jira URL: ") + jiraURL, _ := r.ReadString('\n') + + fmt.Print("Jira Username: ") + username, _ := r.ReadString('\n') + + fmt.Print("Jira Password: ") + bytePassword, _ := terminal.ReadPassword(int(syscall.Stdin)) + password := string(bytePassword) + + tp := jira.BasicAuthTransport{ + Username: strings.TrimSpace(username), + Password: strings.TrimSpace(password), + } + + client, err := jira.NewClient(tp.Client(), strings.TrimSpace(jiraURL)) + if err != nil { + fmt.Printf("\nerror: %v\n", err) + return + } + + i := jira.Issue{ + Fields: &jira.IssueFields{ + Assignee: &jira.User{ + Name: "myuser", + }, + Reporter: &jira.User{ + Name: "youruser", + }, + Description: "Test Issue", + Type: jira.IssueType{ + Name: "Bug", + }, + Project: jira.Project{ + Key: "PROJ1", + }, + Summary: "Just a demo issue", + }, + } + + issue, _, err := client.Issue.Create(&i) + if err != nil { + panic(err) + } + + fmt.Printf("%s: %+v\n", issue.Key, issue.Fields.Summary) +} diff --git a/vendor/github.com/andygrunwald/go-jira/examples/do/main.go b/vendor/github.com/andygrunwald/go-jira/examples/do/main.go new file mode 100644 index 0000000..c6f9a11 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/examples/do/main.go @@ -0,0 +1,22 @@ +package main + +import ( + "fmt" + + jira "github.com/andygrunwald/go-jira" +) + +func main() { + jiraClient, _ := jira.NewClient(nil, "https://jira.atlassian.com/") + req, _ := jiraClient.NewRequest("GET", "/rest/api/2/project", nil) + + projects := new([]jira.Project) + _, err := jiraClient.Do(req, projects) + if err != nil { + panic(err) + } + + for _, project := range *projects { + fmt.Printf("%s: %s\n", project.Key, project.Name) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/examples/ignorecerts/main.go b/vendor/github.com/andygrunwald/go-jira/examples/ignorecerts/main.go new file mode 100644 index 0000000..6fcc602 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/examples/ignorecerts/main.go @@ -0,0 +1,24 @@ +package main + +import ( + "crypto/tls" + "fmt" + "net/http" + + jira "github.com/andygrunwald/go-jira" +) + +func main() { + tr := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + client := &http.Client{Transport: tr} + + jiraClient, _ := jira.NewClient(client, "https://issues.apache.org/jira/") + issue, _, _ := jiraClient.Issue.Get("MESOS-3325", nil) + + fmt.Printf("%s: %+v\n", issue.Key, issue.Fields.Summary) + fmt.Printf("Type: %s\n", issue.Fields.Type.Name) + fmt.Printf("Priority: %s\n", issue.Fields.Priority.Name) + +} diff --git a/vendor/github.com/andygrunwald/go-jira/examples/newclient/main.go b/vendor/github.com/andygrunwald/go-jira/examples/newclient/main.go new file mode 100644 index 0000000..e0bec52 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/examples/newclient/main.go @@ -0,0 +1,16 @@ +package main + +import ( + "fmt" + + jira "github.com/andygrunwald/go-jira" +) + +func main() { + jiraClient, _ := jira.NewClient(nil, "https://issues.apache.org/jira/") + issue, _, _ := jiraClient.Issue.Get("MESOS-3325", nil) + + fmt.Printf("%s: %+v\n", issue.Key, issue.Fields.Summary) + fmt.Printf("Type: %s\n", issue.Fields.Type.Name) + fmt.Printf("Priority: %s\n", issue.Fields.Priority.Name) +} diff --git a/vendor/github.com/andygrunwald/go-jira/examples/renderedfields/main.go b/vendor/github.com/andygrunwald/go-jira/examples/renderedfields/main.go new file mode 100644 index 0000000..ab23420 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/examples/renderedfields/main.go @@ -0,0 +1,66 @@ +package main + +import ( + "bufio" + "fmt" + "net/http" + "os" + "strings" + "syscall" + + jira "github.com/andygrunwald/go-jira" + "golang.org/x/crypto/ssh/terminal" +) + +func main() { + r := bufio.NewReader(os.Stdin) + + fmt.Print("Jira URL: ") + jiraURL, _ := r.ReadString('\n') + + fmt.Print("Jira Issue key: ") + key, _ := r.ReadString('\n') + key = strings.TrimSpace(key) + + fmt.Print("Jira Username: ") + username, _ := r.ReadString('\n') + + fmt.Print("Jira Password: ") + bytePassword, _ := terminal.ReadPassword(int(syscall.Stdin)) + password := string(bytePassword) + + var tp *http.Client + + if strings.TrimSpace(username) == "" { + tp = nil + } else { + + ba := jira.BasicAuthTransport{ + Username: strings.TrimSpace(username), + Password: strings.TrimSpace(password), + } + tp = ba.Client() + } + + client, err := jira.NewClient(tp, strings.TrimSpace(jiraURL)) + if err != nil { + fmt.Printf("\nerror: %v\n", err) + return + } + + fmt.Printf("Targetting %s for issue %s\n", strings.TrimSpace(jiraURL), key) + + options := &jira.GetQueryOptions{Expand: "renderedFields"} + u, _, err := client.Issue.Get(key, options) + + if err != nil { + fmt.Printf("\n==> error: %v\n", err) + return + } + + fmt.Printf("RenderedFields: %+v\n", *u.RenderedFields) + + for _, c := range u.RenderedFields.Comments.Comments { + fmt.Printf(" %+v\n", c) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/examples/searchpages/main.go b/vendor/github.com/andygrunwald/go-jira/examples/searchpages/main.go new file mode 100644 index 0000000..d3be76e --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/examples/searchpages/main.go @@ -0,0 +1,65 @@ +package main + +import ( + "bufio" + "fmt" + jira "github.com/andygrunwald/go-jira" + "golang.org/x/crypto/ssh/terminal" + "log" + "os" + "strings" + "syscall" + "time" +) + +func main() { + r := bufio.NewReader(os.Stdin) + + fmt.Print("Jira URL: ") + jiraURL, _ := r.ReadString('\n') + + fmt.Print("Jira Username: ") + username, _ := r.ReadString('\n') + + fmt.Print("Jira Password: ") + bytePassword, _ := terminal.ReadPassword(int(syscall.Stdin)) + password := string(bytePassword) + + fmt.Print("\nJira Project Key: ") // e.g. TES or WOW + jiraPK, _ := r.ReadString('\n') + + tp := jira.BasicAuthTransport{ + Username: strings.TrimSpace(username), + Password: strings.TrimSpace(password), + } + + client, err := jira.NewClient(tp.Client(), strings.TrimSpace(jiraURL)) + if err != nil { + log.Fatal(err) + } + + var issues []jira.Issue + + // appendFunc will append jira issues to []jira.Issue + appendFunc := func(i jira.Issue) (err error) { + issues = append(issues, i) + return err + } + + // SearchPages will page through results and pass each issue to appendFunc + // In this example, we'll search for all the issues in the target project + err = client.Issue.SearchPages(fmt.Sprintf(`project=%s`, strings.TrimSpace(jiraPK)), nil, appendFunc) + if err != nil { + log.Fatal(err) + } + + fmt.Printf("%d issues found.\n", len(issues)) + + for _, i := range issues { + t := time.Time(i.Fields.Created) // convert go-jira.Time to time.Time for manipulation + date := t.Format("2006-01-02") + clock := t.Format("15:04") + fmt.Printf("Creation Date: %s\nCreation Time: %s\nIssue Key: %s\nIssue Summary: %s\n\n", date, clock, i.Key, i.Fields.Summary) + } + +} diff --git a/vendor/github.com/andygrunwald/go-jira/field.go b/vendor/github.com/andygrunwald/go-jira/field.go new file mode 100644 index 0000000..257d4f9 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/field.go @@ -0,0 +1,43 @@ +package jira + +// FieldService handles fields for the JIRA instance / API. +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-Field +type FieldService struct { + client *Client +} + +// Field represents a field of a JIRA issue. +type Field struct { + ID string `json:"id,omitempty" structs:"id,omitempty"` + Key string `json:"key,omitempty" structs:"key,omitempty"` + Name string `json:"name,omitempty" structs:"name,omitempty"` + Custom bool `json:"custom,omitempty" structs:"custom,omitempty"` + Navigable bool `json:"navigable,omitempty" structs:"navigable,omitempty"` + Searchable bool `json:"searchable,omitempty" structs:"searchable,omitempty"` + ClauseNames []string `json:"clauseNames,omitempty" structs:"clauseNames,omitempty"` + Schema FieldSchema `json:"schema,omitempty" structs:"schema,omitempty"` +} + +type FieldSchema struct { + Type string `json:"type,omitempty" structs:"type,omitempty"` + System string `json:"system,omitempty" structs:"system,omitempty"` +} + +// GetList gets all fields from JIRA +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-field-get +func (s *FieldService) GetList() ([]Field, *Response, error) { + apiEndpoint := "rest/api/2/field" + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + fieldList := []Field{} + resp, err := s.client.Do(req, &fieldList) + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + return fieldList, resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/field_test.go b/vendor/github.com/andygrunwald/go-jira/field_test.go new file mode 100644 index 0000000..a2deb53 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/field_test.go @@ -0,0 +1,32 @@ +package jira + +import ( + "fmt" + "io/ioutil" + "net/http" + "testing" +) + +func TestFieldService_GetList(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/field" + + raw, err := ioutil.ReadFile("./mocks/all_fields.json") + if err != nil { + t.Error(err.Error()) + } + testMux.HandleFunc(testAPIEdpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, testAPIEdpoint) + fmt.Fprint(w, string(raw)) + }) + + fields, _, err := testClient.Field.GetList() + if fields == nil { + t.Error("Expected field list. Field list is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/filter.go b/vendor/github.com/andygrunwald/go-jira/filter.go new file mode 100644 index 0000000..209e63b --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/filter.go @@ -0,0 +1,93 @@ +package jira + +import "github.com/google/go-querystring/query" +import "fmt" + +// FilterService handles fields for the JIRA instance / API. +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-group-Filter +type FilterService struct { + client *Client +} + +// Filter represents a Filter in Jira +type Filter struct { + Self string `json:"self"` + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Owner User `json:"owner"` + Jql string `json:"jql"` + ViewURL string `json:"viewUrl"` + SearchURL string `json:"searchUrl"` + Favourite bool `json:"favourite"` + FavouritedCount int `json:"favouritedCount"` + SharePermissions []interface{} `json:"sharePermissions"` + Subscriptions struct { + Size int `json:"size"` + Items []interface{} `json:"items"` + MaxResults int `json:"max-results"` + StartIndex int `json:"start-index"` + EndIndex int `json:"end-index"` + } `json:"subscriptions"` +} + +// GetList retrieves all filters from Jira +func (fs *FilterService) GetList() ([]*Filter, *Response, error) { + + options := &GetQueryOptions{} + apiEndpoint := "rest/api/2/filter" + req, err := fs.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + if options != nil { + q, err := query.Values(options) + if err != nil { + return nil, nil, err + } + req.URL.RawQuery = q.Encode() + } + + filters := []*Filter{} + resp, err := fs.client.Do(req, &filters) + if err != nil { + jerr := NewJiraError(resp, err) + return nil, resp, jerr + } + return filters, resp, err +} + +// GetFavouriteList retrieves the user's favourited filters from Jira +func (fs *FilterService) GetFavouriteList() ([]*Filter, *Response, error) { + apiEndpoint := "rest/api/2/filter/favourite" + req, err := fs.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + filters := []*Filter{} + resp, err := fs.client.Do(req, &filters) + if err != nil { + jerr := NewJiraError(resp, err) + return nil, resp, jerr + } + return filters, resp, err +} + +// Get retrieves a single Filter from Jira +func (fs *FilterService) Get(filterID int) (*Filter, *Response, error) { + apiEndpoint := fmt.Sprintf("rest/api/2/filter/%d", filterID) + req, err := fs.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + filter := new(Filter) + resp, err := fs.client.Do(req, filter) + if err != nil { + jerr := NewJiraError(resp, err) + return nil, resp, jerr + } + + return filter, resp, err +} diff --git a/vendor/github.com/andygrunwald/go-jira/filter_test.go b/vendor/github.com/andygrunwald/go-jira/filter_test.go new file mode 100644 index 0000000..9c94bcf --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/filter_test.go @@ -0,0 +1,78 @@ +package jira + +import ( + "fmt" + "io/ioutil" + "net/http" + "testing" +) + +func TestFilterService_GetList(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/filter" + raw, err := ioutil.ReadFile("./mocks/all_filters.json") + if err != nil { + t.Error(err.Error()) + } + testMux.HandleFunc(testAPIEdpoint, func(writer http.ResponseWriter, request *http.Request) { + testMethod(t, request, "GET") + testRequestURL(t, request, testAPIEdpoint) + fmt.Fprint(writer, string(raw)) + }) + + filters, _, err := testClient.Filter.GetList() + if filters == nil { + t.Error("Expected Filters list. Filters list is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} + +func TestFilterService_Get(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/filter/10000" + raw, err := ioutil.ReadFile("./mocks/filter.json") + if err != nil { + t.Error(err.Error()) + } + testMux.HandleFunc(testAPIEdpoint, func(writer http.ResponseWriter, request *http.Request) { + testMethod(t, request, "GET") + testRequestURL(t, request, testAPIEdpoint) + fmt.Fprintf(writer, string(raw)) + }) + + filter, _, err := testClient.Filter.Get(10000) + if filter == nil { + t.Errorf("Expected Filter, got nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } + +} + +func TestFilterService_GetFavouriteList(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/filter/favourite" + raw, err := ioutil.ReadFile("./mocks/favourite_filters.json") + if err != nil { + t.Error(err.Error()) + } + testMux.HandleFunc(testAPIEdpoint, func(writer http.ResponseWriter, request *http.Request) { + testMethod(t, request, "GET") + testRequestURL(t, request, testAPIEdpoint) + fmt.Fprint(writer, string(raw)) + }) + + filters, _, err := testClient.Filter.GetFavouriteList() + if filters == nil { + t.Error("Expected Filters list. Filters list is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/group.go b/vendor/github.com/andygrunwald/go-jira/group.go index e61b493..8ceadc9 100644 --- a/vendor/github.com/andygrunwald/go-jira/group.go +++ b/vendor/github.com/andygrunwald/go-jira/group.go @@ -2,6 +2,7 @@ package jira import ( "fmt" + "net/url" ) // GroupService handles Groups for the JIRA instance / API. @@ -20,6 +21,23 @@ type groupMembersResult struct { Members []GroupMember `json:"values"` } +// Group represents a JIRA group +type Group struct { + ID string `json:"id"` + Title string `json:"title"` + Type string `json:"type"` + Properties groupProperties `json:"properties"` + AdditionalProperties bool `json:"additionalProperties"` +} + +type groupProperties struct { + Name groupPropertiesName `json:"name"` +} + +type groupPropertiesName struct { + Type string `json:"type"` +} + // GroupMember reflects a single member of a group type GroupMember struct { Self string `json:"self,omitempty"` @@ -31,13 +49,22 @@ type GroupMember struct { TimeZone string `json:"timeZone,omitempty"` } +// GroupSearchOptions specifies the optional parameters for the Get Group methods +type GroupSearchOptions struct { + StartAt int + MaxResults int + IncludeInactiveUsers bool +} + // Get returns a paginated list of users who are members of the specified group and its subgroups. // Users in the page are ordered by user names. // User of this resource is required to have sysadmin or admin permissions. // // JIRA API docs: https://docs.atlassian.com/jira/REST/server/#api/2/group-getUsersFromGroup +// +// WARNING: This API only returns the first page of group members func (s *GroupService) Get(name string) ([]GroupMember, *Response, error) { - apiEndpoint := fmt.Sprintf("rest/api/2/group/member?groupname=%s", name) + apiEndpoint := fmt.Sprintf("/rest/api/2/group/member?groupname=%s", url.QueryEscape(name)) req, err := s.client.NewRequest("GET", apiEndpoint, nil) if err != nil { return nil, nil, err @@ -51,3 +78,77 @@ func (s *GroupService) Get(name string) ([]GroupMember, *Response, error) { return group.Members, resp, nil } + +// GetWithOptions returns a paginated list of members of the specified group and its subgroups. +// Users in the page are ordered by user names. +// User of this resource is required to have sysadmin or admin permissions. +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/server/#api/2/group-getUsersFromGroup +func (s *GroupService) GetWithOptions(name string, options *GroupSearchOptions) ([]GroupMember, *Response, error) { + var apiEndpoint string + if options == nil { + apiEndpoint = fmt.Sprintf("/rest/api/2/group/member?groupname=%s", url.QueryEscape(name)) + } else { + apiEndpoint = fmt.Sprintf( + "/rest/api/2/group/member?groupname=%s&startAt=%d&maxResults=%d&includeInactiveUsers=%t", + url.QueryEscape(name), + options.StartAt, + options.MaxResults, + options.IncludeInactiveUsers, + ) + } + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + group := new(groupMembersResult) + resp, err := s.client.Do(req, group) + if err != nil { + return nil, resp, err + } + return group.Members, resp, nil +} + +// Add adds user to group +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/cloud/#api/2/group-addUserToGroup +func (s *GroupService) Add(groupname string, username string) (*Group, *Response, error) { + apiEndpoint := fmt.Sprintf("/rest/api/2/group/user?groupname=%s", groupname) + var user struct { + Name string `json:"name"` + } + user.Name = username + req, err := s.client.NewRequest("POST", apiEndpoint, &user) + if err != nil { + return nil, nil, err + } + + responseGroup := new(Group) + resp, err := s.client.Do(req, responseGroup) + if err != nil { + jerr := NewJiraError(resp, err) + return nil, resp, jerr + } + + return responseGroup, resp, nil +} + +// Remove removes user from group +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/cloud/#api/2/group-removeUserFromGroup +func (s *GroupService) Remove(groupname string, username string) (*Response, error) { + apiEndpoint := fmt.Sprintf("/rest/api/2/group/user?groupname=%s&username=%s", groupname, username) + req, err := s.client.NewRequest("DELETE", apiEndpoint, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(req, nil) + if err != nil { + jerr := NewJiraError(resp, err) + return resp, jerr + } + + return resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/group_test.go b/vendor/github.com/andygrunwald/go-jira/group_test.go new file mode 100644 index 0000000..e4503dd --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/group_test.go @@ -0,0 +1,111 @@ +package jira + +import ( + "fmt" + "net/http" + "testing" +) + +func TestGroupService_Get(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/group/member", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/group/member?groupname=default") + fmt.Fprint(w, `{"self":"http://www.example.com/jira/rest/api/2/group/member?includeInactiveUsers=false&maxResults=50&groupname=default&startAt=0","maxResults":50,"startAt":0,"total":2,"isLast":true,"values":[{"self":"http://www.example.com/jira/rest/api/2/user?username=michael","name":"michael","key":"michael","emailAddress":"michael@example.com","displayName":"MichaelScofield","active":true,"timeZone":"Australia/Sydney"},{"self":"http://www.example.com/jira/rest/api/2/user?username=alex","name":"alex","key":"alex","emailAddress":"alex@example.com","displayName":"AlexanderMahone","active":true,"timeZone":"Australia/Sydney"}]}`) + }) + if members, _, err := testClient.Group.Get("default"); err != nil { + t.Errorf("Error given: %s", err) + } else if members == nil { + t.Error("Expected members. Group.Members is nil") + } +} + +func TestGroupService_GetPage(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/group/member", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/group/member?groupname=default") + startAt := r.URL.Query().Get("startAt") + if startAt == "0" { + fmt.Fprint(w, `{"self":"http://www.example.com/jira/rest/api/2/group/member?includeInactiveUsers=false&maxResults=2&groupname=default&startAt=0","nextPage":"`+testServer.URL+`/rest/api/2/group/member?groupname=default&includeInactiveUsers=false&maxResults=2&startAt=2","maxResults":2,"startAt":0,"total":4,"isLast":false,"values":[{"self":"http://www.example.com/jira/rest/api/2/user?username=michael","name":"michael","key":"michael","emailAddress":"michael@example.com","displayName":"MichaelScofield","active":true,"timeZone":"Australia/Sydney"},{"self":"http://www.example.com/jira/rest/api/2/user?username=alex","name":"alex","key":"alex","emailAddress":"alex@example.com","displayName":"AlexanderMahone","active":true,"timeZone":"Australia/Sydney"}]}`) + } else if startAt == "2" { + fmt.Fprint(w, `{"self":"http://www.example.com/jira/rest/api/2/group/member?includeInactiveUsers=false&maxResults=2&groupname=default&startAt=2","maxResults":2,"startAt":2,"total":4,"isLast":true,"values":[{"self":"http://www.example.com/jira/rest/api/2/user?username=michael","name":"michael","key":"michael","emailAddress":"michael@example.com","displayName":"MichaelScofield","active":true,"timeZone":"Australia/Sydney"},{"self":"http://www.example.com/jira/rest/api/2/user?username=alex","name":"alex","key":"alex","emailAddress":"alex@example.com","displayName":"AlexanderMahone","active":true,"timeZone":"Australia/Sydney"}]}`) + } else { + t.Errorf("startAt %s", startAt) + } + }) + if page, resp, err := testClient.Group.GetWithOptions("default", &GroupSearchOptions{ + StartAt: 0, + MaxResults: 2, + IncludeInactiveUsers: false, + }); err != nil { + t.Errorf("Error given: %s %s", err, testServer.URL) + } else if page == nil || len(page) != 2 { + t.Error("Expected members. Group.Members is not 2 or is nil") + } else { + if resp.StartAt != 0 { + t.Errorf("Expect Result StartAt to be 0, but is %d", resp.StartAt) + } + if resp.MaxResults != 2 { + t.Errorf("Expect Result MaxResults to be 2, but is %d", resp.MaxResults) + } + if resp.Total != 4 { + t.Errorf("Expect Result Total to be 4, but is %d", resp.Total) + } + if page, resp, err := testClient.Group.GetWithOptions("default", &GroupSearchOptions{ + StartAt: 2, + MaxResults: 2, + IncludeInactiveUsers: false, + }); err != nil { + t.Errorf("Error give: %s %s", err, testServer.URL) + } else if page == nil || len(page) != 2 { + t.Error("Expected members. Group.Members is not 2 or is nil") + } else { + if resp.StartAt != 2 { + t.Errorf("Expect Result StartAt to be 2, but is %d", resp.StartAt) + } + if resp.MaxResults != 2 { + t.Errorf("Expect Result MaxResults to be 2, but is %d", resp.MaxResults) + } + if resp.Total != 4 { + t.Errorf("Expect Result Total to be 4, but is %d", resp.Total) + } + } + } +} + +func TestGroupService_Add(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/group/user", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testRequestURL(t, r, "/rest/api/2/group/user?groupname=default") + + w.WriteHeader(http.StatusCreated) + fmt.Fprint(w, `{"name":"default","self":"http://www.example.com/jira/rest/api/2/group?groupname=default","users":{"size":1,"items":[],"max-results":50,"start-index":0,"end-index":0},"expand":"users"}`) + }) + + if group, _, err := testClient.Group.Add("default", "theodore"); err != nil { + t.Errorf("Error given: %s", err) + } else if group == nil { + t.Error("Expected group. Group is nil") + } +} + +func TestGroupService_Remove(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/group/user", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testRequestURL(t, r, "/rest/api/2/group/user?groupname=default") + + w.WriteHeader(http.StatusOK) + fmt.Fprint(w, `{"name":"default","self":"http://www.example.com/jira/rest/api/2/group?groupname=default","users":{"size":1,"items":[],"max-results":50,"start-index":0,"end-index":0},"expand":"users"}`) + }) + + if _, err := testClient.Group.Remove("default", "theodore"); err != nil { + t.Errorf("Error given: %s", err) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/img/logo.png b/vendor/github.com/andygrunwald/go-jira/img/logo.png new file mode 100644 index 0000000..5f02454 Binary files /dev/null and b/vendor/github.com/andygrunwald/go-jira/img/logo.png differ diff --git a/vendor/github.com/andygrunwald/go-jira/img/logo_small.png b/vendor/github.com/andygrunwald/go-jira/img/logo_small.png new file mode 100644 index 0000000..729e3f4 Binary files /dev/null and b/vendor/github.com/andygrunwald/go-jira/img/logo_small.png differ diff --git a/vendor/github.com/andygrunwald/go-jira/issue.go b/vendor/github.com/andygrunwald/go-jira/issue.go index 9d8bad1..61ba6a0 100644 --- a/vendor/github.com/andygrunwald/go-jira/issue.go +++ b/vendor/github.com/andygrunwald/go-jira/issue.go @@ -29,14 +29,22 @@ type IssueService struct { client *Client } +// UpdateQueryOptions specifies the optional parameters to the Edit issue +type UpdateQueryOptions struct { + NotifyUsers bool `url:"notifyUsers,omitempty"` + OverrideScreenSecurity bool `url:"overrideScreenSecurity,omitempty"` + OverrideEditableFlag bool `url:"overrideEditableFlag,omitempty"` +} + // Issue represents a JIRA issue. type Issue struct { - Expand string `json:"expand,omitempty" structs:"expand,omitempty"` - ID string `json:"id,omitempty" structs:"id,omitempty"` - Self string `json:"self,omitempty" structs:"self,omitempty"` - Key string `json:"key,omitempty" structs:"key,omitempty"` - Fields *IssueFields `json:"fields,omitempty" structs:"fields,omitempty"` - Changelog *Changelog `json:"changelog,omitempty" structs:"changelog,omitempty"` + Expand string `json:"expand,omitempty" structs:"expand,omitempty"` + ID string `json:"id,omitempty" structs:"id,omitempty"` + Self string `json:"self,omitempty" structs:"self,omitempty"` + Key string `json:"key,omitempty" structs:"key,omitempty"` + Fields *IssueFields `json:"fields,omitempty" structs:"fields,omitempty"` + RenderedFields *IssueRenderedFields `json:"renderedFields,omitempty" structs:"renderedFields,omitempty"` + Changelog *Changelog `json:"changelog,omitempty" structs:"changelog,omitempty"` } // ChangelogItems reflects one single changelog item of a history item @@ -90,45 +98,46 @@ type Epic struct { // Every JIRA issue has several fields attached. type IssueFields struct { // TODO Missing fields - // * "aggregatetimespent": null, // * "workratio": -1, // * "lastViewed": null, - // * "aggregatetimeoriginalestimate": null, - // * "aggregatetimeestimate": null, // * "environment": null, - Expand string `json:"expand,omitempty" structs:"expand,omitempty"` - Type IssueType `json:"issuetype" structs:"issuetype"` - Project Project `json:"project,omitempty" structs:"project,omitempty"` - Resolution *Resolution `json:"resolution,omitempty" structs:"resolution,omitempty"` - Priority *Priority `json:"priority,omitempty" structs:"priority,omitempty"` - Resolutiondate string `json:"resolutiondate,omitempty" structs:"resolutiondate,omitempty"` - Created string `json:"created,omitempty" structs:"created,omitempty"` - Duedate string `json:"duedate,omitempty" structs:"duedate,omitempty"` - Watches *Watches `json:"watches,omitempty" structs:"watches,omitempty"` - Assignee *User `json:"assignee,omitempty" structs:"assignee,omitempty"` - Updated string `json:"updated,omitempty" structs:"updated,omitempty"` - Description string `json:"description,omitempty" structs:"description,omitempty"` - Summary string `json:"summary" structs:"summary"` - Creator *User `json:"Creator,omitempty" structs:"Creator,omitempty"` - Reporter *User `json:"reporter,omitempty" structs:"reporter,omitempty"` - Components []*Component `json:"components,omitempty" structs:"components,omitempty"` - Status *Status `json:"status,omitempty" structs:"status,omitempty"` - Progress *Progress `json:"progress,omitempty" structs:"progress,omitempty"` - AggregateProgress *Progress `json:"aggregateprogress,omitempty" structs:"aggregateprogress,omitempty"` - TimeTracking *TimeTracking `json:"timetracking,omitempty" structs:"timetracking,omitempty"` - TimeSpent int `json:"timespent,omitempty" structs:"timespent,omitempty"` - TimeEstimate int `json:"timeestimate,omitempty" structs:"timeestimate,omitempty"` - TimeOriginalEstimate int `json:"timeoriginalestimate,omitempty" structs:"timeoriginalestimate,omitempty"` - Worklog *Worklog `json:"worklog,omitempty" structs:"worklog,omitempty"` - IssueLinks []*IssueLink `json:"issuelinks,omitempty" structs:"issuelinks,omitempty"` - Comments *Comments `json:"comment,omitempty" structs:"comment,omitempty"` - FixVersions []*FixVersion `json:"fixVersions,omitempty" structs:"fixVersions,omitempty"` - Labels []string `json:"labels,omitempty" structs:"labels,omitempty"` - Subtasks []*Subtasks `json:"subtasks,omitempty" structs:"subtasks,omitempty"` - Attachments []*Attachment `json:"attachment,omitempty" structs:"attachment,omitempty"` - Epic *Epic `json:"epic,omitempty" structs:"epic,omitempty"` - Parent *Parent `json:"parent,omitempty" structs:"parent,omitempty"` - Unknowns tcontainer.MarshalMap + Expand string `json:"expand,omitempty" structs:"expand,omitempty"` + Type IssueType `json:"issuetype,omitempty" structs:"issuetype,omitempty"` + Project Project `json:"project,omitempty" structs:"project,omitempty"` + Resolution *Resolution `json:"resolution,omitempty" structs:"resolution,omitempty"` + Priority *Priority `json:"priority,omitempty" structs:"priority,omitempty"` + Resolutiondate Time `json:"resolutiondate,omitempty" structs:"resolutiondate,omitempty"` + Created Time `json:"created,omitempty" structs:"created,omitempty"` + Duedate Date `json:"duedate,omitempty" structs:"duedate,omitempty"` + Watches *Watches `json:"watches,omitempty" structs:"watches,omitempty"` + Assignee *User `json:"assignee,omitempty" structs:"assignee,omitempty"` + Updated Time `json:"updated,omitempty" structs:"updated,omitempty"` + Description string `json:"description,omitempty" structs:"description,omitempty"` + Summary string `json:"summary,omitempty" structs:"summary,omitempty"` + Creator *User `json:"Creator,omitempty" structs:"Creator,omitempty"` + Reporter *User `json:"reporter,omitempty" structs:"reporter,omitempty"` + Components []*Component `json:"components,omitempty" structs:"components,omitempty"` + Status *Status `json:"status,omitempty" structs:"status,omitempty"` + Progress *Progress `json:"progress,omitempty" structs:"progress,omitempty"` + AggregateProgress *Progress `json:"aggregateprogress,omitempty" structs:"aggregateprogress,omitempty"` + TimeTracking *TimeTracking `json:"timetracking,omitempty" structs:"timetracking,omitempty"` + TimeSpent int `json:"timespent,omitempty" structs:"timespent,omitempty"` + TimeEstimate int `json:"timeestimate,omitempty" structs:"timeestimate,omitempty"` + TimeOriginalEstimate int `json:"timeoriginalestimate,omitempty" structs:"timeoriginalestimate,omitempty"` + Worklog *Worklog `json:"worklog,omitempty" structs:"worklog,omitempty"` + IssueLinks []*IssueLink `json:"issuelinks,omitempty" structs:"issuelinks,omitempty"` + Comments *Comments `json:"comment,omitempty" structs:"comment,omitempty"` + FixVersions []*FixVersion `json:"fixVersions,omitempty" structs:"fixVersions,omitempty"` + Labels []string `json:"labels,omitempty" structs:"labels,omitempty"` + Subtasks []*Subtasks `json:"subtasks,omitempty" structs:"subtasks,omitempty"` + Attachments []*Attachment `json:"attachment,omitempty" structs:"attachment,omitempty"` + Epic *Epic `json:"epic,omitempty" structs:"epic,omitempty"` + Sprint *Sprint `json:"sprint,omitempty" structs:"sprint,omitempty"` + Parent *Parent `json:"parent,omitempty" structs:"parent,omitempty"` + AggregateTimeOriginalEstimate int `json:"aggregatetimeoriginalestimate,omitempty" structs:"aggregatetimeoriginalestimate,omitempty"` + AggregateTimeSpent int `json:"aggregatetimespent,omitempty" structs:"aggregatetimespent,omitempty"` + AggregateTimeEstimate int `json:"aggregatetimeestimate,omitempty" structs:"aggregatetimeestimate,omitempty"` + Unknowns tcontainer.MarshalMap } // MarshalJSON is a custom JSON marshal function for the IssueFields structs. @@ -195,6 +204,23 @@ func (i *IssueFields) UnmarshalJSON(data []byte) error { } +// IssueRenderedFields represents rendered fields of a JIRA issue. +// Not all IssueFields are rendered. +type IssueRenderedFields struct { + // TODO Missing fields + // * "aggregatetimespent": null, + // * "workratio": -1, + // * "lastViewed": null, + // * "aggregatetimeoriginalestimate": null, + // * "aggregatetimeestimate": null, + // * "environment": null, + Resolutiondate string `json:"resolutiondate,omitempty" structs:"resolutiondate,omitempty"` + Created string `json:"created,omitempty" structs:"created,omitempty"` + Duedate string `json:"duedate,omitempty" structs:"duedate,omitempty"` + Updated string `json:"updated,omitempty" structs:"updated,omitempty"` + Comments *Comments `json:"comment,omitempty" structs:"comment,omitempty"` +} + // IssueType represents a type of a JIRA issue. // Typical types are "Request", "Bug", "Story", ... type IssueType struct { @@ -207,29 +233,20 @@ type IssueType struct { AvatarID int `json:"avatarId,omitempty" structs:"avatarId,omitempty"` } -// Resolution represents a resolution of a JIRA issue. -// Typical types are "Fixed", "Suspended", "Won't Fix", ... -type Resolution struct { - Self string `json:"self" structs:"self"` - ID string `json:"id" structs:"id"` - Description string `json:"description" structs:"description"` - Name string `json:"name" structs:"name"` -} - -// Priority represents a priority of a JIRA issue. -// Typical types are "Normal", "Moderate", "Urgent", ... -type Priority struct { - Self string `json:"self,omitempty" structs:"self,omitempty"` - IconURL string `json:"iconUrl,omitempty" structs:"iconUrl,omitempty"` - Name string `json:"name,omitempty" structs:"name,omitempty"` - ID string `json:"id,omitempty" structs:"id,omitempty"` +// Watches represents a type of how many and which user are "observing" a JIRA issue to track the status / updates. +type Watches struct { + Self string `json:"self,omitempty" structs:"self,omitempty"` + WatchCount int `json:"watchCount,omitempty" structs:"watchCount,omitempty"` + IsWatching bool `json:"isWatching,omitempty" structs:"isWatching,omitempty"` + Watchers []*Watcher `json:"watchers,omitempty" structs:"watchers,omitempty"` } -// Watches represents a type of how many user are "observing" a JIRA issue to track the status / updates. -type Watches struct { - Self string `json:"self,omitempty" structs:"self,omitempty"` - WatchCount int `json:"watchCount,omitempty" structs:"watchCount,omitempty"` - IsWatching bool `json:"isWatching,omitempty" structs:"isWatching,omitempty"` +// Watcher represents a simplified user that "observes" the issue +type Watcher struct { + Self string `json:"self,omitempty" structs:"self,omitempty"` + Name string `json:"name,omitempty" structs:"name,omitempty"` + DisplayName string `json:"displayName,omitempty" structs:"displayName,omitempty"` + Active bool `json:"active,omitempty" structs:"active,omitempty"` } // AvatarUrls represents different dimensions of avatars / images @@ -260,20 +277,11 @@ type Status struct { StatusCategory StatusCategory `json:"statusCategory" structs:"statusCategory"` } -// StatusCategory represents the category a status belongs to. -// Those categories can be user defined in every JIRA instance. -type StatusCategory struct { - Self string `json:"self" structs:"self"` - ID int `json:"id" structs:"id"` - Name string `json:"name" structs:"name"` - Key string `json:"key" structs:"key"` - ColorName string `json:"colorName" structs:"colorName"` -} - // Progress represents the progress of a JIRA issue. type Progress struct { Progress int `json:"progress" structs:"progress"` Total int `json:"total" structs:"total"` + Percent int `json:"percent" structs:"percent"` } // Parent represents the parent of a JIRA issue, to be used with subtask issue types. @@ -285,6 +293,9 @@ type Parent struct { // Time represents the Time definition of JIRA as a time.Time of go type Time time.Time +// Date represents the Date definition of JIRA as a time.Time of go +type Date time.Time + // Wrapper struct for search result type transitionResult struct { Transitions []Transition `json:"transitions" structs:"transitions"` @@ -294,6 +305,7 @@ type transitionResult struct { type Transition struct { ID string `json:"id" structs:"id"` Name string `json:"name" structs:"name"` + To Status `json:"to" structs:"status"` Fields map[string]TransitionField `json:"fields" structs:"fields"` } @@ -304,7 +316,8 @@ type TransitionField struct { // CreateTransitionPayload is used for creating new issue transitions type CreateTransitionPayload struct { - Transition TransitionPayload `json:"transition" structs:"transition"` + Transition TransitionPayload `json:"transition" structs:"transition"` + Fields TransitionPayloadFields `json:"fields" structs:"fields"` } // TransitionPayload represents the request payload of Transition calls like DoTransition @@ -312,6 +325,11 @@ type TransitionPayload struct { ID string `json:"id" structs:"id"` } +// TransitionPayloadFields represents the fields that can be set when executing a transition +type TransitionPayloadFields struct { + Resolution *Resolution `json:"resolution,omitempty" structs:"resolution,omitempty"` +} + // Option represents an option value in a SelectList or MultiSelect // custom issue field type Option struct { @@ -321,6 +339,10 @@ type Option struct { // UnmarshalJSON will transform the JIRA time into a time.Time // during the transformation of the JIRA JSON response func (t *Time) UnmarshalJSON(b []byte) error { + // Ignore null, like in the main JSON package. + if string(b) == "null" { + return nil + } ti, err := time.Parse("\"2006-01-02T15:04:05.999-0700\"", string(b)) if err != nil { return err @@ -329,6 +351,35 @@ func (t *Time) UnmarshalJSON(b []byte) error { return nil } +// MarshalJSON will transform the time.Time into a JIRA time +// during the creation of a JIRA request +func (t Time) MarshalJSON() ([]byte, error) { + return []byte(time.Time(t).Format("\"2006-01-02T15:04:05.999-0700\"")), nil +} + +// UnmarshalJSON will transform the JIRA date into a time.Time +// during the transformation of the JIRA JSON response +func (t *Date) UnmarshalJSON(b []byte) error { + // Ignore null, like in the main JSON package. + if string(b) == "null" { + return nil + } + ti, err := time.Parse("\"2006-01-02\"", string(b)) + if err != nil { + return err + } + *t = Date(ti) + return nil +} + +// MarshalJSON will transform the Date object into a short +// date string as JIRA expects during the creation of a +// JIRA request +func (t Date) MarshalJSON() ([]byte, error) { + time := time.Time(t) + return []byte(time.Format("\"2006-01-02\"")), nil +} + // Worklog represents the work log of a JIRA issue. // One Worklog contains zero or n WorklogRecords // JIRA Wiki: https://confluence.atlassian.com/jira/logging-work-on-an-issue-185729605.html @@ -341,17 +392,17 @@ type Worklog struct { // WorklogRecord represents one entry of a Worklog type WorklogRecord struct { - Self string `json:"self" structs:"self"` - Author User `json:"author" structs:"author"` - UpdateAuthor User `json:"updateAuthor" structs:"updateAuthor"` - Comment string `json:"comment" structs:"comment"` - Created Time `json:"created" structs:"created"` - Updated Time `json:"updated" structs:"updated"` - Started Time `json:"started" structs:"started"` - TimeSpent string `json:"timeSpent" structs:"timeSpent"` - TimeSpentSeconds int `json:"timeSpentSeconds" structs:"timeSpentSeconds"` - ID string `json:"id" structs:"id"` - IssueID string `json:"issueId" structs:"issueId"` + Self string `json:"self,omitempty" structs:"self,omitempty"` + Author *User `json:"author,omitempty" structs:"author,omitempty"` + UpdateAuthor *User `json:"updateAuthor,omitempty" structs:"updateAuthor,omitempty"` + Comment string `json:"comment,omitempty" structs:"comment,omitempty"` + Created *Time `json:"created,omitempty" structs:"created,omitempty"` + Updated *Time `json:"updated,omitempty" structs:"updated,omitempty"` + Started *Time `json:"started,omitempty" structs:"started,omitempty"` + TimeSpent string `json:"timeSpent,omitempty" structs:"timeSpent,omitempty"` + TimeSpentSeconds int `json:"timeSpentSeconds,omitempty" structs:"timeSpentSeconds,omitempty"` + ID string `json:"id,omitempty" structs:"id,omitempty"` + IssueID string `json:"issueId,omitempty" structs:"issueId,omitempty"` } // TimeTracking represents the timetracking fields of a JIRA issue. @@ -443,6 +494,8 @@ type SearchOptions struct { // Expand: Expand specific sections in the returned issues Expand string `url:"expand,omitempty"` Fields []string + // ValidateQuery: The validateQuery param offers control over whether to validate and how strictly to treat the validation. Default: strict. + ValidateQuery string `url:"validateQuery,omitempty"` } // searchResult is only a small wrapper around the Search (with JQL) method @@ -462,8 +515,9 @@ type GetQueryOptions struct { // Properties is the list of properties to return for the issue. By default no properties are returned. Properties string `url:"properties,omitempty"` // FieldsByKeys if true then fields in issues will be referenced by keys instead of ids - FieldsByKeys bool `url:"fieldsByKeys,omitempty"` - UpdateHistory bool `url:"updateHistory,omitempty"` + FieldsByKeys bool `url:"fieldsByKeys,omitempty"` + UpdateHistory bool `url:"updateHistory,omitempty"` + ProjectKeys string `url:"projectKeys,omitempty"` } // CustomFields represents custom fields of JIRA @@ -496,7 +550,8 @@ func (s *IssueService) Get(issueID string, options *GetQueryOptions) (*Issue, *R issue := new(Issue) resp, err := s.client.Do(req, issue) if err != nil { - return nil, resp, err + jerr := NewJiraError(resp, err) + return nil, resp, jerr } return issue, resp, nil @@ -515,7 +570,8 @@ func (s *IssueService) DownloadAttachment(attachmentID string) (*Response, error resp, err := s.client.Do(req, nil) if err != nil { - return resp, err + jerr := NewJiraError(resp, err) + return resp, jerr } return resp, nil @@ -552,19 +608,37 @@ func (s *IssueService) PostAttachment(issueID string, r io.Reader, attachmentNam attachment := new([]Attachment) resp, err := s.client.Do(req, attachment) if err != nil { - return nil, resp, err + jerr := NewJiraError(resp, err) + return nil, resp, jerr } return attachment, resp, nil } +// GetWorklogs gets all the worklogs for an issue. +// This method is especially important if you need to read all the worklogs, not just the first page. +// +// https://docs.atlassian.com/jira/REST/cloud/#api/2/issue/{issueIdOrKey}/worklog-getIssueWorklog +func (s *IssueService) GetWorklogs(issueID string) (*Worklog, *Response, error) { + apiEndpoint := fmt.Sprintf("rest/api/2/issue/%s/worklog", issueID) + + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + v := new(Worklog) + resp, err := s.client.Do(req, v) + return v, resp, err +} + // Create creates an issue or a sub-task from a JSON representation. // Creating a sub-task is similar to creating a regular issue, with two important differences: // The issueType field must correspond to a sub-task issue type and you must provide a parent field in the issue create request containing the id or key of the parent issue. // // JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#api/2/issue-createIssues func (s *IssueService) Create(issue *Issue) (*Issue, *Response, error) { - apiEndpoint := "rest/api/2/issue/" + apiEndpoint := "rest/api/2/issue" req, err := s.client.NewRequest("POST", apiEndpoint, issue) if err != nil { return nil, nil, err @@ -588,18 +662,24 @@ func (s *IssueService) Create(issue *Issue) (*Issue, *Response, error) { return responseIssue, resp, nil } -// Update updates an issue from a JSON representation. The issue is found by key. +// UpdateWithOptions updates an issue from a JSON representation, +// while also specifiying query params. The issue is found by key. // // JIRA API docs: https://docs.atlassian.com/jira/REST/cloud/#api/2/issue-editIssue -func (s *IssueService) Update(issue *Issue) (*Issue, *Response, error) { +func (s *IssueService) UpdateWithOptions(issue *Issue, opts *UpdateQueryOptions) (*Issue, *Response, error) { apiEndpoint := fmt.Sprintf("rest/api/2/issue/%v", issue.Key) - req, err := s.client.NewRequest("PUT", apiEndpoint, issue) + url, err := addOptions(apiEndpoint, opts) + if err != nil { + return nil, nil, err + } + req, err := s.client.NewRequest("PUT", url, issue) if err != nil { return nil, nil, err } resp, err := s.client.Do(req, nil) if err != nil { - return nil, resp, err + jerr := NewJiraError(resp, err) + return nil, resp, jerr } // This is just to follow the rest of the API's convention of returning an issue. @@ -608,6 +688,32 @@ func (s *IssueService) Update(issue *Issue) (*Issue, *Response, error) { return &ret, resp, nil } +// Update updates an issue from a JSON representation. The issue is found by key. +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/cloud/#api/2/issue-editIssue +func (s *IssueService) Update(issue *Issue) (*Issue, *Response, error) { + return s.UpdateWithOptions(issue, nil) +} + +// UpdateIssue updates an issue from a JSON representation. The issue is found by key. +// +// https://docs.atlassian.com/jira/REST/7.4.0/#api/2/issue-editIssue +func (s *IssueService) UpdateIssue(jiraID string, data map[string]interface{}) (*Response, error) { + apiEndpoint := fmt.Sprintf("rest/api/2/issue/%v", jiraID) + req, err := s.client.NewRequest("PUT", apiEndpoint, data) + if err != nil { + return nil, err + } + resp, err := s.client.Do(req, nil) + if err != nil { + return resp, err + } + + // This is just to follow the rest of the API's convention of returning an issue. + // Returning the same pointer here is pointless, so we return a copy instead. + return resp, nil +} + // AddComment adds a new comment to issueID. // // JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#api/2/issue-addComment @@ -618,6 +724,31 @@ func (s *IssueService) AddComment(issueID string, comment *Comment) (*Comment, * return nil, nil, err } + responseComment := new(Comment) + resp, err := s.client.Do(req, responseComment) + if err != nil { + jerr := NewJiraError(resp, err) + return nil, resp, jerr + } + + return responseComment, resp, nil +} + +// UpdateComment updates the body of a comment, identified by comment.ID, on the issueID. +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/cloud/#api/2/issue/{issueIdOrKey}/comment-updateComment +func (s *IssueService) UpdateComment(issueID string, comment *Comment) (*Comment, *Response, error) { + reqBody := struct { + Body string `json:"body"` + }{ + Body: comment.Body, + } + apiEndpoint := fmt.Sprintf("rest/api/2/issue/%s/comment/%s", issueID, comment.ID) + req, err := s.client.NewRequest("PUT", apiEndpoint, reqBody) + if err != nil { + return nil, nil, err + } + responseComment := new(Comment) resp, err := s.client.Do(req, responseComment) if err != nil { @@ -627,6 +758,45 @@ func (s *IssueService) AddComment(issueID string, comment *Comment) (*Comment, * return responseComment, resp, nil } +// DeleteComment Deletes a comment from an issueID. +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-api-3-issue-issueIdOrKey-comment-id-delete +func (s *IssueService) DeleteComment(issueID, commentID string) error { + apiEndpoint := fmt.Sprintf("rest/api/2/issue/%s/comment/%s", issueID, commentID) + req, err := s.client.NewRequest("DELETE", apiEndpoint, nil) + if err != nil { + return err + } + + resp, err := s.client.Do(req, nil) + if err != nil { + jerr := NewJiraError(resp, err) + return jerr + } + + return nil +} + +// AddWorklogRecord adds a new worklog record to issueID. +// +// https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-issue-issueIdOrKey-worklog-post +func (s *IssueService) AddWorklogRecord(issueID string, record *WorklogRecord) (*WorklogRecord, *Response, error) { + apiEndpoint := fmt.Sprintf("rest/api/2/issue/%s/worklog", issueID) + req, err := s.client.NewRequest("POST", apiEndpoint, record) + if err != nil { + return nil, nil, err + } + + responseRecord := new(WorklogRecord) + resp, err := s.client.Do(req, responseRecord) + if err != nil { + jerr := NewJiraError(resp, err) + return nil, resp, jerr + } + + return responseRecord, resp, nil +} + // AddLink adds a link between two issues. // // JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#api/2/issueLink @@ -638,6 +808,10 @@ func (s *IssueService) AddLink(issueLink *IssueLink) (*Response, error) { } resp, err := s.client.Do(req, nil) + if err != nil { + err = NewJiraError(resp, err) + } + return resp, err } @@ -649,8 +823,8 @@ func (s *IssueService) Search(jql string, options *SearchOptions) ([]Issue, *Res if options == nil { u = fmt.Sprintf("rest/api/2/search?jql=%s", url.QueryEscape(jql)) } else { - u = fmt.Sprintf("rest/api/2/search?jql=%s&startAt=%d&maxResults=%d&expand=%s&fields=%s", url.QueryEscape(jql), - options.StartAt, options.MaxResults, options.Expand, strings.Join(options.Fields, ",")) + u = fmt.Sprintf("rest/api/2/search?jql=%s&startAt=%d&maxResults=%d&expand=%s&fields=%s&validateQuery=%s", url.QueryEscape(jql), + options.StartAt, options.MaxResults, options.Expand, strings.Join(options.Fields, ","), options.ValidateQuery) } req, err := s.client.NewRequest("GET", u, nil) @@ -660,9 +834,52 @@ func (s *IssueService) Search(jql string, options *SearchOptions) ([]Issue, *Res v := new(searchResult) resp, err := s.client.Do(req, v) + if err != nil { + err = NewJiraError(resp, err) + } return v.Issues, resp, err } +// SearchPages will get issues from all pages in a search +// +// JIRA API docs: https://developer.atlassian.com/jiradev/jira-apis/jira-rest-apis/jira-rest-api-tutorials/jira-rest-api-example-query-issues +func (s *IssueService) SearchPages(jql string, options *SearchOptions, f func(Issue) error) error { + if options == nil { + options = &SearchOptions{ + StartAt: 0, + MaxResults: 50, + } + } + + if options.MaxResults == 0 { + options.MaxResults = 50 + } + + issues, resp, err := s.Search(jql, options) + if err != nil { + return err + } + + for { + for _, issue := range issues { + err = f(issue) + if err != nil { + return err + } + } + + if resp.StartAt+resp.MaxResults >= resp.Total { + return nil + } + + options.StartAt += resp.MaxResults + issues, resp, err = s.Search(jql, options) + if err != nil { + return err + } + } +} + // GetCustomFields returns a map of customfield_* keys with string values func (s *IssueService) GetCustomFields(issueID string) (CustomFields, *Response, error) { apiEndpoint := fmt.Sprintf("rest/api/2/issue/%s", issueID) @@ -674,7 +891,8 @@ func (s *IssueService) GetCustomFields(issueID string) (CustomFields, *Response, issue := new(map[string]interface{}) resp, err := s.client.Do(req, issue) if err != nil { - return nil, resp, err + jerr := NewJiraError(resp, err) + return nil, resp, jerr } m := *issue @@ -712,6 +930,9 @@ func (s *IssueService) GetTransitions(id string) ([]Transition, *Response, error result := new(transitionResult) resp, err := s.client.Do(req, result) + if err != nil { + err = NewJiraError(resp, err) + } return result.Transitions, resp, err } @@ -742,10 +963,10 @@ func (s *IssueService) DoTransitionWithPayload(ticketID, payload interface{}) (* resp, err := s.client.Do(req, nil) if err != nil { - return nil, err + err = NewJiraError(resp, err) } - return resp, nil + return resp, err } // InitIssueWithMetaAndFields returns Issue with with values from fieldsConfig properly set. @@ -768,7 +989,7 @@ func InitIssueWithMetaAndFields(metaProject *MetaProject, metaIssuetype *MetaIss for key, value := range fieldsConfig { jiraKey, found := allFields[key] if !found { - return nil, fmt.Errorf("Key %s is not found in the list of fields.", key) + return nil, fmt.Errorf("key %s is not found in the list of fields", key) } valueType, err := metaIssuetype.Fields.String(jiraKey + "/schema/type") @@ -784,6 +1005,8 @@ func InitIssueWithMetaAndFields(metaProject *MetaProject, metaIssuetype *MetaIss switch elemType { case "component": issueFields.Unknowns[jiraKey] = []Component{{Name: value}} + case "option": + issueFields.Unknowns[jiraKey] = []map[string]string{{"value": value}} default: issueFields.Unknowns[jiraKey] = []string{value} } @@ -791,6 +1014,8 @@ func InitIssueWithMetaAndFields(metaProject *MetaProject, metaIssuetype *MetaIss issueFields.Unknowns[jiraKey] = value case "date": issueFields.Unknowns[jiraKey] = value + case "datetime": + issueFields.Unknowns[jiraKey] = value case "any": // Treat any as string issueFields.Unknowns[jiraKey] = value @@ -840,3 +1065,100 @@ func (s *IssueService) Delete(issueID string) (*Response, error) { resp, err := s.client.Do(req, nil) return resp, err } + +// GetWatchers wil return all the users watching/observing the given issue +// +// JIRA API docs: https://docs.atlassian.com/software/jira/docs/api/REST/latest/#api/2/issue-getIssueWatchers +func (s *IssueService) GetWatchers(issueID string) (*[]User, *Response, error) { + watchesAPIEndpoint := fmt.Sprintf("rest/api/2/issue/%s/watchers", issueID) + + req, err := s.client.NewRequest("GET", watchesAPIEndpoint, nil) + if err != nil { + return nil, nil, err + } + + watches := new(Watches) + resp, err := s.client.Do(req, watches) + if err != nil { + return nil, nil, NewJiraError(resp, err) + } + + result := []User{} + user := new(User) + for _, watcher := range watches.Watchers { + user, resp, err = s.client.User.Get(watcher.Name) + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + result = append(result, *user) + } + + return &result, resp, nil +} + +// AddWatcher adds watcher to the given issue +// +// JIRA API docs: https://docs.atlassian.com/software/jira/docs/api/REST/latest/#api/2/issue-addWatcher +func (s *IssueService) AddWatcher(issueID string, userName string) (*Response, error) { + apiEndPoint := fmt.Sprintf("rest/api/2/issue/%s/watchers", issueID) + + req, err := s.client.NewRequest("POST", apiEndPoint, userName) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(req, nil) + if err != nil { + err = NewJiraError(resp, err) + } + + return resp, err +} + +// RemoveWatcher removes given user from given issue +// +// JIRA API docs: https://docs.atlassian.com/software/jira/docs/api/REST/latest/#api/2/issue-removeWatcher +func (s *IssueService) RemoveWatcher(issueID string, userName string) (*Response, error) { + apiEndPoint := fmt.Sprintf("rest/api/2/issue/%s/watchers", issueID) + + req, err := s.client.NewRequest("DELETE", apiEndPoint, userName) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(req, nil) + if err != nil { + err = NewJiraError(resp, err) + } + + return resp, err +} + +// UpdateAssignee updates the user assigned to work on the given issue +// +// JIRA API docs: https://docs.atlassian.com/software/jira/docs/api/REST/7.10.2/#api/2/issue-assign +func (s *IssueService) UpdateAssignee(issueID string, assignee *User) (*Response, error) { + apiEndPoint := fmt.Sprintf("rest/api/2/issue/%s/assignee", issueID) + + req, err := s.client.NewRequest("PUT", apiEndPoint, assignee) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(req, nil) + if err != nil { + err = NewJiraError(resp, err) + } + + return resp, err +} + +func (c ChangelogHistory) CreatedTime() (time.Time, error) { + var t time.Time + // Ignore null + if string(c.Created) == "null" { + return t, nil + } + t, err := time.Parse("2006-01-02T15:04:05.999-0700", c.Created) + return t, err +} diff --git a/vendor/github.com/andygrunwald/go-jira/issue_test.go b/vendor/github.com/andygrunwald/go-jira/issue_test.go index 940a38b..5fc67a6 100644 --- a/vendor/github.com/andygrunwald/go-jira/issue_test.go +++ b/vendor/github.com/andygrunwald/go-jira/issue_test.go @@ -3,12 +3,15 @@ package jira import ( "encoding/json" "fmt" + "io" "io/ioutil" "net/http" "reflect" "strings" "testing" + "time" + "github.com/trivago/tgo/tcontainer" ) @@ -19,7 +22,7 @@ func TestIssueService_Get_Success(t *testing.T) { testMethod(t, r, "GET") testRequestURL(t, r, "/rest/api/2/issue/10002") - fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) + fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","duedate":"2018-01-19","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) }) issue, _, err := testClient.Issue.Get("10002", nil) @@ -38,7 +41,7 @@ func TestIssueService_Get_WithQuerySuccess(t *testing.T) { testMethod(t, r, "GET") testRequestURL(t, r, "/rest/api/2/issue/10002?expand=foo") - fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) + fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","duedate":"2018-01-19","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) }) opt := &GetQueryOptions{ @@ -56,12 +59,12 @@ func TestIssueService_Get_WithQuerySuccess(t *testing.T) { func TestIssueService_Create(t *testing.T) { setup() defer teardown() - testMux.HandleFunc("/rest/api/2/issue/", func(w http.ResponseWriter, r *http.Request) { + testMux.HandleFunc("/rest/api/2/issue", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "POST") - testRequestURL(t, r, "/rest/api/2/issue/") + testRequestURL(t, r, "/rest/api/2/issue") w.WriteHeader(http.StatusCreated) - fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) + fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","duedate":"2018-01-19","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) }) i := &Issue{ @@ -78,6 +81,54 @@ func TestIssueService_Create(t *testing.T) { } } +func TestIssueService_CreateThenGet(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testRequestURL(t, r, "/rest/api/2/issue") + + w.WriteHeader(http.StatusCreated) + io.Copy(w, r.Body) + }) + + i := &Issue{ + Fields: &IssueFields{ + Description: "example bug report", + Created: Time(time.Now()), + }, + } + issue, _, err := testClient.Issue.Create(i) + if issue == nil { + t.Error("Expected issue. Issue is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } + + testMux.HandleFunc("/rest/api/2/issue/10002", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/issue/10002") + + bytes, err := json.Marshal(issue) + if err != nil { + t.Errorf("Error marshaling issue: %s", err) + } + _, err = w.Write(bytes) + if err != nil { + t.Errorf("Error writing response: %s", err) + } + }) + + issue2, _, err := testClient.Issue.Get("10002", nil) + if issue2 == nil { + t.Error("Expected issue. Issue is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} + func TestIssueService_Update(t *testing.T) { setup() defer teardown() @@ -103,6 +154,29 @@ func TestIssueService_Update(t *testing.T) { } } +func TestIssueService_UpdateIssue(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue/PROJ-9001", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testRequestURL(t, r, "/rest/api/2/issue/PROJ-9001") + + w.WriteHeader(http.StatusNoContent) + }) + jID := "PROJ-9001" + i := make(map[string]interface{}) + fields := make(map[string]interface{}) + i["fields"] = fields + resp, err := testClient.Issue.UpdateIssue(jID, i) + if resp == nil { + t.Error("Expected resp. resp is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } + +} + func TestIssueService_AddComment(t *testing.T) { setup() defer teardown() @@ -130,6 +204,73 @@ func TestIssueService_AddComment(t *testing.T) { } } +func TestIssueService_UpdateComment(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue/10000/comment/10001", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testRequestURL(t, r, "/rest/api/2/issue/10000/comment/10001") + + w.WriteHeader(http.StatusCreated) + fmt.Fprint(w, `{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10001","id":"10001","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}`) + }) + + c := &Comment{ + ID: "10001", + Body: "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.", + Visibility: CommentVisibility{ + Type: "role", + Value: "Administrators", + }, + } + comment, _, err := testClient.Issue.UpdateComment("10000", c) + if comment == nil { + t.Error("Expected Comment. Comment is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} + +func TestIssueService_DeleteComment(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue/10000/comment/10001", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testRequestURL(t, r, "/rest/api/2/issue/10000/comment/10001") + + w.WriteHeader(http.StatusNoContent) + fmt.Fprint(w, `{}`) + }) + + err := testClient.Issue.DeleteComment("10000", "10001") + if err != nil { + t.Errorf("Error given: %s", err) + } +} + +func TestIssueService_AddWorklogRecord(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue/10000/worklog", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testRequestURL(t, r, "/rest/api/2/issue/10000/worklog") + + w.WriteHeader(http.StatusCreated) + fmt.Fprint(w, `{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2018-02-14T22:14:46.003+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2018-02-14T22:14:46.003+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}`) + }) + r := &WorklogRecord{ + TimeSpent: "1h", + } + record, _, err := testClient.Issue.AddWorklogRecord("10000", r) + if record == nil { + t.Error("Expected Record. Record is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} + func TestIssueService_AddLink(t *testing.T) { setup() defer teardown() @@ -177,7 +318,7 @@ func TestIssueService_Get_Fields(t *testing.T) { testMethod(t, r, "GET") testRequestURL(t, r, "/rest/api/2/issue/10002") - fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"labels":["test"],"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"epic": {"id": 19415,"key": "EPIC-77","self": "https://example.atlassian.net/rest/agile/1.0/epic/19415","name": "Epic Name","summary": "Do it","color": {"key": "color_11"},"done": false},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) + fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"labels":["test"],"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"epic": {"id": 19415,"key": "EPIC-77","self": "https://example.atlassian.net/rest/agile/1.0/epic/19415","name": "Epic Name","summary": "Do it","color": {"key": "color_11"},"done": false},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","duedate":"2018-01-19","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) }) issue, _, err := testClient.Issue.Get("10002", nil) @@ -200,6 +341,37 @@ func TestIssueService_Get_Fields(t *testing.T) { } } +func TestIssueService_Get_RenderedFields(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue/10002", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/issue/10002") + + fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"labels":["test"],"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"epic": {"id": 19415,"key": "EPIC-77","self": "https://example.atlassian.net/rest/agile/1.0/epic/19415","name": "Epic Name","summary": "Do it","color": {"key": "color_11"},"done": false},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","duedate":"2018-01-19","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{},"renderedFields":{"resolutiondate":"In 1 week","updated":"2 hours ago","comment":{"comments":[{"body":"This is HTML"}]}}}`) + }) + + issue, _, err := testClient.Issue.Get("10002", nil) + if issue == nil { + t.Error("Expected issue. Issue is nil") + } + if issue.RenderedFields.Updated != "2 hours ago" { + t.Error("Expected updated to equla '2 hours ago' for rendered field") + } + + if len(issue.RenderedFields.Comments.Comments) != 1 { + t.Errorf("Expected one comment, %v found", len(issue.RenderedFields.Comments.Comments)) + } + comment := issue.RenderedFields.Comments.Comments[0] + if comment.Body != "This is HTML" { + t.Errorf("Wrong comment body returned in RenderedField. Got %s", comment.Body) + } + + if err != nil { + t.Errorf("Error given: %s", err) + } +} + func TestIssueService_DownloadAttachment(t *testing.T) { var testAttachment = "Here is an attachment" @@ -427,13 +599,51 @@ func TestIssueService_Search_WithoutPaging(t *testing.T) { } } +func TestIssueService_SearchPages(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/search", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + if r.URL.String() == "/rest/api/2/search?jql=something&startAt=1&maxResults=2&expand=foo&fields=&validateQuery=warn" { + w.WriteHeader(http.StatusOK) + fmt.Fprint(w, `{"expand": "schema,names","startAt": 1,"maxResults": 2,"total": 6,"issues": [{"expand": "html","id": "10230","self": "http://kelpie9:8081/rest/api/2/issue/BULK-62","key": "BULK-62","fields": {"summary": "testing","timetracking": null,"issuetype": {"self": "http://kelpie9:8081/rest/api/2/issuetype/5","id": "5","description": "The sub-task of the issue","iconUrl": "http://kelpie9:8081/images/icons/issue_subtask.gif","name": "Sub-task","subtask": true},"customfield_10071": null}},{"expand": "html","id": "10004","self": "http://kelpie9:8081/rest/api/2/issue/BULK-47","key": "BULK-47","fields": {"summary": "Cheese v1 2.0 issue","timetracking": null,"issuetype": {"self": "http://kelpie9:8081/rest/api/2/issuetype/3","id": "3","description": "A task that needs to be done.","iconUrl": "http://kelpie9:8081/images/icons/task.gif","name": "Task","subtask": false}}}]}`) + return + } else if r.URL.String() == "/rest/api/2/search?jql=something&startAt=3&maxResults=2&expand=foo&fields=&validateQuery=warn" { + w.WriteHeader(http.StatusOK) + fmt.Fprint(w, `{"expand": "schema,names","startAt": 3,"maxResults": 2,"total": 6,"issues": [{"expand": "html","id": "10230","self": "http://kelpie9:8081/rest/api/2/issue/BULK-62","key": "BULK-62","fields": {"summary": "testing","timetracking": null,"issuetype": {"self": "http://kelpie9:8081/rest/api/2/issuetype/5","id": "5","description": "The sub-task of the issue","iconUrl": "http://kelpie9:8081/images/icons/issue_subtask.gif","name": "Sub-task","subtask": true},"customfield_10071": null}},{"expand": "html","id": "10004","self": "http://kelpie9:8081/rest/api/2/issue/BULK-47","key": "BULK-47","fields": {"summary": "Cheese v1 2.0 issue","timetracking": null,"issuetype": {"self": "http://kelpie9:8081/rest/api/2/issuetype/3","id": "3","description": "A task that needs to be done.","iconUrl": "http://kelpie9:8081/images/icons/task.gif","name": "Task","subtask": false}}}]}`) + return + } else if r.URL.String() == "/rest/api/2/search?jql=something&startAt=5&maxResults=2&expand=foo&fields=&validateQuery=warn" { + w.WriteHeader(http.StatusOK) + fmt.Fprint(w, `{"expand": "schema,names","startAt": 5,"maxResults": 2,"total": 6,"issues": [{"expand": "html","id": "10230","self": "http://kelpie9:8081/rest/api/2/issue/BULK-62","key": "BULK-62","fields": {"summary": "testing","timetracking": null,"issuetype": {"self": "http://kelpie9:8081/rest/api/2/issuetype/5","id": "5","description": "The sub-task of the issue","iconUrl": "http://kelpie9:8081/images/icons/issue_subtask.gif","name": "Sub-task","subtask": true},"customfield_10071": null}}]}`) + return + } + + t.Errorf("Unexpected URL: %v", r.URL) + }) + + opt := &SearchOptions{StartAt: 1, MaxResults: 2, Expand: "foo", ValidateQuery: "warn"} + issues := make([]Issue, 0) + err := testClient.Issue.SearchPages("something", opt, func(issue Issue) error { + issues = append(issues, issue) + return nil + }) + + if err != nil { + t.Errorf("Error given: %s", err) + } + + if len(issues) != 5 { + t.Errorf("Expected 5 issues, %v given", len(issues)) + } +} + func TestIssueService_GetCustomFields(t *testing.T) { setup() defer teardown() testMux.HandleFunc("/rest/api/2/issue/10002", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testRequestURL(t, r, "/rest/api/2/issue/10002") - fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"customfield_123":"test","watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) + fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"customfield_123":"test","watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","duedate":"2018-01-19","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) }) issue, _, err := testClient.Issue.GetCustomFields("10002") @@ -455,7 +665,7 @@ func TestIssueService_GetComplexCustomFields(t *testing.T) { testMux.HandleFunc("/rest/api/2/issue/10002", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testRequestURL(t, r, "/rest/api/2/issue/10002") - fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"customfield_123":{"self":"http://www.example.com/jira/rest/api/2/customFieldOption/123","value":"test","id":"123"},"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) + fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"customfield_123":{"self":"http://www.example.com/jira/rest/api/2/customFieldOption/123","value":"test","id":"123"},"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","duedate":"2018-01-19","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) }) issue, _, err := testClient.Issue.GetCustomFields("10002") @@ -1097,3 +1307,124 @@ func TestIssueService_Delete(t *testing.T) { t.Errorf("Error given: %s", err) } } + +func TestIssueService_GetWorklogs(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue/10002/worklog", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/issue/10002/worklog") + + fmt.Fprint(w, `{"startAt": 1,"maxResults": 40,"total": 1,"worklogs": [{"id": "3","self": "http://kelpie9:8081/rest/api/2/issue/10002/worklog/3","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","comment":"","started":"2016-03-16T04:22:37.356+0000","timeSpent": "1h","timeSpentSeconds": 3600,"issueId":"10002"}]}`) + }) + + worklog, _, err := testClient.Issue.GetWorklogs("10002") + if worklog == nil { + t.Error("Expected worklog. Worklog is nil") + } + + if len(worklog.Worklogs) != 1 { + t.Error("Expected 1 worklog") + } + + if worklog.Worklogs[0].Author.Name != "fred" { + t.Error("Expected worklog author to be fred") + } + + if err != nil { + t.Errorf("Error given: %s", err) + } +} + +func TestIssueService_GetWatchers(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue/10002/watchers", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/issue/10002/watchers") + + fmt.Fprint(w, `{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]}`) + }) + + testMux.HandleFunc("/rest/api/2/user", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/user?username=fred") + + fmt.Fprint(w, `{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","key":"fred", + "name":"fred","emailAddress":"fred@example.com","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred", + "24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred", + "32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":true,"timeZone":"Australia/Sydney","groups":{"size":3,"items":[ + {"name":"jira-user","self":"http://www.example.com/jira/rest/api/2/group?groupname=jira-user"},{"name":"jira-admin", + "self":"http://www.example.com/jira/rest/api/2/group?groupname=jira-admin"},{"name":"important","self":"http://www.example.com/jira/rest/api/2/group?groupname=important" + }]},"applicationRoles":{"size":1,"items":[]},"expand":"groups,applicationRoles"}`) + }) + + watchers, _, err := testClient.Issue.GetWatchers("10002") + if err != nil { + t.Errorf("Error given: %s", err) + return + } + if watchers == nil { + t.Error("Expected watchers. Watchers is nil") + return + } + if len(*watchers) != 1 { + t.Errorf("Expected 1 watcher, got: %d", len(*watchers)) + return + } + if (*watchers)[0].Name != "fred" { + t.Error("Expected watcher name fred") + } +} + +func TestIssueService_UpdateAssignee(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue/10002/assignee", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testRequestURL(t, r, "/rest/api/2/issue/10002/assignee") + + w.WriteHeader(http.StatusNoContent) + }) + + resp, err := testClient.Issue.UpdateAssignee("10002", &User{ + Name: "test-username", + }) + + if resp.StatusCode != 204 { + t.Error("Expected issue not re-assigned.") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} + +func TestIssueService_Get_Fields_Changelog(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/issue/10002", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/issue/10002") + + fmt.Fprint(w, `{"expand":"changelog","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","changelog":{"startAt": 0,"maxResults": 1, "total": 1, "histories": [{"id": "10002", "author": {"self": "http://www.example.com/jira/rest/api/2/user?username=fred", "name": "fred", "key": "fred", "emailAddress": "fred@example.com", "avatarUrls": {"48x48": "http://www.example.com/secure/useravatar?ownerId=fred&avatarId=33072", "24x24": "http://www.example.com/secure/useravatar?size=small&ownerId=fred&avatarId=33072", "16x16": "http://www.example.com/secure/useravatar?size=xsmall&ownerId=fred&avatarId=33072", "32x32": "http://www.example.com/secure/useravatar?size=medium&ownerId=fred&avatarId=33072"},"displayName":"Fred","active": true,"timeZone":"Australia/Sydney"},"created":"2018-06-20T16:50:35.000+0300","items":[{"field":"Rank","fieldtype":"custom","from":"","fromString":"","to":"","toString":"Ranked higher"}]}]}}`) + }) + + issue, _, _ := testClient.Issue.Get("10002", &GetQueryOptions{Expand: "changelog"}) + if issue == nil { + t.Error("Expected issue. Issue is nil") + } + + if len(issue.Changelog.Histories) != 1 { + t.Errorf("Expected one history item, %v found", len(issue.Changelog.Histories)) + } + + if issue.Changelog.Histories[0].Created != "2018-06-20T16:50:35.000+0300" { + t.Errorf("Expected created time of history item 2018-06-20T16:50:35.000+0300, %v got", issue.Changelog.Histories[0].Created) + } + + tm, _ := time.Parse("2006-01-02T15:04:05.999-0700", "2018-06-20T16:50:35.000+0300") + + if ct, _ := issue.Changelog.Histories[0].CreatedTime(); !tm.Equal(ct) { + t.Errorf("Expected CreatedTime func return %v time, %v got", tm, ct) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/jira.go b/vendor/github.com/andygrunwald/go-jira/jira.go index f7430e9..d8d247d 100644 --- a/vendor/github.com/andygrunwald/go-jira/jira.go +++ b/vendor/github.com/andygrunwald/go-jira/jira.go @@ -8,8 +8,11 @@ import ( "net/http" "net/url" "reflect" + "strings" + "time" "github.com/google/go-querystring/query" + "github.com/pkg/errors" ) // A Client manages communication with the JIRA API. @@ -31,6 +34,13 @@ type Client struct { Sprint *SprintService User *UserService Group *GroupService + Version *VersionService + Priority *PriorityService + Field *FieldService + Component *ComponentService + Resolution *ResolutionService + StatusCategory *StatusCategoryService + Filter *FilterService } // NewClient returns a new JIRA API client. @@ -45,6 +55,11 @@ func NewClient(httpClient *http.Client, baseURL string) (*Client, error) { httpClient = http.DefaultClient } + // ensure the baseURL contains a trailing slash so that all paths are preserved in later calls + if !strings.HasSuffix(baseURL, "/") { + baseURL += "/" + } + parsedBaseURL, err := url.Parse(baseURL) if err != nil { return nil, err @@ -61,19 +76,27 @@ func NewClient(httpClient *http.Client, baseURL string) (*Client, error) { c.Sprint = &SprintService{client: c} c.User = &UserService{client: c} c.Group = &GroupService{client: c} + c.Version = &VersionService{client: c} + c.Priority = &PriorityService{client: c} + c.Field = &FieldService{client: c} + c.Component = &ComponentService{client: c} + c.Resolution = &ResolutionService{client: c} + c.StatusCategory = &StatusCategoryService{client: c} + c.Filter = &FilterService{client: c} return c, nil } // NewRawRequest creates an API request. // A relative URL can be provided in urlStr, in which case it is resolved relative to the baseURL of the Client. -// Relative URLs should always be specified without a preceding slash. // Allows using an optional native io.Reader for sourcing the request body. func (c *Client) NewRawRequest(method, urlStr string, body io.Reader) (*http.Request, error) { rel, err := url.Parse(urlStr) if err != nil { return nil, err } + // Relative URLs should be specified without a preceding slash since baseURL will have the trailing slash + rel.Path = strings.TrimLeft(rel.Path, "/") u := c.baseURL.ResolveReference(rel) @@ -104,13 +127,14 @@ func (c *Client) NewRawRequest(method, urlStr string, body io.Reader) (*http.Req // NewRequest creates an API request. // A relative URL can be provided in urlStr, in which case it is resolved relative to the baseURL of the Client. -// Relative URLs should always be specified without a preceding slash. // If specified, the value pointed to by body is JSON encoded and included as the request body. func (c *Client) NewRequest(method, urlStr string, body interface{}) (*http.Request, error) { rel, err := url.Parse(urlStr) if err != nil { return nil, err } + // Relative URLs should be specified without a preceding slash since baseURL will have the trailing slash + rel.Path = strings.TrimLeft(rel.Path, "/") u := c.baseURL.ResolveReference(rel) @@ -172,13 +196,14 @@ func addOptions(s string, opt interface{}) (string, error) { // NewMultiPartRequest creates an API request including a multi-part file. // A relative URL can be provided in urlStr, in which case it is resolved relative to the baseURL of the Client. -// Relative URLs should always be specified without a preceding slash. // If specified, the value pointed to by buf is a multipart form. func (c *Client) NewMultiPartRequest(method, urlStr string, buf *bytes.Buffer) (*http.Request, error) { rel, err := url.Parse(urlStr) if err != nil { return nil, err } + // Relative URLs should be specified without a preceding slash since baseURL will have the trailing slash + rel.Path = strings.TrimLeft(rel.Path, "/") u := c.baseURL.ResolveReference(rel) @@ -276,6 +301,156 @@ func (r *Response) populatePageValues(v interface{}) { r.StartAt = value.StartAt r.MaxResults = value.MaxResults r.Total = value.Total + case *groupMembersResult: + r.StartAt = value.StartAt + r.MaxResults = value.MaxResults + r.Total = value.Total } return } + +// BasicAuthTransport is an http.RoundTripper that authenticates all requests +// using HTTP Basic Authentication with the provided username and password. +type BasicAuthTransport struct { + Username string + Password string + + // Transport is the underlying HTTP transport to use when making requests. + // It will default to http.DefaultTransport if nil. + Transport http.RoundTripper +} + +// RoundTrip implements the RoundTripper interface. We just add the +// basic auth and return the RoundTripper for this transport type. +func (t *BasicAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { + req2 := cloneRequest(req) // per RoundTripper contract + + req2.SetBasicAuth(t.Username, t.Password) + return t.transport().RoundTrip(req2) +} + +// Client returns an *http.Client that makes requests that are authenticated +// using HTTP Basic Authentication. This is a nice little bit of sugar +// so we can just get the client instead of creating the client in the calling code. +// If it's necessary to send more information on client init, the calling code can +// always skip this and set the transport itself. +func (t *BasicAuthTransport) Client() *http.Client { + return &http.Client{Transport: t} +} + +func (t *BasicAuthTransport) transport() http.RoundTripper { + if t.Transport != nil { + return t.Transport + } + return http.DefaultTransport +} + +// CookieAuthTransport is an http.RoundTripper that authenticates all requests +// using Jira's cookie-based authentication. +// +// Note that it is generally preferrable to use HTTP BASIC authentication with the REST API. +// However, this resource may be used to mimic the behaviour of JIRA's log-in page (e.g. to display log-in errors to a user). +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#auth/1/session +type CookieAuthTransport struct { + Username string + Password string + AuthURL string + + // SessionObject is the authenticated cookie string.s + // It's passed in each call to prove the client is authenticated. + SessionObject []*http.Cookie + + // Transport is the underlying HTTP transport to use when making requests. + // It will default to http.DefaultTransport if nil. + Transport http.RoundTripper +} + +// RoundTrip adds the session object to the request. +func (t *CookieAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { + if t.SessionObject == nil { + err := t.setSessionObject() + if err != nil { + return nil, errors.Wrap(err, "cookieauth: no session object has been set") + } + } + + req2 := cloneRequest(req) // per RoundTripper contract + for _, cookie := range t.SessionObject { + // Don't add an empty value cookie to the request + if cookie.Value != "" { + req2.AddCookie(cookie) + } + } + + return t.transport().RoundTrip(req2) +} + +// Client returns an *http.Client that makes requests that are authenticated +// using cookie authentication +func (t *CookieAuthTransport) Client() *http.Client { + return &http.Client{Transport: t} +} + +// setSessionObject attempts to authenticate the user and set +// the session object (e.g. cookie) +func (t *CookieAuthTransport) setSessionObject() error { + req, err := t.buildAuthRequest() + if err != nil { + return err + } + + var authClient = &http.Client{ + Timeout: time.Second * 60, + } + resp, err := authClient.Do(req) + if err != nil { + return err + } + + t.SessionObject = resp.Cookies() + return nil +} + +// getAuthRequest assembles the request to get the authenticated cookie +func (t *CookieAuthTransport) buildAuthRequest() (*http.Request, error) { + body := struct { + Username string `json:"username"` + Password string `json:"password"` + }{ + t.Username, + t.Password, + } + + b := new(bytes.Buffer) + json.NewEncoder(b).Encode(body) + + req, err := http.NewRequest("POST", t.AuthURL, b) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", "application/json") + return req, nil +} + +func (t *CookieAuthTransport) transport() http.RoundTripper { + if t.Transport != nil { + return t.Transport + } + return http.DefaultTransport +} + +// cloneRequest returns a clone of the provided *http.Request. +// The clone is a shallow copy of the struct and its Header map. +func cloneRequest(r *http.Request) *http.Request { + // shallow copy of the struct + r2 := new(http.Request) + *r2 = *r + // deep copy of the Header + r2.Header = make(http.Header, len(r.Header)) + for k, s := range r.Header { + r2.Header[k] = append([]string(nil), s...) + } + return r2 +} diff --git a/vendor/github.com/andygrunwald/go-jira/jira_test.go b/vendor/github.com/andygrunwald/go-jira/jira_test.go index b35aac1..a731fc5 100644 --- a/vendor/github.com/andygrunwald/go-jira/jira_test.go +++ b/vendor/github.com/andygrunwald/go-jira/jira_test.go @@ -112,6 +112,18 @@ func TestNewClient_WithServices(t *testing.T) { if c.Group == nil { t.Error("No GroupService provided") } + if c.Version == nil { + t.Error("No VersionService provided") + } + if c.Priority == nil { + t.Error("No PriorityService provided") + } + if c.Resolution == nil { + t.Error("No ResolutionService provided") + } + if c.StatusCategory == nil { + t.Error("No StatusCategoryService provided") + } } func TestCheckResponse(t *testing.T) { @@ -244,7 +256,7 @@ func TestClient_NewRequest_BasicAuth(t *testing.T) { } } -// If a nil body is passed to gerrit.NewRequest, make sure that nil is also passed to http.NewRequest. +// If a nil body is passed to jira.NewRequest, make sure that nil is also passed to http.NewRequest. // In most cases, passing an io.Reader that returns no content is fine, // since there is no difference between an HTTP request body that is an empty string versus one that is not set at all. // However in certain cases, intermediate systems may treat these differently resulting in subtle errors. @@ -389,7 +401,7 @@ func TestClient_Do_HTTPError(t *testing.T) { } // Test handling of an error caused by the internal http client's Do() function. -// A redirect loop is pretty unlikely to occur within the Gerrit API, but does allow us to exercise the right code path. +// A redirect loop is pretty unlikely to occur within the JIRA API, but does allow us to exercise the right code path. func TestClient_Do_RedirectLoop(t *testing.T) { setup() defer teardown() @@ -428,23 +440,159 @@ func TestClient_GetBaseURL_WithURL(t *testing.T) { } } -func TestClient_Do_PagingInfoEmptyByDefault(t *testing.T) { - c, _ := NewClient(nil, testJIRAInstanceURL) - req, _ := c.NewRequest("GET", "/", nil) - type foo struct { - A string +func TestBasicAuthTransport(t *testing.T) { + setup() + defer teardown() + + username, password := "username", "password" + + testMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + u, p, ok := r.BasicAuth() + if !ok { + t.Errorf("request does not contain basic auth credentials") + } + if u != username { + t.Errorf("request contained basic auth username %q, want %q", u, username) + } + if p != password { + t.Errorf("request contained basic auth password %q, want %q", p, password) + } + }) + + tp := &BasicAuthTransport{ + Username: username, + Password: password, } - body := new(foo) - resp, _ := c.Do(req, body) + basicAuthClient, _ := NewClient(tp.Client(), testServer.URL) + req, _ := basicAuthClient.NewRequest("GET", ".", nil) + basicAuthClient.Do(req, nil) +} + +func TestBasicAuthTransport_transport(t *testing.T) { + // default transport + tp := &BasicAuthTransport{} + if tp.transport() != http.DefaultTransport { + t.Errorf("Expected http.DefaultTransport to be used.") + } - if resp.StartAt != 0 { - t.Errorf("StartAt not equal to 0") + // custom transport + tp = &BasicAuthTransport{ + Transport: &http.Transport{}, } - if resp.MaxResults != 0 { - t.Errorf("StartAt not equal to 0") + if tp.transport() == http.DefaultTransport { + t.Errorf("Expected custom transport to be used.") } - if resp.Total != 0 { - t.Errorf("StartAt not equal to 0") +} + +// Test that the cookie in the transport is the cookie returned in the header +func TestCookieAuthTransport_SessionObject_Exists(t *testing.T) { + setup() + defer teardown() + + testCookie := &http.Cookie{Name: "test", Value: "test"} + + testMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + cookies := r.Cookies() + + if len(cookies) < 1 { + t.Errorf("No cookies set") + } + + if cookies[0].Name != testCookie.Name { + t.Errorf("Cookie names don't match, expected %v, got %v", testCookie.Name, cookies[0].Name) + } + + if cookies[0].Value != testCookie.Value { + t.Errorf("Cookie values don't match, expected %v, got %v", testCookie.Value, cookies[0].Value) + } + }) + + tp := &CookieAuthTransport{ + Username: "username", + Password: "password", + AuthURL: "https://some.jira.com/rest/auth/1/session", + SessionObject: []*http.Cookie{testCookie}, } + + basicAuthClient, _ := NewClient(tp.Client(), testServer.URL) + req, _ := basicAuthClient.NewRequest("GET", ".", nil) + basicAuthClient.Do(req, nil) +} + +// Test that an empty cookie in the transport is not returned in the header +func TestCookieAuthTransport_SessionObject_ExistsWithEmptyCookie(t *testing.T) { + setup() + defer teardown() + + emptyCookie := &http.Cookie{Name: "empty_cookie", Value: ""} + testCookie := &http.Cookie{Name: "test", Value: "test"} + + testMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + cookies := r.Cookies() + + if len(cookies) > 1 { + t.Errorf("The empty cookie should not have been added") + } + + if cookies[0].Name != testCookie.Name { + t.Errorf("Cookie names don't match, expected %v, got %v", testCookie.Name, cookies[0].Name) + } + + if cookies[0].Value != testCookie.Value { + t.Errorf("Cookie values don't match, expected %v, got %v", testCookie.Value, cookies[0].Value) + } + }) + + tp := &CookieAuthTransport{ + Username: "username", + Password: "password", + AuthURL: "https://some.jira.com/rest/auth/1/session", + SessionObject: []*http.Cookie{emptyCookie, testCookie}, + } + + basicAuthClient, _ := NewClient(tp.Client(), testServer.URL) + req, _ := basicAuthClient.NewRequest("GET", ".", nil) + basicAuthClient.Do(req, nil) +} + +// Test that if no cookie is in the transport, it checks for a cookie +func TestCookieAuthTransport_SessionObject_DoesNotExist(t *testing.T) { + setup() + defer teardown() + + testCookie := &http.Cookie{Name: "does_not_exist", Value: "does_not_exist"} + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + http.SetCookie(w, testCookie) + w.Write([]byte(`OK`)) + })) + defer ts.Close() + + testMux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + cookies := r.Cookies() + + if len(cookies) < 1 { + t.Errorf("No cookies set") + } + + if cookies[0].Name != testCookie.Name { + t.Errorf("Cookie names don't match, expected %v, got %v", testCookie.Name, cookies[0].Name) + } + + if cookies[0].Value != testCookie.Value { + t.Errorf("Cookie values don't match, expected %v, got %v", testCookie.Value, cookies[0].Value) + } + }) + + tp := &CookieAuthTransport{ + Username: "username", + Password: "password", + AuthURL: ts.URL, + } + + basicAuthClient, _ := NewClient(tp.Client(), testServer.URL) + req, _ := basicAuthClient.NewRequest("GET", ".", nil) + basicAuthClient.Do(req, nil) } diff --git a/vendor/github.com/andygrunwald/go-jira/metaissue.go b/vendor/github.com/andygrunwald/go-jira/metaissue.go index e922d66..1981378 100644 --- a/vendor/github.com/andygrunwald/go-jira/metaissue.go +++ b/vendor/github.com/andygrunwald/go-jira/metaissue.go @@ -4,6 +4,7 @@ import ( "fmt" "strings" + "github.com/google/go-querystring/query" "github.com/trivago/tgo/tcontainer" ) @@ -16,7 +17,7 @@ type CreateMetaInfo struct { // MetaProject is the meta information about a project returned from createmeta api type MetaProject struct { Expand string `json:"expand,omitempty"` - Self string `json:"self, omitempty"` + Self string `json:"self,omitempty"` Id string `json:"id,omitempty"` Key string `json:"key,omitempty"` Name string `json:"name,omitempty"` @@ -42,14 +43,25 @@ type MetaIssueType struct { } // GetCreateMeta makes the api call to get the meta information required to create a ticket -func (s *IssueService) GetCreateMeta(projectkey string) (*CreateMetaInfo, *Response, error) { +func (s *IssueService) GetCreateMeta(projectkeys string) (*CreateMetaInfo, *Response, error) { + return s.GetCreateMetaWithOptions(&GetQueryOptions{ProjectKeys: projectkeys, Expand: "projects.issuetypes.fields"}) +} - apiEndpoint := fmt.Sprintf("rest/api/2/issue/createmeta?projectKeys=%s&expand=projects.issuetypes.fields", projectkey) +// GetCreateMetaWithOptions makes the api call to get the meta information without requiring to have a projectKey +func (s *IssueService) GetCreateMetaWithOptions(options *GetQueryOptions) (*CreateMetaInfo, *Response, error) { + apiEndpoint := "rest/api/2/issue/createmeta" req, err := s.client.NewRequest("GET", apiEndpoint, nil) if err != nil { return nil, nil, err } + if options != nil { + q, err := query.Values(options) + if err != nil { + return nil, nil, err + } + req.URL.RawQuery = q.Encode() + } meta := new(CreateMetaInfo) resp, err := s.client.Do(req, meta) diff --git a/vendor/github.com/andygrunwald/go-jira/metaissue_test.go b/vendor/github.com/andygrunwald/go-jira/metaissue_test.go index 3d5b144..d19149b 100644 --- a/vendor/github.com/andygrunwald/go-jira/metaissue_test.go +++ b/vendor/github.com/andygrunwald/go-jira/metaissue_test.go @@ -378,6 +378,377 @@ func TestIssueService_GetCreateMeta_Success(t *testing.T) { } +func TestMetaIssueType_GetCreateMetaWithOptions(t *testing.T) { + setup() + defer teardown() + + testAPIEndpoint := "/rest/api/2/issue/createmeta" + + testMux.HandleFunc(testAPIEndpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, testAPIEndpoint) + + fmt.Fprint(w, `{ + "expand": "projects", + "projects": [{ + "expand": "issuetypes", + "self": "https://my.jira.com/rest/api/2/project/11300", + "id": "11300", + "key": "SPN", + "name": "Super Project Name", + "avatarUrls": { + "48x48": "https://my.jira.com/secure/projectavatar?pid=11300&avatarId=14405", + "24x24": "https://my.jira.com/secure/projectavatar?size=small&pid=11300&avatarId=14405", + "16x16": "https://my.jira.com/secure/projectavatar?size=xsmall&pid=11300&avatarId=14405", + "32x32": "https://my.jira.com/secure/projectavatar?size=medium&pid=11300&avatarId=14405" + }, + "issuetypes": [{ + "self": "https://my.jira.com/rest/api/2/issuetype/6", + "id": "6", + "description": "An issue which ideally should be able to be completed in one step", + "iconUrl": "https://my.jira.com/secure/viewavatar?size=xsmall&avatarId=14006&avatarType=issuetype", + "name": "Request", + "subtask": false, + "expand": "fields", + "fields": { + "summary": { + "required": true, + "schema": { + "type": "string", + "system": "summary" + }, + "name": "Summary", + "hasDefaultValue": false, + "operations": [ + "set" + ] + }, + "issuetype": { + "required": true, + "schema": { + "type": "issuetype", + "system": "issuetype" + }, + "name": "Issue Type", + "hasDefaultValue": false, + "operations": [ + + ], + "allowedValues": [{ + "self": "https://my.jira.com/rest/api/2/issuetype/6", + "id": "6", + "description": "An issue which ideally should be able to be completed in one step", + "iconUrl": "https://my.jira.com/secure/viewavatar?size=xsmall&avatarId=14006&avatarType=issuetype", + "name": "Request", + "subtask": false, + "avatarId": 14006 + }] + }, + "components": { + "required": true, + "schema": { + "type": "array", + "items": "component", + "system": "components" + }, + "name": "Component/s", + "hasDefaultValue": false, + "operations": [ + "add", + "set", + "remove" + ], + "allowedValues": [{ + "self": "https://my.jira.com/rest/api/2/component/14144", + "id": "14144", + "name": "Build automation", + "description": "Jenkins, webhooks, etc." + }, { + "self": "https://my.jira.com/rest/api/2/component/14149", + "id": "14149", + "name": "Caches and noSQL", + "description": "Cassandra, Memcached, Redis, Twemproxy, Xcache" + }, { + "self": "https://my.jira.com/rest/api/2/component/14152", + "id": "14152", + "name": "Cloud services", + "description": "AWS and similar services" + }, { + "self": "https://my.jira.com/rest/api/2/component/14147", + "id": "14147", + "name": "Code quality tools", + "description": "Code sniffer, Sonar" + }, { + "self": "https://my.jira.com/rest/api/2/component/14156", + "id": "14156", + "name": "Configuration management and provisioning", + "description": "Apache/PHP modules, Consul, Salt" + }, { + "self": "https://my.jira.com/rest/api/2/component/13606", + "id": "13606", + "name": "Cronjobs", + "description": "Cronjobs in general" + }, { + "self": "https://my.jira.com/rest/api/2/component/14150", + "id": "14150", + "name": "Data pipelines and queues", + "description": "Kafka, RabbitMq" + }, { + "self": "https://my.jira.com/rest/api/2/component/14159", + "id": "14159", + "name": "Database", + "description": "MySQL related problems" + }, { + "self": "https://my.jira.com/rest/api/2/component/14314", + "id": "14314", + "name": "Documentation" + }, { + "self": "https://my.jira.com/rest/api/2/component/14151", + "id": "14151", + "name": "Git", + "description": "Bitbucket, GitHub, GitLab, Git in general" + }, { + "self": "https://my.jira.com/rest/api/2/component/14155", + "id": "14155", + "name": "HTTP services", + "description": "CDN, HaProxy, HTTP, Varnish" + }, { + "self": "https://my.jira.com/rest/api/2/component/14154", + "id": "14154", + "name": "Job and service scheduling", + "description": "Chronos, Docker, Marathon, Mesos" + }, { + "self": "https://my.jira.com/rest/api/2/component/14158", + "id": "14158", + "name": "Legacy", + "description": "Everything related to legacy" + }, { + "self": "https://my.jira.com/rest/api/2/component/14157", + "id": "14157", + "name": "Monitoring", + "description": "Collectd, Nagios, Monitoring in general" + }, { + "self": "https://my.jira.com/rest/api/2/component/14148", + "id": "14148", + "name": "Other services" + }, { + "self": "https://my.jira.com/rest/api/2/component/13602", + "id": "13602", + "name": "Package management", + "description": "Composer, Medusa, Satis" + }, { + "self": "https://my.jira.com/rest/api/2/component/14145", + "id": "14145", + "name": "Release", + "description": "Directory config, release queries, rewrite rules" + }, { + "self": "https://my.jira.com/rest/api/2/component/14146", + "id": "14146", + "name": "Staging systems and VMs", + "description": "Stage, QA machines, KVMs,Vagrant" + }, { + "self": "https://my.jira.com/rest/api/2/component/14153", + "id": "14153", + "name": "Blog" + }, { + "self": "https://my.jira.com/rest/api/2/component/14143", + "id": "14143", + "name": "Test automation", + "description": "Testing infrastructure in general" + }, { + "self": "https://my.jira.com/rest/api/2/component/14221", + "id": "14221", + "name": "Internal Infrastructure" + }] + }, + "attachment": { + "required": false, + "schema": { + "type": "array", + "items": "attachment", + "system": "attachment" + }, + "name": "Attachment", + "hasDefaultValue": false, + "operations": [ + + ] + }, + "duedate": { + "required": false, + "schema": { + "type": "date", + "system": "duedate" + }, + "name": "Due Date", + "hasDefaultValue": false, + "operations": [ + "set" + ] + }, + "description": { + "required": false, + "schema": { + "type": "string", + "system": "description" + }, + "name": "Description", + "hasDefaultValue": false, + "operations": [ + "set" + ] + }, + "customfield_10806": { + "required": false, + "schema": { + "type": "any", + "custom": "com.pyxis.greenhopper.jira:gh-epic-link", + "customId": 10806 + }, + "name": "Epic Link", + "hasDefaultValue": false, + "operations": [ + "set" + ] + }, + "project": { + "required": true, + "schema": { + "type": "project", + "system": "project" + }, + "name": "Project", + "hasDefaultValue": false, + "operations": [ + "set" + ], + "allowedValues": [{ + "self": "https://my.jira.com/rest/api/2/project/11300", + "id": "11300", + "key": "SPN", + "name": "Super Project Name", + "avatarUrls": { + "48x48": "https://my.jira.com/secure/projectavatar?pid=11300&avatarId=14405", + "24x24": "https://my.jira.com/secure/projectavatar?size=small&pid=11300&avatarId=14405", + "16x16": "https://my.jira.com/secure/projectavatar?size=xsmall&pid=11300&avatarId=14405", + "32x32": "https://my.jira.com/secure/projectavatar?size=medium&pid=11300&avatarId=14405" + }, + "projectCategory": { + "self": "https://my.jira.com/rest/api/2/projectCategory/10100", + "id": "10100", + "description": "", + "name": "Product & Development" + } + }] + }, + "assignee": { + "required": true, + "schema": { + "type": "user", + "system": "assignee" + }, + "name": "Assignee", + "autoCompleteUrl": "https://my.jira.com/rest/api/latest/user/assignable/search?issueKey=null&username=", + "hasDefaultValue": true, + "operations": [ + "set" + ] + }, + "priority": { + "required": false, + "schema": { + "type": "priority", + "system": "priority" + }, + "name": "Priority", + "hasDefaultValue": true, + "operations": [ + "set" + ], + "allowedValues": [{ + "self": "https://my.jira.com/rest/api/2/priority/1", + "iconUrl": "https://my.jira.com/images/icons/priorities/blocker.svg", + "name": "Immediate", + "id": "1" + }, { + "self": "https://my.jira.com/rest/api/2/priority/2", + "iconUrl": "https://my.jira.com/images/icons/priorities/critical.svg", + "name": "Urgent", + "id": "2" + }, { + "self": "https://my.jira.com/rest/api/2/priority/3", + "iconUrl": "https://my.jira.com/images/icons/priorities/major.svg", + "name": "High", + "id": "3" + }, { + "self": "https://my.jira.com/rest/api/2/priority/6", + "iconUrl": "https://my.jira.com/images/icons/priorities/moderate.svg", + "name": "Moderate", + "id": "6" + }, { + "self": "https://my.jira.com/rest/api/2/priority/4", + "iconUrl": "https://my.jira.com/images/icons/priorities/minor.svg", + "name": "Normal", + "id": "4" + }, { + "self": "https://my.jira.com/rest/api/2/priority/5", + "iconUrl": "https://my.jira.com/images/icons/priorities/trivial.svg", + "name": "Low", + "id": "5" + }] + }, + "labels": { + "required": false, + "schema": { + "type": "array", + "items": "string", + "system": "labels" + }, + "name": "Labels", + "autoCompleteUrl": "https://my.jira.com/rest/api/1.0/labels/suggest?query=", + "hasDefaultValue": false, + "operations": [ + "add", + "set", + "remove" + ] + } + } + }] + }] + }`) + }) + + issue, _, err := testClient.Issue.GetCreateMetaWithOptions(&GetQueryOptions{Expand: "projects.issuetypes.fields"}) + if err != nil { + t.Errorf("Expected nil error but got %s", err) + } + + if len(issue.Projects) != 1 { + t.Errorf("Expected 1 project, got %d", len(issue.Projects)) + } + for _, project := range issue.Projects { + if len(project.IssueTypes) != 1 { + t.Errorf("Expected 1 issueTypes, got %d", len(project.IssueTypes)) + } + for _, issueTypes := range project.IssueTypes { + requiredFields := 0 + fields := issueTypes.Fields + for _, value := range fields { + for key, value := range value.(map[string]interface{}) { + if key == "required" && value == true { + requiredFields = requiredFields + 1 + } + } + + } + if requiredFields != 5 { + t.Errorf("Expected 5 required fields from Create Meta information, got %d", requiredFields) + } + } + } +} + func TestMetaIssueType_GetMandatoryFields(t *testing.T) { data := make(map[string]interface{}) diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/all_boards.json b/vendor/github.com/andygrunwald/go-jira/mocks/all_boards.json index 2065cb6..25768ff 100644 --- a/vendor/github.com/andygrunwald/go-jira/mocks/all_boards.json +++ b/vendor/github.com/andygrunwald/go-jira/mocks/all_boards.json @@ -1,6 +1,7 @@ { "maxResults": 50, "startAt": 0, + "total": 6, "isLast": true, "values": [ { diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/all_boards_filtered.json b/vendor/github.com/andygrunwald/go-jira/mocks/all_boards_filtered.json index 545f8a8..05b3019 100644 --- a/vendor/github.com/andygrunwald/go-jira/mocks/all_boards_filtered.json +++ b/vendor/github.com/andygrunwald/go-jira/mocks/all_boards_filtered.json @@ -1,6 +1,7 @@ { "maxResults": 10, "startAt": 1, + "total": 3, "isLast": true, "values": [ { diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/all_fields.json b/vendor/github.com/andygrunwald/go-jira/mocks/all_fields.json new file mode 100644 index 0000000..554ef56 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/mocks/all_fields.json @@ -0,0 +1,33 @@ +[ + { + "id": "description", + "name": "Description", + "custom": false, + "orderable": true, + "navigable": true, + "searchable": true, + "clauseNames": [ + "description" + ], + "schema": { + "type": "string", + "system": "description" + } + }, + { + "id": "summary", + "key": "summary", + "name": "Summary", + "custom": false, + "orderable": true, + "navigable": true, + "searchable": true, + "clauseNames": [ + "summary" + ], + "schema": { + "type": "string", + "system": "summary" + } + } +] diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/all_filters.json b/vendor/github.com/andygrunwald/go-jira/mocks/all_filters.json new file mode 100644 index 0000000..50cea91 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/mocks/all_filters.json @@ -0,0 +1,35 @@ +[ + { + "self": "http://your-domain.atlassian.net/rest/api/2/filter/10000", + "id": "10000", + "name": "All Open Bugs", + "description": "Lists all open bugs", + "owner": { + "self": "http://your-domain.atlassian.net/rest/api/2/user?username=mia", + "key": "mia", + "accountId": "99:27935d01-92a7-4687-8272-a9b8d3b2ae2e", + "name": "mia", + "avatarUrls": { + "48x48": "http://your-domain.atlassian.net/secure/useravatar?size=large&ownerId=mia", + "24x24": "http://your-domain.atlassian.net/secure/useravatar?size=small&ownerId=mia", + "16x16": "http://your-domain.atlassian.net/secure/useravatar?size=xsmall&ownerId=mia", + "32x32": "http://your-domain.atlassian.net/secure/useravatar?size=medium&ownerId=mia" + }, + "displayName": "Mia Krystof", + "active": false + }, + "jql": "type = Bug and resolution is empty", + "viewUrl": "http://your-domain.atlassian.net/issues/?filter=10000", + "searchUrl": "http://your-domain.atlassian.net/rest/api/2/search?jql=type%20%3D%20Bug%20and%20resolutino%20is%20empty", + "favourite": true, + "favouritedCount": 0, + "sharePermissions": [], + "subscriptions": { + "size": 0, + "items": [], + "max-results": 0, + "start-index": 0, + "end-index": 0 + } + } +] diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/all_priorities.json b/vendor/github.com/andygrunwald/go-jira/mocks/all_priorities.json new file mode 100644 index 0000000..9237eb4 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/mocks/all_priorities.json @@ -0,0 +1,42 @@ +[ + { + "self": "https://issues.apache.org/jira/rest/api/2/priority/1", + "statusColor": "#cc0000", + "description": "Blocks development and/or testing work, production could not run", + "iconUrl": "https://issues.apache.org/jira/images/icons/priorities/blocker.svg", + "name": "Blocker", + "id": "1" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/priority/2", + "statusColor": "#ff0000", + "description": "Crashes, loss of data, severe memory leak.", + "iconUrl": "https://issues.apache.org/jira/images/icons/priorities/critical.svg", + "name": "Critical", + "id": "2" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/priority/3", + "statusColor": "#009900", + "description": "Major loss of function.", + "iconUrl": "https://issues.apache.org/jira/images/icons/priorities/major.svg", + "name": "Major", + "id": "3" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/priority/4", + "statusColor": "#006600", + "description": "Minor loss of function, or other problem where easy workaround is present.", + "iconUrl": "https://issues.apache.org/jira/images/icons/priorities/minor.svg", + "name": "Minor", + "id": "4" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/priority/5", + "statusColor": "#003300", + "description": "Cosmetic problem like misspelt words or misaligned text.", + "iconUrl": "https://issues.apache.org/jira/images/icons/priorities/trivial.svg", + "name": "Trivial", + "id": "5" + } +] \ No newline at end of file diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/all_resolutions.json b/vendor/github.com/andygrunwald/go-jira/mocks/all_resolutions.json new file mode 100644 index 0000000..8d3d96b --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/mocks/all_resolutions.json @@ -0,0 +1,140 @@ +[ + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/1", + "id": "1", + "description": "A fix for this issue is checked into the tree and tested.", + "name": "Fixed" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/2", + "id": "2", + "description": "The problem described is an issue which will never be fixed.", + "name": "Won't Fix" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/3", + "id": "3", + "description": "The problem is a duplicate of an existing issue.", + "name": "Duplicate" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/6", + "id": "6", + "description": "The problem isn't valid and it can't be fixed.", + "name": "Invalid" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/4", + "id": "4", + "description": "The problem is not completely described.", + "name": "Incomplete" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/5", + "id": "5", + "description": "All attempts at reproducing this issue failed, or not enough information was available to reproduce the issue. Reading the code produces no clues as to why this behavior would occur. If more information appears later, please reopen the issue.", + "name": "Cannot Reproduce" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/7", + "id": "7", + "description": "Later", + "name": "Later" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/8", + "id": "8", + "description": "The described issue is not actually a problem - it is as designed.", + "name": "Not A Problem" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/9", + "id": "9", + "description": "Unresolved", + "name": "Unresolved" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10", + "id": "10", + "description": "As the name suggests", + "name": "Implemented" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/11", + "id": "11", + "description": "", + "name": "Done" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10000", + "id": "10000", + "description": "This issue was automatically closed", + "name": "Auto Closed" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10001", + "id": "10001", + "description": "", + "name": "Pending Closed" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10002", + "id": "10002", + "description": "REMIND", + "name": "REMIND" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10003", + "id": "10003", + "description": "Resolved", + "name": "Resolved" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10004", + "id": "10004", + "description": "Not A Bug", + "name": "Not A Bug" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10005", + "id": "10005", + "description": "Workaround", + "name": "Workaround" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10006", + "id": "10006", + "description": "Staged", + "name": "Staged" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10007", + "id": "10007", + "description": "Delivered", + "name": "Delivered" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10008", + "id": "10008", + "description": "Information Provided", + "name": "Information Provided" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10009", + "id": "10009", + "description": "Works for Me", + "name": "Works for Me" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10010", + "id": "10010", + "description": "Feedback Received", + "name": "Feedback Received" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/resolution/10011", + "id": "10011", + "description": "Won't Do", + "name": "Won't Do" + } +] diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/all_statuscategories.json b/vendor/github.com/andygrunwald/go-jira/mocks/all_statuscategories.json new file mode 100644 index 0000000..ec59711 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/mocks/all_statuscategories.json @@ -0,0 +1,30 @@ +[ + { + "self": "https://issues.apache.org/jira/rest/api/2/statuscategory/1", + "id": 1, + "key": "undefined", + "colorName": "medium-gray", + "name": "No Category" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/statuscategory/2", + "id": 2, + "key": "new", + "colorName": "blue-gray", + "name": "To Do" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/statuscategory/4", + "id": 4, + "key": "indeterminate", + "colorName": "yellow", + "name": "In Progress" + }, + { + "self": "https://issues.apache.org/jira/rest/api/2/statuscategory/3", + "id": 3, + "key": "done", + "colorName": "green", + "name": "Done" + } +] diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/favourite_filters.json b/vendor/github.com/andygrunwald/go-jira/mocks/favourite_filters.json new file mode 100644 index 0000000..50cea91 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/mocks/favourite_filters.json @@ -0,0 +1,35 @@ +[ + { + "self": "http://your-domain.atlassian.net/rest/api/2/filter/10000", + "id": "10000", + "name": "All Open Bugs", + "description": "Lists all open bugs", + "owner": { + "self": "http://your-domain.atlassian.net/rest/api/2/user?username=mia", + "key": "mia", + "accountId": "99:27935d01-92a7-4687-8272-a9b8d3b2ae2e", + "name": "mia", + "avatarUrls": { + "48x48": "http://your-domain.atlassian.net/secure/useravatar?size=large&ownerId=mia", + "24x24": "http://your-domain.atlassian.net/secure/useravatar?size=small&ownerId=mia", + "16x16": "http://your-domain.atlassian.net/secure/useravatar?size=xsmall&ownerId=mia", + "32x32": "http://your-domain.atlassian.net/secure/useravatar?size=medium&ownerId=mia" + }, + "displayName": "Mia Krystof", + "active": false + }, + "jql": "type = Bug and resolution is empty", + "viewUrl": "http://your-domain.atlassian.net/issues/?filter=10000", + "searchUrl": "http://your-domain.atlassian.net/rest/api/2/search?jql=type%20%3D%20Bug%20and%20resolutino%20is%20empty", + "favourite": true, + "favouritedCount": 0, + "sharePermissions": [], + "subscriptions": { + "size": 0, + "items": [], + "max-results": 0, + "start-index": 0, + "end-index": 0 + } + } +] diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/filter.json b/vendor/github.com/andygrunwald/go-jira/mocks/filter.json new file mode 100644 index 0000000..a47141a --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/mocks/filter.json @@ -0,0 +1,33 @@ +{ + "self": "http://your-domain.atlassian.net/rest/api/2/filter/10000", + "id": "10000", + "name": "All Open Bugs", + "description": "Lists all open bugs", + "owner": { + "self": "http://your-domain.atlassian.net/rest/api/2/user?username=mia", + "key": "mia", + "accountId": "99:27935d01-92a7-4687-8272-a9b8d3b2ae2e", + "name": "mia", + "avatarUrls": { + "48x48": "http://your-domain.atlassian.net/secure/useravatar?size=large&ownerId=mia", + "24x24": "http://your-domain.atlassian.net/secure/useravatar?size=small&ownerId=mia", + "16x16": "http://your-domain.atlassian.net/secure/useravatar?size=xsmall&ownerId=mia", + "32x32": "http://your-domain.atlassian.net/secure/useravatar?size=medium&ownerId=mia" + }, + "displayName": "Mia Krystof", + "active": false + }, + "jql": "type = Bug and resolution is empty", + "viewUrl": "http://your-domain.atlassian.net/issues/?filter=10000", + "searchUrl": "http://your-domain.atlassian.net/rest/api/2/search?jql=type%20%3D%20Bug%20and%20resolutino%20is%20empty", + "favourite": true, + "favouritedCount": 0, + "sharePermissions": [], + "subscriptions": { + "size": 0, + "items": [], + "max-results": 0, + "start-index": 0, + "end-index": 0 + } +} \ No newline at end of file diff --git a/vendor/github.com/andygrunwald/go-jira/mocks/sprints_filtered.json b/vendor/github.com/andygrunwald/go-jira/mocks/sprints_filtered.json new file mode 100644 index 0000000..29bdb23 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/mocks/sprints_filtered.json @@ -0,0 +1,16 @@ +{ + "isLast": true, + "maxResults": 50, + "startAt": 0, + "values": [ + { + "endDate": "2016-06-28T14:24:00.000-07:00", + "id": 832, + "name": "Iteration-13-2", + "originBoardId": 734, + "self": "https://jira.com/rest/agile/1.0/sprint/832", + "startDate": "2016-06-20T13:24:39.161-07:00", + "state": "active" + } + ] +} diff --git a/vendor/github.com/andygrunwald/go-jira/priority.go b/vendor/github.com/andygrunwald/go-jira/priority.go new file mode 100644 index 0000000..481f959 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/priority.go @@ -0,0 +1,37 @@ +package jira + +// PriorityService handles priorities for the JIRA instance / API. +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-Priority +type PriorityService struct { + client *Client +} + +// Priority represents a priority of a JIRA issue. +// Typical types are "Normal", "Moderate", "Urgent", ... +type Priority struct { + Self string `json:"self,omitempty" structs:"self,omitempty"` + IconURL string `json:"iconUrl,omitempty" structs:"iconUrl,omitempty"` + Name string `json:"name,omitempty" structs:"name,omitempty"` + ID string `json:"id,omitempty" structs:"id,omitempty"` + StatusColor string `json:"statusColor,omitempty" structs:"statusColor,omitempty"` + Description string `json:"description,omitempty" structs:"description,omitempty"` +} + +// GetList gets all priorities from JIRA +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-priority-get +func (s *PriorityService) GetList() ([]Priority, *Response, error) { + apiEndpoint := "rest/api/2/priority" + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + priorityList := []Priority{} + resp, err := s.client.Do(req, &priorityList) + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + return priorityList, resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/priority_test.go b/vendor/github.com/andygrunwald/go-jira/priority_test.go new file mode 100644 index 0000000..b91a236 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/priority_test.go @@ -0,0 +1,32 @@ +package jira + +import ( + "fmt" + "io/ioutil" + "net/http" + "testing" +) + +func TestPriorityService_GetList(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/priority" + + raw, err := ioutil.ReadFile("./mocks/all_priorities.json") + if err != nil { + t.Error(err.Error()) + } + testMux.HandleFunc(testAPIEdpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, testAPIEdpoint) + fmt.Fprint(w, string(raw)) + }) + + priorities, _, err := testClient.Priority.GetList() + if priorities == nil { + t.Error("Expected priority list. Priority list is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/project.go b/vendor/github.com/andygrunwald/go-jira/project.go index 8df4fc0..b71b5bb 100644 --- a/vendor/github.com/andygrunwald/go-jira/project.go +++ b/vendor/github.com/andygrunwald/go-jira/project.go @@ -2,6 +2,8 @@ package jira import ( "fmt" + + "github.com/google/go-querystring/query" ) // ProjectService handles projects for the JIRA instance / API. @@ -21,6 +23,7 @@ type ProjectList []struct { AvatarUrls AvatarUrls `json:"avatarUrls" structs:"avatarUrls"` ProjectTypeKey string `json:"projectTypeKey" structs:"projectTypeKey"` ProjectCategory ProjectCategory `json:"projectCategory,omitempty" structs:"projectsCategory,omitempty"` + IssueTypes []IssueType `json:"issueTypes,omitempty" structs:"issueTypes,omitempty"` } // ProjectCategory represents a single project category @@ -53,18 +56,6 @@ type Project struct { ProjectCategory ProjectCategory `json:"projectCategory,omitempty" structs:"projectCategory,omitempty"` } -// Version represents a single release version of a project -type Version struct { - Self string `json:"self" structs:"self,omitempty"` - ID string `json:"id" structs:"id,omitempty"` - Name string `json:"name" structs:"name,omitempty"` - Archived bool `json:"archived" structs:"archived,omitempty"` - Released bool `json:"released" structs:"released,omitempty"` - ReleaseDate string `json:"releaseDate" structs:"releaseDate,omitempty"` - UserReleaseDate string `json:"userReleaseDate" structs:"userReleaseDate,omitempty"` - ProjectID int `json:"projectId" structs:"projectId,omitempty"` // Unlike other IDs, this is returned as a number -} - // ProjectComponent represents a single component of a project type ProjectComponent struct { Self string `json:"self" structs:"self,omitempty"` @@ -81,21 +72,48 @@ type ProjectComponent struct { ProjectID int `json:"projectId" structs:"projectId,omitempty"` } +// PermissionScheme represents the permission scheme for the project +type PermissionScheme struct { + Expand string `json:"expand" structs:"expand,omitempty"` + Self string `json:"self" structs:"self,omitempty"` + ID int `json:"id" structs:"id,omitempty"` + Name string `json:"name" structs:"name,omitempty"` + Description string `json:"description" structs:"description,omitempty"` +} + // GetList gets all projects form JIRA // // JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#api/2/project-getAllProjects func (s *ProjectService) GetList() (*ProjectList, *Response, error) { + return s.ListWithOptions(&GetQueryOptions{}) +} + +// ListWithOptions gets all projects form JIRA with optional query params, like &GetQueryOptions{Expand: "issueTypes"} to get +// a list of all projects and their supported issuetypes +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#api/2/project-getAllProjects +func (s *ProjectService) ListWithOptions(options *GetQueryOptions) (*ProjectList, *Response, error) { apiEndpoint := "rest/api/2/project" req, err := s.client.NewRequest("GET", apiEndpoint, nil) if err != nil { return nil, nil, err } + if options != nil { + q, err := query.Values(options) + if err != nil { + return nil, nil, err + } + req.URL.RawQuery = q.Encode() + } + projectList := new(ProjectList) resp, err := s.client.Do(req, projectList) if err != nil { - return nil, resp, err + jerr := NewJiraError(resp, err) + return nil, resp, jerr } + return projectList, resp, nil } @@ -114,7 +132,31 @@ func (s *ProjectService) Get(projectID string) (*Project, *Response, error) { project := new(Project) resp, err := s.client.Do(req, project) if err != nil { - return nil, resp, err + jerr := NewJiraError(resp, err) + return nil, resp, jerr } + return project, resp, nil } + +// GetPermissionScheme returns a full representation of the permission scheme for the project +// JIRA will attempt to identify the project by the projectIdOrKey path parameter. +// This can be an project id, or an project key. +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#api/2/project-getProject +func (s *ProjectService) GetPermissionScheme(projectID string) (*PermissionScheme, *Response, error) { + apiEndpoint := fmt.Sprintf("/rest/api/2/project/%s/permissionscheme", projectID) + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + ps := new(PermissionScheme) + resp, err := s.client.Do(req, ps) + if err != nil { + jerr := NewJiraError(resp, err) + return nil, resp, jerr + } + + return ps, resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/project_test.go b/vendor/github.com/andygrunwald/go-jira/project_test.go index f2d53d0..a120c9c 100644 --- a/vendor/github.com/andygrunwald/go-jira/project_test.go +++ b/vendor/github.com/andygrunwald/go-jira/project_test.go @@ -31,6 +31,30 @@ func TestProjectService_GetList(t *testing.T) { } } +func TestProjectService_ListWithOptions(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/project" + + raw, err := ioutil.ReadFile("./mocks/all_projects.json") + if err != nil { + t.Error(err.Error()) + } + testMux.HandleFunc(testAPIEdpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/project?expand=issueTypes") + fmt.Fprint(w, string(raw)) + }) + + projects, _, err := testClient.Project.ListWithOptions(&GetQueryOptions{Expand: "issueTypes"}) + if projects == nil { + t.Error("Expected project list. Project list is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} + func TestProjectService_Get(t *testing.T) { setup() defer teardown() @@ -78,3 +102,57 @@ func TestProjectService_Get_NoProject(t *testing.T) { t.Errorf("Error given: %s", err) } } + +func TestProjectService_GetPermissionScheme_Failure(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/project/99999999/permissionscheme" + + testMux.HandleFunc(testAPIEdpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, testAPIEdpoint) + fmt.Fprint(w, nil) + }) + + permissionScheme, resp, err := testClient.Project.GetPermissionScheme("99999999") + if permissionScheme != nil { + t.Errorf("Expected nil. Got %+v", permissionScheme) + } + + if resp.Status == "404" { + t.Errorf("Expected status 404. Got %s", resp.Status) + } + if err == nil { + t.Errorf("Error given: %s", err) + } +} + +func TestProjectService_GetPermissionScheme_Success(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/project/99999999/permissionscheme" + + testMux.HandleFunc(testAPIEdpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, testAPIEdpoint) + fmt.Fprint(w, `{ + "expand": "permissions,user,group,projectRole,field,all", + "id": 10201, + "self": "https://www.example.com/rest/api/2/permissionscheme/10201", + "name": "Project for specific-users", + "description": "Projects that can only see for people belonging to specific-users group" + }`) + }) + + permissionScheme, resp, err := testClient.Project.GetPermissionScheme("99999999") + if permissionScheme.ID != 10201 { + t.Errorf("Expected Permission Scheme ID. Got %+v", permissionScheme) + } + + if resp.Status == "404" { + t.Errorf("Expected status 404. Got %s", resp.Status) + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/resolution.go b/vendor/github.com/andygrunwald/go-jira/resolution.go new file mode 100644 index 0000000..36a651f --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/resolution.go @@ -0,0 +1,35 @@ +package jira + +// ResolutionService handles resolutions for the JIRA instance / API. +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-Resolution +type ResolutionService struct { + client *Client +} + +// Resolution represents a resolution of a JIRA issue. +// Typical types are "Fixed", "Suspended", "Won't Fix", ... +type Resolution struct { + Self string `json:"self" structs:"self"` + ID string `json:"id" structs:"id"` + Description string `json:"description" structs:"description"` + Name string `json:"name" structs:"name"` +} + +// GetList gets all resolutions from JIRA +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-resolution-get +func (s *ResolutionService) GetList() ([]Resolution, *Response, error) { + apiEndpoint := "rest/api/2/resolution" + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + resolutionList := []Resolution{} + resp, err := s.client.Do(req, &resolutionList) + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + return resolutionList, resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/resolution_test.go b/vendor/github.com/andygrunwald/go-jira/resolution_test.go new file mode 100644 index 0000000..7c01e75 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/resolution_test.go @@ -0,0 +1,32 @@ +package jira + +import ( + "fmt" + "io/ioutil" + "net/http" + "testing" +) + +func TestResolutionService_GetList(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/resolution" + + raw, err := ioutil.ReadFile("./mocks/all_resolutions.json") + if err != nil { + t.Error(err.Error()) + } + testMux.HandleFunc(testAPIEdpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, testAPIEdpoint) + fmt.Fprint(w, string(raw)) + }) + + resolution, _, err := testClient.Resolution.GetList() + if resolution == nil { + t.Error("Expected resolution list. Resolution list is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/sprint.go b/vendor/github.com/andygrunwald/go-jira/sprint.go index a101eb9..7e8e697 100644 --- a/vendor/github.com/andygrunwald/go-jira/sprint.go +++ b/vendor/github.com/andygrunwald/go-jira/sprint.go @@ -2,6 +2,8 @@ package jira import ( "fmt" + + "github.com/google/go-querystring/query" ) // SprintService handles sprints in JIRA Agile API. @@ -37,6 +39,9 @@ func (s *SprintService) MoveIssuesToSprint(sprintID int, issueIDs []string) (*Re } resp, err := s.client.Do(req, nil) + if err != nil { + err = NewJiraError(resp, err) + } return resp, err } @@ -56,5 +61,47 @@ func (s *SprintService) GetIssuesForSprint(sprintID int) ([]Issue, *Response, er result := new(IssuesInSprintResult) resp, err := s.client.Do(req, result) + if err != nil { + err = NewJiraError(resp, err) + } + return result.Issues, resp, err } + +// GetIssue returns a full representation of the issue for the given issue key. +// JIRA will attempt to identify the issue by the issueIdOrKey path parameter. +// This can be an issue id, or an issue key. +// If the issue cannot be found via an exact match, JIRA will also look for the issue in a case-insensitive way, or by looking to see if the issue was moved. +// +// The given options will be appended to the query string +// +// JIRA API docs: https://docs.atlassian.com/jira-software/REST/7.3.1/#agile/1.0/issue-getIssue +// +// TODO: create agile service for holding all agile apis' implementation +func (s *SprintService) GetIssue(issueID string, options *GetQueryOptions) (*Issue, *Response, error) { + apiEndpoint := fmt.Sprintf("rest/agile/1.0/issue/%s", issueID) + + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + + if err != nil { + return nil, nil, err + } + + if options != nil { + q, err := query.Values(options) + if err != nil { + return nil, nil, err + } + req.URL.RawQuery = q.Encode() + } + + issue := new(Issue) + resp, err := s.client.Do(req, issue) + + if err != nil { + jerr := NewJiraError(resp, err) + return nil, resp, jerr + } + + return issue, resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/sprint_test.go b/vendor/github.com/andygrunwald/go-jira/sprint_test.go index 40f0546..6b5c5bd 100644 --- a/vendor/github.com/andygrunwald/go-jira/sprint_test.go +++ b/vendor/github.com/andygrunwald/go-jira/sprint_test.go @@ -5,6 +5,7 @@ import ( "fmt" "io/ioutil" "net/http" + "reflect" "testing" ) @@ -65,3 +66,31 @@ func TestSprintService_GetIssuesForSprint(t *testing.T) { } } + +func TestSprintService_GetIssue(t *testing.T) { + setup() + defer teardown() + + testAPIEndpoint := "/rest/agile/1.0/issue/10002" + + testMux.HandleFunc(testAPIEndpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, testAPIEndpoint) + fmt.Fprint(w, `{"expand":"renderedFields,names,schema,transitions,operations,editmeta,changelog,versionedRepresentations","id":"10002","self":"http://www.example.com/jira/rest/api/2/issue/10002","key":"EX-1","fields":{"labels":["test"],"watcher":{"self":"http://www.example.com/jira/rest/api/2/issue/EX-1/watchers","isWatching":false,"watchCount":1,"watchers":[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false}]},"sprint": {"id": 37,"self": "http://www.example.com/jira/rest/agile/1.0/sprint/13", "state": "future", "name": "sprint 2"}, "epic": {"id": 19415,"key": "EPIC-77","self": "https://example.atlassian.net/rest/agile/1.0/epic/19415","name": "Epic Name","summary": "Do it","color": {"key": "color_11"},"done": false},"attachment":[{"self":"http://www.example.com/jira/rest/api/2.0/attachments/10000","filename":"picture.jpg","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred","24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred","32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.461+0000","size":23123,"mimeType":"image/jpeg","content":"http://www.example.com/jira/attachments/10000","thumbnail":"http://www.example.com/jira/secure/thumbnail/10000"}],"sub-tasks":[{"id":"10000","type":{"id":"10000","name":"","inward":"Parent","outward":"Sub-task"},"outwardIssue":{"id":"10003","key":"EX-2","self":"http://www.example.com/jira/rest/api/2/issue/EX-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"description":"example bug report","project":{"self":"http://www.example.com/jira/rest/api/2/project/EX","id":"10000","key":"EX","name":"Example","avatarUrls":{"48x48":"http://www.example.com/jira/secure/projectavatar?size=large&pid=10000","24x24":"http://www.example.com/jira/secure/projectavatar?size=small&pid=10000","16x16":"http://www.example.com/jira/secure/projectavatar?size=xsmall&pid=10000","32x32":"http://www.example.com/jira/secure/projectavatar?size=medium&pid=10000"},"projectCategory":{"self":"http://www.example.com/jira/rest/api/2/projectCategory/10000","id":"10000","name":"FIRST","description":"First Project Category"}},"comment":{"comments":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/comment/10000","id":"10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"body":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.","updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"created":"2016-03-16T04:22:37.356+0000","updated":"2016-03-16T04:22:37.356+0000","visibility":{"type":"role","value":"Administrators"}}]},"issuelinks":[{"id":"10001","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"outwardIssue":{"id":"10004L","key":"PRJ-2","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-2","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}},{"id":"10002","type":{"id":"10000","name":"Dependent","inward":"depends on","outward":"is depended by"},"inwardIssue":{"id":"10004","key":"PRJ-3","self":"http://www.example.com/jira/rest/api/2/issue/PRJ-3","fields":{"status":{"iconUrl":"http://www.example.com/jira//images/icons/statuses/open.png","name":"Open"}}}}],"worklog":{"worklogs":[{"self":"http://www.example.com/jira/rest/api/2/issue/10010/worklog/10000","author":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"updateAuthor":{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","name":"fred","displayName":"Fred F. User","active":false},"comment":"I did some work here.","updated":"2016-03-16T04:22:37.471+0000","visibility":{"type":"group","value":"jira-developers"},"started":"2016-03-16T04:22:37.471+0000","timeSpent":"3h 20m","timeSpentSeconds":12000,"id":"100028","issueId":"10002"}]},"updated":"2016-04-06T02:36:53.594-0700","duedate":"2018-01-19","timetracking":{"originalEstimate":"10m","remainingEstimate":"3m","timeSpent":"6m","originalEstimateSeconds":600,"remainingEstimateSeconds":200,"timeSpentSeconds":400}},"names":{"watcher":"watcher","attachment":"attachment","sub-tasks":"sub-tasks","description":"description","project":"project","comment":"comment","issuelinks":"issuelinks","worklog":"worklog","updated":"updated","timetracking":"timetracking"},"schema":{}}`) + }) + + issue, _, err := testClient.Sprint.GetIssue("10002", nil) + if issue == nil { + t.Errorf("Expected issue. Issue is nil %v", err) + } + if !reflect.DeepEqual(issue.Fields.Labels, []string{"test"}) { + t.Error("Expected labels for the returned issue") + } + if len(issue.Fields.Comments.Comments) != 1 { + t.Errorf("Expected one comment, %v found", len(issue.Fields.Comments.Comments)) + } + if err != nil { + t.Errorf("Error given: %s", err) + } + +} diff --git a/vendor/github.com/andygrunwald/go-jira/statuscategory.go b/vendor/github.com/andygrunwald/go-jira/statuscategory.go new file mode 100644 index 0000000..05db420 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/statuscategory.go @@ -0,0 +1,44 @@ +package jira + +// StatusCategoryService handles status categories for the JIRA instance / API. +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-Statuscategory +type StatusCategoryService struct { + client *Client +} + +// StatusCategory represents the category a status belongs to. +// Those categories can be user defined in every JIRA instance. +type StatusCategory struct { + Self string `json:"self" structs:"self"` + ID int `json:"id" structs:"id"` + Name string `json:"name" structs:"name"` + Key string `json:"key" structs:"key"` + ColorName string `json:"colorName" structs:"colorName"` +} + +// These constants are the keys of the default JIRA status categories +const ( + StatusCategoryComplete = "done" + StatusCategoryInProgress = "indeterminate" + StatusCategoryToDo = "new" + StatusCategoryUndefined = "undefined" +) + +// GetList gets all status categories from JIRA +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-statuscategory-get +func (s *StatusCategoryService) GetList() ([]StatusCategory, *Response, error) { + apiEndpoint := "rest/api/2/statuscategory" + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + statusCategoryList := []StatusCategory{} + resp, err := s.client.Do(req, &statusCategoryList) + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + return statusCategoryList, resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/statuscategory_test.go b/vendor/github.com/andygrunwald/go-jira/statuscategory_test.go new file mode 100644 index 0000000..3f8b7ec --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/statuscategory_test.go @@ -0,0 +1,32 @@ +package jira + +import ( + "fmt" + "io/ioutil" + "net/http" + "testing" +) + +func TestStatusCategoryService_GetList(t *testing.T) { + setup() + defer teardown() + testAPIEdpoint := "/rest/api/2/statuscategory" + + raw, err := ioutil.ReadFile("./mocks/all_statuscategories.json") + if err != nil { + t.Error(err.Error()) + } + testMux.HandleFunc(testAPIEdpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, testAPIEdpoint) + fmt.Fprint(w, string(raw)) + }) + + statusCategory, _, err := testClient.StatusCategory.GetList() + if statusCategory == nil { + t.Error("Expected statusCategory list. StatusCategory list is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/user.go b/vendor/github.com/andygrunwald/go-jira/user.go index 7a9762c..2a61c74 100644 --- a/vendor/github.com/andygrunwald/go-jira/user.go +++ b/vendor/github.com/andygrunwald/go-jira/user.go @@ -27,6 +27,21 @@ type User struct { ApplicationKeys []string `json:"applicationKeys,omitempty" structs:"applicationKeys,omitempty"` } +// UserGroup represents the group list +type UserGroup struct { + Self string `json:"self,omitempty" structs:"self,omitempty"` + Name string `json:"name,omitempty" structs:"name,omitempty"` +} + +type userSearchParam struct { + name string + value string +} + +type userSearch []userSearchParam + +type userSearchF func(userSearch) userSearch + // Get gets user info from JIRA // // JIRA API docs: https://docs.atlassian.com/jira/REST/cloud/#api/2/user-getUser @@ -40,7 +55,7 @@ func (s *UserService) Get(username string) (*User, *Response, error) { user := new(User) resp, err := s.client.Do(req, user) if err != nil { - return nil, resp, err + return nil, resp, NewJiraError(resp, err) } return user, resp, nil } @@ -64,11 +79,132 @@ func (s *UserService) Create(user *User) (*User, *Response, error) { defer resp.Body.Close() data, err := ioutil.ReadAll(resp.Body) if err != nil { - return nil, resp, fmt.Errorf("Could not read the returned data") + e := fmt.Errorf("Could not read the returned data") + return nil, resp, NewJiraError(resp, e) } err = json.Unmarshal(data, responseUser) if err != nil { - return nil, resp, fmt.Errorf("Could not unmarshall the data into struct") + e := fmt.Errorf("Could not unmarshall the data into struct") + return nil, resp, NewJiraError(resp, e) } return responseUser, resp, nil } + +// Delete deletes an user from JIRA. +// Returns http.StatusNoContent on success. +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-user-delete +func (s *UserService) Delete(username string) (*Response, error) { + apiEndpoint := fmt.Sprintf("/rest/api/2/user?username=%s", username) + req, err := s.client.NewRequest("DELETE", apiEndpoint, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(req, nil) + if err != nil { + return resp, NewJiraError(resp, err) + } + return resp, nil +} + +// GetGroups returns the groups which the user belongs to +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/cloud/#api/2/user-getUserGroups +func (s *UserService) GetGroups(username string) (*[]UserGroup, *Response, error) { + apiEndpoint := fmt.Sprintf("/rest/api/2/user/groups?username=%s", username) + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + userGroups := new([]UserGroup) + resp, err := s.client.Do(req, userGroups) + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + return userGroups, resp, nil +} + +// Get information about the current logged-in user +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-myself-get +func (s *UserService) GetSelf() (*User, *Response, error) { + const apiEndpoint = "rest/api/2/myself" + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + var user User + resp, err := s.client.Do(req, &user) + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + return &user, resp, nil +} + +// WithMaxResults sets the max results to return +func WithMaxResults(maxResults int) userSearchF { + return func(s userSearch) userSearch { + s = append(s, userSearchParam{name: "maxResults", value: fmt.Sprintf("%d", maxResults)}) + return s + } +} + +// WithStartAt set the start pager +func WithStartAt(startAt int) userSearchF { + return func(s userSearch) userSearch { + s = append(s, userSearchParam{name: "startAt", value: fmt.Sprintf("%d", startAt)}) + return s + } +} + +// WithActive sets the active users lookup +func WithActive(active bool) userSearchF { + return func(s userSearch) userSearch { + s = append(s, userSearchParam{name: "includeActive", value: fmt.Sprintf("%t", active)}) + return s + } +} + +// WithInactive sets the inactive users lookup +func WithInactive(inactive bool) userSearchF { + return func(s userSearch) userSearch { + s = append(s, userSearchParam{name: "includeInactive", value: fmt.Sprintf("%t", inactive)}) + return s + } +} + +// Find searches for user info from JIRA: +// It can find users by email, username or name +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/cloud/#api/2/user-findUsers +func (s *UserService) Find(property string, tweaks ...userSearchF) ([]User, *Response, error) { + search := []userSearchParam{ + { + name: "username", + value: property, + }, + } + for _, f := range tweaks { + search = f(search) + } + + var queryString = "" + for _, param := range search { + queryString += param.name + "=" + param.value + "&" + } + + apiEndpoint := fmt.Sprintf("/rest/api/2/user/search?%s", queryString[:len(queryString)-1]) + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + users := []User{} + resp, err := s.client.Do(req, &users) + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + return users, resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/user_test.go b/vendor/github.com/andygrunwald/go-jira/user_test.go index 24b086b..2e1c1ea 100644 --- a/vendor/github.com/andygrunwald/go-jira/user_test.go +++ b/vendor/github.com/andygrunwald/go-jira/user_test.go @@ -55,3 +55,115 @@ func TestUserService_Create(t *testing.T) { t.Error("Expected user. User is nil") } } + +func TestUserService_Delete(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/user", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testRequestURL(t, r, "/rest/api/2/user?username=fred") + + w.WriteHeader(http.StatusNoContent) + }) + + resp, err := testClient.User.Delete("fred") + if err != nil { + t.Errorf("Error given: %s", err) + } + + if resp.StatusCode != http.StatusNoContent { + t.Errorf("Wrong status code: %d. Expected %d", resp.StatusCode, http.StatusNoContent) + } +} + +func TestUserService_GetGroups(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/user/groups", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/user/groups?username=fred") + + w.WriteHeader(http.StatusCreated) + fmt.Fprint(w, `[{"name":"jira-software-users","self":"http://www.example.com/jira/rest/api/2/user?username=fred"}]`) + }) + + if groups, _, err := testClient.User.GetGroups("fred"); err != nil { + t.Errorf("Error given: %s", err) + } else if groups == nil { + t.Error("Expected user groups. []UserGroup is nil") + } +} + +func TestUserService_GetSelf(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/myself", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/myself") + + w.WriteHeader(http.StatusCreated) + fmt.Fprint(w, `{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","key":"fred", + "name":"fred","emailAddress":"fred@example.com","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred", + "24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred", + "32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":true,"timeZone":"Australia/Sydney","groups":{"size":3,"items":[ + {"name":"jira-user","self":"http://www.example.com/jira/rest/api/2/group?groupname=jira-user"},{"name":"jira-admin", + "self":"http://www.example.com/jira/rest/api/2/group?groupname=jira-admin"},{"name":"important","self":"http://www.example.com/jira/rest/api/2/group?groupname=important" + }]},"applicationRoles":{"size":1,"items":[]},"expand":"groups,applicationRoles"}`) + }) + + if user, _, err := testClient.User.GetSelf(); err != nil { + t.Errorf("Error given: %s", err) + } else if user == nil { + t.Error("Expected user groups. []UserGroup is nil") + } else if user.Name != "fred" || + !user.Active || + user.DisplayName != "Fred F. User" { + t.Errorf("User JSON deserialized incorrectly") + } +} + +func TestUserService_Find_Success(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/user/search", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/user/search?username=fred@example.com") + + fmt.Fprint(w, `[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","key":"fred", + "name":"fred","emailAddress":"fred@example.com","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred", + "24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred", + "32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":true,"timeZone":"Australia/Sydney","groups":{"size":3,"items":[ + {"name":"jira-user","self":"http://www.example.com/jira/rest/api/2/group?groupname=jira-user"},{"name":"jira-admin", + "self":"http://www.example.com/jira/rest/api/2/group?groupname=jira-admin"},{"name":"important","self":"http://www.example.com/jira/rest/api/2/group?groupname=important" + }]},"applicationRoles":{"size":1,"items":[]},"expand":"groups,applicationRoles"}]`) + }) + + if user, _, err := testClient.User.Find("fred@example.com"); err != nil { + t.Errorf("Error given: %s", err) + } else if user == nil { + t.Error("Expected user. User is nil") + } +} + +func TestUserService_Find_SuccessParams(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/user/search", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/user/search?username=fred@example.com&startAt=100&maxResults=1000") + + fmt.Fprint(w, `[{"self":"http://www.example.com/jira/rest/api/2/user?username=fred","key":"fred", + "name":"fred","emailAddress":"fred@example.com","avatarUrls":{"48x48":"http://www.example.com/jira/secure/useravatar?size=large&ownerId=fred", + "24x24":"http://www.example.com/jira/secure/useravatar?size=small&ownerId=fred","16x16":"http://www.example.com/jira/secure/useravatar?size=xsmall&ownerId=fred", + "32x32":"http://www.example.com/jira/secure/useravatar?size=medium&ownerId=fred"},"displayName":"Fred F. User","active":true,"timeZone":"Australia/Sydney","groups":{"size":3,"items":[ + {"name":"jira-user","self":"http://www.example.com/jira/rest/api/2/group?groupname=jira-user"},{"name":"jira-admin", + "self":"http://www.example.com/jira/rest/api/2/group?groupname=jira-admin"},{"name":"important","self":"http://www.example.com/jira/rest/api/2/group?groupname=important" + }]},"applicationRoles":{"size":1,"items":[]},"expand":"groups,applicationRoles"}]`) + }) + + if user, _, err := testClient.User.Find("fred@example.com", WithStartAt(100), WithMaxResults(1000)); err != nil { + t.Errorf("Error given: %s", err) + } else if user == nil { + t.Error("Expected user. User is nil") + } +} diff --git a/vendor/github.com/andygrunwald/go-jira/version.go b/vendor/github.com/andygrunwald/go-jira/version.go new file mode 100644 index 0000000..87bcde7 --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/version.go @@ -0,0 +1,97 @@ +package jira + +import ( + "encoding/json" + "fmt" + "io/ioutil" +) + +// VersionService handles Versions for the JIRA instance / API. +// +// JIRA API docs: https://docs.atlassian.com/jira/REST/latest/#api/2/version +type VersionService struct { + client *Client +} + +// Version represents a single release version of a project +type Version struct { + Self string `json:"self,omitempty" structs:"self,omitempty"` + ID string `json:"id,omitempty" structs:"id,omitempty"` + Name string `json:"name,omitempty" structs:"name,omitempty"` + Description string `json:"description,omitempty" structs:"name,omitempty"` + Archived bool `json:"archived,omitempty" structs:"archived,omitempty"` + Released bool `json:"released,omitempty" structs:"released,omitempty"` + ReleaseDate string `json:"releaseDate,omitempty" structs:"releaseDate,omitempty"` + UserReleaseDate string `json:"userReleaseDate,omitempty" structs:"userReleaseDate,omitempty"` + ProjectID int `json:"projectId,omitempty" structs:"projectId,omitempty"` // Unlike other IDs, this is returned as a number + StartDate string `json:"startDate,omitempty" structs:"startDate,omitempty"` +} + +// Get gets version info from JIRA +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-version-id-get +func (s *VersionService) Get(versionID int) (*Version, *Response, error) { + apiEndpoint := fmt.Sprintf("/rest/api/2/version/%v", versionID) + req, err := s.client.NewRequest("GET", apiEndpoint, nil) + if err != nil { + return nil, nil, err + } + + version := new(Version) + resp, err := s.client.Do(req, version) + if err != nil { + return nil, resp, NewJiraError(resp, err) + } + return version, resp, nil +} + +// Create creates a version in JIRA. +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-version-post +func (s *VersionService) Create(version *Version) (*Version, *Response, error) { + apiEndpoint := "/rest/api/2/version" + req, err := s.client.NewRequest("POST", apiEndpoint, version) + if err != nil { + return nil, nil, err + } + + resp, err := s.client.Do(req, nil) + if err != nil { + return nil, resp, err + } + + responseVersion := new(Version) + defer resp.Body.Close() + data, err := ioutil.ReadAll(resp.Body) + if err != nil { + e := fmt.Errorf("Could not read the returned data") + return nil, resp, NewJiraError(resp, e) + } + err = json.Unmarshal(data, responseVersion) + if err != nil { + e := fmt.Errorf("Could not unmarshall the data into struct") + return nil, resp, NewJiraError(resp, e) + } + return responseVersion, resp, nil +} + +// Update updates a version from a JSON representation. +// +// JIRA API docs: https://developer.atlassian.com/cloud/jira/platform/rest/#api-api-2-version-id-put +func (s *VersionService) Update(version *Version) (*Version, *Response, error) { + apiEndpoint := fmt.Sprintf("rest/api/2/version/%v", version.ID) + req, err := s.client.NewRequest("PUT", apiEndpoint, version) + if err != nil { + return nil, nil, err + } + resp, err := s.client.Do(req, nil) + if err != nil { + jerr := NewJiraError(resp, err) + return nil, resp, jerr + } + + // This is just to follow the rest of the API's convention of returning a version. + // Returning the same pointer here is pointless, so we return a copy instead. + ret := *version + return &ret, resp, nil +} diff --git a/vendor/github.com/andygrunwald/go-jira/version_test.go b/vendor/github.com/andygrunwald/go-jira/version_test.go new file mode 100644 index 0000000..c77097d --- /dev/null +++ b/vendor/github.com/andygrunwald/go-jira/version_test.go @@ -0,0 +1,111 @@ +package jira + +import ( + "fmt" + "net/http" + "testing" +) + +func TestVersionService_Get_Success(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/version/10002", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testRequestURL(t, r, "/rest/api/2/version/10002") + + fmt.Fprint(w, `{ + "self": "http://www.example.com/jira/rest/api/2/version/10002", + "id": "10002", + "description": "An excellent version", + "name": "New Version 1", + "archived": false, + "released": true, + "releaseDate": "2010-07-06", + "overdue": true, + "userReleaseDate": "6/Jul/2010", + "startDate" : "2010-07-01", + "projectId": 10000 + }`) + }) + + version, _, err := testClient.Version.Get(10002) + if version == nil { + t.Error("Expected version. Issue is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} + +func TestVersionService_Create(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/version", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testRequestURL(t, r, "/rest/api/2/version") + + w.WriteHeader(http.StatusCreated) + fmt.Fprint(w, `{ + "description": "An excellent version", + "name": "New Version 1", + "archived": false, + "released": true, + "releaseDate": "2010-07-06", + "userReleaseDate": "6/Jul/2010", + "project": "PXA", + "projectId": 10000 + }`) + }) + + v := &Version{ + Name: "New Version 1", + Description: "An excellent version", + ProjectID: 10000, + Released: true, + ReleaseDate: "2010-07-06", + UserReleaseDate: "6/Jul/2010", + StartDate: "2018-07-01", + } + + version, _, err := testClient.Version.Create(v) + if version == nil { + t.Error("Expected version. Version is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} + +func TestServiceService_Update(t *testing.T) { + setup() + defer teardown() + testMux.HandleFunc("/rest/api/2/version/10002", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testRequestURL(t, r, "/rest/api/2/version/10002") + fmt.Fprint(w, `{ + "description": "An excellent updated version", + "name": "New Updated Version 1", + "archived": false, + "released": true, + "releaseDate": "2010-07-06", + "userReleaseDate": "6/Jul/2010", + "startDate" : "2010-07-01", + "project": "PXA", + "projectId": 10000 + }`) + }) + + v := &Version{ + ID: "10002", + Name: "New Updated Version 1", + Description: "An excellent updated version", + } + + version, _, err := testClient.Version.Update(v) + if version == nil { + t.Error("Expected version. Version is nil") + } + if err != nil { + t.Errorf("Error given: %s", err) + } +} diff --git a/vendor/github.com/fatih/structs/.travis.yml b/vendor/github.com/fatih/structs/.travis.yml index b53dd55..a08df79 100644 --- a/vendor/github.com/fatih/structs/.travis.yml +++ b/vendor/github.com/fatih/structs/.travis.yml @@ -2,6 +2,7 @@ language: go go: - 1.7.x - 1.8.x + - 1.9.x - tip sudo: false before_install: diff --git a/vendor/github.com/fatih/structs/README.md b/vendor/github.com/fatih/structs/README.md index 44e0100..1a4a2fd 100644 --- a/vendor/github.com/fatih/structs/README.md +++ b/vendor/github.com/fatih/structs/README.md @@ -1,3 +1,10 @@ +# Archived project. No maintenance. + +This project is not maintained anymore and is archived. Feel free to fork and +make your own changes if needed. For more detail read my blog post: [Taking an indefinite sabbatical from my projects](https://arslan.io/2018/10/09/taking-an-indefinite-sabbatical-from-my-projects/) + +Thanks to everyone for their valuable feedback and contributions. + # Structs [![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/fatih/structs) [![Build Status](http://img.shields.io/travis/fatih/structs.svg?style=flat-square)](https://travis-ci.org/fatih/structs) [![Coverage Status](http://img.shields.io/coveralls/fatih/structs.svg?style=flat-square)](https://coveralls.io/r/fatih/structs) Structs contains various utilities to work with Go (Golang) structs. It was @@ -81,8 +88,8 @@ n := s.Names() // Get a []string f := s.Field(name) // Get a *Field based on the given field name f, ok := s.FieldOk(name) // Get a *Field based on the given field name n := s.Name() // Get the struct name -h := s.HasZero() // Check if any field is initialized -z := s.IsZero() // Check if all fields are initialized +h := s.HasZero() // Check if any field is uninitialized +z := s.IsZero() // Check if all fields are uninitialized ``` ### Field methods diff --git a/vendor/github.com/fatih/structs/field_test.go b/vendor/github.com/fatih/structs/field_test.go index de9dc3b..ed14dcc 100644 --- a/vendor/github.com/fatih/structs/field_test.go +++ b/vendor/github.com/fatih/structs/field_test.go @@ -135,7 +135,7 @@ func TestField_Set(t *testing.T) { func TestField_NotSettable(t *testing.T) { a := map[int]Baz{ - 4: Baz{ + 4: { A: "value", }, } diff --git a/vendor/github.com/fatih/structs/structs.go b/vendor/github.com/fatih/structs/structs.go index be3816a..3a87706 100644 --- a/vendor/github.com/fatih/structs/structs.go +++ b/vendor/github.com/fatih/structs/structs.go @@ -203,9 +203,7 @@ func (s *Struct) Values() []interface{} { if IsStruct(val.Interface()) && !tagOpts.Has("omitnested") { // look out for embedded structs, and convert them to a // []interface{} to be added to the final values slice - for _, embeddedVal := range Values(val.Interface()) { - t = append(t, embeddedVal) - } + t = append(t, Values(val.Interface())...) } else { t = append(t, val.Interface()) } @@ -573,7 +571,7 @@ func (s *Struct) nested(val reflect.Value) interface{} { break } - slices := make([]interface{}, val.Len(), val.Len()) + slices := make([]interface{}, val.Len()) for x := 0; x < val.Len(); x++ { slices[x] = s.nested(val.Index(x)) } diff --git a/vendor/github.com/fatih/structs/structs_example_test.go b/vendor/github.com/fatih/structs/structs_example_test.go index 329c130..e74099d 100644 --- a/vendor/github.com/fatih/structs/structs_example_test.go +++ b/vendor/github.com/fatih/structs/structs_example_test.go @@ -301,7 +301,7 @@ func ExampleIsZero() { Enabled bool } - // Nothing is initalized + // Nothing is initialized a := &Server{} isZeroA := IsZero(a) diff --git a/vendor/github.com/fatih/structs/structs_test.go b/vendor/github.com/fatih/structs/structs_test.go index 96a5284..073882e 100644 --- a/vendor/github.com/fatih/structs/structs_test.go +++ b/vendor/github.com/fatih/structs/structs_test.go @@ -380,7 +380,7 @@ func TestMap_NestedMapWithSliceIntValues(t *testing.T) { b := &B{ Foo: map[string][]int{ - "example_key": []int{80}, + "example_key": {80}, }, } @@ -418,7 +418,7 @@ func TestMap_NestedMapWithSliceStructValues(t *testing.T) { b := &B{ Foo: map[string][]address{ - "example_key": []address{ + "example_key": { {Country: "Turkey"}, }, }, @@ -463,8 +463,8 @@ func TestMap_NestedSliceWithStructValues(t *testing.T) { p := person{ Name: "test", Addresses: []address{ - address{Country: "England"}, - address{Country: "Italy"}, + {Country: "England"}, + {Country: "Italy"}, }, } mp := Map(p) @@ -492,8 +492,8 @@ func TestMap_NestedSliceWithPointerOfStructValues(t *testing.T) { p := person{ Name: "test", Addresses: []*address{ - &address{Country: "England"}, - &address{Country: "Italy"}, + {Country: "England"}, + {Country: "Italy"}, }, } mp := Map(p) @@ -1430,14 +1430,14 @@ func TestPointer2Pointer(t *testing.T) { func TestMap_InterfaceTypeWithMapValue(t *testing.T) { type A struct { Name string `structs:"name"` - Ip string `structs:"ip"` + IP string `structs:"ip"` Query string `structs:"query"` Payload interface{} `structs:"payload"` } a := A{ Name: "test", - Ip: "127.0.0.1", + IP: "127.0.0.1", Query: "", Payload: map[string]string{"test_param": "test_param"}, } diff --git a/vendor/github.com/fatih/structs/tags.go b/vendor/github.com/fatih/structs/tags.go index 8859341..136a31e 100644 --- a/vendor/github.com/fatih/structs/tags.go +++ b/vendor/github.com/fatih/structs/tags.go @@ -5,7 +5,7 @@ import "strings" // tagOptions contains a slice of tag options type tagOptions []string -// Has returns true if the given optiton is available in tagOptions +// Has returns true if the given option is available in tagOptions func (t tagOptions) Has(opt string) bool { for _, tagOpt := range t { if tagOpt == opt { diff --git a/vendor/github.com/fsnotify/fsnotify/.travis.yml b/vendor/github.com/fsnotify/fsnotify/.travis.yml index 22e5fa4..9071560 100644 --- a/vendor/github.com/fsnotify/fsnotify/.travis.yml +++ b/vendor/github.com/fsnotify/fsnotify/.travis.yml @@ -2,29 +2,36 @@ sudo: false language: go go: - - 1.8 - - 1.7.x - - tip + - "stable" + - "1.11.x" + - "1.10.x" + - "1.9.x" + - "1.8.x" matrix: + include: + - go: "stable" + env: GOLINT=true allow_failures: - go: tip fast_finish: true -before_script: - - go get -u github.com/golang/lint/golint + +before_install: + - if [ ! -z "${GOLINT}" ]; then go get -u golang.org/x/lint/golint; fi script: - - go test -v --race ./... + - go test --race ./... after_script: - test -z "$(gofmt -s -l -w . | tee /dev/stderr)" - - test -z "$(golint ./... | tee /dev/stderr)" + - if [ ! -z "${GOLINT}" ]; then echo running golint; golint --set_exit_status ./...; else echo skipping golint; fi - go vet ./... os: - linux - osx + - windows notifications: email: false diff --git a/vendor/github.com/fsnotify/fsnotify/AUTHORS b/vendor/github.com/fsnotify/fsnotify/AUTHORS index 0a5bf8f..5ab5d41 100644 --- a/vendor/github.com/fsnotify/fsnotify/AUTHORS +++ b/vendor/github.com/fsnotify/fsnotify/AUTHORS @@ -8,8 +8,10 @@ # Please keep the list sorted. +Aaron L Adrien Bustany Amit Krishnan +Anmol Sethi Bjørn Erik Pedersen Bruno Bigras Caleb Spare @@ -26,6 +28,7 @@ Kelvin Fo Ken-ichirou MATSUZAWA Matt Layher Nathan Youngman +Nickolai Zeldovich Patrick Paul Hammond Pawel Knap @@ -33,12 +36,15 @@ Pieter Droogendijk Pursuit92 Riku Voipio Rob Figueiredo +Rodrigo Chiossi Slawek Ligus Soge Zhang Tiffany Jernigan Tilak Sharma +Tom Payne Travis Cline Tudor Golubenco +Vahe Khachikyan Yukang bronze1man debrando diff --git a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md index 8c732c1..be4d7ea 100644 --- a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md +++ b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## v1.4.7 / 2018-01-09 + +* BSD/macOS: Fix possible deadlock on closing the watcher on kqueue (thanks @nhooyr and @glycerine) +* Tests: Fix missing verb on format string (thanks @rchiossi) +* Linux: Fix deadlock in Remove (thanks @aarondl) +* Linux: Watch.Add improvements (avoid race, fix consistency, reduce garbage) (thanks @twpayne) +* Docs: Moved FAQ into the README (thanks @vahe) +* Linux: Properly handle inotify's IN_Q_OVERFLOW event (thanks @zeldovich) +* Docs: replace references to OS X with macOS + ## v1.4.2 / 2016-10-10 * Linux: use InotifyInit1 with IN_CLOEXEC to stop leaking a file descriptor to a child process when using fork/exec [#178](https://github.com/fsnotify/fsnotify/pull/178) (thanks @pattyshack) diff --git a/vendor/github.com/fsnotify/fsnotify/README.md b/vendor/github.com/fsnotify/fsnotify/README.md index 3993207..f7e67db 100644 --- a/vendor/github.com/fsnotify/fsnotify/README.md +++ b/vendor/github.com/fsnotify/fsnotify/README.md @@ -14,7 +14,7 @@ Cross platform: Windows, Linux, BSD and macOS. |----------|----------|----------| |inotify |Linux 2.6.27 or later, Android\*|Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify)| |kqueue |BSD, macOS, iOS\*|Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify)| -|ReadDirectoryChangesW|Windows|Supported [![Build status](https://ci.appveyor.com/api/projects/status/ivwjubaih4r0udeh/branch/master?svg=true)](https://ci.appveyor.com/project/NathanYoungman/fsnotify/branch/master)| +|ReadDirectoryChangesW|Windows|Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify)| |FSEvents |macOS |[Planned](https://github.com/fsnotify/fsnotify/issues/11)| |FEN |Solaris 11 |[In Progress](https://github.com/fsnotify/fsnotify/issues/12)| |fanotify |Linux 2.6.37+ | | diff --git a/vendor/github.com/fsnotify/fsnotify/example_test.go b/vendor/github.com/fsnotify/fsnotify/example_test.go index 700502c..b4f9f95 100644 --- a/vendor/github.com/fsnotify/fsnotify/example_test.go +++ b/vendor/github.com/fsnotify/fsnotify/example_test.go @@ -23,12 +23,18 @@ func ExampleNewWatcher() { go func() { for { select { - case event := <-watcher.Events: + case event, ok := <-watcher.Events: + if !ok { + return + } log.Println("event:", event) if event.Op&fsnotify.Write == fsnotify.Write { log.Println("modified file:", event.Name) } - case err := <-watcher.Errors: + case err, ok := <-watcher.Errors: + if !ok { + return + } log.Println("error:", err) } } diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify.go b/vendor/github.com/fsnotify/fsnotify/fsnotify.go index 190bf0d..89cab04 100644 --- a/vendor/github.com/fsnotify/fsnotify/fsnotify.go +++ b/vendor/github.com/fsnotify/fsnotify/fsnotify.go @@ -63,4 +63,6 @@ func (e Event) String() string { } // Common errors that can be reported by a watcher -var ErrEventOverflow = errors.New("fsnotify queue overflow") +var ( + ErrEventOverflow = errors.New("fsnotify queue overflow") +) diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify_test.go b/vendor/github.com/fsnotify/fsnotify/fsnotify_test.go index 9d6d72a..f9771d9 100644 --- a/vendor/github.com/fsnotify/fsnotify/fsnotify_test.go +++ b/vendor/github.com/fsnotify/fsnotify/fsnotify_test.go @@ -6,7 +6,11 @@ package fsnotify -import "testing" +import ( + "os" + "testing" + "time" +) func TestEventStringWithValue(t *testing.T) { for opMask, expectedString := range map[Op]string{ @@ -38,3 +42,29 @@ func TestEventOpStringWithNoValue(t *testing.T) { t.Fatalf("Expected %s, got: %v", expectedOpString, event.Op.String()) } } + +// TestWatcherClose tests that the goroutine started by creating the watcher can be +// signalled to return at any time, even if there is no goroutine listening on the events +// or errors channels. +func TestWatcherClose(t *testing.T) { + t.Parallel() + + name := tempMkFile(t, "") + w := newWatcher(t) + err := w.Add(name) + if err != nil { + t.Fatal(err) + } + + err = os.Remove(name) + if err != nil { + t.Fatal(err) + } + // Allow the watcher to receive the event. + time.Sleep(time.Millisecond * 100) + + err = w.Close() + if err != nil { + t.Fatal(err) + } +} diff --git a/vendor/github.com/fsnotify/fsnotify/inotify_poller.go b/vendor/github.com/fsnotify/fsnotify/inotify_poller.go index cc7db4b..b33f2b4 100644 --- a/vendor/github.com/fsnotify/fsnotify/inotify_poller.go +++ b/vendor/github.com/fsnotify/fsnotify/inotify_poller.go @@ -40,12 +40,12 @@ func newFdPoller(fd int) (*fdPoller, error) { poller.fd = fd // Create epoll fd - poller.epfd, errno = unix.EpollCreate1(0) + poller.epfd, errno = unix.EpollCreate1(unix.EPOLL_CLOEXEC) if poller.epfd == -1 { return nil, errno } // Create pipe; pipe[0] is the read end, pipe[1] the write end. - errno = unix.Pipe2(poller.pipe[:], unix.O_NONBLOCK) + errno = unix.Pipe2(poller.pipe[:], unix.O_NONBLOCK|unix.O_CLOEXEC) if errno != nil { return nil, errno } diff --git a/vendor/github.com/fsnotify/fsnotify/inotify_test.go b/vendor/github.com/fsnotify/fsnotify/inotify_test.go index 2cc6d93..2c11d2e 100644 --- a/vendor/github.com/fsnotify/fsnotify/inotify_test.go +++ b/vendor/github.com/fsnotify/fsnotify/inotify_test.go @@ -11,6 +11,7 @@ import ( "os" "path/filepath" "strings" + "sync" "testing" "time" ) @@ -295,7 +296,7 @@ func TestInotifyRemoveTwice(t *testing.T) { err = w.Remove(testFile) if err != nil { - t.Fatalf("wanted successful remove but got:", err) + t.Fatalf("wanted successful remove but got: %v", err) } err = w.Remove(testFile) @@ -390,9 +391,12 @@ func TestInotifyOverflow(t *testing.T) { errChan := make(chan error, numDirs*numFiles) + // All events need to be in the inotify queue before pulling events off it to trigger this error. + wg := sync.WaitGroup{} for dn := 0; dn < numDirs; dn++ { testSubdir := fmt.Sprintf("%s/%d", testDir, dn) + wg.Add(1) go func() { for fn := 0; fn < numFiles; fn++ { testFile := fmt.Sprintf("%s/%d", testSubdir, fn) @@ -409,8 +413,10 @@ func TestInotifyOverflow(t *testing.T) { continue } } + wg.Done() }() } + wg.Wait() creates := 0 overflows := 0 diff --git a/vendor/github.com/fsnotify/fsnotify/integration_darwin_test.go b/vendor/github.com/fsnotify/fsnotify/integration_darwin_test.go index cd6adc2..c43d00c 100644 --- a/vendor/github.com/fsnotify/fsnotify/integration_darwin_test.go +++ b/vendor/github.com/fsnotify/fsnotify/integration_darwin_test.go @@ -7,12 +7,24 @@ package fsnotify import ( "os" "path/filepath" + "strconv" + "strings" "testing" "time" "golang.org/x/sys/unix" ) +// darwinVersion returns version os Darwin (17 is macOS 10.13). +func darwinVersion() (int, error) { + s, err := unix.Sysctl("kern.osrelease") + if err != nil { + return 0, err + } + s = strings.Split(s, ".")[0] + return strconv.Atoi(s) +} + // testExchangedataForWatcher tests the watcher with the exchangedata operation on macOS. // // This is widely used for atomic saves on macOS, e.g. TextMate and in Apple's NSDocument. @@ -20,6 +32,14 @@ import ( // See https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man2/exchangedata.2.html // Also see: https://github.com/textmate/textmate/blob/cd016be29489eba5f3c09b7b70b06da134dda550/Frameworks/io/src/swap_file_data.cc#L20 func testExchangedataForWatcher(t *testing.T, watchDir bool) { + osVersion, err := darwinVersion() + if err != nil { + t.Fatal("unable to get Darwin version:", err) + } + if osVersion >= 17 { + t.Skip("Exchangedata is deprecated in macOS 10.13") + } + // Create directory to watch testDir1 := tempMkdir(t) @@ -55,7 +75,7 @@ func testExchangedataForWatcher(t *testing.T, watchDir bool) { // Receive errors on the error channel on a separate goroutine go func() { for err := range watcher.Errors { - t.Fatalf("error received: %s", err) + t.Errorf("error received: %s", err) } }() diff --git a/vendor/github.com/fsnotify/fsnotify/integration_test.go b/vendor/github.com/fsnotify/fsnotify/integration_test.go index 8b7e9d3..7096344 100644 --- a/vendor/github.com/fsnotify/fsnotify/integration_test.go +++ b/vendor/github.com/fsnotify/fsnotify/integration_test.go @@ -194,7 +194,7 @@ func TestFsnotifyMultipleCreates(t *testing.T) { // Receive errors on the error channel on a separate goroutine go func() { for err := range watcher.Errors { - t.Fatalf("error received: %s", err) + t.Errorf("error received: %s", err) } }() @@ -339,7 +339,7 @@ func TestFsnotifyDirOnly(t *testing.T) { // Receive errors on the error channel on a separate goroutine go func() { for err := range watcher.Errors { - t.Fatalf("error received: %s", err) + t.Errorf("error received: %s", err) } }() @@ -444,7 +444,7 @@ func TestFsnotifyDeleteWatchedDir(t *testing.T) { // Receive errors on the error channel on a separate goroutine go func() { for err := range watcher.Errors { - t.Fatalf("error received: %s", err) + t.Errorf("error received: %s", err) } }() @@ -489,7 +489,7 @@ func TestFsnotifySubDir(t *testing.T) { // Receive errors on the error channel on a separate goroutine go func() { for err := range watcher.Errors { - t.Fatalf("error received: %s", err) + t.Errorf("error received: %s", err) } }() @@ -581,7 +581,7 @@ func TestFsnotifyRename(t *testing.T) { // Receive errors on the error channel on a separate goroutine go func() { for err := range watcher.Errors { - t.Fatalf("error received: %s", err) + t.Errorf("error received: %s", err) } }() @@ -663,7 +663,7 @@ func TestFsnotifyRenameToCreate(t *testing.T) { // Receive errors on the error channel on a separate goroutine go func() { for err := range watcher.Errors { - t.Fatalf("error received: %s", err) + t.Errorf("error received: %s", err) } }() @@ -756,7 +756,7 @@ func TestFsnotifyRenameToOverwrite(t *testing.T) { // Receive errors on the error channel on a separate goroutine go func() { for err := range watcher.Errors { - t.Fatalf("error received: %s", err) + t.Errorf("error received: %s", err) } }() @@ -874,7 +874,7 @@ func TestFsnotifyAttrib(t *testing.T) { // Receive errors on the error channel on a separate goroutine go func() { for err := range watcher.Errors { - t.Fatalf("error received: %s", err) + t.Errorf("error received: %s", err) } }() diff --git a/vendor/github.com/fsnotify/fsnotify/kqueue.go b/vendor/github.com/fsnotify/fsnotify/kqueue.go index c2b4acb..86e76a3 100644 --- a/vendor/github.com/fsnotify/fsnotify/kqueue.go +++ b/vendor/github.com/fsnotify/fsnotify/kqueue.go @@ -22,7 +22,7 @@ import ( type Watcher struct { Events chan Event Errors chan error - done chan bool // Channel for sending a "quit message" to the reader goroutine + done chan struct{} // Channel for sending a "quit message" to the reader goroutine kq int // File descriptor (as returned by the kqueue() syscall). @@ -56,7 +56,7 @@ func NewWatcher() (*Watcher, error) { externalWatches: make(map[string]bool), Events: make(chan Event), Errors: make(chan error), - done: make(chan bool), + done: make(chan struct{}), } go w.readEvents() @@ -71,10 +71,8 @@ func (w *Watcher) Close() error { return nil } w.isClosed = true - w.mu.Unlock() // copy paths to remove while locked - w.mu.Lock() var pathsToRemove = make([]string, 0, len(w.watches)) for name := range w.watches { pathsToRemove = append(pathsToRemove, name) @@ -82,15 +80,12 @@ func (w *Watcher) Close() error { w.mu.Unlock() // unlock before calling Remove, which also locks - var err error for _, name := range pathsToRemove { - if e := w.Remove(name); e != nil && err == nil { - err = e - } + w.Remove(name) } - // Send "quit" message to the reader goroutine: - w.done <- true + // send a "quit" message to the reader goroutine + close(w.done) return nil } @@ -266,17 +261,12 @@ func (w *Watcher) addWatch(name string, flags uint32) (string, error) { func (w *Watcher) readEvents() { eventBuffer := make([]unix.Kevent_t, 10) +loop: for { // See if there is a message on the "done" channel select { case <-w.done: - err := unix.Close(w.kq) - if err != nil { - w.Errors <- err - } - close(w.Events) - close(w.Errors) - return + break loop default: } @@ -284,7 +274,11 @@ func (w *Watcher) readEvents() { kevents, err := read(w.kq, eventBuffer, &keventWaitTime) // EINTR is okay, the syscall was interrupted before timeout expired. if err != nil && err != unix.EINTR { - w.Errors <- err + select { + case w.Errors <- err: + case <-w.done: + break loop + } continue } @@ -319,8 +313,12 @@ func (w *Watcher) readEvents() { if path.isDir && event.Op&Write == Write && !(event.Op&Remove == Remove) { w.sendDirectoryChangeEvents(event.Name) } else { - // Send the event on the Events channel - w.Events <- event + // Send the event on the Events channel. + select { + case w.Events <- event: + case <-w.done: + break loop + } } if event.Op&Remove == Remove { @@ -352,6 +350,18 @@ func (w *Watcher) readEvents() { kevents = kevents[1:] } } + + // cleanup + err := unix.Close(w.kq) + if err != nil { + // only way the previous loop breaks is if w.done was closed so we need to async send to w.Errors. + select { + case w.Errors <- err: + default: + } + } + close(w.Events) + close(w.Errors) } // newEvent returns an platform-independent Event based on kqueue Fflags. @@ -407,7 +417,11 @@ func (w *Watcher) sendDirectoryChangeEvents(dirPath string) { // Get all files files, err := ioutil.ReadDir(dirPath) if err != nil { - w.Errors <- err + select { + case w.Errors <- err: + case <-w.done: + return + } } // Search for new files @@ -428,7 +442,11 @@ func (w *Watcher) sendFileCreatedEventIfNew(filePath string, fileInfo os.FileInf w.mu.Unlock() if !doesExist { // Send create event - w.Events <- newCreateEvent(filePath) + select { + case w.Events <- newCreateEvent(filePath): + case <-w.done: + return + } } // like watchDirectoryFiles (but without doing another ReadDir) diff --git a/vendor/github.com/golang/protobuf/.github/ISSUE_TEMPLATE/bug_report.md b/vendor/github.com/golang/protobuf/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..cb9fc37 --- /dev/null +++ b/vendor/github.com/golang/protobuf/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,20 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +**What version of protobuf and what language are you using?** +Version: (e.g., `v1.1.0`, `89a0c16f`, etc) + +**What did you do?** +If possible, provide a recipe for reproducing the error. +A complete runnable program is good with `.proto` and `.go` source code. + +**What did you expect to see?** + +**What did you see instead?** + +Make sure you include information that can help us debug (full error message, exception listing, stack trace, logs). + +**Anything else we should know about your project / environment?** diff --git a/vendor/github.com/golang/protobuf/.github/ISSUE_TEMPLATE/feature_request.md b/vendor/github.com/golang/protobuf/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..b904f1f --- /dev/null +++ b/vendor/github.com/golang/protobuf/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,17 @@ +--- +name: Feature request +about: Suggest an idea for this project + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/vendor/github.com/golang/protobuf/.github/ISSUE_TEMPLATE/question.md b/vendor/github.com/golang/protobuf/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 0000000..bfa6dde --- /dev/null +++ b/vendor/github.com/golang/protobuf/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,7 @@ +--- +name: Question +about: Questions and troubleshooting + +--- + + diff --git a/vendor/github.com/golang/protobuf/.gitignore b/vendor/github.com/golang/protobuf/.gitignore index 8f5b596..c7dd405 100644 --- a/vendor/github.com/golang/protobuf/.gitignore +++ b/vendor/github.com/golang/protobuf/.gitignore @@ -12,5 +12,6 @@ core _obj _test _testmain.go -protoc-gen-go/testdata/multi/*.pb.go -_conformance/_conformance + +# Conformance test output and transient files. +conformance/failing_tests.txt diff --git a/vendor/github.com/golang/protobuf/.travis.yml b/vendor/github.com/golang/protobuf/.travis.yml index 24e22f8..fd650b2 100644 --- a/vendor/github.com/golang/protobuf/.travis.yml +++ b/vendor/github.com/golang/protobuf/.travis.yml @@ -1,17 +1,32 @@ sudo: false language: go go: -- 1.6.x -- 1.7.x -- 1.8.x +- 1.9.x +- 1.10.x +- 1.11.x +- 1.x install: + - go get -v -d google.golang.org/grpc - go get -v -d -t github.com/golang/protobuf/... - - curl -L https://github.com/google/protobuf/releases/download/v3.3.0/protoc-3.3.0-linux-x86_64.zip -o /tmp/protoc.zip - - unzip /tmp/protoc.zip -d $HOME/protoc + - curl -L https://github.com/google/protobuf/releases/download/v3.6.1/protoc-3.6.1-linux-x86_64.zip -o /tmp/protoc.zip + - unzip /tmp/protoc.zip -d "$HOME"/protoc + - mkdir -p "$HOME"/src && ln -s "$HOME"/protoc "$HOME"/src/protobuf env: - PATH=$HOME/protoc/bin:$PATH script: - - make all test + - make all + - make regenerate + # TODO(tamird): When https://github.com/travis-ci/gimme/pull/130 is + # released, make this look for "1.x". + - if [[ "$TRAVIS_GO_VERSION" == 1.10* ]]; then + if [[ "$(git status --porcelain 2>&1)" != "" ]]; then + git status >&2; + git diff -a >&2; + exit 1; + fi; + echo "git status is clean."; + fi; + - make test diff --git a/vendor/github.com/golang/protobuf/LICENSE b/vendor/github.com/golang/protobuf/LICENSE index 1b1b192..0f64693 100644 --- a/vendor/github.com/golang/protobuf/LICENSE +++ b/vendor/github.com/golang/protobuf/LICENSE @@ -1,7 +1,4 @@ -Go support for Protocol Buffers - Google's data interchange format - Copyright 2010 The Go Authors. All rights reserved. -https://github.com/golang/protobuf Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are diff --git a/vendor/github.com/golang/protobuf/Make.protobuf b/vendor/github.com/golang/protobuf/Make.protobuf deleted file mode 100644 index 15071de..0000000 --- a/vendor/github.com/golang/protobuf/Make.protobuf +++ /dev/null @@ -1,40 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Includable Makefile to add a rule for generating .pb.go files from .proto files -# (Google protocol buffer descriptions). -# Typical use if myproto.proto is a file in package mypackage in this directory: -# -# include $(GOROOT)/src/pkg/github.com/golang/protobuf/Make.protobuf - -%.pb.go: %.proto - protoc --go_out=. $< - diff --git a/vendor/github.com/golang/protobuf/Makefile b/vendor/github.com/golang/protobuf/Makefile index a1421d8..109f1cb 100644 --- a/vendor/github.com/golang/protobuf/Makefile +++ b/vendor/github.com/golang/protobuf/Makefile @@ -29,16 +29,15 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - all: install install: - go install ./proto ./jsonpb ./ptypes - go install ./protoc-gen-go + go install ./proto ./jsonpb ./ptypes ./protoc-gen-go test: - go test ./proto ./jsonpb ./ptypes - make -C protoc-gen-go/testdata test + go test ./... ./protoc-gen-go/testdata + go test -tags purego ./... ./protoc-gen-go/testdata + go build ./protoc-gen-go/testdata/grpc/grpc.pb.go clean: go clean ./... @@ -47,9 +46,4 @@ nuke: go clean -i ./... regenerate: - make -C protoc-gen-go/descriptor regenerate - make -C protoc-gen-go/plugin regenerate - make -C protoc-gen-go/testdata regenerate - make -C proto/testdata regenerate - make -C jsonpb/jsonpb_test_proto regenerate - make -C _conformance regenerate + ./regenerate.sh diff --git a/vendor/github.com/golang/protobuf/README.md b/vendor/github.com/golang/protobuf/README.md index 795f53f..04a52df 100644 --- a/vendor/github.com/golang/protobuf/README.md +++ b/vendor/github.com/golang/protobuf/README.md @@ -1,12 +1,13 @@ -# Go support for Protocol Buffers +# Go support for Protocol Buffers - Google's data interchange format [![Build Status](https://travis-ci.org/golang/protobuf.svg?branch=master)](https://travis-ci.org/golang/protobuf) +[![GoDoc](https://godoc.org/github.com/golang/protobuf?status.svg)](https://godoc.org/github.com/golang/protobuf) Google's data interchange format. Copyright 2010 The Go Authors. https://github.com/golang/protobuf -This package and the code it generates requires at least Go 1.4. +This package and the code it generates requires at least Go 1.9. This software implements Go bindings for protocol buffers. For information about protocol buffers themselves, see @@ -23,11 +24,19 @@ To use this software, you must: https://golang.org/doc/install for details or, if you are using gccgo, follow the instructions at https://golang.org/doc/install/gccgo -- Grab the code from the repository and install the proto package. +- Grab the code from the repository and install the `proto` package. The simplest way is to run `go get -u github.com/golang/protobuf/protoc-gen-go`. - The compiler plugin, protoc-gen-go, will be installed in $GOBIN, - defaulting to $GOPATH/bin. It must be in your $PATH for the protocol - compiler, protoc, to find it. + The compiler plugin, `protoc-gen-go`, will be installed in `$GOPATH/bin` + unless `$GOBIN` is set. It must be in your `$PATH` for the protocol + compiler, `protoc`, to find it. +- If you need a particular version of `protoc-gen-go` (e.g., to match your + `proto` package version), one option is + ```shell + GIT_TAG="v1.2.0" # change as needed + go get -d -u github.com/golang/protobuf/protoc-gen-go + git -C "$(go env GOPATH)"/src/github.com/golang/protobuf checkout $GIT_TAG + go install github.com/golang/protobuf/protoc-gen-go + ``` This software has two parts: a 'protocol compiler plugin' that generates Go source files that, once compiled, can access and manage @@ -55,13 +64,53 @@ parameter set to the directory you want to output the Go code to. The generated files will be suffixed .pb.go. See the Test code below for an example using such a file. +## Packages and input paths ## + +The protocol buffer language has a concept of "packages" which does not +correspond well to the Go notion of packages. In generated Go code, +each source `.proto` file is associated with a single Go package. The +name and import path for this package is specified with the `go_package` +proto option: + + option go_package = "github.com/golang/protobuf/ptypes/any"; + +The protocol buffer compiler will attempt to derive a package name and +import path if a `go_package` option is not present, but it is +best to always specify one explicitly. + +There is a one-to-one relationship between source `.proto` files and +generated `.pb.go` files, but any number of `.pb.go` files may be +contained in the same Go package. + +The output name of a generated file is produced by replacing the +`.proto` suffix with `.pb.go` (e.g., `foo.proto` produces `foo.pb.go`). +However, the output directory is selected in one of two ways. Let +us say we have `inputs/x.proto` with a `go_package` option of +`github.com/golang/protobuf/p`. The corresponding output file may +be: + +- Relative to the import path: + +```shell + protoc --go_out=. inputs/x.proto + # writes ./github.com/golang/protobuf/p/x.pb.go +``` + + (This can work well with `--go_out=$GOPATH`.) + +- Relative to the input file: + +```shell +protoc --go_out=paths=source_relative:. inputs/x.proto +# generate ./inputs/x.pb.go +``` + +## Generated code ## The package comment for the proto library contains text describing the interface provided in Go for protocol buffers. Here is an edited version. -========== - The proto package converts data structures to and from the wire format of protocol buffers. It works in concert with the Go source code generated for .proto files by the protocol compiler. @@ -111,17 +160,15 @@ When the .proto file specifies `syntax="proto3"`, there are some differences: Consider file test.proto, containing ```proto + syntax = "proto2"; package example; - + enum FOO { X = 17; }; - + message Test { required string label = 1; optional int32 type = 2 [default=77]; repeated int64 reps = 3; - optional group OptionalGroup = 4 { - required string RequiredField = 5; - } } ``` @@ -138,13 +185,10 @@ To create and play with a Test object from the example package, ) func main() { - test := &example.Test { + test := &example.Test{ Label: proto.String("hello"), Type: proto.Int32(17), Reps: []int64{1, 2, 3}, - Optionalgroup: &example.Test_OptionalGroup { - RequiredField: proto.String("good bye"), - }, } data, err := proto.Marshal(test) if err != nil { @@ -168,22 +212,25 @@ To create and play with a Test object from the example package, To pass extra parameters to the plugin, use a comma-separated parameter list separated from the output directory by a colon: - protoc --go_out=plugins=grpc,import_path=mypackage:. *.proto - -- `import_prefix=xxx` - a prefix that is added onto the beginning of - all imports. Useful for things like generating protos in a - subdirectory, or regenerating vendored protobufs in-place. -- `import_path=foo/bar` - used as the package if no input files - declare `go_package`. If it contains slashes, everything up to the - rightmost slash is ignored. +- `paths=(import | source_relative)` - specifies how the paths of + generated files are structured. See the "Packages and imports paths" + section above. The default is `import`. - `plugins=plugin1+plugin2` - specifies the list of sub-plugins to load. The only plugin in this repo is `grpc`. - `Mfoo/bar.proto=quux/shme` - declares that foo/bar.proto is associated with Go package quux/shme. This is subject to the import_prefix parameter. +The following parameters are deprecated and should not be used: + +- `import_prefix=xxx` - a prefix that is added onto the beginning of + all imports. +- `import_path=foo/bar` - used as the package if no input files + declare `go_package`. If it contains slashes, everything up to the + rightmost slash is ignored. + ## gRPC Support ## If a proto file specifies RPC services, protoc-gen-go can be instructed to diff --git a/vendor/github.com/golang/protobuf/_conformance/Makefile b/vendor/github.com/golang/protobuf/_conformance/Makefile deleted file mode 100644 index 89800e2..0000000 --- a/vendor/github.com/golang/protobuf/_conformance/Makefile +++ /dev/null @@ -1,33 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2016 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -regenerate: - protoc --go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any,Mgoogle/protobuf/duration.proto=github.com/golang/protobuf/ptypes/duration,Mgoogle/protobuf/struct.proto=github.com/golang/protobuf/ptypes/struct,Mgoogle/protobuf/timestamp.proto=github.com/golang/protobuf/ptypes/timestamp,Mgoogle/protobuf/wrappers.proto=github.com/golang/protobuf/ptypes/wrappers,Mgoogle/protobuf/field_mask.proto=google.golang.org/genproto/protobuf:. conformance_proto/conformance.proto diff --git a/vendor/github.com/golang/protobuf/_conformance/conformance.go b/vendor/github.com/golang/protobuf/_conformance/conformance.go deleted file mode 100644 index c54212c..0000000 --- a/vendor/github.com/golang/protobuf/_conformance/conformance.go +++ /dev/null @@ -1,161 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// conformance implements the conformance test subprocess protocol as -// documented in conformance.proto. -package main - -import ( - "encoding/binary" - "fmt" - "io" - "os" - - pb "github.com/golang/protobuf/_conformance/conformance_proto" - "github.com/golang/protobuf/jsonpb" - "github.com/golang/protobuf/proto" -) - -func main() { - var sizeBuf [4]byte - inbuf := make([]byte, 0, 4096) - outbuf := proto.NewBuffer(nil) - for { - if _, err := io.ReadFull(os.Stdin, sizeBuf[:]); err == io.EOF { - break - } else if err != nil { - fmt.Fprintln(os.Stderr, "go conformance: read request:", err) - os.Exit(1) - } - size := binary.LittleEndian.Uint32(sizeBuf[:]) - if int(size) > cap(inbuf) { - inbuf = make([]byte, size) - } - inbuf = inbuf[:size] - if _, err := io.ReadFull(os.Stdin, inbuf); err != nil { - fmt.Fprintln(os.Stderr, "go conformance: read request:", err) - os.Exit(1) - } - - req := new(pb.ConformanceRequest) - if err := proto.Unmarshal(inbuf, req); err != nil { - fmt.Fprintln(os.Stderr, "go conformance: parse request:", err) - os.Exit(1) - } - res := handle(req) - - if err := outbuf.Marshal(res); err != nil { - fmt.Fprintln(os.Stderr, "go conformance: marshal response:", err) - os.Exit(1) - } - binary.LittleEndian.PutUint32(sizeBuf[:], uint32(len(outbuf.Bytes()))) - if _, err := os.Stdout.Write(sizeBuf[:]); err != nil { - fmt.Fprintln(os.Stderr, "go conformance: write response:", err) - os.Exit(1) - } - if _, err := os.Stdout.Write(outbuf.Bytes()); err != nil { - fmt.Fprintln(os.Stderr, "go conformance: write response:", err) - os.Exit(1) - } - outbuf.Reset() - } -} - -var jsonMarshaler = jsonpb.Marshaler{ - OrigName: true, -} - -func handle(req *pb.ConformanceRequest) *pb.ConformanceResponse { - var err error - var msg pb.TestAllTypes - switch p := req.Payload.(type) { - case *pb.ConformanceRequest_ProtobufPayload: - err = proto.Unmarshal(p.ProtobufPayload, &msg) - case *pb.ConformanceRequest_JsonPayload: - err = jsonpb.UnmarshalString(p.JsonPayload, &msg) - if err != nil && err.Error() == "unmarshaling Any not supported yet" { - return &pb.ConformanceResponse{ - Result: &pb.ConformanceResponse_Skipped{ - Skipped: err.Error(), - }, - } - } - default: - return &pb.ConformanceResponse{ - Result: &pb.ConformanceResponse_RuntimeError{ - RuntimeError: "unknown request payload type", - }, - } - } - if err != nil { - return &pb.ConformanceResponse{ - Result: &pb.ConformanceResponse_ParseError{ - ParseError: err.Error(), - }, - } - } - switch req.RequestedOutputFormat { - case pb.WireFormat_PROTOBUF: - p, err := proto.Marshal(&msg) - if err != nil { - return &pb.ConformanceResponse{ - Result: &pb.ConformanceResponse_SerializeError{ - SerializeError: err.Error(), - }, - } - } - return &pb.ConformanceResponse{ - Result: &pb.ConformanceResponse_ProtobufPayload{ - ProtobufPayload: p, - }, - } - case pb.WireFormat_JSON: - p, err := jsonMarshaler.MarshalToString(&msg) - if err != nil { - return &pb.ConformanceResponse{ - Result: &pb.ConformanceResponse_SerializeError{ - SerializeError: err.Error(), - }, - } - } - return &pb.ConformanceResponse{ - Result: &pb.ConformanceResponse_JsonPayload{ - JsonPayload: p, - }, - } - default: - return &pb.ConformanceResponse{ - Result: &pb.ConformanceResponse_RuntimeError{ - RuntimeError: "unknown output format", - }, - } - } -} diff --git a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.pb.go b/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.pb.go deleted file mode 100644 index ec354ea..0000000 --- a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.pb.go +++ /dev/null @@ -1,1885 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: conformance_proto/conformance.proto - -/* -Package conformance is a generated protocol buffer package. - -It is generated from these files: - conformance_proto/conformance.proto - -It has these top-level messages: - ConformanceRequest - ConformanceResponse - TestAllTypes - ForeignMessage -*/ -package conformance - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import google_protobuf "github.com/golang/protobuf/ptypes/any" -import google_protobuf1 "github.com/golang/protobuf/ptypes/duration" -import google_protobuf2 "google.golang.org/genproto/protobuf" -import google_protobuf3 "github.com/golang/protobuf/ptypes/struct" -import google_protobuf4 "github.com/golang/protobuf/ptypes/timestamp" -import google_protobuf5 "github.com/golang/protobuf/ptypes/wrappers" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type WireFormat int32 - -const ( - WireFormat_UNSPECIFIED WireFormat = 0 - WireFormat_PROTOBUF WireFormat = 1 - WireFormat_JSON WireFormat = 2 -) - -var WireFormat_name = map[int32]string{ - 0: "UNSPECIFIED", - 1: "PROTOBUF", - 2: "JSON", -} -var WireFormat_value = map[string]int32{ - "UNSPECIFIED": 0, - "PROTOBUF": 1, - "JSON": 2, -} - -func (x WireFormat) String() string { - return proto.EnumName(WireFormat_name, int32(x)) -} -func (WireFormat) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -type ForeignEnum int32 - -const ( - ForeignEnum_FOREIGN_FOO ForeignEnum = 0 - ForeignEnum_FOREIGN_BAR ForeignEnum = 1 - ForeignEnum_FOREIGN_BAZ ForeignEnum = 2 -) - -var ForeignEnum_name = map[int32]string{ - 0: "FOREIGN_FOO", - 1: "FOREIGN_BAR", - 2: "FOREIGN_BAZ", -} -var ForeignEnum_value = map[string]int32{ - "FOREIGN_FOO": 0, - "FOREIGN_BAR": 1, - "FOREIGN_BAZ": 2, -} - -func (x ForeignEnum) String() string { - return proto.EnumName(ForeignEnum_name, int32(x)) -} -func (ForeignEnum) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -type TestAllTypes_NestedEnum int32 - -const ( - TestAllTypes_FOO TestAllTypes_NestedEnum = 0 - TestAllTypes_BAR TestAllTypes_NestedEnum = 1 - TestAllTypes_BAZ TestAllTypes_NestedEnum = 2 - TestAllTypes_NEG TestAllTypes_NestedEnum = -1 -) - -var TestAllTypes_NestedEnum_name = map[int32]string{ - 0: "FOO", - 1: "BAR", - 2: "BAZ", - -1: "NEG", -} -var TestAllTypes_NestedEnum_value = map[string]int32{ - "FOO": 0, - "BAR": 1, - "BAZ": 2, - "NEG": -1, -} - -func (x TestAllTypes_NestedEnum) String() string { - return proto.EnumName(TestAllTypes_NestedEnum_name, int32(x)) -} -func (TestAllTypes_NestedEnum) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } - -// Represents a single test case's input. The testee should: -// -// 1. parse this proto (which should always succeed) -// 2. parse the protobuf or JSON payload in "payload" (which may fail) -// 3. if the parse succeeded, serialize the message in the requested format. -type ConformanceRequest struct { - // The payload (whether protobuf of JSON) is always for a TestAllTypes proto - // (see below). - // - // Types that are valid to be assigned to Payload: - // *ConformanceRequest_ProtobufPayload - // *ConformanceRequest_JsonPayload - Payload isConformanceRequest_Payload `protobuf_oneof:"payload"` - // Which format should the testee serialize its message to? - RequestedOutputFormat WireFormat `protobuf:"varint,3,opt,name=requested_output_format,json=requestedOutputFormat,enum=conformance.WireFormat" json:"requested_output_format,omitempty"` -} - -func (m *ConformanceRequest) Reset() { *m = ConformanceRequest{} } -func (m *ConformanceRequest) String() string { return proto.CompactTextString(m) } -func (*ConformanceRequest) ProtoMessage() {} -func (*ConformanceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -type isConformanceRequest_Payload interface { - isConformanceRequest_Payload() -} - -type ConformanceRequest_ProtobufPayload struct { - ProtobufPayload []byte `protobuf:"bytes,1,opt,name=protobuf_payload,json=protobufPayload,proto3,oneof"` -} -type ConformanceRequest_JsonPayload struct { - JsonPayload string `protobuf:"bytes,2,opt,name=json_payload,json=jsonPayload,oneof"` -} - -func (*ConformanceRequest_ProtobufPayload) isConformanceRequest_Payload() {} -func (*ConformanceRequest_JsonPayload) isConformanceRequest_Payload() {} - -func (m *ConformanceRequest) GetPayload() isConformanceRequest_Payload { - if m != nil { - return m.Payload - } - return nil -} - -func (m *ConformanceRequest) GetProtobufPayload() []byte { - if x, ok := m.GetPayload().(*ConformanceRequest_ProtobufPayload); ok { - return x.ProtobufPayload - } - return nil -} - -func (m *ConformanceRequest) GetJsonPayload() string { - if x, ok := m.GetPayload().(*ConformanceRequest_JsonPayload); ok { - return x.JsonPayload - } - return "" -} - -func (m *ConformanceRequest) GetRequestedOutputFormat() WireFormat { - if m != nil { - return m.RequestedOutputFormat - } - return WireFormat_UNSPECIFIED -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*ConformanceRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _ConformanceRequest_OneofMarshaler, _ConformanceRequest_OneofUnmarshaler, _ConformanceRequest_OneofSizer, []interface{}{ - (*ConformanceRequest_ProtobufPayload)(nil), - (*ConformanceRequest_JsonPayload)(nil), - } -} - -func _ConformanceRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*ConformanceRequest) - // payload - switch x := m.Payload.(type) { - case *ConformanceRequest_ProtobufPayload: - b.EncodeVarint(1<<3 | proto.WireBytes) - b.EncodeRawBytes(x.ProtobufPayload) - case *ConformanceRequest_JsonPayload: - b.EncodeVarint(2<<3 | proto.WireBytes) - b.EncodeStringBytes(x.JsonPayload) - case nil: - default: - return fmt.Errorf("ConformanceRequest.Payload has unexpected type %T", x) - } - return nil -} - -func _ConformanceRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*ConformanceRequest) - switch tag { - case 1: // payload.protobuf_payload - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Payload = &ConformanceRequest_ProtobufPayload{x} - return true, err - case 2: // payload.json_payload - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Payload = &ConformanceRequest_JsonPayload{x} - return true, err - default: - return false, nil - } -} - -func _ConformanceRequest_OneofSizer(msg proto.Message) (n int) { - m := msg.(*ConformanceRequest) - // payload - switch x := m.Payload.(type) { - case *ConformanceRequest_ProtobufPayload: - n += proto.SizeVarint(1<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.ProtobufPayload))) - n += len(x.ProtobufPayload) - case *ConformanceRequest_JsonPayload: - n += proto.SizeVarint(2<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.JsonPayload))) - n += len(x.JsonPayload) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -// Represents a single test case's output. -type ConformanceResponse struct { - // Types that are valid to be assigned to Result: - // *ConformanceResponse_ParseError - // *ConformanceResponse_SerializeError - // *ConformanceResponse_RuntimeError - // *ConformanceResponse_ProtobufPayload - // *ConformanceResponse_JsonPayload - // *ConformanceResponse_Skipped - Result isConformanceResponse_Result `protobuf_oneof:"result"` -} - -func (m *ConformanceResponse) Reset() { *m = ConformanceResponse{} } -func (m *ConformanceResponse) String() string { return proto.CompactTextString(m) } -func (*ConformanceResponse) ProtoMessage() {} -func (*ConformanceResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -type isConformanceResponse_Result interface { - isConformanceResponse_Result() -} - -type ConformanceResponse_ParseError struct { - ParseError string `protobuf:"bytes,1,opt,name=parse_error,json=parseError,oneof"` -} -type ConformanceResponse_SerializeError struct { - SerializeError string `protobuf:"bytes,6,opt,name=serialize_error,json=serializeError,oneof"` -} -type ConformanceResponse_RuntimeError struct { - RuntimeError string `protobuf:"bytes,2,opt,name=runtime_error,json=runtimeError,oneof"` -} -type ConformanceResponse_ProtobufPayload struct { - ProtobufPayload []byte `protobuf:"bytes,3,opt,name=protobuf_payload,json=protobufPayload,proto3,oneof"` -} -type ConformanceResponse_JsonPayload struct { - JsonPayload string `protobuf:"bytes,4,opt,name=json_payload,json=jsonPayload,oneof"` -} -type ConformanceResponse_Skipped struct { - Skipped string `protobuf:"bytes,5,opt,name=skipped,oneof"` -} - -func (*ConformanceResponse_ParseError) isConformanceResponse_Result() {} -func (*ConformanceResponse_SerializeError) isConformanceResponse_Result() {} -func (*ConformanceResponse_RuntimeError) isConformanceResponse_Result() {} -func (*ConformanceResponse_ProtobufPayload) isConformanceResponse_Result() {} -func (*ConformanceResponse_JsonPayload) isConformanceResponse_Result() {} -func (*ConformanceResponse_Skipped) isConformanceResponse_Result() {} - -func (m *ConformanceResponse) GetResult() isConformanceResponse_Result { - if m != nil { - return m.Result - } - return nil -} - -func (m *ConformanceResponse) GetParseError() string { - if x, ok := m.GetResult().(*ConformanceResponse_ParseError); ok { - return x.ParseError - } - return "" -} - -func (m *ConformanceResponse) GetSerializeError() string { - if x, ok := m.GetResult().(*ConformanceResponse_SerializeError); ok { - return x.SerializeError - } - return "" -} - -func (m *ConformanceResponse) GetRuntimeError() string { - if x, ok := m.GetResult().(*ConformanceResponse_RuntimeError); ok { - return x.RuntimeError - } - return "" -} - -func (m *ConformanceResponse) GetProtobufPayload() []byte { - if x, ok := m.GetResult().(*ConformanceResponse_ProtobufPayload); ok { - return x.ProtobufPayload - } - return nil -} - -func (m *ConformanceResponse) GetJsonPayload() string { - if x, ok := m.GetResult().(*ConformanceResponse_JsonPayload); ok { - return x.JsonPayload - } - return "" -} - -func (m *ConformanceResponse) GetSkipped() string { - if x, ok := m.GetResult().(*ConformanceResponse_Skipped); ok { - return x.Skipped - } - return "" -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*ConformanceResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _ConformanceResponse_OneofMarshaler, _ConformanceResponse_OneofUnmarshaler, _ConformanceResponse_OneofSizer, []interface{}{ - (*ConformanceResponse_ParseError)(nil), - (*ConformanceResponse_SerializeError)(nil), - (*ConformanceResponse_RuntimeError)(nil), - (*ConformanceResponse_ProtobufPayload)(nil), - (*ConformanceResponse_JsonPayload)(nil), - (*ConformanceResponse_Skipped)(nil), - } -} - -func _ConformanceResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*ConformanceResponse) - // result - switch x := m.Result.(type) { - case *ConformanceResponse_ParseError: - b.EncodeVarint(1<<3 | proto.WireBytes) - b.EncodeStringBytes(x.ParseError) - case *ConformanceResponse_SerializeError: - b.EncodeVarint(6<<3 | proto.WireBytes) - b.EncodeStringBytes(x.SerializeError) - case *ConformanceResponse_RuntimeError: - b.EncodeVarint(2<<3 | proto.WireBytes) - b.EncodeStringBytes(x.RuntimeError) - case *ConformanceResponse_ProtobufPayload: - b.EncodeVarint(3<<3 | proto.WireBytes) - b.EncodeRawBytes(x.ProtobufPayload) - case *ConformanceResponse_JsonPayload: - b.EncodeVarint(4<<3 | proto.WireBytes) - b.EncodeStringBytes(x.JsonPayload) - case *ConformanceResponse_Skipped: - b.EncodeVarint(5<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Skipped) - case nil: - default: - return fmt.Errorf("ConformanceResponse.Result has unexpected type %T", x) - } - return nil -} - -func _ConformanceResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*ConformanceResponse) - switch tag { - case 1: // result.parse_error - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Result = &ConformanceResponse_ParseError{x} - return true, err - case 6: // result.serialize_error - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Result = &ConformanceResponse_SerializeError{x} - return true, err - case 2: // result.runtime_error - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Result = &ConformanceResponse_RuntimeError{x} - return true, err - case 3: // result.protobuf_payload - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Result = &ConformanceResponse_ProtobufPayload{x} - return true, err - case 4: // result.json_payload - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Result = &ConformanceResponse_JsonPayload{x} - return true, err - case 5: // result.skipped - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Result = &ConformanceResponse_Skipped{x} - return true, err - default: - return false, nil - } -} - -func _ConformanceResponse_OneofSizer(msg proto.Message) (n int) { - m := msg.(*ConformanceResponse) - // result - switch x := m.Result.(type) { - case *ConformanceResponse_ParseError: - n += proto.SizeVarint(1<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.ParseError))) - n += len(x.ParseError) - case *ConformanceResponse_SerializeError: - n += proto.SizeVarint(6<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.SerializeError))) - n += len(x.SerializeError) - case *ConformanceResponse_RuntimeError: - n += proto.SizeVarint(2<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.RuntimeError))) - n += len(x.RuntimeError) - case *ConformanceResponse_ProtobufPayload: - n += proto.SizeVarint(3<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.ProtobufPayload))) - n += len(x.ProtobufPayload) - case *ConformanceResponse_JsonPayload: - n += proto.SizeVarint(4<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.JsonPayload))) - n += len(x.JsonPayload) - case *ConformanceResponse_Skipped: - n += proto.SizeVarint(5<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Skipped))) - n += len(x.Skipped) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -// This proto includes every type of field in both singular and repeated -// forms. -type TestAllTypes struct { - // Singular - OptionalInt32 int32 `protobuf:"varint,1,opt,name=optional_int32,json=optionalInt32" json:"optional_int32,omitempty"` - OptionalInt64 int64 `protobuf:"varint,2,opt,name=optional_int64,json=optionalInt64" json:"optional_int64,omitempty"` - OptionalUint32 uint32 `protobuf:"varint,3,opt,name=optional_uint32,json=optionalUint32" json:"optional_uint32,omitempty"` - OptionalUint64 uint64 `protobuf:"varint,4,opt,name=optional_uint64,json=optionalUint64" json:"optional_uint64,omitempty"` - OptionalSint32 int32 `protobuf:"zigzag32,5,opt,name=optional_sint32,json=optionalSint32" json:"optional_sint32,omitempty"` - OptionalSint64 int64 `protobuf:"zigzag64,6,opt,name=optional_sint64,json=optionalSint64" json:"optional_sint64,omitempty"` - OptionalFixed32 uint32 `protobuf:"fixed32,7,opt,name=optional_fixed32,json=optionalFixed32" json:"optional_fixed32,omitempty"` - OptionalFixed64 uint64 `protobuf:"fixed64,8,opt,name=optional_fixed64,json=optionalFixed64" json:"optional_fixed64,omitempty"` - OptionalSfixed32 int32 `protobuf:"fixed32,9,opt,name=optional_sfixed32,json=optionalSfixed32" json:"optional_sfixed32,omitempty"` - OptionalSfixed64 int64 `protobuf:"fixed64,10,opt,name=optional_sfixed64,json=optionalSfixed64" json:"optional_sfixed64,omitempty"` - OptionalFloat float32 `protobuf:"fixed32,11,opt,name=optional_float,json=optionalFloat" json:"optional_float,omitempty"` - OptionalDouble float64 `protobuf:"fixed64,12,opt,name=optional_double,json=optionalDouble" json:"optional_double,omitempty"` - OptionalBool bool `protobuf:"varint,13,opt,name=optional_bool,json=optionalBool" json:"optional_bool,omitempty"` - OptionalString string `protobuf:"bytes,14,opt,name=optional_string,json=optionalString" json:"optional_string,omitempty"` - OptionalBytes []byte `protobuf:"bytes,15,opt,name=optional_bytes,json=optionalBytes,proto3" json:"optional_bytes,omitempty"` - OptionalNestedMessage *TestAllTypes_NestedMessage `protobuf:"bytes,18,opt,name=optional_nested_message,json=optionalNestedMessage" json:"optional_nested_message,omitempty"` - OptionalForeignMessage *ForeignMessage `protobuf:"bytes,19,opt,name=optional_foreign_message,json=optionalForeignMessage" json:"optional_foreign_message,omitempty"` - OptionalNestedEnum TestAllTypes_NestedEnum `protobuf:"varint,21,opt,name=optional_nested_enum,json=optionalNestedEnum,enum=conformance.TestAllTypes_NestedEnum" json:"optional_nested_enum,omitempty"` - OptionalForeignEnum ForeignEnum `protobuf:"varint,22,opt,name=optional_foreign_enum,json=optionalForeignEnum,enum=conformance.ForeignEnum" json:"optional_foreign_enum,omitempty"` - OptionalStringPiece string `protobuf:"bytes,24,opt,name=optional_string_piece,json=optionalStringPiece" json:"optional_string_piece,omitempty"` - OptionalCord string `protobuf:"bytes,25,opt,name=optional_cord,json=optionalCord" json:"optional_cord,omitempty"` - RecursiveMessage *TestAllTypes `protobuf:"bytes,27,opt,name=recursive_message,json=recursiveMessage" json:"recursive_message,omitempty"` - // Repeated - RepeatedInt32 []int32 `protobuf:"varint,31,rep,packed,name=repeated_int32,json=repeatedInt32" json:"repeated_int32,omitempty"` - RepeatedInt64 []int64 `protobuf:"varint,32,rep,packed,name=repeated_int64,json=repeatedInt64" json:"repeated_int64,omitempty"` - RepeatedUint32 []uint32 `protobuf:"varint,33,rep,packed,name=repeated_uint32,json=repeatedUint32" json:"repeated_uint32,omitempty"` - RepeatedUint64 []uint64 `protobuf:"varint,34,rep,packed,name=repeated_uint64,json=repeatedUint64" json:"repeated_uint64,omitempty"` - RepeatedSint32 []int32 `protobuf:"zigzag32,35,rep,packed,name=repeated_sint32,json=repeatedSint32" json:"repeated_sint32,omitempty"` - RepeatedSint64 []int64 `protobuf:"zigzag64,36,rep,packed,name=repeated_sint64,json=repeatedSint64" json:"repeated_sint64,omitempty"` - RepeatedFixed32 []uint32 `protobuf:"fixed32,37,rep,packed,name=repeated_fixed32,json=repeatedFixed32" json:"repeated_fixed32,omitempty"` - RepeatedFixed64 []uint64 `protobuf:"fixed64,38,rep,packed,name=repeated_fixed64,json=repeatedFixed64" json:"repeated_fixed64,omitempty"` - RepeatedSfixed32 []int32 `protobuf:"fixed32,39,rep,packed,name=repeated_sfixed32,json=repeatedSfixed32" json:"repeated_sfixed32,omitempty"` - RepeatedSfixed64 []int64 `protobuf:"fixed64,40,rep,packed,name=repeated_sfixed64,json=repeatedSfixed64" json:"repeated_sfixed64,omitempty"` - RepeatedFloat []float32 `protobuf:"fixed32,41,rep,packed,name=repeated_float,json=repeatedFloat" json:"repeated_float,omitempty"` - RepeatedDouble []float64 `protobuf:"fixed64,42,rep,packed,name=repeated_double,json=repeatedDouble" json:"repeated_double,omitempty"` - RepeatedBool []bool `protobuf:"varint,43,rep,packed,name=repeated_bool,json=repeatedBool" json:"repeated_bool,omitempty"` - RepeatedString []string `protobuf:"bytes,44,rep,name=repeated_string,json=repeatedString" json:"repeated_string,omitempty"` - RepeatedBytes [][]byte `protobuf:"bytes,45,rep,name=repeated_bytes,json=repeatedBytes,proto3" json:"repeated_bytes,omitempty"` - RepeatedNestedMessage []*TestAllTypes_NestedMessage `protobuf:"bytes,48,rep,name=repeated_nested_message,json=repeatedNestedMessage" json:"repeated_nested_message,omitempty"` - RepeatedForeignMessage []*ForeignMessage `protobuf:"bytes,49,rep,name=repeated_foreign_message,json=repeatedForeignMessage" json:"repeated_foreign_message,omitempty"` - RepeatedNestedEnum []TestAllTypes_NestedEnum `protobuf:"varint,51,rep,packed,name=repeated_nested_enum,json=repeatedNestedEnum,enum=conformance.TestAllTypes_NestedEnum" json:"repeated_nested_enum,omitempty"` - RepeatedForeignEnum []ForeignEnum `protobuf:"varint,52,rep,packed,name=repeated_foreign_enum,json=repeatedForeignEnum,enum=conformance.ForeignEnum" json:"repeated_foreign_enum,omitempty"` - RepeatedStringPiece []string `protobuf:"bytes,54,rep,name=repeated_string_piece,json=repeatedStringPiece" json:"repeated_string_piece,omitempty"` - RepeatedCord []string `protobuf:"bytes,55,rep,name=repeated_cord,json=repeatedCord" json:"repeated_cord,omitempty"` - // Map - MapInt32Int32 map[int32]int32 `protobuf:"bytes,56,rep,name=map_int32_int32,json=mapInt32Int32" json:"map_int32_int32,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - MapInt64Int64 map[int64]int64 `protobuf:"bytes,57,rep,name=map_int64_int64,json=mapInt64Int64" json:"map_int64_int64,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - MapUint32Uint32 map[uint32]uint32 `protobuf:"bytes,58,rep,name=map_uint32_uint32,json=mapUint32Uint32" json:"map_uint32_uint32,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - MapUint64Uint64 map[uint64]uint64 `protobuf:"bytes,59,rep,name=map_uint64_uint64,json=mapUint64Uint64" json:"map_uint64_uint64,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - MapSint32Sint32 map[int32]int32 `protobuf:"bytes,60,rep,name=map_sint32_sint32,json=mapSint32Sint32" json:"map_sint32_sint32,omitempty" protobuf_key:"zigzag32,1,opt,name=key" protobuf_val:"zigzag32,2,opt,name=value"` - MapSint64Sint64 map[int64]int64 `protobuf:"bytes,61,rep,name=map_sint64_sint64,json=mapSint64Sint64" json:"map_sint64_sint64,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"zigzag64,2,opt,name=value"` - MapFixed32Fixed32 map[uint32]uint32 `protobuf:"bytes,62,rep,name=map_fixed32_fixed32,json=mapFixed32Fixed32" json:"map_fixed32_fixed32,omitempty" protobuf_key:"fixed32,1,opt,name=key" protobuf_val:"fixed32,2,opt,name=value"` - MapFixed64Fixed64 map[uint64]uint64 `protobuf:"bytes,63,rep,name=map_fixed64_fixed64,json=mapFixed64Fixed64" json:"map_fixed64_fixed64,omitempty" protobuf_key:"fixed64,1,opt,name=key" protobuf_val:"fixed64,2,opt,name=value"` - MapSfixed32Sfixed32 map[int32]int32 `protobuf:"bytes,64,rep,name=map_sfixed32_sfixed32,json=mapSfixed32Sfixed32" json:"map_sfixed32_sfixed32,omitempty" protobuf_key:"fixed32,1,opt,name=key" protobuf_val:"fixed32,2,opt,name=value"` - MapSfixed64Sfixed64 map[int64]int64 `protobuf:"bytes,65,rep,name=map_sfixed64_sfixed64,json=mapSfixed64Sfixed64" json:"map_sfixed64_sfixed64,omitempty" protobuf_key:"fixed64,1,opt,name=key" protobuf_val:"fixed64,2,opt,name=value"` - MapInt32Float map[int32]float32 `protobuf:"bytes,66,rep,name=map_int32_float,json=mapInt32Float" json:"map_int32_float,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"fixed32,2,opt,name=value"` - MapInt32Double map[int32]float64 `protobuf:"bytes,67,rep,name=map_int32_double,json=mapInt32Double" json:"map_int32_double,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"fixed64,2,opt,name=value"` - MapBoolBool map[bool]bool `protobuf:"bytes,68,rep,name=map_bool_bool,json=mapBoolBool" json:"map_bool_bool,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - MapStringString map[string]string `protobuf:"bytes,69,rep,name=map_string_string,json=mapStringString" json:"map_string_string,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - MapStringBytes map[string][]byte `protobuf:"bytes,70,rep,name=map_string_bytes,json=mapStringBytes" json:"map_string_bytes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value,proto3"` - MapStringNestedMessage map[string]*TestAllTypes_NestedMessage `protobuf:"bytes,71,rep,name=map_string_nested_message,json=mapStringNestedMessage" json:"map_string_nested_message,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - MapStringForeignMessage map[string]*ForeignMessage `protobuf:"bytes,72,rep,name=map_string_foreign_message,json=mapStringForeignMessage" json:"map_string_foreign_message,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - MapStringNestedEnum map[string]TestAllTypes_NestedEnum `protobuf:"bytes,73,rep,name=map_string_nested_enum,json=mapStringNestedEnum" json:"map_string_nested_enum,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value,enum=conformance.TestAllTypes_NestedEnum"` - MapStringForeignEnum map[string]ForeignEnum `protobuf:"bytes,74,rep,name=map_string_foreign_enum,json=mapStringForeignEnum" json:"map_string_foreign_enum,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value,enum=conformance.ForeignEnum"` - // Types that are valid to be assigned to OneofField: - // *TestAllTypes_OneofUint32 - // *TestAllTypes_OneofNestedMessage - // *TestAllTypes_OneofString - // *TestAllTypes_OneofBytes - // *TestAllTypes_OneofBool - // *TestAllTypes_OneofUint64 - // *TestAllTypes_OneofFloat - // *TestAllTypes_OneofDouble - // *TestAllTypes_OneofEnum - OneofField isTestAllTypes_OneofField `protobuf_oneof:"oneof_field"` - // Well-known types - OptionalBoolWrapper *google_protobuf5.BoolValue `protobuf:"bytes,201,opt,name=optional_bool_wrapper,json=optionalBoolWrapper" json:"optional_bool_wrapper,omitempty"` - OptionalInt32Wrapper *google_protobuf5.Int32Value `protobuf:"bytes,202,opt,name=optional_int32_wrapper,json=optionalInt32Wrapper" json:"optional_int32_wrapper,omitempty"` - OptionalInt64Wrapper *google_protobuf5.Int64Value `protobuf:"bytes,203,opt,name=optional_int64_wrapper,json=optionalInt64Wrapper" json:"optional_int64_wrapper,omitempty"` - OptionalUint32Wrapper *google_protobuf5.UInt32Value `protobuf:"bytes,204,opt,name=optional_uint32_wrapper,json=optionalUint32Wrapper" json:"optional_uint32_wrapper,omitempty"` - OptionalUint64Wrapper *google_protobuf5.UInt64Value `protobuf:"bytes,205,opt,name=optional_uint64_wrapper,json=optionalUint64Wrapper" json:"optional_uint64_wrapper,omitempty"` - OptionalFloatWrapper *google_protobuf5.FloatValue `protobuf:"bytes,206,opt,name=optional_float_wrapper,json=optionalFloatWrapper" json:"optional_float_wrapper,omitempty"` - OptionalDoubleWrapper *google_protobuf5.DoubleValue `protobuf:"bytes,207,opt,name=optional_double_wrapper,json=optionalDoubleWrapper" json:"optional_double_wrapper,omitempty"` - OptionalStringWrapper *google_protobuf5.StringValue `protobuf:"bytes,208,opt,name=optional_string_wrapper,json=optionalStringWrapper" json:"optional_string_wrapper,omitempty"` - OptionalBytesWrapper *google_protobuf5.BytesValue `protobuf:"bytes,209,opt,name=optional_bytes_wrapper,json=optionalBytesWrapper" json:"optional_bytes_wrapper,omitempty"` - RepeatedBoolWrapper []*google_protobuf5.BoolValue `protobuf:"bytes,211,rep,name=repeated_bool_wrapper,json=repeatedBoolWrapper" json:"repeated_bool_wrapper,omitempty"` - RepeatedInt32Wrapper []*google_protobuf5.Int32Value `protobuf:"bytes,212,rep,name=repeated_int32_wrapper,json=repeatedInt32Wrapper" json:"repeated_int32_wrapper,omitempty"` - RepeatedInt64Wrapper []*google_protobuf5.Int64Value `protobuf:"bytes,213,rep,name=repeated_int64_wrapper,json=repeatedInt64Wrapper" json:"repeated_int64_wrapper,omitempty"` - RepeatedUint32Wrapper []*google_protobuf5.UInt32Value `protobuf:"bytes,214,rep,name=repeated_uint32_wrapper,json=repeatedUint32Wrapper" json:"repeated_uint32_wrapper,omitempty"` - RepeatedUint64Wrapper []*google_protobuf5.UInt64Value `protobuf:"bytes,215,rep,name=repeated_uint64_wrapper,json=repeatedUint64Wrapper" json:"repeated_uint64_wrapper,omitempty"` - RepeatedFloatWrapper []*google_protobuf5.FloatValue `protobuf:"bytes,216,rep,name=repeated_float_wrapper,json=repeatedFloatWrapper" json:"repeated_float_wrapper,omitempty"` - RepeatedDoubleWrapper []*google_protobuf5.DoubleValue `protobuf:"bytes,217,rep,name=repeated_double_wrapper,json=repeatedDoubleWrapper" json:"repeated_double_wrapper,omitempty"` - RepeatedStringWrapper []*google_protobuf5.StringValue `protobuf:"bytes,218,rep,name=repeated_string_wrapper,json=repeatedStringWrapper" json:"repeated_string_wrapper,omitempty"` - RepeatedBytesWrapper []*google_protobuf5.BytesValue `protobuf:"bytes,219,rep,name=repeated_bytes_wrapper,json=repeatedBytesWrapper" json:"repeated_bytes_wrapper,omitempty"` - OptionalDuration *google_protobuf1.Duration `protobuf:"bytes,301,opt,name=optional_duration,json=optionalDuration" json:"optional_duration,omitempty"` - OptionalTimestamp *google_protobuf4.Timestamp `protobuf:"bytes,302,opt,name=optional_timestamp,json=optionalTimestamp" json:"optional_timestamp,omitempty"` - OptionalFieldMask *google_protobuf2.FieldMask `protobuf:"bytes,303,opt,name=optional_field_mask,json=optionalFieldMask" json:"optional_field_mask,omitempty"` - OptionalStruct *google_protobuf3.Struct `protobuf:"bytes,304,opt,name=optional_struct,json=optionalStruct" json:"optional_struct,omitempty"` - OptionalAny *google_protobuf.Any `protobuf:"bytes,305,opt,name=optional_any,json=optionalAny" json:"optional_any,omitempty"` - OptionalValue *google_protobuf3.Value `protobuf:"bytes,306,opt,name=optional_value,json=optionalValue" json:"optional_value,omitempty"` - RepeatedDuration []*google_protobuf1.Duration `protobuf:"bytes,311,rep,name=repeated_duration,json=repeatedDuration" json:"repeated_duration,omitempty"` - RepeatedTimestamp []*google_protobuf4.Timestamp `protobuf:"bytes,312,rep,name=repeated_timestamp,json=repeatedTimestamp" json:"repeated_timestamp,omitempty"` - RepeatedFieldmask []*google_protobuf2.FieldMask `protobuf:"bytes,313,rep,name=repeated_fieldmask,json=repeatedFieldmask" json:"repeated_fieldmask,omitempty"` - RepeatedStruct []*google_protobuf3.Struct `protobuf:"bytes,324,rep,name=repeated_struct,json=repeatedStruct" json:"repeated_struct,omitempty"` - RepeatedAny []*google_protobuf.Any `protobuf:"bytes,315,rep,name=repeated_any,json=repeatedAny" json:"repeated_any,omitempty"` - RepeatedValue []*google_protobuf3.Value `protobuf:"bytes,316,rep,name=repeated_value,json=repeatedValue" json:"repeated_value,omitempty"` - // Test field-name-to-JSON-name convention. - // (protobuf says names can be any valid C/C++ identifier.) - Fieldname1 int32 `protobuf:"varint,401,opt,name=fieldname1" json:"fieldname1,omitempty"` - FieldName2 int32 `protobuf:"varint,402,opt,name=field_name2,json=fieldName2" json:"field_name2,omitempty"` - XFieldName3 int32 `protobuf:"varint,403,opt,name=_field_name3,json=FieldName3" json:"_field_name3,omitempty"` - Field_Name4_ int32 `protobuf:"varint,404,opt,name=field__name4_,json=fieldName4" json:"field__name4_,omitempty"` - Field0Name5 int32 `protobuf:"varint,405,opt,name=field0name5" json:"field0name5,omitempty"` - Field_0Name6 int32 `protobuf:"varint,406,opt,name=field_0_name6,json=field0Name6" json:"field_0_name6,omitempty"` - FieldName7 int32 `protobuf:"varint,407,opt,name=fieldName7" json:"fieldName7,omitempty"` - FieldName8 int32 `protobuf:"varint,408,opt,name=FieldName8" json:"FieldName8,omitempty"` - Field_Name9 int32 `protobuf:"varint,409,opt,name=field_Name9,json=fieldName9" json:"field_Name9,omitempty"` - Field_Name10 int32 `protobuf:"varint,410,opt,name=Field_Name10,json=FieldName10" json:"Field_Name10,omitempty"` - FIELD_NAME11 int32 `protobuf:"varint,411,opt,name=FIELD_NAME11,json=FIELDNAME11" json:"FIELD_NAME11,omitempty"` - FIELDName12 int32 `protobuf:"varint,412,opt,name=FIELD_name12,json=FIELDName12" json:"FIELD_name12,omitempty"` - XFieldName13 int32 `protobuf:"varint,413,opt,name=__field_name13,json=FieldName13" json:"__field_name13,omitempty"` - X_FieldName14 int32 `protobuf:"varint,414,opt,name=__Field_name14,json=FieldName14" json:"__Field_name14,omitempty"` - Field_Name15 int32 `protobuf:"varint,415,opt,name=field__name15,json=fieldName15" json:"field__name15,omitempty"` - Field__Name16 int32 `protobuf:"varint,416,opt,name=field__Name16,json=fieldName16" json:"field__Name16,omitempty"` - FieldName17__ int32 `protobuf:"varint,417,opt,name=field_name17__,json=fieldName17" json:"field_name17__,omitempty"` - FieldName18__ int32 `protobuf:"varint,418,opt,name=Field_name18__,json=FieldName18" json:"Field_name18__,omitempty"` -} - -func (m *TestAllTypes) Reset() { *m = TestAllTypes{} } -func (m *TestAllTypes) String() string { return proto.CompactTextString(m) } -func (*TestAllTypes) ProtoMessage() {} -func (*TestAllTypes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } - -type isTestAllTypes_OneofField interface { - isTestAllTypes_OneofField() -} - -type TestAllTypes_OneofUint32 struct { - OneofUint32 uint32 `protobuf:"varint,111,opt,name=oneof_uint32,json=oneofUint32,oneof"` -} -type TestAllTypes_OneofNestedMessage struct { - OneofNestedMessage *TestAllTypes_NestedMessage `protobuf:"bytes,112,opt,name=oneof_nested_message,json=oneofNestedMessage,oneof"` -} -type TestAllTypes_OneofString struct { - OneofString string `protobuf:"bytes,113,opt,name=oneof_string,json=oneofString,oneof"` -} -type TestAllTypes_OneofBytes struct { - OneofBytes []byte `protobuf:"bytes,114,opt,name=oneof_bytes,json=oneofBytes,proto3,oneof"` -} -type TestAllTypes_OneofBool struct { - OneofBool bool `protobuf:"varint,115,opt,name=oneof_bool,json=oneofBool,oneof"` -} -type TestAllTypes_OneofUint64 struct { - OneofUint64 uint64 `protobuf:"varint,116,opt,name=oneof_uint64,json=oneofUint64,oneof"` -} -type TestAllTypes_OneofFloat struct { - OneofFloat float32 `protobuf:"fixed32,117,opt,name=oneof_float,json=oneofFloat,oneof"` -} -type TestAllTypes_OneofDouble struct { - OneofDouble float64 `protobuf:"fixed64,118,opt,name=oneof_double,json=oneofDouble,oneof"` -} -type TestAllTypes_OneofEnum struct { - OneofEnum TestAllTypes_NestedEnum `protobuf:"varint,119,opt,name=oneof_enum,json=oneofEnum,enum=conformance.TestAllTypes_NestedEnum,oneof"` -} - -func (*TestAllTypes_OneofUint32) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofNestedMessage) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofString) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofBytes) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofBool) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofUint64) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofFloat) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofDouble) isTestAllTypes_OneofField() {} -func (*TestAllTypes_OneofEnum) isTestAllTypes_OneofField() {} - -func (m *TestAllTypes) GetOneofField() isTestAllTypes_OneofField { - if m != nil { - return m.OneofField - } - return nil -} - -func (m *TestAllTypes) GetOptionalInt32() int32 { - if m != nil { - return m.OptionalInt32 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalInt64() int64 { - if m != nil { - return m.OptionalInt64 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalUint32() uint32 { - if m != nil { - return m.OptionalUint32 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalUint64() uint64 { - if m != nil { - return m.OptionalUint64 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalSint32() int32 { - if m != nil { - return m.OptionalSint32 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalSint64() int64 { - if m != nil { - return m.OptionalSint64 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalFixed32() uint32 { - if m != nil { - return m.OptionalFixed32 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalFixed64() uint64 { - if m != nil { - return m.OptionalFixed64 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalSfixed32() int32 { - if m != nil { - return m.OptionalSfixed32 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalSfixed64() int64 { - if m != nil { - return m.OptionalSfixed64 - } - return 0 -} - -func (m *TestAllTypes) GetOptionalFloat() float32 { - if m != nil { - return m.OptionalFloat - } - return 0 -} - -func (m *TestAllTypes) GetOptionalDouble() float64 { - if m != nil { - return m.OptionalDouble - } - return 0 -} - -func (m *TestAllTypes) GetOptionalBool() bool { - if m != nil { - return m.OptionalBool - } - return false -} - -func (m *TestAllTypes) GetOptionalString() string { - if m != nil { - return m.OptionalString - } - return "" -} - -func (m *TestAllTypes) GetOptionalBytes() []byte { - if m != nil { - return m.OptionalBytes - } - return nil -} - -func (m *TestAllTypes) GetOptionalNestedMessage() *TestAllTypes_NestedMessage { - if m != nil { - return m.OptionalNestedMessage - } - return nil -} - -func (m *TestAllTypes) GetOptionalForeignMessage() *ForeignMessage { - if m != nil { - return m.OptionalForeignMessage - } - return nil -} - -func (m *TestAllTypes) GetOptionalNestedEnum() TestAllTypes_NestedEnum { - if m != nil { - return m.OptionalNestedEnum - } - return TestAllTypes_FOO -} - -func (m *TestAllTypes) GetOptionalForeignEnum() ForeignEnum { - if m != nil { - return m.OptionalForeignEnum - } - return ForeignEnum_FOREIGN_FOO -} - -func (m *TestAllTypes) GetOptionalStringPiece() string { - if m != nil { - return m.OptionalStringPiece - } - return "" -} - -func (m *TestAllTypes) GetOptionalCord() string { - if m != nil { - return m.OptionalCord - } - return "" -} - -func (m *TestAllTypes) GetRecursiveMessage() *TestAllTypes { - if m != nil { - return m.RecursiveMessage - } - return nil -} - -func (m *TestAllTypes) GetRepeatedInt32() []int32 { - if m != nil { - return m.RepeatedInt32 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedInt64() []int64 { - if m != nil { - return m.RepeatedInt64 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedUint32() []uint32 { - if m != nil { - return m.RepeatedUint32 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedUint64() []uint64 { - if m != nil { - return m.RepeatedUint64 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedSint32() []int32 { - if m != nil { - return m.RepeatedSint32 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedSint64() []int64 { - if m != nil { - return m.RepeatedSint64 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedFixed32() []uint32 { - if m != nil { - return m.RepeatedFixed32 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedFixed64() []uint64 { - if m != nil { - return m.RepeatedFixed64 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedSfixed32() []int32 { - if m != nil { - return m.RepeatedSfixed32 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedSfixed64() []int64 { - if m != nil { - return m.RepeatedSfixed64 - } - return nil -} - -func (m *TestAllTypes) GetRepeatedFloat() []float32 { - if m != nil { - return m.RepeatedFloat - } - return nil -} - -func (m *TestAllTypes) GetRepeatedDouble() []float64 { - if m != nil { - return m.RepeatedDouble - } - return nil -} - -func (m *TestAllTypes) GetRepeatedBool() []bool { - if m != nil { - return m.RepeatedBool - } - return nil -} - -func (m *TestAllTypes) GetRepeatedString() []string { - if m != nil { - return m.RepeatedString - } - return nil -} - -func (m *TestAllTypes) GetRepeatedBytes() [][]byte { - if m != nil { - return m.RepeatedBytes - } - return nil -} - -func (m *TestAllTypes) GetRepeatedNestedMessage() []*TestAllTypes_NestedMessage { - if m != nil { - return m.RepeatedNestedMessage - } - return nil -} - -func (m *TestAllTypes) GetRepeatedForeignMessage() []*ForeignMessage { - if m != nil { - return m.RepeatedForeignMessage - } - return nil -} - -func (m *TestAllTypes) GetRepeatedNestedEnum() []TestAllTypes_NestedEnum { - if m != nil { - return m.RepeatedNestedEnum - } - return nil -} - -func (m *TestAllTypes) GetRepeatedForeignEnum() []ForeignEnum { - if m != nil { - return m.RepeatedForeignEnum - } - return nil -} - -func (m *TestAllTypes) GetRepeatedStringPiece() []string { - if m != nil { - return m.RepeatedStringPiece - } - return nil -} - -func (m *TestAllTypes) GetRepeatedCord() []string { - if m != nil { - return m.RepeatedCord - } - return nil -} - -func (m *TestAllTypes) GetMapInt32Int32() map[int32]int32 { - if m != nil { - return m.MapInt32Int32 - } - return nil -} - -func (m *TestAllTypes) GetMapInt64Int64() map[int64]int64 { - if m != nil { - return m.MapInt64Int64 - } - return nil -} - -func (m *TestAllTypes) GetMapUint32Uint32() map[uint32]uint32 { - if m != nil { - return m.MapUint32Uint32 - } - return nil -} - -func (m *TestAllTypes) GetMapUint64Uint64() map[uint64]uint64 { - if m != nil { - return m.MapUint64Uint64 - } - return nil -} - -func (m *TestAllTypes) GetMapSint32Sint32() map[int32]int32 { - if m != nil { - return m.MapSint32Sint32 - } - return nil -} - -func (m *TestAllTypes) GetMapSint64Sint64() map[int64]int64 { - if m != nil { - return m.MapSint64Sint64 - } - return nil -} - -func (m *TestAllTypes) GetMapFixed32Fixed32() map[uint32]uint32 { - if m != nil { - return m.MapFixed32Fixed32 - } - return nil -} - -func (m *TestAllTypes) GetMapFixed64Fixed64() map[uint64]uint64 { - if m != nil { - return m.MapFixed64Fixed64 - } - return nil -} - -func (m *TestAllTypes) GetMapSfixed32Sfixed32() map[int32]int32 { - if m != nil { - return m.MapSfixed32Sfixed32 - } - return nil -} - -func (m *TestAllTypes) GetMapSfixed64Sfixed64() map[int64]int64 { - if m != nil { - return m.MapSfixed64Sfixed64 - } - return nil -} - -func (m *TestAllTypes) GetMapInt32Float() map[int32]float32 { - if m != nil { - return m.MapInt32Float - } - return nil -} - -func (m *TestAllTypes) GetMapInt32Double() map[int32]float64 { - if m != nil { - return m.MapInt32Double - } - return nil -} - -func (m *TestAllTypes) GetMapBoolBool() map[bool]bool { - if m != nil { - return m.MapBoolBool - } - return nil -} - -func (m *TestAllTypes) GetMapStringString() map[string]string { - if m != nil { - return m.MapStringString - } - return nil -} - -func (m *TestAllTypes) GetMapStringBytes() map[string][]byte { - if m != nil { - return m.MapStringBytes - } - return nil -} - -func (m *TestAllTypes) GetMapStringNestedMessage() map[string]*TestAllTypes_NestedMessage { - if m != nil { - return m.MapStringNestedMessage - } - return nil -} - -func (m *TestAllTypes) GetMapStringForeignMessage() map[string]*ForeignMessage { - if m != nil { - return m.MapStringForeignMessage - } - return nil -} - -func (m *TestAllTypes) GetMapStringNestedEnum() map[string]TestAllTypes_NestedEnum { - if m != nil { - return m.MapStringNestedEnum - } - return nil -} - -func (m *TestAllTypes) GetMapStringForeignEnum() map[string]ForeignEnum { - if m != nil { - return m.MapStringForeignEnum - } - return nil -} - -func (m *TestAllTypes) GetOneofUint32() uint32 { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofUint32); ok { - return x.OneofUint32 - } - return 0 -} - -func (m *TestAllTypes) GetOneofNestedMessage() *TestAllTypes_NestedMessage { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofNestedMessage); ok { - return x.OneofNestedMessage - } - return nil -} - -func (m *TestAllTypes) GetOneofString() string { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofString); ok { - return x.OneofString - } - return "" -} - -func (m *TestAllTypes) GetOneofBytes() []byte { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofBytes); ok { - return x.OneofBytes - } - return nil -} - -func (m *TestAllTypes) GetOneofBool() bool { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofBool); ok { - return x.OneofBool - } - return false -} - -func (m *TestAllTypes) GetOneofUint64() uint64 { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofUint64); ok { - return x.OneofUint64 - } - return 0 -} - -func (m *TestAllTypes) GetOneofFloat() float32 { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofFloat); ok { - return x.OneofFloat - } - return 0 -} - -func (m *TestAllTypes) GetOneofDouble() float64 { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofDouble); ok { - return x.OneofDouble - } - return 0 -} - -func (m *TestAllTypes) GetOneofEnum() TestAllTypes_NestedEnum { - if x, ok := m.GetOneofField().(*TestAllTypes_OneofEnum); ok { - return x.OneofEnum - } - return TestAllTypes_FOO -} - -func (m *TestAllTypes) GetOptionalBoolWrapper() *google_protobuf5.BoolValue { - if m != nil { - return m.OptionalBoolWrapper - } - return nil -} - -func (m *TestAllTypes) GetOptionalInt32Wrapper() *google_protobuf5.Int32Value { - if m != nil { - return m.OptionalInt32Wrapper - } - return nil -} - -func (m *TestAllTypes) GetOptionalInt64Wrapper() *google_protobuf5.Int64Value { - if m != nil { - return m.OptionalInt64Wrapper - } - return nil -} - -func (m *TestAllTypes) GetOptionalUint32Wrapper() *google_protobuf5.UInt32Value { - if m != nil { - return m.OptionalUint32Wrapper - } - return nil -} - -func (m *TestAllTypes) GetOptionalUint64Wrapper() *google_protobuf5.UInt64Value { - if m != nil { - return m.OptionalUint64Wrapper - } - return nil -} - -func (m *TestAllTypes) GetOptionalFloatWrapper() *google_protobuf5.FloatValue { - if m != nil { - return m.OptionalFloatWrapper - } - return nil -} - -func (m *TestAllTypes) GetOptionalDoubleWrapper() *google_protobuf5.DoubleValue { - if m != nil { - return m.OptionalDoubleWrapper - } - return nil -} - -func (m *TestAllTypes) GetOptionalStringWrapper() *google_protobuf5.StringValue { - if m != nil { - return m.OptionalStringWrapper - } - return nil -} - -func (m *TestAllTypes) GetOptionalBytesWrapper() *google_protobuf5.BytesValue { - if m != nil { - return m.OptionalBytesWrapper - } - return nil -} - -func (m *TestAllTypes) GetRepeatedBoolWrapper() []*google_protobuf5.BoolValue { - if m != nil { - return m.RepeatedBoolWrapper - } - return nil -} - -func (m *TestAllTypes) GetRepeatedInt32Wrapper() []*google_protobuf5.Int32Value { - if m != nil { - return m.RepeatedInt32Wrapper - } - return nil -} - -func (m *TestAllTypes) GetRepeatedInt64Wrapper() []*google_protobuf5.Int64Value { - if m != nil { - return m.RepeatedInt64Wrapper - } - return nil -} - -func (m *TestAllTypes) GetRepeatedUint32Wrapper() []*google_protobuf5.UInt32Value { - if m != nil { - return m.RepeatedUint32Wrapper - } - return nil -} - -func (m *TestAllTypes) GetRepeatedUint64Wrapper() []*google_protobuf5.UInt64Value { - if m != nil { - return m.RepeatedUint64Wrapper - } - return nil -} - -func (m *TestAllTypes) GetRepeatedFloatWrapper() []*google_protobuf5.FloatValue { - if m != nil { - return m.RepeatedFloatWrapper - } - return nil -} - -func (m *TestAllTypes) GetRepeatedDoubleWrapper() []*google_protobuf5.DoubleValue { - if m != nil { - return m.RepeatedDoubleWrapper - } - return nil -} - -func (m *TestAllTypes) GetRepeatedStringWrapper() []*google_protobuf5.StringValue { - if m != nil { - return m.RepeatedStringWrapper - } - return nil -} - -func (m *TestAllTypes) GetRepeatedBytesWrapper() []*google_protobuf5.BytesValue { - if m != nil { - return m.RepeatedBytesWrapper - } - return nil -} - -func (m *TestAllTypes) GetOptionalDuration() *google_protobuf1.Duration { - if m != nil { - return m.OptionalDuration - } - return nil -} - -func (m *TestAllTypes) GetOptionalTimestamp() *google_protobuf4.Timestamp { - if m != nil { - return m.OptionalTimestamp - } - return nil -} - -func (m *TestAllTypes) GetOptionalFieldMask() *google_protobuf2.FieldMask { - if m != nil { - return m.OptionalFieldMask - } - return nil -} - -func (m *TestAllTypes) GetOptionalStruct() *google_protobuf3.Struct { - if m != nil { - return m.OptionalStruct - } - return nil -} - -func (m *TestAllTypes) GetOptionalAny() *google_protobuf.Any { - if m != nil { - return m.OptionalAny - } - return nil -} - -func (m *TestAllTypes) GetOptionalValue() *google_protobuf3.Value { - if m != nil { - return m.OptionalValue - } - return nil -} - -func (m *TestAllTypes) GetRepeatedDuration() []*google_protobuf1.Duration { - if m != nil { - return m.RepeatedDuration - } - return nil -} - -func (m *TestAllTypes) GetRepeatedTimestamp() []*google_protobuf4.Timestamp { - if m != nil { - return m.RepeatedTimestamp - } - return nil -} - -func (m *TestAllTypes) GetRepeatedFieldmask() []*google_protobuf2.FieldMask { - if m != nil { - return m.RepeatedFieldmask - } - return nil -} - -func (m *TestAllTypes) GetRepeatedStruct() []*google_protobuf3.Struct { - if m != nil { - return m.RepeatedStruct - } - return nil -} - -func (m *TestAllTypes) GetRepeatedAny() []*google_protobuf.Any { - if m != nil { - return m.RepeatedAny - } - return nil -} - -func (m *TestAllTypes) GetRepeatedValue() []*google_protobuf3.Value { - if m != nil { - return m.RepeatedValue - } - return nil -} - -func (m *TestAllTypes) GetFieldname1() int32 { - if m != nil { - return m.Fieldname1 - } - return 0 -} - -func (m *TestAllTypes) GetFieldName2() int32 { - if m != nil { - return m.FieldName2 - } - return 0 -} - -func (m *TestAllTypes) GetXFieldName3() int32 { - if m != nil { - return m.XFieldName3 - } - return 0 -} - -func (m *TestAllTypes) GetField_Name4_() int32 { - if m != nil { - return m.Field_Name4_ - } - return 0 -} - -func (m *TestAllTypes) GetField0Name5() int32 { - if m != nil { - return m.Field0Name5 - } - return 0 -} - -func (m *TestAllTypes) GetField_0Name6() int32 { - if m != nil { - return m.Field_0Name6 - } - return 0 -} - -func (m *TestAllTypes) GetFieldName7() int32 { - if m != nil { - return m.FieldName7 - } - return 0 -} - -func (m *TestAllTypes) GetFieldName8() int32 { - if m != nil { - return m.FieldName8 - } - return 0 -} - -func (m *TestAllTypes) GetField_Name9() int32 { - if m != nil { - return m.Field_Name9 - } - return 0 -} - -func (m *TestAllTypes) GetField_Name10() int32 { - if m != nil { - return m.Field_Name10 - } - return 0 -} - -func (m *TestAllTypes) GetFIELD_NAME11() int32 { - if m != nil { - return m.FIELD_NAME11 - } - return 0 -} - -func (m *TestAllTypes) GetFIELDName12() int32 { - if m != nil { - return m.FIELDName12 - } - return 0 -} - -func (m *TestAllTypes) GetXFieldName13() int32 { - if m != nil { - return m.XFieldName13 - } - return 0 -} - -func (m *TestAllTypes) GetX_FieldName14() int32 { - if m != nil { - return m.X_FieldName14 - } - return 0 -} - -func (m *TestAllTypes) GetField_Name15() int32 { - if m != nil { - return m.Field_Name15 - } - return 0 -} - -func (m *TestAllTypes) GetField__Name16() int32 { - if m != nil { - return m.Field__Name16 - } - return 0 -} - -func (m *TestAllTypes) GetFieldName17__() int32 { - if m != nil { - return m.FieldName17__ - } - return 0 -} - -func (m *TestAllTypes) GetFieldName18__() int32 { - if m != nil { - return m.FieldName18__ - } - return 0 -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*TestAllTypes) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _TestAllTypes_OneofMarshaler, _TestAllTypes_OneofUnmarshaler, _TestAllTypes_OneofSizer, []interface{}{ - (*TestAllTypes_OneofUint32)(nil), - (*TestAllTypes_OneofNestedMessage)(nil), - (*TestAllTypes_OneofString)(nil), - (*TestAllTypes_OneofBytes)(nil), - (*TestAllTypes_OneofBool)(nil), - (*TestAllTypes_OneofUint64)(nil), - (*TestAllTypes_OneofFloat)(nil), - (*TestAllTypes_OneofDouble)(nil), - (*TestAllTypes_OneofEnum)(nil), - } -} - -func _TestAllTypes_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*TestAllTypes) - // oneof_field - switch x := m.OneofField.(type) { - case *TestAllTypes_OneofUint32: - b.EncodeVarint(111<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.OneofUint32)) - case *TestAllTypes_OneofNestedMessage: - b.EncodeVarint(112<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.OneofNestedMessage); err != nil { - return err - } - case *TestAllTypes_OneofString: - b.EncodeVarint(113<<3 | proto.WireBytes) - b.EncodeStringBytes(x.OneofString) - case *TestAllTypes_OneofBytes: - b.EncodeVarint(114<<3 | proto.WireBytes) - b.EncodeRawBytes(x.OneofBytes) - case *TestAllTypes_OneofBool: - t := uint64(0) - if x.OneofBool { - t = 1 - } - b.EncodeVarint(115<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *TestAllTypes_OneofUint64: - b.EncodeVarint(116<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.OneofUint64)) - case *TestAllTypes_OneofFloat: - b.EncodeVarint(117<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(math.Float32bits(x.OneofFloat))) - case *TestAllTypes_OneofDouble: - b.EncodeVarint(118<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.OneofDouble)) - case *TestAllTypes_OneofEnum: - b.EncodeVarint(119<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.OneofEnum)) - case nil: - default: - return fmt.Errorf("TestAllTypes.OneofField has unexpected type %T", x) - } - return nil -} - -func _TestAllTypes_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*TestAllTypes) - switch tag { - case 111: // oneof_field.oneof_uint32 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.OneofField = &TestAllTypes_OneofUint32{uint32(x)} - return true, err - case 112: // oneof_field.oneof_nested_message - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(TestAllTypes_NestedMessage) - err := b.DecodeMessage(msg) - m.OneofField = &TestAllTypes_OneofNestedMessage{msg} - return true, err - case 113: // oneof_field.oneof_string - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.OneofField = &TestAllTypes_OneofString{x} - return true, err - case 114: // oneof_field.oneof_bytes - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.OneofField = &TestAllTypes_OneofBytes{x} - return true, err - case 115: // oneof_field.oneof_bool - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.OneofField = &TestAllTypes_OneofBool{x != 0} - return true, err - case 116: // oneof_field.oneof_uint64 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.OneofField = &TestAllTypes_OneofUint64{x} - return true, err - case 117: // oneof_field.oneof_float - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.OneofField = &TestAllTypes_OneofFloat{math.Float32frombits(uint32(x))} - return true, err - case 118: // oneof_field.oneof_double - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.OneofField = &TestAllTypes_OneofDouble{math.Float64frombits(x)} - return true, err - case 119: // oneof_field.oneof_enum - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.OneofField = &TestAllTypes_OneofEnum{TestAllTypes_NestedEnum(x)} - return true, err - default: - return false, nil - } -} - -func _TestAllTypes_OneofSizer(msg proto.Message) (n int) { - m := msg.(*TestAllTypes) - // oneof_field - switch x := m.OneofField.(type) { - case *TestAllTypes_OneofUint32: - n += proto.SizeVarint(111<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.OneofUint32)) - case *TestAllTypes_OneofNestedMessage: - s := proto.Size(x.OneofNestedMessage) - n += proto.SizeVarint(112<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case *TestAllTypes_OneofString: - n += proto.SizeVarint(113<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.OneofString))) - n += len(x.OneofString) - case *TestAllTypes_OneofBytes: - n += proto.SizeVarint(114<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.OneofBytes))) - n += len(x.OneofBytes) - case *TestAllTypes_OneofBool: - n += proto.SizeVarint(115<<3 | proto.WireVarint) - n += 1 - case *TestAllTypes_OneofUint64: - n += proto.SizeVarint(116<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.OneofUint64)) - case *TestAllTypes_OneofFloat: - n += proto.SizeVarint(117<<3 | proto.WireFixed32) - n += 4 - case *TestAllTypes_OneofDouble: - n += proto.SizeVarint(118<<3 | proto.WireFixed64) - n += 8 - case *TestAllTypes_OneofEnum: - n += proto.SizeVarint(119<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.OneofEnum)) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -type TestAllTypes_NestedMessage struct { - A int32 `protobuf:"varint,1,opt,name=a" json:"a,omitempty"` - Corecursive *TestAllTypes `protobuf:"bytes,2,opt,name=corecursive" json:"corecursive,omitempty"` -} - -func (m *TestAllTypes_NestedMessage) Reset() { *m = TestAllTypes_NestedMessage{} } -func (m *TestAllTypes_NestedMessage) String() string { return proto.CompactTextString(m) } -func (*TestAllTypes_NestedMessage) ProtoMessage() {} -func (*TestAllTypes_NestedMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } - -func (m *TestAllTypes_NestedMessage) GetA() int32 { - if m != nil { - return m.A - } - return 0 -} - -func (m *TestAllTypes_NestedMessage) GetCorecursive() *TestAllTypes { - if m != nil { - return m.Corecursive - } - return nil -} - -type ForeignMessage struct { - C int32 `protobuf:"varint,1,opt,name=c" json:"c,omitempty"` -} - -func (m *ForeignMessage) Reset() { *m = ForeignMessage{} } -func (m *ForeignMessage) String() string { return proto.CompactTextString(m) } -func (*ForeignMessage) ProtoMessage() {} -func (*ForeignMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } - -func (m *ForeignMessage) GetC() int32 { - if m != nil { - return m.C - } - return 0 -} - -func init() { - proto.RegisterType((*ConformanceRequest)(nil), "conformance.ConformanceRequest") - proto.RegisterType((*ConformanceResponse)(nil), "conformance.ConformanceResponse") - proto.RegisterType((*TestAllTypes)(nil), "conformance.TestAllTypes") - proto.RegisterType((*TestAllTypes_NestedMessage)(nil), "conformance.TestAllTypes.NestedMessage") - proto.RegisterType((*ForeignMessage)(nil), "conformance.ForeignMessage") - proto.RegisterEnum("conformance.WireFormat", WireFormat_name, WireFormat_value) - proto.RegisterEnum("conformance.ForeignEnum", ForeignEnum_name, ForeignEnum_value) - proto.RegisterEnum("conformance.TestAllTypes_NestedEnum", TestAllTypes_NestedEnum_name, TestAllTypes_NestedEnum_value) -} - -func init() { proto.RegisterFile("conformance_proto/conformance.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 2737 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x5a, 0xd9, 0x72, 0xdb, 0xc8, - 0xd5, 0x16, 0x08, 0x59, 0x4b, 0x93, 0x92, 0xa8, 0xd6, 0xd6, 0x96, 0x5d, 0x63, 0x58, 0xb2, 0x7f, - 0xd3, 0xf6, 0x8c, 0xac, 0x05, 0x86, 0x65, 0xcf, 0x3f, 0x8e, 0x45, 0x9b, 0xb4, 0xe4, 0x8c, 0x25, - 0x17, 0x64, 0x8d, 0xab, 0x9c, 0x0b, 0x06, 0xa6, 0x20, 0x15, 0xc7, 0x24, 0xc1, 0x01, 0x48, 0x4f, - 0x94, 0xcb, 0xbc, 0x41, 0xf6, 0x7d, 0xbd, 0xcf, 0x7a, 0x93, 0xa4, 0x92, 0xab, 0x54, 0x6e, 0xb2, - 0x27, 0x95, 0x3d, 0x79, 0x85, 0xbc, 0x43, 0x52, 0xbd, 0xa2, 0xbb, 0x01, 0x50, 0xf4, 0x54, 0x0d, - 0x25, 0x1e, 0x7c, 0xfd, 0x9d, 0xd3, 0xe7, 0x1c, 0x7c, 0x2d, 0x1c, 0x18, 0x2c, 0xd7, 0x83, 0xf6, - 0x51, 0x10, 0xb6, 0xbc, 0x76, 0xdd, 0xaf, 0x75, 0xc2, 0xa0, 0x1b, 0xdc, 0x90, 0x2c, 0x2b, 0xc4, - 0x02, 0xf3, 0x92, 0x69, 0xf1, 0xec, 0x71, 0x10, 0x1c, 0x37, 0xfd, 0x1b, 0xe4, 0xd2, 0x8b, 0xde, - 0xd1, 0x0d, 0xaf, 0x7d, 0x42, 0x71, 0x8b, 0x6f, 0xe8, 0x97, 0x0e, 0x7b, 0xa1, 0xd7, 0x6d, 0x04, - 0x6d, 0x76, 0xdd, 0xd2, 0xaf, 0x1f, 0x35, 0xfc, 0xe6, 0x61, 0xad, 0xe5, 0x45, 0x2f, 0x19, 0xe2, - 0xbc, 0x8e, 0x88, 0xba, 0x61, 0xaf, 0xde, 0x65, 0x57, 0x2f, 0xe8, 0x57, 0xbb, 0x8d, 0x96, 0x1f, - 0x75, 0xbd, 0x56, 0x27, 0x2b, 0x80, 0x0f, 0x43, 0xaf, 0xd3, 0xf1, 0xc3, 0x88, 0x5e, 0x5f, 0xfa, - 0x85, 0x01, 0xe0, 0xfd, 0x78, 0x2f, 0xae, 0xff, 0x41, 0xcf, 0x8f, 0xba, 0xf0, 0x3a, 0x28, 0xf2, - 0x15, 0xb5, 0x8e, 0x77, 0xd2, 0x0c, 0xbc, 0x43, 0x64, 0x58, 0x46, 0xa9, 0xb0, 0x3d, 0xe4, 0x4e, - 0xf1, 0x2b, 0x4f, 0xe8, 0x05, 0xb8, 0x0c, 0x0a, 0xef, 0x47, 0x41, 0x5b, 0x00, 0x73, 0x96, 0x51, - 0x1a, 0xdf, 0x1e, 0x72, 0xf3, 0xd8, 0xca, 0x41, 0x7b, 0x60, 0x21, 0xa4, 0xe4, 0xfe, 0x61, 0x2d, - 0xe8, 0x75, 0x3b, 0xbd, 0x6e, 0x8d, 0x78, 0xed, 0x22, 0xd3, 0x32, 0x4a, 0x93, 0xeb, 0x0b, 0x2b, - 0x72, 0x9a, 0x9f, 0x35, 0x42, 0xbf, 0x4a, 0x2e, 0xbb, 0x73, 0x62, 0xdd, 0x1e, 0x59, 0x46, 0xcd, - 0xe5, 0x71, 0x30, 0xca, 0x1c, 0x2e, 0x7d, 0x2a, 0x07, 0x66, 0x94, 0x4d, 0x44, 0x9d, 0xa0, 0x1d, - 0xf9, 0xf0, 0x22, 0xc8, 0x77, 0xbc, 0x30, 0xf2, 0x6b, 0x7e, 0x18, 0x06, 0x21, 0xd9, 0x00, 0x8e, - 0x0b, 0x10, 0x63, 0x05, 0xdb, 0xe0, 0x55, 0x30, 0x15, 0xf9, 0x61, 0xc3, 0x6b, 0x36, 0x3e, 0xc9, - 0x61, 0x23, 0x0c, 0x36, 0x29, 0x2e, 0x50, 0xe8, 0x65, 0x30, 0x11, 0xf6, 0xda, 0x38, 0xc1, 0x0c, - 0xc8, 0xf7, 0x59, 0x60, 0x66, 0x0a, 0x4b, 0x4b, 0x9d, 0x39, 0x68, 0xea, 0x86, 0xd3, 0x52, 0xb7, - 0x08, 0x46, 0xa3, 0x97, 0x8d, 0x4e, 0xc7, 0x3f, 0x44, 0x67, 0xd8, 0x75, 0x6e, 0x28, 0x8f, 0x81, - 0x91, 0xd0, 0x8f, 0x7a, 0xcd, 0xee, 0xd2, 0x7f, 0xaa, 0xa0, 0xf0, 0xd4, 0x8f, 0xba, 0x5b, 0xcd, - 0xe6, 0xd3, 0x93, 0x8e, 0x1f, 0xc1, 0xcb, 0x60, 0x32, 0xe8, 0xe0, 0x5e, 0xf3, 0x9a, 0xb5, 0x46, - 0xbb, 0xbb, 0xb1, 0x4e, 0x12, 0x70, 0xc6, 0x9d, 0xe0, 0xd6, 0x1d, 0x6c, 0xd4, 0x61, 0x8e, 0x4d, - 0xf6, 0x65, 0x2a, 0x30, 0xc7, 0x86, 0x57, 0xc0, 0x94, 0x80, 0xf5, 0x28, 0x1d, 0xde, 0xd5, 0x84, - 0x2b, 0x56, 0x1f, 0x10, 0x6b, 0x02, 0xe8, 0xd8, 0x64, 0x57, 0xc3, 0x2a, 0x50, 0x63, 0x8c, 0x28, - 0x23, 0xde, 0xde, 0x74, 0x0c, 0xdc, 0x4f, 0x32, 0x46, 0x94, 0x11, 0xd7, 0x08, 0xaa, 0x40, 0xc7, - 0x86, 0x57, 0x41, 0x51, 0x00, 0x8f, 0x1a, 0x9f, 0xf0, 0x0f, 0x37, 0xd6, 0xd1, 0xa8, 0x65, 0x94, - 0x46, 0x5d, 0x41, 0x50, 0xa5, 0xe6, 0x24, 0xd4, 0xb1, 0xd1, 0x98, 0x65, 0x94, 0x46, 0x34, 0xa8, - 0x63, 0xc3, 0xeb, 0x60, 0x3a, 0x76, 0xcf, 0x69, 0xc7, 0x2d, 0xa3, 0x34, 0xe5, 0x0a, 0x8e, 0x7d, - 0x66, 0x4f, 0x01, 0x3b, 0x36, 0x02, 0x96, 0x51, 0x2a, 0xea, 0x60, 0xc7, 0x56, 0x52, 0x7f, 0xd4, - 0x0c, 0xbc, 0x2e, 0xca, 0x5b, 0x46, 0x29, 0x17, 0xa7, 0xbe, 0x8a, 0x8d, 0xca, 0xfe, 0x0f, 0x83, - 0xde, 0x8b, 0xa6, 0x8f, 0x0a, 0x96, 0x51, 0x32, 0xe2, 0xfd, 0x3f, 0x20, 0x56, 0xb8, 0x0c, 0xc4, - 0xca, 0xda, 0x8b, 0x20, 0x68, 0xa2, 0x09, 0xcb, 0x28, 0x8d, 0xb9, 0x05, 0x6e, 0x2c, 0x07, 0x41, - 0x53, 0xcd, 0x66, 0x37, 0x6c, 0xb4, 0x8f, 0xd1, 0x24, 0xee, 0x2a, 0x29, 0x9b, 0xc4, 0xaa, 0x44, - 0xf7, 0xe2, 0xa4, 0xeb, 0x47, 0x68, 0x0a, 0xb7, 0x71, 0x1c, 0x5d, 0x19, 0x1b, 0x61, 0x0d, 0x2c, - 0x08, 0x58, 0x9b, 0xde, 0xde, 0x2d, 0x3f, 0x8a, 0xbc, 0x63, 0x1f, 0x41, 0xcb, 0x28, 0xe5, 0xd7, - 0xaf, 0x28, 0x37, 0xb6, 0xdc, 0xa2, 0x2b, 0xbb, 0x04, 0xff, 0x98, 0xc2, 0xdd, 0x39, 0xce, 0xa3, - 0x98, 0xe1, 0x01, 0x40, 0x71, 0x96, 0x82, 0xd0, 0x6f, 0x1c, 0xb7, 0x85, 0x87, 0x19, 0xe2, 0xe1, - 0x9c, 0xe2, 0xa1, 0x4a, 0x31, 0x9c, 0x75, 0x5e, 0x24, 0x53, 0xb1, 0xc3, 0xf7, 0xc0, 0xac, 0x1e, - 0xb7, 0xdf, 0xee, 0xb5, 0xd0, 0x1c, 0x51, 0xa3, 0x4b, 0xa7, 0x05, 0x5d, 0x69, 0xf7, 0x5a, 0x2e, - 0x54, 0x23, 0xc6, 0x36, 0xf8, 0x2e, 0x98, 0x4b, 0x84, 0x4b, 0x88, 0xe7, 0x09, 0x31, 0x4a, 0x8b, - 0x95, 0x90, 0xcd, 0x68, 0x81, 0x12, 0x36, 0x47, 0x62, 0xa3, 0xd5, 0xaa, 0x75, 0x1a, 0x7e, 0xdd, - 0x47, 0x08, 0xd7, 0xac, 0x9c, 0x1b, 0xcb, 0xc5, 0xeb, 0x68, 0xdd, 0x9e, 0xe0, 0xcb, 0xf0, 0x8a, - 0xd4, 0x0a, 0xf5, 0x20, 0x3c, 0x44, 0x67, 0x19, 0xde, 0x88, 0xdb, 0xe1, 0x7e, 0x10, 0x1e, 0xc2, - 0x2a, 0x98, 0x0e, 0xfd, 0x7a, 0x2f, 0x8c, 0x1a, 0xaf, 0x7c, 0x91, 0xd6, 0x73, 0x24, 0xad, 0x67, - 0x33, 0x73, 0xe0, 0x16, 0xc5, 0x1a, 0x9e, 0xce, 0xcb, 0x60, 0x32, 0xf4, 0x3b, 0xbe, 0x87, 0xf3, - 0x48, 0x6f, 0xe6, 0x0b, 0x96, 0x89, 0xd5, 0x86, 0x5b, 0x85, 0xda, 0xc8, 0x30, 0xc7, 0x46, 0x96, - 0x65, 0x62, 0xb5, 0x91, 0x60, 0x54, 0x1b, 0x04, 0x8c, 0xa9, 0xcd, 0x45, 0xcb, 0xc4, 0x6a, 0xc3, - 0xcd, 0xb1, 0xda, 0x28, 0x40, 0xc7, 0x46, 0x4b, 0x96, 0x89, 0xd5, 0x46, 0x06, 0x6a, 0x8c, 0x4c, - 0x6d, 0x96, 0x2d, 0x13, 0xab, 0x0d, 0x37, 0xef, 0x27, 0x19, 0x99, 0xda, 0x5c, 0xb2, 0x4c, 0xac, - 0x36, 0x32, 0x90, 0xaa, 0x8d, 0x00, 0x72, 0x59, 0xb8, 0x6c, 0x99, 0x58, 0x6d, 0xb8, 0x5d, 0x52, - 0x1b, 0x15, 0xea, 0xd8, 0xe8, 0xff, 0x2c, 0x13, 0xab, 0x8d, 0x02, 0xa5, 0x6a, 0x13, 0xbb, 0xe7, - 0xb4, 0x57, 0x2c, 0x13, 0xab, 0x8d, 0x08, 0x40, 0x52, 0x1b, 0x0d, 0xec, 0xd8, 0xa8, 0x64, 0x99, - 0x58, 0x6d, 0x54, 0x30, 0x55, 0x9b, 0x38, 0x08, 0xa2, 0x36, 0x57, 0x2d, 0x13, 0xab, 0x8d, 0x08, - 0x81, 0xab, 0x8d, 0x80, 0x31, 0xb5, 0xb9, 0x66, 0x99, 0x58, 0x6d, 0xb8, 0x39, 0x56, 0x1b, 0x01, - 0x24, 0x6a, 0x73, 0xdd, 0x32, 0xb1, 0xda, 0x70, 0x23, 0x57, 0x9b, 0x38, 0x42, 0xaa, 0x36, 0x6f, - 0x5a, 0x26, 0x56, 0x1b, 0x11, 0x9f, 0x50, 0x9b, 0x98, 0x8d, 0xa8, 0xcd, 0x5b, 0x96, 0x89, 0xd5, - 0x46, 0xd0, 0x71, 0xb5, 0x11, 0x30, 0x4d, 0x6d, 0x56, 0x2d, 0xf3, 0xb5, 0xd4, 0x86, 0xf3, 0x24, - 0xd4, 0x26, 0xce, 0x92, 0xa6, 0x36, 0x6b, 0xc4, 0x43, 0x7f, 0xb5, 0x11, 0xc9, 0x4c, 0xa8, 0x8d, - 0x1e, 0x37, 0x11, 0x85, 0x0d, 0xcb, 0x1c, 0x5c, 0x6d, 0xd4, 0x88, 0xb9, 0xda, 0x24, 0xc2, 0x25, - 0xc4, 0x36, 0x21, 0xee, 0xa3, 0x36, 0x5a, 0xa0, 0x5c, 0x6d, 0xb4, 0x6a, 0x31, 0xb5, 0x71, 0x70, - 0xcd, 0xa8, 0xda, 0xa8, 0x75, 0x13, 0x6a, 0x23, 0xd6, 0x11, 0xb5, 0xb9, 0xc5, 0xf0, 0x46, 0xdc, - 0x0e, 0x44, 0x6d, 0x9e, 0x82, 0xa9, 0x96, 0xd7, 0xa1, 0x02, 0xc1, 0x64, 0x62, 0x93, 0x24, 0xf5, - 0xcd, 0xec, 0x0c, 0x3c, 0xf6, 0x3a, 0x44, 0x3b, 0xc8, 0x47, 0xa5, 0xdd, 0x0d, 0x4f, 0xdc, 0x89, - 0x96, 0x6c, 0x93, 0x58, 0x1d, 0x9b, 0xa9, 0xca, 0xed, 0xc1, 0x58, 0x1d, 0x9b, 0x7c, 0x28, 0xac, - 0xcc, 0x06, 0x9f, 0x83, 0x69, 0xcc, 0x4a, 0xe5, 0x87, 0xab, 0xd0, 0x1d, 0xc2, 0xbb, 0xd2, 0x97, - 0x97, 0x4a, 0x13, 0xfd, 0xa4, 0xcc, 0x38, 0x3c, 0xd9, 0x2a, 0x73, 0x3b, 0x36, 0x17, 0xae, 0xb7, - 0x07, 0xe4, 0x76, 0x6c, 0xfa, 0xa9, 0x72, 0x73, 0x2b, 0xe7, 0xa6, 0x22, 0xc7, 0xb5, 0xee, 0xff, - 0x07, 0xe0, 0xa6, 0x02, 0xb8, 0xaf, 0xc5, 0x2d, 0x5b, 0x65, 0x6e, 0xc7, 0xe6, 0xf2, 0xf8, 0xce, - 0x80, 0xdc, 0x8e, 0xbd, 0xaf, 0xc5, 0x2d, 0x5b, 0xe1, 0xc7, 0xc1, 0x0c, 0xe6, 0x66, 0xda, 0x26, - 0x24, 0xf5, 0x2e, 0x61, 0x5f, 0xed, 0xcb, 0xce, 0x74, 0x96, 0xfd, 0xa0, 0xfc, 0x38, 0x50, 0xd5, - 0xae, 0x78, 0x70, 0x6c, 0xa1, 0xc4, 0x1f, 0x19, 0xd4, 0x83, 0x63, 0xb3, 0x1f, 0x9a, 0x07, 0x61, - 0x87, 0x47, 0x60, 0x8e, 0xe4, 0x87, 0x6f, 0x42, 0x28, 0xf8, 0x3d, 0xe2, 0x63, 0xbd, 0x7f, 0x8e, - 0x18, 0x98, 0xff, 0xa4, 0x5e, 0x70, 0xc8, 0xfa, 0x15, 0xd5, 0x0f, 0xae, 0x04, 0xdf, 0xcb, 0xd6, - 0xc0, 0x7e, 0x1c, 0x9b, 0xff, 0xd4, 0xfd, 0xc4, 0x57, 0xd4, 0xfb, 0x95, 0x1e, 0x1a, 0xe5, 0x41, - 0xef, 0x57, 0x72, 0x9c, 0x68, 0xf7, 0x2b, 0x3d, 0x62, 0x9e, 0x81, 0x62, 0xcc, 0xca, 0xce, 0x98, - 0xfb, 0x84, 0xf6, 0xad, 0xd3, 0x69, 0xe9, 0xe9, 0x43, 0x79, 0x27, 0x5b, 0x8a, 0x11, 0xee, 0x02, - 0xec, 0x89, 0x9c, 0x46, 0xf4, 0x48, 0x7a, 0x40, 0x58, 0xaf, 0xf5, 0x65, 0xc5, 0xe7, 0x14, 0xfe, - 0x9f, 0x52, 0xe6, 0x5b, 0xb1, 0x45, 0xb4, 0x3b, 0x95, 0x42, 0x76, 0x7e, 0x55, 0x06, 0x69, 0x77, - 0x02, 0xa5, 0x9f, 0x52, 0xbb, 0x4b, 0x56, 0x9e, 0x04, 0xc6, 0x4d, 0x8f, 0xbc, 0xea, 0x00, 0x49, - 0xa0, 0xcb, 0xc9, 0x69, 0x18, 0x27, 0x41, 0x32, 0xc2, 0x0e, 0x38, 0x2b, 0x11, 0x6b, 0x87, 0xe4, - 0x43, 0xe2, 0xe1, 0xe6, 0x00, 0x1e, 0x94, 0x63, 0x91, 0x7a, 0x9a, 0x6f, 0xa5, 0x5e, 0x84, 0x11, - 0x58, 0x94, 0x3c, 0xea, 0xa7, 0xe6, 0x36, 0x71, 0xe9, 0x0c, 0xe0, 0x52, 0x3d, 0x33, 0xa9, 0xcf, - 0x85, 0x56, 0xfa, 0x55, 0x78, 0x0c, 0xe6, 0x93, 0xdb, 0x24, 0x47, 0xdf, 0xce, 0x20, 0xf7, 0x80, - 0xb4, 0x0d, 0x7c, 0xf4, 0x49, 0xf7, 0x80, 0x76, 0x05, 0xbe, 0x0f, 0x16, 0x52, 0x76, 0x47, 0x3c, - 0x3d, 0x22, 0x9e, 0x36, 0x06, 0xdf, 0x5a, 0xec, 0x6a, 0xb6, 0x95, 0x72, 0x09, 0x2e, 0x83, 0x42, - 0xd0, 0xf6, 0x83, 0x23, 0x7e, 0xdc, 0x04, 0xf8, 0x11, 0x7b, 0x7b, 0xc8, 0xcd, 0x13, 0x2b, 0x3b, - 0x3c, 0x3e, 0x06, 0x66, 0x29, 0x48, 0xab, 0x6d, 0xe7, 0xb5, 0x1e, 0xb7, 0xb6, 0x87, 0x5c, 0x48, - 0x68, 0xd4, 0x5a, 0x8a, 0x08, 0x58, 0xb7, 0x7f, 0xc0, 0x27, 0x12, 0xc4, 0xca, 0x7a, 0xf7, 0x22, - 0xa0, 0x5f, 0x59, 0xdb, 0x86, 0x6c, 0xbc, 0x01, 0x88, 0x91, 0x76, 0xe1, 0x05, 0x00, 0x18, 0x04, - 0xdf, 0x87, 0x11, 0x7e, 0x10, 0xdd, 0x1e, 0x72, 0xc7, 0x29, 0x02, 0xdf, 0x5b, 0xca, 0x56, 0x1d, - 0x1b, 0x75, 0x2d, 0xa3, 0x34, 0xac, 0x6c, 0xd5, 0xb1, 0x63, 0x47, 0x54, 0x7b, 0x7a, 0xf8, 0xf1, - 0x58, 0x38, 0xa2, 0x62, 0x22, 0x78, 0x98, 0x90, 0xbc, 0xc2, 0x8f, 0xc6, 0x82, 0x87, 0x09, 0x43, - 0x85, 0x47, 0x43, 0xca, 0xf6, 0xe1, 0xe0, 0x8f, 0x78, 0x22, 0x66, 0x52, 0x9e, 0x3d, 0xe9, 0x69, - 0x8c, 0x88, 0x0c, 0x9b, 0xa6, 0xa1, 0x5f, 0x19, 0x24, 0xf7, 0x8b, 0x2b, 0x74, 0xdc, 0xb6, 0xc2, - 0xe7, 0x3c, 0x2b, 0x78, 0xab, 0xef, 0x79, 0xcd, 0x9e, 0x1f, 0x3f, 0xa6, 0x61, 0xd3, 0x33, 0xba, - 0x0e, 0xba, 0x60, 0x5e, 0x9d, 0xd1, 0x08, 0xc6, 0x5f, 0x1b, 0xec, 0xd1, 0x56, 0x67, 0x24, 0x7a, - 0x47, 0x29, 0x67, 0x95, 0x49, 0x4e, 0x06, 0xa7, 0x63, 0x0b, 0xce, 0xdf, 0xf4, 0xe1, 0x74, 0xec, - 0x24, 0xa7, 0x63, 0x73, 0xce, 0x03, 0xe9, 0x21, 0xbf, 0xa7, 0x06, 0xfa, 0x5b, 0x4a, 0x7a, 0x3e, - 0x41, 0x7a, 0x20, 0x45, 0x3a, 0xa7, 0x0e, 0x89, 0xb2, 0x68, 0xa5, 0x58, 0x7f, 0xd7, 0x8f, 0x96, - 0x07, 0x3b, 0xa7, 0x8e, 0x94, 0xd2, 0x32, 0x40, 0x1a, 0x47, 0xb0, 0xfe, 0x3e, 0x2b, 0x03, 0xa4, - 0x97, 0xb4, 0x0c, 0x10, 0x5b, 0x5a, 0xa8, 0xb4, 0xd3, 0x04, 0xe9, 0x1f, 0xb2, 0x42, 0xa5, 0xcd, - 0xa7, 0x85, 0x4a, 0x8d, 0x69, 0xb4, 0x4c, 0x61, 0x38, 0xed, 0x1f, 0xb3, 0x68, 0xe9, 0x4d, 0xa8, - 0xd1, 0x52, 0x63, 0x5a, 0x06, 0xc8, 0x3d, 0x2a, 0x58, 0xff, 0x94, 0x95, 0x01, 0x72, 0xdb, 0x6a, - 0x19, 0x20, 0x36, 0xce, 0xb9, 0x27, 0x3d, 0x1c, 0x28, 0xcd, 0xff, 0x67, 0x83, 0xc8, 0x60, 0xdf, - 0xe6, 0x97, 0x1f, 0x0a, 0xa5, 0x20, 0xd5, 0x91, 0x81, 0x60, 0xfc, 0x8b, 0xc1, 0x9e, 0xb4, 0xfa, - 0x35, 0xbf, 0x32, 0x58, 0xc8, 0xe0, 0x94, 0x1a, 0xea, 0xaf, 0x7d, 0x38, 0x45, 0xf3, 0x2b, 0x53, - 0x08, 0xa9, 0x46, 0xda, 0x30, 0x42, 0x90, 0xfe, 0x8d, 0x92, 0x9e, 0xd2, 0xfc, 0xea, 0xcc, 0x22, - 0x8b, 0x56, 0x8a, 0xf5, 0xef, 0xfd, 0x68, 0x45, 0xf3, 0xab, 0x13, 0x8e, 0xb4, 0x0c, 0xa8, 0xcd, - 0xff, 0x8f, 0xac, 0x0c, 0xc8, 0xcd, 0xaf, 0x0c, 0x03, 0xd2, 0x42, 0xd5, 0x9a, 0xff, 0x9f, 0x59, - 0xa1, 0x2a, 0xcd, 0xaf, 0x8e, 0x0e, 0xd2, 0x68, 0xb5, 0xe6, 0xff, 0x57, 0x16, 0xad, 0xd2, 0xfc, - 0xea, 0xb3, 0x68, 0x5a, 0x06, 0xd4, 0xe6, 0xff, 0x77, 0x56, 0x06, 0xe4, 0xe6, 0x57, 0x06, 0x0e, - 0x9c, 0xf3, 0xa1, 0x34, 0xd7, 0xe5, 0xef, 0x70, 0xd0, 0x77, 0x73, 0x6c, 0x4e, 0x96, 0xd8, 0x3b, - 0x43, 0xc4, 0x33, 0x5f, 0x6e, 0x81, 0x8f, 0x80, 0x18, 0x1a, 0xd6, 0xc4, 0xcb, 0x1a, 0xf4, 0xbd, - 0x5c, 0xc6, 0xf9, 0xf1, 0x94, 0x43, 0x5c, 0xe1, 0x5f, 0x98, 0xe0, 0x47, 0xc1, 0x8c, 0x34, 0xc4, - 0xe6, 0x2f, 0x8e, 0xd0, 0xf7, 0xb3, 0xc8, 0xaa, 0x18, 0xf3, 0xd8, 0x8b, 0x5e, 0xc6, 0x64, 0xc2, - 0x04, 0xb7, 0xd4, 0xb9, 0x70, 0xaf, 0xde, 0x45, 0x3f, 0xa0, 0x44, 0x0b, 0x69, 0x45, 0xe8, 0xd5, - 0xbb, 0xca, 0xc4, 0xb8, 0x57, 0xef, 0xc2, 0x4d, 0x20, 0x66, 0x8b, 0x35, 0xaf, 0x7d, 0x82, 0x7e, - 0x48, 0xd7, 0xcf, 0x26, 0xd6, 0x6f, 0xb5, 0x4f, 0xdc, 0x3c, 0x87, 0x6e, 0xb5, 0x4f, 0xe0, 0x5d, - 0x69, 0xd6, 0xfc, 0x0a, 0x97, 0x01, 0xfd, 0x88, 0xae, 0x9d, 0x4f, 0xac, 0xa5, 0x55, 0x12, 0xd3, - 0x4d, 0xf2, 0x15, 0x97, 0x27, 0x6e, 0x50, 0x5e, 0x9e, 0x1f, 0xe7, 0x48, 0xb5, 0xfb, 0x95, 0x47, - 0xf4, 0xa5, 0x54, 0x1e, 0x41, 0x14, 0x97, 0xe7, 0x27, 0xb9, 0x0c, 0x85, 0x93, 0xca, 0xc3, 0x97, - 0xc5, 0xe5, 0x91, 0xb9, 0x48, 0x79, 0x48, 0x75, 0x7e, 0x9a, 0xc5, 0x25, 0x55, 0x27, 0x1e, 0x0a, - 0xb2, 0x55, 0xb8, 0x3a, 0xf2, 0xad, 0x82, 0xab, 0xf3, 0x4b, 0x4a, 0x94, 0x5d, 0x1d, 0xe9, 0xee, - 0x60, 0xd5, 0x11, 0x14, 0xb8, 0x3a, 0x3f, 0xa3, 0xeb, 0x33, 0xaa, 0xc3, 0xa1, 0xac, 0x3a, 0x62, - 0x25, 0xad, 0xce, 0xcf, 0xe9, 0xda, 0xcc, 0xea, 0x70, 0x38, 0xad, 0xce, 0x05, 0x00, 0xc8, 0xfe, - 0xdb, 0x5e, 0xcb, 0x5f, 0x43, 0x9f, 0x36, 0xc9, 0x6b, 0x28, 0xc9, 0x04, 0x2d, 0x90, 0xa7, 0xfd, - 0x8b, 0xbf, 0xae, 0xa3, 0xcf, 0xc8, 0x88, 0x5d, 0x6c, 0x82, 0x17, 0x41, 0xa1, 0x16, 0x43, 0x36, - 0xd0, 0x67, 0x19, 0xa4, 0xca, 0x21, 0x1b, 0x70, 0x09, 0x4c, 0x50, 0x04, 0x81, 0xd8, 0x35, 0xf4, - 0x39, 0x9d, 0x86, 0xfc, 0x3d, 0x49, 0xbe, 0xad, 0x62, 0xc8, 0x4d, 0xf4, 0x79, 0x8a, 0x90, 0x6d, - 0x70, 0x99, 0xd3, 0xac, 0x12, 0x1e, 0x07, 0x7d, 0x41, 0x01, 0x61, 0x1e, 0x47, 0xec, 0x08, 0x7f, - 0xbb, 0x85, 0xbe, 0xa8, 0x3b, 0xba, 0x85, 0x01, 0x22, 0xb4, 0x4d, 0xf4, 0x25, 0x3d, 0xda, 0xcd, - 0x78, 0xcb, 0xf8, 0xeb, 0x6d, 0xf4, 0x65, 0x9d, 0xe2, 0x36, 0x5c, 0x02, 0x85, 0xaa, 0x40, 0xac, - 0xad, 0xa2, 0xaf, 0xb0, 0x38, 0x04, 0xc9, 0xda, 0x2a, 0xc1, 0xec, 0x54, 0xde, 0x7d, 0x50, 0xdb, - 0xdd, 0x7a, 0x5c, 0x59, 0x5b, 0x43, 0x5f, 0xe5, 0x18, 0x6c, 0xa4, 0xb6, 0x18, 0x43, 0x72, 0xbd, - 0x8e, 0xbe, 0xa6, 0x60, 0x88, 0x0d, 0x5e, 0x02, 0x93, 0x35, 0x29, 0xbf, 0x6b, 0x1b, 0xe8, 0xeb, - 0x09, 0x6f, 0x1b, 0x14, 0x55, 0x8d, 0x51, 0x36, 0xfa, 0x46, 0x02, 0x65, 0xc7, 0x09, 0xa4, 0xa0, - 0x9b, 0xe8, 0x9b, 0x72, 0x02, 0x09, 0x48, 0xca, 0x32, 0xdd, 0x9d, 0x83, 0xbe, 0x95, 0x00, 0x39, - 0xd8, 0x9f, 0x14, 0xd3, 0xad, 0x5a, 0x0d, 0x7d, 0x3b, 0x81, 0xba, 0x85, 0x51, 0x52, 0x4c, 0x9b, - 0xb5, 0x1a, 0xfa, 0x4e, 0x22, 0xaa, 0xcd, 0xc5, 0xe7, 0x60, 0x42, 0x7d, 0xd0, 0x29, 0x00, 0xc3, - 0x63, 0x6f, 0x44, 0x0d, 0x0f, 0xbe, 0x0d, 0xf2, 0xf5, 0x40, 0xbc, 0xd4, 0x40, 0xb9, 0xd3, 0x5e, - 0x80, 0xc8, 0xe8, 0xc5, 0x7b, 0x00, 0x26, 0x87, 0x94, 0xb0, 0x08, 0xcc, 0x97, 0xfe, 0x09, 0x73, - 0x81, 0x7f, 0x85, 0xb3, 0xe0, 0x0c, 0xbd, 0x7d, 0x72, 0xc4, 0x46, 0xbf, 0xdc, 0xc9, 0x6d, 0x1a, - 0x31, 0x83, 0x3c, 0x90, 0x94, 0x19, 0xcc, 0x14, 0x06, 0x53, 0x66, 0x28, 0x83, 0xd9, 0xb4, 0xd1, - 0xa3, 0xcc, 0x31, 0x91, 0xc2, 0x31, 0x91, 0xce, 0xa1, 0x8c, 0x18, 0x65, 0x8e, 0xe1, 0x14, 0x8e, - 0xe1, 0x24, 0x47, 0x62, 0x94, 0x28, 0x73, 0x4c, 0xa7, 0x70, 0x4c, 0xa7, 0x73, 0x28, 0x23, 0x43, - 0x99, 0x03, 0xa6, 0x70, 0x40, 0x99, 0xe3, 0x01, 0x98, 0x4f, 0x1f, 0x0c, 0xca, 0x2c, 0xa3, 0x29, - 0x2c, 0xa3, 0x19, 0x2c, 0xea, 0xf0, 0x4f, 0x66, 0x19, 0x49, 0x61, 0x19, 0x91, 0x59, 0xaa, 0x00, - 0x65, 0x8d, 0xf7, 0x64, 0x9e, 0xa9, 0x14, 0x9e, 0xa9, 0x2c, 0x1e, 0x6d, 0x7c, 0x27, 0xf3, 0x14, - 0x53, 0x78, 0x8a, 0xa9, 0xdd, 0x26, 0x0f, 0xe9, 0x4e, 0xeb, 0xd7, 0x9c, 0xcc, 0xb0, 0x05, 0x66, - 0x52, 0xe6, 0x71, 0xa7, 0x51, 0x18, 0x32, 0xc5, 0x5d, 0x50, 0xd4, 0x87, 0x6f, 0xf2, 0xfa, 0xb1, - 0x94, 0xf5, 0x63, 0x29, 0x4d, 0xa2, 0x0f, 0xda, 0x64, 0x8e, 0xf1, 0x14, 0x8e, 0xf1, 0xe4, 0x36, - 0xf4, 0x89, 0xda, 0x69, 0x14, 0x05, 0x99, 0x22, 0x04, 0xe7, 0xfa, 0x8c, 0xcc, 0x52, 0xa8, 0xde, - 0x91, 0xa9, 0x5e, 0xe3, 0x7d, 0x95, 0xe4, 0xf3, 0x18, 0x9c, 0xef, 0x37, 0x33, 0x4b, 0x71, 0xba, - 0xa6, 0x3a, 0xed, 0xfb, 0x0a, 0x4b, 0x72, 0xd4, 0xa4, 0x0d, 0x97, 0x36, 0x2b, 0x4b, 0x71, 0x72, - 0x47, 0x76, 0x32, 0xe8, 0x4b, 0x2d, 0xc9, 0x9b, 0x07, 0xce, 0x66, 0xce, 0xcb, 0x52, 0xdc, 0xad, - 0xa8, 0xee, 0xb2, 0x5f, 0x75, 0xc5, 0x2e, 0x96, 0x6e, 0x03, 0x20, 0x4d, 0xf6, 0x46, 0x81, 0x59, - 0xdd, 0xdb, 0x2b, 0x0e, 0xe1, 0x5f, 0xca, 0x5b, 0x6e, 0xd1, 0xa0, 0xbf, 0x3c, 0x2f, 0xe6, 0xb0, - 0xbb, 0xdd, 0xca, 0xc3, 0xe2, 0x7f, 0xf9, 0x7f, 0x46, 0x79, 0x42, 0x8c, 0xa2, 0xf0, 0xa9, 0xb2, - 0xf4, 0x06, 0x98, 0xd4, 0x06, 0x92, 0x05, 0x60, 0xd4, 0xf9, 0x81, 0x52, 0xbf, 0x76, 0x13, 0x80, - 0xf8, 0xdf, 0x30, 0xc1, 0x29, 0x90, 0x3f, 0xd8, 0xdd, 0x7f, 0x52, 0xb9, 0xbf, 0x53, 0xdd, 0xa9, - 0x3c, 0x28, 0x0e, 0xc1, 0x02, 0x18, 0x7b, 0xe2, 0xee, 0x3d, 0xdd, 0x2b, 0x1f, 0x54, 0x8b, 0x06, - 0x1c, 0x03, 0xc3, 0x8f, 0xf6, 0xf7, 0x76, 0x8b, 0xb9, 0x6b, 0xf7, 0x40, 0x5e, 0x9e, 0x07, 0x4e, - 0x81, 0x7c, 0x75, 0xcf, 0xad, 0xec, 0x3c, 0xdc, 0xad, 0xd1, 0x48, 0x25, 0x03, 0x8d, 0x58, 0x31, - 0x3c, 0x2f, 0xe6, 0xca, 0x17, 0xc1, 0x85, 0x7a, 0xd0, 0x4a, 0xfc, 0x61, 0x26, 0x25, 0xe7, 0xc5, - 0x08, 0xb1, 0x6e, 0xfc, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x33, 0xc2, 0x0c, 0xb6, 0xeb, 0x26, 0x00, - 0x00, -} diff --git a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.proto b/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.proto deleted file mode 100644 index 95a8fd1..0000000 --- a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.proto +++ /dev/null @@ -1,285 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto3"; -package conformance; -option java_package = "com.google.protobuf.conformance"; - -import "google/protobuf/any.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; - -// This defines the conformance testing protocol. This protocol exists between -// the conformance test suite itself and the code being tested. For each test, -// the suite will send a ConformanceRequest message and expect a -// ConformanceResponse message. -// -// You can either run the tests in two different ways: -// -// 1. in-process (using the interface in conformance_test.h). -// -// 2. as a sub-process communicating over a pipe. Information about how to -// do this is in conformance_test_runner.cc. -// -// Pros/cons of the two approaches: -// -// - running as a sub-process is much simpler for languages other than C/C++. -// -// - running as a sub-process may be more tricky in unusual environments like -// iOS apps, where fork/stdin/stdout are not available. - -enum WireFormat { - UNSPECIFIED = 0; - PROTOBUF = 1; - JSON = 2; -} - -// Represents a single test case's input. The testee should: -// -// 1. parse this proto (which should always succeed) -// 2. parse the protobuf or JSON payload in "payload" (which may fail) -// 3. if the parse succeeded, serialize the message in the requested format. -message ConformanceRequest { - // The payload (whether protobuf of JSON) is always for a TestAllTypes proto - // (see below). - oneof payload { - bytes protobuf_payload = 1; - string json_payload = 2; - } - - // Which format should the testee serialize its message to? - WireFormat requested_output_format = 3; -} - -// Represents a single test case's output. -message ConformanceResponse { - oneof result { - // This string should be set to indicate parsing failed. The string can - // provide more information about the parse error if it is available. - // - // Setting this string does not necessarily mean the testee failed the - // test. Some of the test cases are intentionally invalid input. - string parse_error = 1; - - // If the input was successfully parsed but errors occurred when - // serializing it to the requested output format, set the error message in - // this field. - string serialize_error = 6; - - // This should be set if some other error occurred. This will always - // indicate that the test failed. The string can provide more information - // about the failure. - string runtime_error = 2; - - // If the input was successfully parsed and the requested output was - // protobuf, serialize it to protobuf and set it in this field. - bytes protobuf_payload = 3; - - // If the input was successfully parsed and the requested output was JSON, - // serialize to JSON and set it in this field. - string json_payload = 4; - - // For when the testee skipped the test, likely because a certain feature - // wasn't supported, like JSON input/output. - string skipped = 5; - } -} - -// This proto includes every type of field in both singular and repeated -// forms. -message TestAllTypes { - message NestedMessage { - int32 a = 1; - TestAllTypes corecursive = 2; - } - - enum NestedEnum { - FOO = 0; - BAR = 1; - BAZ = 2; - NEG = -1; // Intentionally negative. - } - - // Singular - int32 optional_int32 = 1; - int64 optional_int64 = 2; - uint32 optional_uint32 = 3; - uint64 optional_uint64 = 4; - sint32 optional_sint32 = 5; - sint64 optional_sint64 = 6; - fixed32 optional_fixed32 = 7; - fixed64 optional_fixed64 = 8; - sfixed32 optional_sfixed32 = 9; - sfixed64 optional_sfixed64 = 10; - float optional_float = 11; - double optional_double = 12; - bool optional_bool = 13; - string optional_string = 14; - bytes optional_bytes = 15; - - NestedMessage optional_nested_message = 18; - ForeignMessage optional_foreign_message = 19; - - NestedEnum optional_nested_enum = 21; - ForeignEnum optional_foreign_enum = 22; - - string optional_string_piece = 24 [ctype=STRING_PIECE]; - string optional_cord = 25 [ctype=CORD]; - - TestAllTypes recursive_message = 27; - - // Repeated - repeated int32 repeated_int32 = 31; - repeated int64 repeated_int64 = 32; - repeated uint32 repeated_uint32 = 33; - repeated uint64 repeated_uint64 = 34; - repeated sint32 repeated_sint32 = 35; - repeated sint64 repeated_sint64 = 36; - repeated fixed32 repeated_fixed32 = 37; - repeated fixed64 repeated_fixed64 = 38; - repeated sfixed32 repeated_sfixed32 = 39; - repeated sfixed64 repeated_sfixed64 = 40; - repeated float repeated_float = 41; - repeated double repeated_double = 42; - repeated bool repeated_bool = 43; - repeated string repeated_string = 44; - repeated bytes repeated_bytes = 45; - - repeated NestedMessage repeated_nested_message = 48; - repeated ForeignMessage repeated_foreign_message = 49; - - repeated NestedEnum repeated_nested_enum = 51; - repeated ForeignEnum repeated_foreign_enum = 52; - - repeated string repeated_string_piece = 54 [ctype=STRING_PIECE]; - repeated string repeated_cord = 55 [ctype=CORD]; - - // Map - map < int32, int32> map_int32_int32 = 56; - map < int64, int64> map_int64_int64 = 57; - map < uint32, uint32> map_uint32_uint32 = 58; - map < uint64, uint64> map_uint64_uint64 = 59; - map < sint32, sint32> map_sint32_sint32 = 60; - map < sint64, sint64> map_sint64_sint64 = 61; - map < fixed32, fixed32> map_fixed32_fixed32 = 62; - map < fixed64, fixed64> map_fixed64_fixed64 = 63; - map map_sfixed32_sfixed32 = 64; - map map_sfixed64_sfixed64 = 65; - map < int32, float> map_int32_float = 66; - map < int32, double> map_int32_double = 67; - map < bool, bool> map_bool_bool = 68; - map < string, string> map_string_string = 69; - map < string, bytes> map_string_bytes = 70; - map < string, NestedMessage> map_string_nested_message = 71; - map < string, ForeignMessage> map_string_foreign_message = 72; - map < string, NestedEnum> map_string_nested_enum = 73; - map < string, ForeignEnum> map_string_foreign_enum = 74; - - oneof oneof_field { - uint32 oneof_uint32 = 111; - NestedMessage oneof_nested_message = 112; - string oneof_string = 113; - bytes oneof_bytes = 114; - bool oneof_bool = 115; - uint64 oneof_uint64 = 116; - float oneof_float = 117; - double oneof_double = 118; - NestedEnum oneof_enum = 119; - } - - // Well-known types - google.protobuf.BoolValue optional_bool_wrapper = 201; - google.protobuf.Int32Value optional_int32_wrapper = 202; - google.protobuf.Int64Value optional_int64_wrapper = 203; - google.protobuf.UInt32Value optional_uint32_wrapper = 204; - google.protobuf.UInt64Value optional_uint64_wrapper = 205; - google.protobuf.FloatValue optional_float_wrapper = 206; - google.protobuf.DoubleValue optional_double_wrapper = 207; - google.protobuf.StringValue optional_string_wrapper = 208; - google.protobuf.BytesValue optional_bytes_wrapper = 209; - - repeated google.protobuf.BoolValue repeated_bool_wrapper = 211; - repeated google.protobuf.Int32Value repeated_int32_wrapper = 212; - repeated google.protobuf.Int64Value repeated_int64_wrapper = 213; - repeated google.protobuf.UInt32Value repeated_uint32_wrapper = 214; - repeated google.protobuf.UInt64Value repeated_uint64_wrapper = 215; - repeated google.protobuf.FloatValue repeated_float_wrapper = 216; - repeated google.protobuf.DoubleValue repeated_double_wrapper = 217; - repeated google.protobuf.StringValue repeated_string_wrapper = 218; - repeated google.protobuf.BytesValue repeated_bytes_wrapper = 219; - - google.protobuf.Duration optional_duration = 301; - google.protobuf.Timestamp optional_timestamp = 302; - google.protobuf.FieldMask optional_field_mask = 303; - google.protobuf.Struct optional_struct = 304; - google.protobuf.Any optional_any = 305; - google.protobuf.Value optional_value = 306; - - repeated google.protobuf.Duration repeated_duration = 311; - repeated google.protobuf.Timestamp repeated_timestamp = 312; - repeated google.protobuf.FieldMask repeated_fieldmask = 313; - repeated google.protobuf.Struct repeated_struct = 324; - repeated google.protobuf.Any repeated_any = 315; - repeated google.protobuf.Value repeated_value = 316; - - // Test field-name-to-JSON-name convention. - // (protobuf says names can be any valid C/C++ identifier.) - int32 fieldname1 = 401; - int32 field_name2 = 402; - int32 _field_name3 = 403; - int32 field__name4_ = 404; - int32 field0name5 = 405; - int32 field_0_name6 = 406; - int32 fieldName7 = 407; - int32 FieldName8 = 408; - int32 field_Name9 = 409; - int32 Field_Name10 = 410; - int32 FIELD_NAME11 = 411; - int32 FIELD_name12 = 412; - int32 __field_name13 = 413; - int32 __Field_name14 = 414; - int32 field__name15 = 415; - int32 field__Name16 = 416; - int32 field_name17__ = 417; - int32 Field_name18__ = 418; -} - -message ForeignMessage { - int32 c = 1; -} - -enum ForeignEnum { - FOREIGN_FOO = 0; - FOREIGN_BAR = 1; - FOREIGN_BAZ = 2; -} diff --git a/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go b/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go index 27b0729..bf5174d 100644 --- a/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go +++ b/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/golang/protobuf/descriptor" - tpb "github.com/golang/protobuf/proto/testdata" + tpb "github.com/golang/protobuf/proto/test_proto" protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" ) @@ -20,7 +20,7 @@ func TestMessage(t *testing.T) { } } -func Example_Options() { +func Example_options() { var msg *tpb.MyMessageSet _, md := descriptor.ForMessage(msg) if md.GetOptions().GetMessageSetWireFormat() { diff --git a/vendor/github.com/golang/protobuf/go.mod b/vendor/github.com/golang/protobuf/go.mod new file mode 100644 index 0000000..eccf7fd --- /dev/null +++ b/vendor/github.com/golang/protobuf/go.mod @@ -0,0 +1 @@ +module github.com/golang/protobuf diff --git a/vendor/github.com/golang/protobuf/go.sum b/vendor/github.com/golang/protobuf/go.sum new file mode 100644 index 0000000..e69de29 diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go index 412ba1c..ada2b78 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go @@ -56,6 +56,8 @@ import ( stpb "github.com/golang/protobuf/ptypes/struct" ) +const secondInNanos = int64(time.Second / time.Nanosecond) + // Marshaler is a configurable object for converting between // protocol buffer objects and a JSON representation for them. type Marshaler struct { @@ -73,12 +75,40 @@ type Marshaler struct { // Whether to use the original (.proto) name for fields. OrigName bool + + // A custom URL resolver to use when marshaling Any messages to JSON. + // If unset, the default resolution strategy is to extract the + // fully-qualified type name from the type URL and pass that to + // proto.MessageType(string). + AnyResolver AnyResolver +} + +// AnyResolver takes a type URL, present in an Any message, and resolves it into +// an instance of the associated message. +type AnyResolver interface { + Resolve(typeUrl string) (proto.Message, error) +} + +func defaultResolveAny(typeUrl string) (proto.Message, error) { + // Only the part of typeUrl after the last slash is relevant. + mname := typeUrl + if slash := strings.LastIndex(mname, "/"); slash >= 0 { + mname = mname[slash+1:] + } + mt := proto.MessageType(mname) + if mt == nil { + return nil, fmt.Errorf("unknown message type %q", mname) + } + return reflect.New(mt.Elem()).Interface().(proto.Message), nil } // JSONPBMarshaler is implemented by protobuf messages that customize the // way they are marshaled to JSON. Messages that implement this should // also implement JSONPBUnmarshaler so that the custom format can be // parsed. +// +// The JSON marshaling must follow the proto to JSON specification: +// https://developers.google.com/protocol-buffers/docs/proto3#json type JSONPBMarshaler interface { MarshalJSONPB(*Marshaler) ([]byte, error) } @@ -87,12 +117,23 @@ type JSONPBMarshaler interface { // the way they are unmarshaled from JSON. Messages that implement this // should also implement JSONPBMarshaler so that the custom format can be // produced. +// +// The JSON unmarshaling must follow the JSON to proto specification: +// https://developers.google.com/protocol-buffers/docs/proto3#json type JSONPBUnmarshaler interface { UnmarshalJSONPB(*Unmarshaler, []byte) error } // Marshal marshals a protocol buffer into JSON. func (m *Marshaler) Marshal(out io.Writer, pb proto.Message) error { + v := reflect.ValueOf(pb) + if pb == nil || (v.Kind() == reflect.Ptr && v.IsNil()) { + return errors.New("Marshal called with nil") + } + // Check for unset required fields first. + if err := checkRequiredFields(pb); err != nil { + return err + } writer := &errWriter{writer: out} return m.marshalObject(writer, pb, "", "") } @@ -165,13 +206,22 @@ func (m *Marshaler) marshalObject(out *errWriter, v proto.Message, indent, typeU // Any is a bit more involved. return m.marshalAny(out, v, indent) case "Duration": - // "Generated output always contains 3, 6, or 9 fractional digits, + // "Generated output always contains 0, 3, 6, or 9 fractional digits, // depending on required precision." s, ns := s.Field(0).Int(), s.Field(1).Int() - d := time.Duration(s)*time.Second + time.Duration(ns)*time.Nanosecond - x := fmt.Sprintf("%.9f", d.Seconds()) + if ns <= -secondInNanos || ns >= secondInNanos { + return fmt.Errorf("ns out of range (%v, %v)", -secondInNanos, secondInNanos) + } + if (s > 0 && ns < 0) || (s < 0 && ns > 0) { + return errors.New("signs of seconds and nanos do not match") + } + if s < 0 { + ns = -ns + } + x := fmt.Sprintf("%d.%09d", s, ns) x = strings.TrimSuffix(x, "000") x = strings.TrimSuffix(x, "000") + x = strings.TrimSuffix(x, ".000") out.write(`"`) out.write(x) out.write(`s"`) @@ -182,13 +232,17 @@ func (m *Marshaler) marshalObject(out *errWriter, v proto.Message, indent, typeU return m.marshalValue(out, &proto.Properties{}, s.Field(0), indent) case "Timestamp": // "RFC 3339, where generated output will always be Z-normalized - // and uses 3, 6 or 9 fractional digits." + // and uses 0, 3, 6 or 9 fractional digits." s, ns := s.Field(0).Int(), s.Field(1).Int() + if ns < 0 || ns >= secondInNanos { + return fmt.Errorf("ns out of range [0, %v)", secondInNanos) + } t := time.Unix(s, ns).UTC() // time.RFC3339Nano isn't exactly right (we need to get 3/6/9 fractional digits). x := t.Format("2006-01-02T15:04:05.000000000") x = strings.TrimSuffix(x, "000") x = strings.TrimSuffix(x, "000") + x = strings.TrimSuffix(x, ".000") out.write(`"`) out.write(x) out.write(`Z"`) @@ -344,16 +398,17 @@ func (m *Marshaler) marshalAny(out *errWriter, any proto.Message, indent string) turl := v.Field(0).String() val := v.Field(1).Bytes() - // Only the part of type_url after the last slash is relevant. - mname := turl - if slash := strings.LastIndex(mname, "/"); slash >= 0 { - mname = mname[slash+1:] + var msg proto.Message + var err error + if m.AnyResolver != nil { + msg, err = m.AnyResolver.Resolve(turl) + } else { + msg, err = defaultResolveAny(turl) } - mt := proto.MessageType(mname) - if mt == nil { - return fmt.Errorf("unknown message type %q", mname) + if err != nil { + return err } - msg := reflect.New(mt.Elem()).Interface().(proto.Message) + if err := proto.Unmarshal(val, msg); err != nil { return err } @@ -516,6 +571,7 @@ func (m *Marshaler) marshalValue(out *errWriter, prop *proto.Properties, v refle out.write(m.Indent) } + // TODO handle map key prop properly b, err := json.Marshal(k.Interface()) if err != nil { return err @@ -537,7 +593,11 @@ func (m *Marshaler) marshalValue(out *errWriter, prop *proto.Properties, v refle out.write(` `) } - if err := m.marshalValue(out, prop, v.MapIndex(k), indent+m.Indent); err != nil { + vprop := prop + if prop != nil && prop.MapValProp != nil { + vprop = prop.MapValProp + } + if err := m.marshalValue(out, vprop, v.MapIndex(k), indent+m.Indent); err != nil { return err } } @@ -590,6 +650,12 @@ type Unmarshaler struct { // Whether to allow messages to contain unknown fields, as opposed to // failing to unmarshal. AllowUnknownFields bool + + // A custom URL resolver to use when unmarshaling Any messages from JSON. + // If unset, the default resolution strategy is to extract the + // fully-qualified type name from the type URL and pass that to + // proto.MessageType(string). + AnyResolver AnyResolver } // UnmarshalNext unmarshals the next protocol buffer from a JSON object stream. @@ -600,7 +666,10 @@ func (u *Unmarshaler) UnmarshalNext(dec *json.Decoder, pb proto.Message) error { if err := dec.Decode(&inputValue); err != nil { return err } - return u.unmarshalValue(reflect.ValueOf(pb).Elem(), inputValue, nil) + if err := u.unmarshalValue(reflect.ValueOf(pb).Elem(), inputValue, nil); err != nil { + return err + } + return checkRequiredFields(pb) } // Unmarshal unmarshals a JSON object stream into a protocol @@ -639,7 +708,14 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe // Allocate memory for pointer fields. if targetType.Kind() == reflect.Ptr { + // If input value is "null" and target is a pointer type, then the field should be treated as not set + // UNLESS the target is structpb.Value, in which case it should be set to structpb.NullValue. + _, isJSONPBUnmarshaler := target.Interface().(JSONPBUnmarshaler) + if string(inputValue) == "null" && targetType != reflect.TypeOf(&stpb.Value{}) && !isJSONPBUnmarshaler { + return nil + } target.Set(reflect.New(targetType.Elem())) + return u.unmarshalValue(target.Elem(), inputValue, prop) } @@ -647,15 +723,11 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe return jsu.UnmarshalJSONPB(u, []byte(inputValue)) } - // Handle well-known types. + // Handle well-known types that are not pointers. if w, ok := target.Addr().Interface().(wkt); ok { switch w.XXX_WellKnownType() { case "DoubleValue", "FloatValue", "Int64Value", "UInt64Value", "Int32Value", "UInt32Value", "BoolValue", "StringValue", "BytesValue": - // "Wrappers use the same representation in JSON - // as the wrapped primitive type, except that null is allowed." - // encoding/json will turn JSON `null` into Go `nil`, - // so we don't have to do any extra work. return u.unmarshalValue(target.Field(0), inputValue, prop) case "Any": // Use json.RawMessage pointer type instead of value to support pre-1.8 version. @@ -677,16 +749,17 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe } target.Field(0).SetString(turl) - mname := turl - if slash := strings.LastIndex(mname, "/"); slash >= 0 { - mname = mname[slash+1:] + var m proto.Message + var err error + if u.AnyResolver != nil { + m, err = u.AnyResolver.Resolve(turl) + } else { + m, err = defaultResolveAny(turl) } - mt := proto.MessageType(mname) - if mt == nil { - return fmt.Errorf("unknown message type %q", mname) + if err != nil { + return err } - m := reflect.New(mt.Elem()).Interface().(proto.Message) if _, ok := m.(wkt); ok { val, ok := jsonFields["value"] if !ok { @@ -716,21 +789,16 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe return nil case "Duration": - ivStr := string(inputValue) - if ivStr == "null" { - target.Field(0).SetInt(0) - target.Field(1).SetInt(0) - return nil - } - - unq, err := strconv.Unquote(ivStr) + unq, err := unquote(string(inputValue)) if err != nil { return err } + d, err := time.ParseDuration(unq) if err != nil { return fmt.Errorf("bad Duration: %v", err) } + ns := d.Nanoseconds() s := ns / 1e9 ns %= 1e9 @@ -738,33 +806,25 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe target.Field(1).SetInt(ns) return nil case "Timestamp": - ivStr := string(inputValue) - if ivStr == "null" { - target.Field(0).SetInt(0) - target.Field(1).SetInt(0) - return nil - } - - unq, err := strconv.Unquote(ivStr) + unq, err := unquote(string(inputValue)) if err != nil { return err } + t, err := time.Parse(time.RFC3339Nano, unq) if err != nil { return fmt.Errorf("bad Timestamp: %v", err) } + target.Field(0).SetInt(t.Unix()) target.Field(1).SetInt(int64(t.Nanosecond())) return nil case "Struct": - if string(inputValue) == "null" { - // Interpret a null struct as empty. - return nil - } var m map[string]json.RawMessage if err := json.Unmarshal(inputValue, &m); err != nil { return fmt.Errorf("bad StructValue: %v", err) } + target.Field(0).Set(reflect.ValueOf(map[string]*stpb.Value{})) for k, jv := range m { pv := &stpb.Value{} @@ -775,15 +835,12 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe } return nil case "ListValue": - if string(inputValue) == "null" { - // Interpret a null ListValue as empty. - return nil - } var s []json.RawMessage if err := json.Unmarshal(inputValue, &s); err != nil { return fmt.Errorf("bad ListValue: %v", err) } - target.Field(0).Set(reflect.ValueOf(make([]*stpb.Value, len(s), len(s)))) + + target.Field(0).Set(reflect.ValueOf(make([]*stpb.Value, len(s)))) for i, sv := range s { if err := u.unmarshalValue(target.Field(0).Index(i), sv, prop); err != nil { return err @@ -796,7 +853,7 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe target.Field(0).Set(reflect.ValueOf(&stpb.Value_NullValue{})) } else if v, err := strconv.ParseFloat(ivStr, 0); err == nil { target.Field(0).Set(reflect.ValueOf(&stpb.Value_NumberValue{v})) - } else if v, err := strconv.Unquote(ivStr); err == nil { + } else if v, err := unquote(ivStr); err == nil { target.Field(0).Set(reflect.ValueOf(&stpb.Value_StringValue{v})) } else if v, err := strconv.ParseBool(ivStr); err == nil { target.Field(0).Set(reflect.ValueOf(&stpb.Value_BoolValue{v})) @@ -832,6 +889,9 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe target.Set(reflect.New(targetType.Elem())) target = target.Elem() } + if targetType.Kind() != reflect.Int32 { + return fmt.Errorf("invalid target %q for enum %s", targetType.Kind(), prop.Enum) + } target.SetInt(int64(n)) return nil } @@ -933,11 +993,13 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe if err := json.Unmarshal(inputValue, &slc); err != nil { return err } - len := len(slc) - target.Set(reflect.MakeSlice(targetType, len, len)) - for i := 0; i < len; i++ { - if err := u.unmarshalValue(target.Index(i), slc[i], prop); err != nil { - return err + if slc != nil { + l := len(slc) + target.Set(reflect.MakeSlice(targetType, l, l)) + for i := 0; i < l; i++ { + if err := u.unmarshalValue(target.Index(i), slc[i], prop); err != nil { + return err + } } } return nil @@ -949,44 +1011,40 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe if err := json.Unmarshal(inputValue, &mp); err != nil { return err } - target.Set(reflect.MakeMap(targetType)) - var keyprop, valprop *proto.Properties - if prop != nil { - // These could still be nil if the protobuf metadata is broken somehow. - // TODO: This won't work because the fields are unexported. - // We should probably just reparse them. - //keyprop, valprop = prop.mkeyprop, prop.mvalprop - } - for ks, raw := range mp { - // Unmarshal map key. The core json library already decoded the key into a - // string, so we handle that specially. Other types were quoted post-serialization. - var k reflect.Value - if targetType.Key().Kind() == reflect.String { - k = reflect.ValueOf(ks) - } else { - k = reflect.New(targetType.Key()).Elem() - if err := u.unmarshalValue(k, json.RawMessage(ks), keyprop); err != nil { - return err + if mp != nil { + target.Set(reflect.MakeMap(targetType)) + for ks, raw := range mp { + // Unmarshal map key. The core json library already decoded the key into a + // string, so we handle that specially. Other types were quoted post-serialization. + var k reflect.Value + if targetType.Key().Kind() == reflect.String { + k = reflect.ValueOf(ks) + } else { + k = reflect.New(targetType.Key()).Elem() + var kprop *proto.Properties + if prop != nil && prop.MapKeyProp != nil { + kprop = prop.MapKeyProp + } + if err := u.unmarshalValue(k, json.RawMessage(ks), kprop); err != nil { + return err + } } - } - // Unmarshal map value. - v := reflect.New(targetType.Elem()).Elem() - if err := u.unmarshalValue(v, raw, valprop); err != nil { - return err + // Unmarshal map value. + v := reflect.New(targetType.Elem()).Elem() + var vprop *proto.Properties + if prop != nil && prop.MapValProp != nil { + vprop = prop.MapValProp + } + if err := u.unmarshalValue(v, raw, vprop); err != nil { + return err + } + target.SetMapIndex(k, v) } - target.SetMapIndex(k, v) } return nil } - // 64-bit integers can be encoded as strings. In this case we drop - // the quotes and proceed as normal. - isNum := targetType.Kind() == reflect.Int64 || targetType.Kind() == reflect.Uint64 - if isNum && strings.HasPrefix(string(inputValue), `"`) { - inputValue = inputValue[1 : len(inputValue)-1] - } - // Non-finite numbers can be encoded as strings. isFloat := targetType.Kind() == reflect.Float32 || targetType.Kind() == reflect.Float64 if isFloat { @@ -996,10 +1054,25 @@ func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMe } } + // integers & floats can be encoded as strings. In this case we drop + // the quotes and proceed as normal. + isNum := targetType.Kind() == reflect.Int64 || targetType.Kind() == reflect.Uint64 || + targetType.Kind() == reflect.Int32 || targetType.Kind() == reflect.Uint32 || + targetType.Kind() == reflect.Float32 || targetType.Kind() == reflect.Float64 + if isNum && strings.HasPrefix(string(inputValue), `"`) { + inputValue = inputValue[1 : len(inputValue)-1] + } + // Use the encoding/json for parsing other value types. return json.Unmarshal(inputValue, target.Addr().Interface()) } +func unquote(s string) (string, error) { + var ret string + err := json.Unmarshal([]byte(s), &ret) + return ret, err +} + // jsonProperties returns parsed proto.Properties for the field and corrects JSONName attribute. func jsonProperties(f reflect.StructField, origName bool) *proto.Properties { var prop proto.Properties @@ -1049,6 +1122,8 @@ func (s mapKeys) Swap(i, j int) { s[i], s[j] = s[j], s[i] } func (s mapKeys) Less(i, j int) bool { if k := s[i].Kind(); k == s[j].Kind() { switch k { + case reflect.String: + return s[i].String() < s[j].String() case reflect.Int32, reflect.Int64: return s[i].Int() < s[j].Int() case reflect.Uint32, reflect.Uint64: @@ -1057,3 +1132,140 @@ func (s mapKeys) Less(i, j int) bool { } return fmt.Sprint(s[i].Interface()) < fmt.Sprint(s[j].Interface()) } + +// checkRequiredFields returns an error if any required field in the given proto message is not set. +// This function is used by both Marshal and Unmarshal. While required fields only exist in a +// proto2 message, a proto3 message can contain proto2 message(s). +func checkRequiredFields(pb proto.Message) error { + // Most well-known type messages do not contain required fields. The "Any" type may contain + // a message that has required fields. + // + // When an Any message is being marshaled, the code will invoked proto.Unmarshal on Any.Value + // field in order to transform that into JSON, and that should have returned an error if a + // required field is not set in the embedded message. + // + // When an Any message is being unmarshaled, the code will have invoked proto.Marshal on the + // embedded message to store the serialized message in Any.Value field, and that should have + // returned an error if a required field is not set. + if _, ok := pb.(wkt); ok { + return nil + } + + v := reflect.ValueOf(pb) + // Skip message if it is not a struct pointer. + if v.Kind() != reflect.Ptr { + return nil + } + v = v.Elem() + if v.Kind() != reflect.Struct { + return nil + } + + for i := 0; i < v.NumField(); i++ { + field := v.Field(i) + sfield := v.Type().Field(i) + + if sfield.PkgPath != "" { + // blank PkgPath means the field is exported; skip if not exported + continue + } + + if strings.HasPrefix(sfield.Name, "XXX_") { + continue + } + + // Oneof field is an interface implemented by wrapper structs containing the actual oneof + // field, i.e. an interface containing &T{real_value}. + if sfield.Tag.Get("protobuf_oneof") != "" { + if field.Kind() != reflect.Interface { + continue + } + v := field.Elem() + if v.Kind() != reflect.Ptr || v.IsNil() { + continue + } + v = v.Elem() + if v.Kind() != reflect.Struct || v.NumField() < 1 { + continue + } + field = v.Field(0) + sfield = v.Type().Field(0) + } + + protoTag := sfield.Tag.Get("protobuf") + if protoTag == "" { + continue + } + var prop proto.Properties + prop.Init(sfield.Type, sfield.Name, protoTag, &sfield) + + switch field.Kind() { + case reflect.Map: + if field.IsNil() { + continue + } + // Check each map value. + keys := field.MapKeys() + for _, k := range keys { + v := field.MapIndex(k) + if err := checkRequiredFieldsInValue(v); err != nil { + return err + } + } + case reflect.Slice: + // Handle non-repeated type, e.g. bytes. + if !prop.Repeated { + if prop.Required && field.IsNil() { + return fmt.Errorf("required field %q is not set", prop.Name) + } + continue + } + + // Handle repeated type. + if field.IsNil() { + continue + } + // Check each slice item. + for i := 0; i < field.Len(); i++ { + v := field.Index(i) + if err := checkRequiredFieldsInValue(v); err != nil { + return err + } + } + case reflect.Ptr: + if field.IsNil() { + if prop.Required { + return fmt.Errorf("required field %q is not set", prop.Name) + } + continue + } + if err := checkRequiredFieldsInValue(field); err != nil { + return err + } + } + } + + // Handle proto2 extensions. + for _, ext := range proto.RegisteredExtensions(pb) { + if !proto.HasExtension(pb, ext) { + continue + } + ep, err := proto.GetExtension(pb, ext) + if err != nil { + return err + } + err = checkRequiredFieldsInValue(reflect.ValueOf(ep)) + if err != nil { + return err + } + } + + return nil +} + +func checkRequiredFieldsInValue(v reflect.Value) error { + if pm, ok := v.Interface().(proto.Message); ok { + return checkRequiredFields(pm) + } + return nil +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go index da93163..45a13d4 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go @@ -60,43 +60,111 @@ var ( } simpleObject = &pb.Simple{ - OInt32: proto.Int32(-32), - OInt64: proto.Int64(-6400000000), - OUint32: proto.Uint32(32), - OUint64: proto.Uint64(6400000000), - OSint32: proto.Int32(-13), - OSint64: proto.Int64(-2600000000), - OFloat: proto.Float32(3.14), - ODouble: proto.Float64(6.02214179e23), - OBool: proto.Bool(true), - OString: proto.String("hello \"there\""), - OBytes: []byte("beep boop"), + OInt32: proto.Int32(-32), + OInt32Str: proto.Int32(-32), + OInt64: proto.Int64(-6400000000), + OInt64Str: proto.Int64(-6400000000), + OUint32: proto.Uint32(32), + OUint32Str: proto.Uint32(32), + OUint64: proto.Uint64(6400000000), + OUint64Str: proto.Uint64(6400000000), + OSint32: proto.Int32(-13), + OSint32Str: proto.Int32(-13), + OSint64: proto.Int64(-2600000000), + OSint64Str: proto.Int64(-2600000000), + OFloat: proto.Float32(3.14), + OFloatStr: proto.Float32(3.14), + ODouble: proto.Float64(6.02214179e23), + ODoubleStr: proto.Float64(6.02214179e23), + OBool: proto.Bool(true), + OString: proto.String("hello \"there\""), + OBytes: []byte("beep boop"), } - simpleObjectJSON = `{` + + simpleObjectInputJSON = `{` + `"oBool":true,` + `"oInt32":-32,` + + `"oInt32Str":"-32",` + + `"oInt64":-6400000000,` + + `"oInt64Str":"-6400000000",` + + `"oUint32":32,` + + `"oUint32Str":"32",` + + `"oUint64":6400000000,` + + `"oUint64Str":"6400000000",` + + `"oSint32":-13,` + + `"oSint32Str":"-13",` + + `"oSint64":-2600000000,` + + `"oSint64Str":"-2600000000",` + + `"oFloat":3.14,` + + `"oFloatStr":"3.14",` + + `"oDouble":6.02214179e+23,` + + `"oDoubleStr":"6.02214179e+23",` + + `"oString":"hello \"there\"",` + + `"oBytes":"YmVlcCBib29w"` + + `}` + + simpleObjectOutputJSON = `{` + + `"oBool":true,` + + `"oInt32":-32,` + + `"oInt32Str":-32,` + `"oInt64":"-6400000000",` + + `"oInt64Str":"-6400000000",` + `"oUint32":32,` + + `"oUint32Str":32,` + `"oUint64":"6400000000",` + + `"oUint64Str":"6400000000",` + `"oSint32":-13,` + + `"oSint32Str":-13,` + `"oSint64":"-2600000000",` + + `"oSint64Str":"-2600000000",` + `"oFloat":3.14,` + + `"oFloatStr":3.14,` + `"oDouble":6.02214179e+23,` + + `"oDoubleStr":6.02214179e+23,` + `"oString":"hello \"there\"",` + `"oBytes":"YmVlcCBib29w"` + `}` - simpleObjectPrettyJSON = `{ + simpleObjectInputPrettyJSON = `{ "oBool": true, "oInt32": -32, + "oInt32Str": "-32", + "oInt64": -6400000000, + "oInt64Str": "-6400000000", + "oUint32": 32, + "oUint32Str": "32", + "oUint64": 6400000000, + "oUint64Str": "6400000000", + "oSint32": -13, + "oSint32Str": "-13", + "oSint64": -2600000000, + "oSint64Str": "-2600000000", + "oFloat": 3.14, + "oFloatStr": "3.14", + "oDouble": 6.02214179e+23, + "oDoubleStr": "6.02214179e+23", + "oString": "hello \"there\"", + "oBytes": "YmVlcCBib29w" +}` + + simpleObjectOutputPrettyJSON = `{ + "oBool": true, + "oInt32": -32, + "oInt32Str": -32, "oInt64": "-6400000000", + "oInt64Str": "-6400000000", "oUint32": 32, + "oUint32Str": 32, "oUint64": "6400000000", + "oUint64Str": "6400000000", "oSint32": -13, + "oSint32Str": -13, "oSint64": "-2600000000", + "oSint64Str": "-2600000000", "oFloat": 3.14, + "oFloatStr": 3.14, "oDouble": 6.02214179e+23, + "oDoubleStr": 6.02214179e+23, "oString": "hello \"there\"", "oBytes": "YmVlcCBib29w" }` @@ -343,8 +411,8 @@ var marshalingTests = []struct { pb proto.Message json string }{ - {"simple flat object", marshaler, simpleObject, simpleObjectJSON}, - {"simple pretty object", marshalerAllOptions, simpleObject, simpleObjectPrettyJSON}, + {"simple flat object", marshaler, simpleObject, simpleObjectOutputJSON}, + {"simple pretty object", marshalerAllOptions, simpleObject, simpleObjectOutputPrettyJSON}, {"non-finite floats fields object", marshaler, nonFinites, nonFinitesJSON}, {"repeated fields flat object", marshaler, repeatsObject, repeatsObjectJSON}, {"repeated fields pretty object", marshalerAllOptions, repeatsObject, repeatsObjectPrettyJSON}, @@ -379,14 +447,13 @@ var marshalingTests = []struct { &pb.Mappy{Strry: map[string]string{`"one"`: "two", "three": "four"}}, `{"strry":{"\"one\"":"two","three":"four"}}`}, {"map", marshaler, - &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: &pb.Simple3{Dub: 1}}}, `{"objjy":{"1":{"dub":1}}}`}, + &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: {Dub: 1}}}, `{"objjy":{"1":{"dub":1}}}`}, {"map", marshalerAllOptions, - &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: &pb.Simple3{Dub: 1}}}, objjyPrettyJSON}, + &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: {Dub: 1}}}, objjyPrettyJSON}, {"map", marshaler, &pb.Mappy{Buggy: map[int64]string{1234: "yup"}}, `{"buggy":{"1234":"yup"}}`}, {"map", marshaler, &pb.Mappy{Booly: map[bool]bool{false: true}}, `{"booly":{"false":true}}`}, - // TODO: This is broken. - //{"map", marshaler, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}, `{"enumy":{"XIV":"ROMAN"}`}, + {"map", marshaler, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}, `{"enumy":{"XIV":"ROMAN"}}`}, {"map", Marshaler{EnumsAsInts: true}, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}, `{"enumy":{"XIV":2}}`}, {"map", marshaler, &pb.Mappy{S32Booly: map[int32]bool{1: true, 3: false, 10: true, 12: false}}, `{"s32booly":{"1":true,"3":false,"10":true,"12":false}}`}, {"map", marshaler, &pb.Mappy{S64Booly: map[int64]bool{1: true, 3: false, 10: true, 12: false}}, `{"s64booly":{"1":true,"3":false,"10":true,"12":false}}`}, @@ -395,7 +462,7 @@ var marshalingTests = []struct { {"proto2 map", marshaler, &pb.Maps{MInt64Str: map[int64]string{213: "cat"}}, `{"mInt64Str":{"213":"cat"}}`}, {"proto2 map", marshaler, - &pb.Maps{MBoolSimple: map[bool]*pb.Simple{true: &pb.Simple{OInt32: proto.Int32(1)}}}, + &pb.Maps{MBoolSimple: map[bool]*pb.Simple{true: {OInt32: proto.Int32(1)}}}, `{"mBoolSimple":{"true":{"oInt32":1}}}`}, {"oneof, not set", marshaler, &pb.MsgWithOneof{}, `{}`}, {"oneof, set", marshaler, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_Title{"Grand Poobah"}}, `{"title":"Grand Poobah"}`}, @@ -406,7 +473,10 @@ var marshalingTests = []struct { {"Any with message and indent", marshalerAllOptions, anySimple, anySimplePrettyJSON}, {"Any with WKT", marshaler, anyWellKnown, anyWellKnownJSON}, {"Any with WKT and indent", marshalerAllOptions, anyWellKnown, anyWellKnownPrettyJSON}, - {"Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3}}, `{"dur":"3.000s"}`}, + {"Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3}}, `{"dur":"3s"}`}, + {"Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3, Nanos: 1e6}}, `{"dur":"3.001s"}`}, + {"Duration beyond float64 precision", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 100000000, Nanos: 1}}, `{"dur":"100000000.000000001s"}`}, + {"negative Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: -123, Nanos: -456}}, `{"dur":"-123.000000456s"}`}, {"Struct", marshaler, &pb.KnownTypes{St: &stpb.Struct{ Fields: map[string]*stpb.Value{ "one": {Kind: &stpb.Value_StringValue{"loneliest number"}}, @@ -421,6 +491,7 @@ var marshalingTests = []struct { {Kind: &stpb.Value_BoolValue{true}}, }}}, `{"lv":["x",null,3,true]}`}, {"Timestamp", marshaler, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 21e6}}, `{"ts":"2014-05-13T16:53:20.021Z"}`}, + {"Timestamp", marshaler, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 0}}, `{"ts":"2014-05-13T16:53:20Z"}`}, {"number Value", marshaler, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_NumberValue{1}}}, `{"val":1}`}, {"null Value", marshaler, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_NullValue{stpb.NullValue_NULL_VALUE}}}, `{"val":null}`}, {"string number value", marshaler, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_StringValue{"9223372036854775807"}}}, `{"val":"9223372036854775807"}`}, @@ -449,6 +520,9 @@ var marshalingTests = []struct { {"BoolValue", marshaler, &pb.KnownTypes{Bool: &wpb.BoolValue{Value: true}}, `{"bool":true}`}, {"StringValue", marshaler, &pb.KnownTypes{Str: &wpb.StringValue{Value: "plush"}}, `{"str":"plush"}`}, {"BytesValue", marshaler, &pb.KnownTypes{Bytes: &wpb.BytesValue{Value: []byte("wow")}}, `{"bytes":"d293"}`}, + + {"required", marshaler, &pb.MsgWithRequired{Str: proto.String("hello")}, `{"str":"hello"}`}, + {"required bytes", marshaler, &pb.MsgWithRequiredBytes{Byts: []byte{}}, `{"byts":""}`}, } func TestMarshaling(t *testing.T) { @@ -462,9 +536,43 @@ func TestMarshaling(t *testing.T) { } } +func TestMarshalingNil(t *testing.T) { + var msg *pb.Simple + m := &Marshaler{} + if _, err := m.MarshalToString(msg); err == nil { + t.Errorf("mashaling nil returned no error") + } +} + +func TestMarshalIllegalTime(t *testing.T) { + tests := []struct { + pb proto.Message + fail bool + }{ + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: 1, Nanos: 0}}, false}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: -1, Nanos: 0}}, false}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: 1, Nanos: -1}}, true}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: -1, Nanos: 1}}, true}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: 1, Nanos: 1000000000}}, true}, + {&pb.KnownTypes{Dur: &durpb.Duration{Seconds: -1, Nanos: -1000000000}}, true}, + {&pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 1, Nanos: 1}}, false}, + {&pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 1, Nanos: -1}}, true}, + {&pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 1, Nanos: 1000000000}}, true}, + } + for _, tt := range tests { + _, err := marshaler.MarshalToString(tt.pb) + if err == nil && tt.fail { + t.Errorf("marshaler.MarshalToString(%v) = _, ; want _, ", tt.pb) + } + if err != nil && !tt.fail { + t.Errorf("marshaler.MarshalToString(%v) = _, %v; want _, ", tt.pb, err) + } + } +} + func TestMarshalJSONPBMarshaler(t *testing.T) { rawJson := `{ "foo": "bar", "baz": [0, 1, 2, 3] }` - msg := dynamicMessage{rawJson: rawJson} + msg := dynamicMessage{RawJson: rawJson} str, err := new(Marshaler).MarshalToString(&msg) if err != nil { t.Errorf("an unexpected error occurred when marshalling JSONPBMarshaler: %v", err) @@ -475,7 +583,7 @@ func TestMarshalJSONPBMarshaler(t *testing.T) { } func TestMarshalAnyJSONPBMarshaler(t *testing.T) { - msg := dynamicMessage{rawJson: `{ "foo": "bar", "baz": [0, 1, 2, 3] }`} + msg := dynamicMessage{RawJson: `{ "foo": "bar", "baz": [0, 1, 2, 3] }`} a, err := ptypes.MarshalAny(&msg) if err != nil { t.Errorf("an unexpected error occurred when marshalling to Any: %v", err) @@ -486,20 +594,118 @@ func TestMarshalAnyJSONPBMarshaler(t *testing.T) { } // after custom marshaling, it's round-tripped through JSON decoding/encoding already, // so the keys are sorted, whitespace is compacted, and "@type" key has been added - expected := `{"@type":"type.googleapis.com/` + dynamicMessageName +`","baz":[0,1,2,3],"foo":"bar"}` + expected := `{"@type":"type.googleapis.com/` + dynamicMessageName + `","baz":[0,1,2,3],"foo":"bar"}` if str != expected { t.Errorf("marshalling JSON produced incorrect output: got %s, wanted %s", str, expected) } } +func TestMarshalWithCustomValidation(t *testing.T) { + msg := dynamicMessage{RawJson: `{ "foo": "bar", "baz": [0, 1, 2, 3] }`, Dummy: &dynamicMessage{}} + + js, err := new(Marshaler).MarshalToString(&msg) + if err != nil { + t.Errorf("an unexpected error occurred when marshalling to json: %v", err) + } + err = Unmarshal(strings.NewReader(js), &msg) + if err != nil { + t.Errorf("an unexpected error occurred when unmarshalling from json: %v", err) + } +} + +// Test marshaling message containing unset required fields should produce error. +func TestMarshalUnsetRequiredFields(t *testing.T) { + msgExt := &pb.Real{} + proto.SetExtension(msgExt, pb.E_Extm, &pb.MsgWithRequired{}) + + tests := []struct { + desc string + marshaler *Marshaler + pb proto.Message + }{ + { + desc: "direct required field", + marshaler: &Marshaler{}, + pb: &pb.MsgWithRequired{}, + }, + { + desc: "direct required field + emit defaults", + marshaler: &Marshaler{EmitDefaults: true}, + pb: &pb.MsgWithRequired{}, + }, + { + desc: "indirect required field", + marshaler: &Marshaler{}, + pb: &pb.MsgWithIndirectRequired{Subm: &pb.MsgWithRequired{}}, + }, + { + desc: "indirect required field + emit defaults", + marshaler: &Marshaler{EmitDefaults: true}, + pb: &pb.MsgWithIndirectRequired{Subm: &pb.MsgWithRequired{}}, + }, + { + desc: "direct required wkt field", + marshaler: &Marshaler{}, + pb: &pb.MsgWithRequiredWKT{}, + }, + { + desc: "direct required wkt field + emit defaults", + marshaler: &Marshaler{EmitDefaults: true}, + pb: &pb.MsgWithRequiredWKT{}, + }, + { + desc: "direct required bytes field", + marshaler: &Marshaler{}, + pb: &pb.MsgWithRequiredBytes{}, + }, + { + desc: "required in map value", + marshaler: &Marshaler{}, + pb: &pb.MsgWithIndirectRequired{ + MapField: map[string]*pb.MsgWithRequired{ + "key": {}, + }, + }, + }, + { + desc: "required in repeated item", + marshaler: &Marshaler{}, + pb: &pb.MsgWithIndirectRequired{ + SliceField: []*pb.MsgWithRequired{ + {Str: proto.String("hello")}, + {}, + }, + }, + }, + { + desc: "required inside oneof", + marshaler: &Marshaler{}, + pb: &pb.MsgWithOneof{ + Union: &pb.MsgWithOneof_MsgWithRequired{&pb.MsgWithRequired{}}, + }, + }, + { + desc: "required inside extension", + marshaler: &Marshaler{}, + pb: msgExt, + }, + } + + for _, tc := range tests { + if _, err := tc.marshaler.MarshalToString(tc.pb); err == nil { + t.Errorf("%s: expecting error in marshaling with unset required fields %+v", tc.desc, tc.pb) + } + } +} + var unmarshalingTests = []struct { desc string unmarshaler Unmarshaler json string pb proto.Message }{ - {"simple flat object", Unmarshaler{}, simpleObjectJSON, simpleObject}, - {"simple pretty object", Unmarshaler{}, simpleObjectPrettyJSON, simpleObject}, + {"simple flat object", Unmarshaler{}, simpleObjectInputJSON, simpleObject}, + {"simple pretty object", Unmarshaler{}, simpleObjectInputPrettyJSON, simpleObject}, {"repeated fields flat object", Unmarshaler{}, repeatsObjectJSON, repeatsObject}, {"repeated fields pretty object", Unmarshaler{}, repeatsObjectPrettyJSON, repeatsObject}, {"nested message/enum flat object", Unmarshaler{}, complexObjectJSON, complexObject}, @@ -535,14 +741,13 @@ var unmarshalingTests = []struct { {"-Inf", Unmarshaler{}, `{"oDouble":"-Infinity"}`, &pb.Simple{ODouble: proto.Float64(math.Inf(-1))}}, {"map", Unmarshaler{}, `{"nummy":{"1":2,"3":4}}`, &pb.Mappy{Nummy: map[int64]int32{1: 2, 3: 4}}}, {"map", Unmarshaler{}, `{"strry":{"\"one\"":"two","three":"four"}}`, &pb.Mappy{Strry: map[string]string{`"one"`: "two", "three": "four"}}}, - {"map", Unmarshaler{}, `{"objjy":{"1":{"dub":1}}}`, &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: &pb.Simple3{Dub: 1}}}}, + {"map", Unmarshaler{}, `{"objjy":{"1":{"dub":1}}}`, &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: {Dub: 1}}}}, {"proto2 extension", Unmarshaler{}, realNumberJSON, realNumber}, {"Any with message", Unmarshaler{}, anySimpleJSON, anySimple}, {"Any with message and indent", Unmarshaler{}, anySimplePrettyJSON, anySimple}, {"Any with WKT", Unmarshaler{}, anyWellKnownJSON, anyWellKnown}, {"Any with WKT and indent", Unmarshaler{}, anyWellKnownPrettyJSON, anyWellKnown}, - // TODO: This is broken. - //{"map", Unmarshaler{}, `{"enumy":{"XIV":"ROMAN"}`, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}}, + {"map", Unmarshaler{}, `{"enumy":{"XIV":"ROMAN"}}`, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}}, {"map", Unmarshaler{}, `{"enumy":{"XIV":2}}`, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}}, {"oneof", Unmarshaler{}, `{"salary":31000}`, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_Salary{31000}}}, {"oneof spec name", Unmarshaler{}, `{"Country":"Australia"}`, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_Country{"Australia"}}}, @@ -553,12 +758,16 @@ var unmarshalingTests = []struct { {"camelName input", Unmarshaler{}, `{"oBool":true}`, &pb.Simple{OBool: proto.Bool(true)}}, {"Duration", Unmarshaler{}, `{"dur":"3.000s"}`, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3}}}, - {"null Duration", Unmarshaler{}, `{"dur":null}`, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 0}}}, + {"Duration", Unmarshaler{}, `{"dur":"4s"}`, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 4}}}, + {"Duration with unicode", Unmarshaler{}, `{"dur": "3\u0073"}`, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3}}}, + {"null Duration", Unmarshaler{}, `{"dur":null}`, &pb.KnownTypes{Dur: nil}}, {"Timestamp", Unmarshaler{}, `{"ts":"2014-05-13T16:53:20.021Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 21e6}}}, + {"Timestamp", Unmarshaler{}, `{"ts":"2014-05-13T16:53:20Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 0}}}, + {"Timestamp with unicode", Unmarshaler{}, `{"ts": "2014-05-13T16:53:20\u005a"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 0}}}, {"PreEpochTimestamp", Unmarshaler{}, `{"ts":"1969-12-31T23:59:58.999999995Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: -2, Nanos: 999999995}}}, {"ZeroTimeTimestamp", Unmarshaler{}, `{"ts":"0001-01-01T00:00:00Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: -62135596800, Nanos: 0}}}, - {"null Timestamp", Unmarshaler{}, `{"ts":null}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 0, Nanos: 0}}}, - {"null Struct", Unmarshaler{}, `{"st": null}`, &pb.KnownTypes{St: &stpb.Struct{}}}, + {"null Timestamp", Unmarshaler{}, `{"ts":null}`, &pb.KnownTypes{Ts: nil}}, + {"null Struct", Unmarshaler{}, `{"st": null}`, &pb.KnownTypes{St: nil}}, {"empty Struct", Unmarshaler{}, `{"st": {}}`, &pb.KnownTypes{St: &stpb.Struct{}}}, {"basic Struct", Unmarshaler{}, `{"st": {"a": "x", "b": null, "c": 3, "d": true}}`, &pb.KnownTypes{St: &stpb.Struct{Fields: map[string]*stpb.Value{ "a": {Kind: &stpb.Value_StringValue{"x"}}, @@ -575,7 +784,7 @@ var unmarshalingTests = []struct { }}}}, }}}}, }}}}, - {"null ListValue", Unmarshaler{}, `{"lv": null}`, &pb.KnownTypes{Lv: &stpb.ListValue{}}}, + {"null ListValue", Unmarshaler{}, `{"lv": null}`, &pb.KnownTypes{Lv: nil}}, {"empty ListValue", Unmarshaler{}, `{"lv": []}`, &pb.KnownTypes{Lv: &stpb.ListValue{}}}, {"basic ListValue", Unmarshaler{}, `{"lv": ["x", null, 3, true]}`, &pb.KnownTypes{Lv: &stpb.ListValue{Values: []*stpb.Value{ {Kind: &stpb.Value_StringValue{"x"}}, @@ -611,9 +820,29 @@ var unmarshalingTests = []struct { {"UInt32Value", Unmarshaler{}, `{"u32":4}`, &pb.KnownTypes{U32: &wpb.UInt32Value{Value: 4}}}, {"BoolValue", Unmarshaler{}, `{"bool":true}`, &pb.KnownTypes{Bool: &wpb.BoolValue{Value: true}}}, {"StringValue", Unmarshaler{}, `{"str":"plush"}`, &pb.KnownTypes{Str: &wpb.StringValue{Value: "plush"}}}, + {"StringValue containing escaped character", Unmarshaler{}, `{"str":"a\/b"}`, &pb.KnownTypes{Str: &wpb.StringValue{Value: "a/b"}}}, + {"StructValue containing StringValue's", Unmarshaler{}, `{"escaped": "a\/b", "unicode": "\u00004E16\u0000754C"}`, + &stpb.Struct{ + Fields: map[string]*stpb.Value{ + "escaped": {Kind: &stpb.Value_StringValue{"a/b"}}, + "unicode": {Kind: &stpb.Value_StringValue{"\u00004E16\u0000754C"}}, + }, + }}, {"BytesValue", Unmarshaler{}, `{"bytes":"d293"}`, &pb.KnownTypes{Bytes: &wpb.BytesValue{Value: []byte("wow")}}}, - // `null` is also a permissible value. Let's just test one. - {"null DoubleValue", Unmarshaler{}, `{"dbl":null}`, &pb.KnownTypes{Dbl: &wpb.DoubleValue{}}}, + + // Ensure that `null` as a value ends up with a nil pointer instead of a [type]Value struct. + {"null DoubleValue", Unmarshaler{}, `{"dbl":null}`, &pb.KnownTypes{Dbl: nil}}, + {"null FloatValue", Unmarshaler{}, `{"flt":null}`, &pb.KnownTypes{Flt: nil}}, + {"null Int64Value", Unmarshaler{}, `{"i64":null}`, &pb.KnownTypes{I64: nil}}, + {"null UInt64Value", Unmarshaler{}, `{"u64":null}`, &pb.KnownTypes{U64: nil}}, + {"null Int32Value", Unmarshaler{}, `{"i32":null}`, &pb.KnownTypes{I32: nil}}, + {"null UInt32Value", Unmarshaler{}, `{"u32":null}`, &pb.KnownTypes{U32: nil}}, + {"null BoolValue", Unmarshaler{}, `{"bool":null}`, &pb.KnownTypes{Bool: nil}}, + {"null StringValue", Unmarshaler{}, `{"str":null}`, &pb.KnownTypes{Str: nil}}, + {"null BytesValue", Unmarshaler{}, `{"bytes":null}`, &pb.KnownTypes{Bytes: nil}}, + + {"required", Unmarshaler{}, `{"str":"hello"}`, &pb.MsgWithRequired{Str: proto.String("hello")}}, + {"required bytes", Unmarshaler{}, `{"byts": []}`, &pb.MsgWithRequiredBytes{Byts: []byte{}}}, } func TestUnmarshaling(t *testing.T) { @@ -623,7 +852,7 @@ func TestUnmarshaling(t *testing.T) { err := tt.unmarshaler.Unmarshal(strings.NewReader(tt.json), p) if err != nil { - t.Errorf("%s: %v", tt.desc, err) + t.Errorf("unmarshalling %s: %v", tt.desc, err) continue } @@ -636,6 +865,26 @@ func TestUnmarshaling(t *testing.T) { } } +func TestUnmarshalNullArray(t *testing.T) { + var repeats pb.Repeats + if err := UnmarshalString(`{"rBool":null}`, &repeats); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(repeats, pb.Repeats{}) { + t.Errorf("got non-nil fields in [%#v]", repeats) + } +} + +func TestUnmarshalNullObject(t *testing.T) { + var maps pb.Maps + if err := UnmarshalString(`{"mInt64Str":null}`, &maps); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(maps, pb.Maps{}) { + t.Errorf("got non-nil fields in [%#v]", maps) + } +} + func TestUnmarshalNext(t *testing.T) { // We only need to check against a few, not all of them. tests := unmarshalingTests[:5] @@ -681,6 +930,11 @@ var unmarshalingShouldError = []struct { {"gibberish", "{adskja123;l23=-=", new(pb.Simple)}, {"unknown field", `{"unknown": "foo"}`, new(pb.Simple)}, {"unknown enum name", `{"hilarity":"DAVE"}`, new(proto3pb.Message)}, + {"Duration containing invalid character", `{"dur": "3\U0073"}`, &pb.KnownTypes{}}, + {"Timestamp containing invalid character", `{"ts": "2014-05-13T16:53:20\U005a"}`, &pb.KnownTypes{}}, + {"StringValue containing invalid character", `{"str": "\U00004E16\U0000754C"}`, &pb.KnownTypes{}}, + {"StructValue containing invalid character", `{"str": "\U00004E16\U0000754C"}`, &stpb.Struct{}}, + {"repeated proto3 enum with non array input", `{"rFunny":"PUNS"}`, &proto3pb.Message{RFunny: []proto3pb.Message_Humour{}}}, } func TestUnmarshalingBadInput(t *testing.T) { @@ -692,14 +946,86 @@ func TestUnmarshalingBadInput(t *testing.T) { } } +type funcResolver func(turl string) (proto.Message, error) + +func (fn funcResolver) Resolve(turl string) (proto.Message, error) { + return fn(turl) +} + +func TestAnyWithCustomResolver(t *testing.T) { + var resolvedTypeUrls []string + resolver := funcResolver(func(turl string) (proto.Message, error) { + resolvedTypeUrls = append(resolvedTypeUrls, turl) + return new(pb.Simple), nil + }) + msg := &pb.Simple{ + OBytes: []byte{1, 2, 3, 4}, + OBool: proto.Bool(true), + OString: proto.String("foobar"), + OInt64: proto.Int64(1020304), + } + msgBytes, err := proto.Marshal(msg) + if err != nil { + t.Errorf("an unexpected error occurred when marshaling message: %v", err) + } + // make an Any with a type URL that won't resolve w/out custom resolver + any := &anypb.Any{ + TypeUrl: "https://foobar.com/some.random.MessageKind", + Value: msgBytes, + } + + m := Marshaler{AnyResolver: resolver} + js, err := m.MarshalToString(any) + if err != nil { + t.Errorf("an unexpected error occurred when marshaling any to JSON: %v", err) + } + if len(resolvedTypeUrls) != 1 { + t.Errorf("custom resolver was not invoked during marshaling") + } else if resolvedTypeUrls[0] != "https://foobar.com/some.random.MessageKind" { + t.Errorf("custom resolver was invoked with wrong URL: got %q, wanted %q", resolvedTypeUrls[0], "https://foobar.com/some.random.MessageKind") + } + wanted := `{"@type":"https://foobar.com/some.random.MessageKind","oBool":true,"oInt64":"1020304","oString":"foobar","oBytes":"AQIDBA=="}` + if js != wanted { + t.Errorf("marshalling JSON produced incorrect output: got %s, wanted %s", js, wanted) + } + + u := Unmarshaler{AnyResolver: resolver} + roundTrip := &anypb.Any{} + err = u.Unmarshal(bytes.NewReader([]byte(js)), roundTrip) + if err != nil { + t.Errorf("an unexpected error occurred when unmarshaling any from JSON: %v", err) + } + if len(resolvedTypeUrls) != 2 { + t.Errorf("custom resolver was not invoked during marshaling") + } else if resolvedTypeUrls[1] != "https://foobar.com/some.random.MessageKind" { + t.Errorf("custom resolver was invoked with wrong URL: got %q, wanted %q", resolvedTypeUrls[1], "https://foobar.com/some.random.MessageKind") + } + if !proto.Equal(any, roundTrip) { + t.Errorf("message contents not set correctly after unmarshalling JSON: got %s, wanted %s", roundTrip, any) + } +} + func TestUnmarshalJSONPBUnmarshaler(t *testing.T) { rawJson := `{ "foo": "bar", "baz": [0, 1, 2, 3] }` var msg dynamicMessage if err := Unmarshal(strings.NewReader(rawJson), &msg); err != nil { t.Errorf("an unexpected error occurred when parsing into JSONPBUnmarshaler: %v", err) } - if msg.rawJson != rawJson { - t.Errorf("message contents not set correctly after unmarshalling JSON: got %s, wanted %s", msg.rawJson, rawJson) + if msg.RawJson != rawJson { + t.Errorf("message contents not set correctly after unmarshalling JSON: got %s, wanted %s", msg.RawJson, rawJson) + } +} + +func TestUnmarshalNullWithJSONPBUnmarshaler(t *testing.T) { + rawJson := `{"stringField":null}` + var ptrFieldMsg ptrFieldMessage + if err := Unmarshal(strings.NewReader(rawJson), &ptrFieldMsg); err != nil { + t.Errorf("unmarshal error: %v", err) + } + + want := ptrFieldMessage{StringField: &stringField{IsSet: true, StringValue: "null"}} + if !proto.Equal(&ptrFieldMsg, &want) { + t.Errorf("unmarshal result StringField: got %v, want %v", ptrFieldMsg, want) } } @@ -710,7 +1036,7 @@ func TestUnmarshalAnyJSONPBUnmarshaler(t *testing.T) { t.Errorf("an unexpected error occurred when parsing into JSONPBUnmarshaler: %v", err) } - dm := &dynamicMessage{rawJson: `{"baz":[0,1,2,3],"foo":"bar"}`} + dm := &dynamicMessage{RawJson: `{"baz":[0,1,2,3],"foo":"bar"}`} var want anypb.Any if b, err := proto.Marshal(dm); err != nil { t.Errorf("an unexpected error occurred when marshaling message: %v", err) @@ -720,40 +1046,186 @@ func TestUnmarshalAnyJSONPBUnmarshaler(t *testing.T) { } if !proto.Equal(&got, &want) { - t.Errorf("message contents not set correctly after unmarshalling JSON: got %s, wanted %s", got, want) + t.Errorf("message contents not set correctly after unmarshalling JSON: got %v, wanted %v", got, want) } } const ( dynamicMessageName = "google.protobuf.jsonpb.testing.dynamicMessage" ) + func init() { // we register the custom type below so that we can use it in Any types proto.RegisterType((*dynamicMessage)(nil), dynamicMessageName) } +type ptrFieldMessage struct { + StringField *stringField `protobuf:"bytes,1,opt,name=stringField"` +} + +func (m *ptrFieldMessage) Reset() { +} + +func (m *ptrFieldMessage) String() string { + return m.StringField.StringValue +} + +func (m *ptrFieldMessage) ProtoMessage() { +} + +type stringField struct { + IsSet bool `protobuf:"varint,1,opt,name=isSet"` + StringValue string `protobuf:"bytes,2,opt,name=stringValue"` +} + +func (s *stringField) Reset() { +} + +func (s *stringField) String() string { + return s.StringValue +} + +func (s *stringField) ProtoMessage() { +} + +func (s *stringField) UnmarshalJSONPB(jum *Unmarshaler, js []byte) error { + s.IsSet = true + s.StringValue = string(js) + return nil +} + // dynamicMessage implements protobuf.Message but is not a normal generated message type. // It provides implementations of JSONPBMarshaler and JSONPBUnmarshaler for JSON support. type dynamicMessage struct { - rawJson string `protobuf:"bytes,1,opt,name=rawJson"` + RawJson string `protobuf:"bytes,1,opt,name=rawJson"` + + // an unexported nested message is present just to ensure that it + // won't result in a panic (see issue #509) + Dummy *dynamicMessage `protobuf:"bytes,2,opt,name=dummy"` } func (m *dynamicMessage) Reset() { - m.rawJson = "{}" + m.RawJson = "{}" } func (m *dynamicMessage) String() string { - return m.rawJson + return m.RawJson } func (m *dynamicMessage) ProtoMessage() { } func (m *dynamicMessage) MarshalJSONPB(jm *Marshaler) ([]byte, error) { - return []byte(m.rawJson), nil + return []byte(m.RawJson), nil } func (m *dynamicMessage) UnmarshalJSONPB(jum *Unmarshaler, js []byte) error { - m.rawJson = string(js) + m.RawJson = string(js) return nil -} \ No newline at end of file +} + +// Test unmarshaling message containing unset required fields should produce error. +func TestUnmarshalUnsetRequiredFields(t *testing.T) { + tests := []struct { + desc string + pb proto.Message + json string + }{ + { + desc: "direct required field missing", + pb: &pb.MsgWithRequired{}, + json: `{}`, + }, + { + desc: "direct required field set to null", + pb: &pb.MsgWithRequired{}, + json: `{"str": null}`, + }, + { + desc: "indirect required field missing", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"subm": {}}`, + }, + { + desc: "indirect required field set to null", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"subm": {"str": null}}`, + }, + { + desc: "direct required bytes field missing", + pb: &pb.MsgWithRequiredBytes{}, + json: `{}`, + }, + { + desc: "direct required bytes field set to null", + pb: &pb.MsgWithRequiredBytes{}, + json: `{"byts": null}`, + }, + { + desc: "direct required wkt field missing", + pb: &pb.MsgWithRequiredWKT{}, + json: `{}`, + }, + { + desc: "direct required wkt field set to null", + pb: &pb.MsgWithRequiredWKT{}, + json: `{"str": null}`, + }, + { + desc: "any containing message with required field set to null", + pb: &pb.KnownTypes{}, + json: `{"an": {"@type": "example.com/jsonpb.MsgWithRequired", "str": null}}`, + }, + { + desc: "any containing message with missing required field", + pb: &pb.KnownTypes{}, + json: `{"an": {"@type": "example.com/jsonpb.MsgWithRequired"}}`, + }, + { + desc: "missing required in map value", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"map_field": {"a": {}, "b": {"str": "hi"}}}`, + }, + { + desc: "required in map value set to null", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"map_field": {"a": {"str": "hello"}, "b": {"str": null}}}`, + }, + { + desc: "missing required in slice item", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"slice_field": [{}, {"str": "hi"}]}`, + }, + { + desc: "required in slice item set to null", + pb: &pb.MsgWithIndirectRequired{}, + json: `{"slice_field": [{"str": "hello"}, {"str": null}]}`, + }, + { + desc: "required inside oneof missing", + pb: &pb.MsgWithOneof{}, + json: `{"msgWithRequired": {}}`, + }, + { + desc: "required inside oneof set to null", + pb: &pb.MsgWithOneof{}, + json: `{"msgWithRequired": {"str": null}}`, + }, + { + desc: "required field in extension missing", + pb: &pb.Real{}, + json: `{"[jsonpb.extm]":{}}`, + }, + { + desc: "required field in extension set to null", + pb: &pb.Real{}, + json: `{"[jsonpb.extm]":{"str": null}}`, + }, + } + + for _, tc := range tests { + if err := UnmarshalString(tc.json, tc.pb); err == nil { + t.Errorf("%s: expecting error in unmarshaling with unset required fields %s", tc.desc, tc.json) + } + } +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/Makefile b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/Makefile deleted file mode 100644 index eeda8ae..0000000 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/Makefile +++ /dev/null @@ -1,33 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2015 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -regenerate: - protoc --go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any,Mgoogle/protobuf/duration.proto=github.com/golang/protobuf/ptypes/duration,Mgoogle/protobuf/struct.proto=github.com/golang/protobuf/ptypes/struct,Mgoogle/protobuf/timestamp.proto=github.com/golang/protobuf/ptypes/timestamp,Mgoogle/protobuf/wrappers.proto=github.com/golang/protobuf/ptypes/wrappers:. *.proto diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go index ebb180e..ef8bc34 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go @@ -1,34 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: more_test_objects.proto -/* -Package jsonpb is a generated protocol buffer package. - -It is generated from these files: - more_test_objects.proto - test_objects.proto - -It has these top-level messages: - Simple3 - SimpleSlice3 - SimpleMap3 - SimpleNull3 - Mappy - Simple - NonFinites - Repeats - Widget - Maps - MsgWithOneof - Real - Complex - KnownTypes -*/ package jsonpb -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -39,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type Numeral int32 @@ -54,6 +33,7 @@ var Numeral_name = map[int32]string{ 1: "ARABIC", 2: "ROMAN", } + var Numeral_value = map[string]int32{ "UNKNOWN": 0, "ARABIC": 1, @@ -63,16 +43,42 @@ var Numeral_value = map[string]int32{ func (x Numeral) String() string { return proto.EnumName(Numeral_name, int32(x)) } -func (Numeral) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (Numeral) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_e6c135db3023e377, []int{0} +} type Simple3 struct { - Dub float64 `protobuf:"fixed64,1,opt,name=dub" json:"dub,omitempty"` + Dub float64 `protobuf:"fixed64,1,opt,name=dub,proto3" json:"dub,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Simple3) Reset() { *m = Simple3{} } +func (m *Simple3) String() string { return proto.CompactTextString(m) } +func (*Simple3) ProtoMessage() {} +func (*Simple3) Descriptor() ([]byte, []int) { + return fileDescriptor_e6c135db3023e377, []int{0} +} + +func (m *Simple3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Simple3.Unmarshal(m, b) +} +func (m *Simple3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Simple3.Marshal(b, m, deterministic) +} +func (m *Simple3) XXX_Merge(src proto.Message) { + xxx_messageInfo_Simple3.Merge(m, src) +} +func (m *Simple3) XXX_Size() int { + return xxx_messageInfo_Simple3.Size(m) +} +func (m *Simple3) XXX_DiscardUnknown() { + xxx_messageInfo_Simple3.DiscardUnknown(m) } -func (m *Simple3) Reset() { *m = Simple3{} } -func (m *Simple3) String() string { return proto.CompactTextString(m) } -func (*Simple3) ProtoMessage() {} -func (*Simple3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +var xxx_messageInfo_Simple3 proto.InternalMessageInfo func (m *Simple3) GetDub() float64 { if m != nil { @@ -82,13 +88,36 @@ func (m *Simple3) GetDub() float64 { } type SimpleSlice3 struct { - Slices []string `protobuf:"bytes,1,rep,name=slices" json:"slices,omitempty"` + Slices []string `protobuf:"bytes,1,rep,name=slices,proto3" json:"slices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *SimpleSlice3) Reset() { *m = SimpleSlice3{} } -func (m *SimpleSlice3) String() string { return proto.CompactTextString(m) } -func (*SimpleSlice3) ProtoMessage() {} -func (*SimpleSlice3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (m *SimpleSlice3) Reset() { *m = SimpleSlice3{} } +func (m *SimpleSlice3) String() string { return proto.CompactTextString(m) } +func (*SimpleSlice3) ProtoMessage() {} +func (*SimpleSlice3) Descriptor() ([]byte, []int) { + return fileDescriptor_e6c135db3023e377, []int{1} +} + +func (m *SimpleSlice3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleSlice3.Unmarshal(m, b) +} +func (m *SimpleSlice3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleSlice3.Marshal(b, m, deterministic) +} +func (m *SimpleSlice3) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleSlice3.Merge(m, src) +} +func (m *SimpleSlice3) XXX_Size() int { + return xxx_messageInfo_SimpleSlice3.Size(m) +} +func (m *SimpleSlice3) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleSlice3.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleSlice3 proto.InternalMessageInfo func (m *SimpleSlice3) GetSlices() []string { if m != nil { @@ -98,13 +127,36 @@ func (m *SimpleSlice3) GetSlices() []string { } type SimpleMap3 struct { - Stringy map[string]string `protobuf:"bytes,1,rep,name=stringy" json:"stringy,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Stringy map[string]string `protobuf:"bytes,1,rep,name=stringy,proto3" json:"stringy,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleMap3) Reset() { *m = SimpleMap3{} } +func (m *SimpleMap3) String() string { return proto.CompactTextString(m) } +func (*SimpleMap3) ProtoMessage() {} +func (*SimpleMap3) Descriptor() ([]byte, []int) { + return fileDescriptor_e6c135db3023e377, []int{2} +} + +func (m *SimpleMap3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleMap3.Unmarshal(m, b) +} +func (m *SimpleMap3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleMap3.Marshal(b, m, deterministic) +} +func (m *SimpleMap3) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleMap3.Merge(m, src) +} +func (m *SimpleMap3) XXX_Size() int { + return xxx_messageInfo_SimpleMap3.Size(m) +} +func (m *SimpleMap3) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleMap3.DiscardUnknown(m) } -func (m *SimpleMap3) Reset() { *m = SimpleMap3{} } -func (m *SimpleMap3) String() string { return proto.CompactTextString(m) } -func (*SimpleMap3) ProtoMessage() {} -func (*SimpleMap3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +var xxx_messageInfo_SimpleMap3 proto.InternalMessageInfo func (m *SimpleMap3) GetStringy() map[string]string { if m != nil { @@ -114,13 +166,36 @@ func (m *SimpleMap3) GetStringy() map[string]string { } type SimpleNull3 struct { - Simple *Simple3 `protobuf:"bytes,1,opt,name=simple" json:"simple,omitempty"` + Simple *Simple3 `protobuf:"bytes,1,opt,name=simple,proto3" json:"simple,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleNull3) Reset() { *m = SimpleNull3{} } +func (m *SimpleNull3) String() string { return proto.CompactTextString(m) } +func (*SimpleNull3) ProtoMessage() {} +func (*SimpleNull3) Descriptor() ([]byte, []int) { + return fileDescriptor_e6c135db3023e377, []int{3} +} + +func (m *SimpleNull3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleNull3.Unmarshal(m, b) +} +func (m *SimpleNull3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleNull3.Marshal(b, m, deterministic) +} +func (m *SimpleNull3) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleNull3.Merge(m, src) +} +func (m *SimpleNull3) XXX_Size() int { + return xxx_messageInfo_SimpleNull3.Size(m) +} +func (m *SimpleNull3) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleNull3.DiscardUnknown(m) } -func (m *SimpleNull3) Reset() { *m = SimpleNull3{} } -func (m *SimpleNull3) String() string { return proto.CompactTextString(m) } -func (*SimpleNull3) ProtoMessage() {} -func (*SimpleNull3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +var xxx_messageInfo_SimpleNull3 proto.InternalMessageInfo func (m *SimpleNull3) GetSimple() *Simple3 { if m != nil { @@ -130,22 +205,45 @@ func (m *SimpleNull3) GetSimple() *Simple3 { } type Mappy struct { - Nummy map[int64]int32 `protobuf:"bytes,1,rep,name=nummy" json:"nummy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - Strry map[string]string `protobuf:"bytes,2,rep,name=strry" json:"strry,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Objjy map[int32]*Simple3 `protobuf:"bytes,3,rep,name=objjy" json:"objjy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Buggy map[int64]string `protobuf:"bytes,4,rep,name=buggy" json:"buggy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Booly map[bool]bool `protobuf:"bytes,5,rep,name=booly" json:"booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - Enumy map[string]Numeral `protobuf:"bytes,6,rep,name=enumy" json:"enumy,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value,enum=jsonpb.Numeral"` - S32Booly map[int32]bool `protobuf:"bytes,7,rep,name=s32booly" json:"s32booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - S64Booly map[int64]bool `protobuf:"bytes,8,rep,name=s64booly" json:"s64booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - U32Booly map[uint32]bool `protobuf:"bytes,9,rep,name=u32booly" json:"u32booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` - U64Booly map[uint64]bool `protobuf:"bytes,10,rep,name=u64booly" json:"u64booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` -} - -func (m *Mappy) Reset() { *m = Mappy{} } -func (m *Mappy) String() string { return proto.CompactTextString(m) } -func (*Mappy) ProtoMessage() {} -func (*Mappy) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + Nummy map[int64]int32 `protobuf:"bytes,1,rep,name=nummy,proto3" json:"nummy,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + Strry map[string]string `protobuf:"bytes,2,rep,name=strry,proto3" json:"strry,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Objjy map[int32]*Simple3 `protobuf:"bytes,3,rep,name=objjy,proto3" json:"objjy,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Buggy map[int64]string `protobuf:"bytes,4,rep,name=buggy,proto3" json:"buggy,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Booly map[bool]bool `protobuf:"bytes,5,rep,name=booly,proto3" json:"booly,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + Enumy map[string]Numeral `protobuf:"bytes,6,rep,name=enumy,proto3" json:"enumy,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=jsonpb.Numeral"` + S32Booly map[int32]bool `protobuf:"bytes,7,rep,name=s32booly,proto3" json:"s32booly,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + S64Booly map[int64]bool `protobuf:"bytes,8,rep,name=s64booly,proto3" json:"s64booly,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + U32Booly map[uint32]bool `protobuf:"bytes,9,rep,name=u32booly,proto3" json:"u32booly,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + U64Booly map[uint64]bool `protobuf:"bytes,10,rep,name=u64booly,proto3" json:"u64booly,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mappy) Reset() { *m = Mappy{} } +func (m *Mappy) String() string { return proto.CompactTextString(m) } +func (*Mappy) ProtoMessage() {} +func (*Mappy) Descriptor() ([]byte, []int) { + return fileDescriptor_e6c135db3023e377, []int{4} +} + +func (m *Mappy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mappy.Unmarshal(m, b) +} +func (m *Mappy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mappy.Marshal(b, m, deterministic) +} +func (m *Mappy) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mappy.Merge(m, src) +} +func (m *Mappy) XXX_Size() int { + return xxx_messageInfo_Mappy.Size(m) +} +func (m *Mappy) XXX_DiscardUnknown() { + xxx_messageInfo_Mappy.DiscardUnknown(m) +} + +var xxx_messageInfo_Mappy proto.InternalMessageInfo func (m *Mappy) GetNummy() map[int64]int32 { if m != nil { @@ -218,17 +316,28 @@ func (m *Mappy) GetU64Booly() map[uint64]bool { } func init() { + proto.RegisterEnum("jsonpb.Numeral", Numeral_name, Numeral_value) proto.RegisterType((*Simple3)(nil), "jsonpb.Simple3") proto.RegisterType((*SimpleSlice3)(nil), "jsonpb.SimpleSlice3") proto.RegisterType((*SimpleMap3)(nil), "jsonpb.SimpleMap3") + proto.RegisterMapType((map[string]string)(nil), "jsonpb.SimpleMap3.StringyEntry") proto.RegisterType((*SimpleNull3)(nil), "jsonpb.SimpleNull3") proto.RegisterType((*Mappy)(nil), "jsonpb.Mappy") - proto.RegisterEnum("jsonpb.Numeral", Numeral_name, Numeral_value) + proto.RegisterMapType((map[bool]bool)(nil), "jsonpb.Mappy.BoolyEntry") + proto.RegisterMapType((map[int64]string)(nil), "jsonpb.Mappy.BuggyEntry") + proto.RegisterMapType((map[string]Numeral)(nil), "jsonpb.Mappy.EnumyEntry") + proto.RegisterMapType((map[int64]int32)(nil), "jsonpb.Mappy.NummyEntry") + proto.RegisterMapType((map[int32]*Simple3)(nil), "jsonpb.Mappy.ObjjyEntry") + proto.RegisterMapType((map[int32]bool)(nil), "jsonpb.Mappy.S32boolyEntry") + proto.RegisterMapType((map[int64]bool)(nil), "jsonpb.Mappy.S64boolyEntry") + proto.RegisterMapType((map[string]string)(nil), "jsonpb.Mappy.StrryEntry") + proto.RegisterMapType((map[uint32]bool)(nil), "jsonpb.Mappy.U32boolyEntry") + proto.RegisterMapType((map[uint64]bool)(nil), "jsonpb.Mappy.U64boolyEntry") } -func init() { proto.RegisterFile("more_test_objects.proto", fileDescriptor0) } +func init() { proto.RegisterFile("more_test_objects.proto", fileDescriptor_e6c135db3023e377) } -var fileDescriptor0 = []byte{ +var fileDescriptor_e6c135db3023e377 = []byte{ // 526 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0xdd, 0x6b, 0xdb, 0x3c, 0x14, 0x87, 0x5f, 0x27, 0xf5, 0xd7, 0x49, 0xfb, 0x2e, 0x88, 0xb1, 0x99, 0xf4, 0x62, 0xc5, 0xb0, diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go index d413d74..9f4a97e 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go @@ -3,20 +3,28 @@ package jsonpb -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import google_protobuf "github.com/golang/protobuf/ptypes/any" -import google_protobuf1 "github.com/golang/protobuf/ptypes/duration" -import google_protobuf2 "github.com/golang/protobuf/ptypes/struct" -import google_protobuf3 "github.com/golang/protobuf/ptypes/timestamp" -import google_protobuf4 "github.com/golang/protobuf/ptypes/wrappers" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + any "github.com/golang/protobuf/ptypes/any" + duration "github.com/golang/protobuf/ptypes/duration" + _struct "github.com/golang/protobuf/ptypes/struct" + timestamp "github.com/golang/protobuf/ptypes/timestamp" + wrappers "github.com/golang/protobuf/ptypes/wrappers" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + type Widget_Color int32 const ( @@ -30,6 +38,7 @@ var Widget_Color_name = map[int32]string{ 1: "GREEN", 2: "BLUE", } + var Widget_Color_value = map[string]int32{ "RED": 0, "GREEN": 1, @@ -41,9 +50,11 @@ func (x Widget_Color) Enum() *Widget_Color { *p = x return p } + func (x Widget_Color) String() string { return proto.EnumName(Widget_Color_name, int32(x)) } + func (x *Widget_Color) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(Widget_Color_value, data, "Widget_Color") if err != nil { @@ -52,28 +63,61 @@ func (x *Widget_Color) UnmarshalJSON(data []byte) error { *x = Widget_Color(value) return nil } -func (Widget_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor1, []int{3, 0} } + +func (Widget_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{3, 0} +} // Test message for holding primitive types. type Simple struct { - OBool *bool `protobuf:"varint,1,opt,name=o_bool,json=oBool" json:"o_bool,omitempty"` - OInt32 *int32 `protobuf:"varint,2,opt,name=o_int32,json=oInt32" json:"o_int32,omitempty"` - OInt64 *int64 `protobuf:"varint,3,opt,name=o_int64,json=oInt64" json:"o_int64,omitempty"` - OUint32 *uint32 `protobuf:"varint,4,opt,name=o_uint32,json=oUint32" json:"o_uint32,omitempty"` - OUint64 *uint64 `protobuf:"varint,5,opt,name=o_uint64,json=oUint64" json:"o_uint64,omitempty"` - OSint32 *int32 `protobuf:"zigzag32,6,opt,name=o_sint32,json=oSint32" json:"o_sint32,omitempty"` - OSint64 *int64 `protobuf:"zigzag64,7,opt,name=o_sint64,json=oSint64" json:"o_sint64,omitempty"` - OFloat *float32 `protobuf:"fixed32,8,opt,name=o_float,json=oFloat" json:"o_float,omitempty"` - ODouble *float64 `protobuf:"fixed64,9,opt,name=o_double,json=oDouble" json:"o_double,omitempty"` - OString *string `protobuf:"bytes,10,opt,name=o_string,json=oString" json:"o_string,omitempty"` - OBytes []byte `protobuf:"bytes,11,opt,name=o_bytes,json=oBytes" json:"o_bytes,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Simple) Reset() { *m = Simple{} } -func (m *Simple) String() string { return proto.CompactTextString(m) } -func (*Simple) ProtoMessage() {} -func (*Simple) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{0} } + OBool *bool `protobuf:"varint,1,opt,name=o_bool,json=oBool" json:"o_bool,omitempty"` + OInt32 *int32 `protobuf:"varint,2,opt,name=o_int32,json=oInt32" json:"o_int32,omitempty"` + OInt32Str *int32 `protobuf:"varint,3,opt,name=o_int32_str,json=oInt32Str" json:"o_int32_str,omitempty"` + OInt64 *int64 `protobuf:"varint,4,opt,name=o_int64,json=oInt64" json:"o_int64,omitempty"` + OInt64Str *int64 `protobuf:"varint,5,opt,name=o_int64_str,json=oInt64Str" json:"o_int64_str,omitempty"` + OUint32 *uint32 `protobuf:"varint,6,opt,name=o_uint32,json=oUint32" json:"o_uint32,omitempty"` + OUint32Str *uint32 `protobuf:"varint,7,opt,name=o_uint32_str,json=oUint32Str" json:"o_uint32_str,omitempty"` + OUint64 *uint64 `protobuf:"varint,8,opt,name=o_uint64,json=oUint64" json:"o_uint64,omitempty"` + OUint64Str *uint64 `protobuf:"varint,9,opt,name=o_uint64_str,json=oUint64Str" json:"o_uint64_str,omitempty"` + OSint32 *int32 `protobuf:"zigzag32,10,opt,name=o_sint32,json=oSint32" json:"o_sint32,omitempty"` + OSint32Str *int32 `protobuf:"zigzag32,11,opt,name=o_sint32_str,json=oSint32Str" json:"o_sint32_str,omitempty"` + OSint64 *int64 `protobuf:"zigzag64,12,opt,name=o_sint64,json=oSint64" json:"o_sint64,omitempty"` + OSint64Str *int64 `protobuf:"zigzag64,13,opt,name=o_sint64_str,json=oSint64Str" json:"o_sint64_str,omitempty"` + OFloat *float32 `protobuf:"fixed32,14,opt,name=o_float,json=oFloat" json:"o_float,omitempty"` + OFloatStr *float32 `protobuf:"fixed32,15,opt,name=o_float_str,json=oFloatStr" json:"o_float_str,omitempty"` + ODouble *float64 `protobuf:"fixed64,16,opt,name=o_double,json=oDouble" json:"o_double,omitempty"` + ODoubleStr *float64 `protobuf:"fixed64,17,opt,name=o_double_str,json=oDoubleStr" json:"o_double_str,omitempty"` + OString *string `protobuf:"bytes,18,opt,name=o_string,json=oString" json:"o_string,omitempty"` + OBytes []byte `protobuf:"bytes,19,opt,name=o_bytes,json=oBytes" json:"o_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Simple) Reset() { *m = Simple{} } +func (m *Simple) String() string { return proto.CompactTextString(m) } +func (*Simple) ProtoMessage() {} +func (*Simple) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{0} +} + +func (m *Simple) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Simple.Unmarshal(m, b) +} +func (m *Simple) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Simple.Marshal(b, m, deterministic) +} +func (m *Simple) XXX_Merge(src proto.Message) { + xxx_messageInfo_Simple.Merge(m, src) +} +func (m *Simple) XXX_Size() int { + return xxx_messageInfo_Simple.Size(m) +} +func (m *Simple) XXX_DiscardUnknown() { + xxx_messageInfo_Simple.DiscardUnknown(m) +} + +var xxx_messageInfo_Simple proto.InternalMessageInfo func (m *Simple) GetOBool() bool { if m != nil && m.OBool != nil { @@ -89,6 +133,13 @@ func (m *Simple) GetOInt32() int32 { return 0 } +func (m *Simple) GetOInt32Str() int32 { + if m != nil && m.OInt32Str != nil { + return *m.OInt32Str + } + return 0 +} + func (m *Simple) GetOInt64() int64 { if m != nil && m.OInt64 != nil { return *m.OInt64 @@ -96,6 +147,13 @@ func (m *Simple) GetOInt64() int64 { return 0 } +func (m *Simple) GetOInt64Str() int64 { + if m != nil && m.OInt64Str != nil { + return *m.OInt64Str + } + return 0 +} + func (m *Simple) GetOUint32() uint32 { if m != nil && m.OUint32 != nil { return *m.OUint32 @@ -103,6 +161,13 @@ func (m *Simple) GetOUint32() uint32 { return 0 } +func (m *Simple) GetOUint32Str() uint32 { + if m != nil && m.OUint32Str != nil { + return *m.OUint32Str + } + return 0 +} + func (m *Simple) GetOUint64() uint64 { if m != nil && m.OUint64 != nil { return *m.OUint64 @@ -110,6 +175,13 @@ func (m *Simple) GetOUint64() uint64 { return 0 } +func (m *Simple) GetOUint64Str() uint64 { + if m != nil && m.OUint64Str != nil { + return *m.OUint64Str + } + return 0 +} + func (m *Simple) GetOSint32() int32 { if m != nil && m.OSint32 != nil { return *m.OSint32 @@ -117,6 +189,13 @@ func (m *Simple) GetOSint32() int32 { return 0 } +func (m *Simple) GetOSint32Str() int32 { + if m != nil && m.OSint32Str != nil { + return *m.OSint32Str + } + return 0 +} + func (m *Simple) GetOSint64() int64 { if m != nil && m.OSint64 != nil { return *m.OSint64 @@ -124,6 +203,13 @@ func (m *Simple) GetOSint64() int64 { return 0 } +func (m *Simple) GetOSint64Str() int64 { + if m != nil && m.OSint64Str != nil { + return *m.OSint64Str + } + return 0 +} + func (m *Simple) GetOFloat() float32 { if m != nil && m.OFloat != nil { return *m.OFloat @@ -131,6 +217,13 @@ func (m *Simple) GetOFloat() float32 { return 0 } +func (m *Simple) GetOFloatStr() float32 { + if m != nil && m.OFloatStr != nil { + return *m.OFloatStr + } + return 0 +} + func (m *Simple) GetODouble() float64 { if m != nil && m.ODouble != nil { return *m.ODouble @@ -138,6 +231,13 @@ func (m *Simple) GetODouble() float64 { return 0 } +func (m *Simple) GetODoubleStr() float64 { + if m != nil && m.ODoubleStr != nil { + return *m.ODoubleStr + } + return 0 +} + func (m *Simple) GetOString() string { if m != nil && m.OString != nil { return *m.OString @@ -154,19 +254,41 @@ func (m *Simple) GetOBytes() []byte { // Test message for holding special non-finites primitives. type NonFinites struct { - FNan *float32 `protobuf:"fixed32,1,opt,name=f_nan,json=fNan" json:"f_nan,omitempty"` - FPinf *float32 `protobuf:"fixed32,2,opt,name=f_pinf,json=fPinf" json:"f_pinf,omitempty"` - FNinf *float32 `protobuf:"fixed32,3,opt,name=f_ninf,json=fNinf" json:"f_ninf,omitempty"` - DNan *float64 `protobuf:"fixed64,4,opt,name=d_nan,json=dNan" json:"d_nan,omitempty"` - DPinf *float64 `protobuf:"fixed64,5,opt,name=d_pinf,json=dPinf" json:"d_pinf,omitempty"` - DNinf *float64 `protobuf:"fixed64,6,opt,name=d_ninf,json=dNinf" json:"d_ninf,omitempty"` - XXX_unrecognized []byte `json:"-"` + FNan *float32 `protobuf:"fixed32,1,opt,name=f_nan,json=fNan" json:"f_nan,omitempty"` + FPinf *float32 `protobuf:"fixed32,2,opt,name=f_pinf,json=fPinf" json:"f_pinf,omitempty"` + FNinf *float32 `protobuf:"fixed32,3,opt,name=f_ninf,json=fNinf" json:"f_ninf,omitempty"` + DNan *float64 `protobuf:"fixed64,4,opt,name=d_nan,json=dNan" json:"d_nan,omitempty"` + DPinf *float64 `protobuf:"fixed64,5,opt,name=d_pinf,json=dPinf" json:"d_pinf,omitempty"` + DNinf *float64 `protobuf:"fixed64,6,opt,name=d_ninf,json=dNinf" json:"d_ninf,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *NonFinites) Reset() { *m = NonFinites{} } -func (m *NonFinites) String() string { return proto.CompactTextString(m) } -func (*NonFinites) ProtoMessage() {} -func (*NonFinites) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{1} } +func (m *NonFinites) Reset() { *m = NonFinites{} } +func (m *NonFinites) String() string { return proto.CompactTextString(m) } +func (*NonFinites) ProtoMessage() {} +func (*NonFinites) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{1} +} + +func (m *NonFinites) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NonFinites.Unmarshal(m, b) +} +func (m *NonFinites) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NonFinites.Marshal(b, m, deterministic) +} +func (m *NonFinites) XXX_Merge(src proto.Message) { + xxx_messageInfo_NonFinites.Merge(m, src) +} +func (m *NonFinites) XXX_Size() int { + return xxx_messageInfo_NonFinites.Size(m) +} +func (m *NonFinites) XXX_DiscardUnknown() { + xxx_messageInfo_NonFinites.DiscardUnknown(m) +} + +var xxx_messageInfo_NonFinites proto.InternalMessageInfo func (m *NonFinites) GetFNan() float32 { if m != nil && m.FNan != nil { @@ -212,24 +334,46 @@ func (m *NonFinites) GetDNinf() float64 { // Test message for holding repeated primitives. type Repeats struct { - RBool []bool `protobuf:"varint,1,rep,name=r_bool,json=rBool" json:"r_bool,omitempty"` - RInt32 []int32 `protobuf:"varint,2,rep,name=r_int32,json=rInt32" json:"r_int32,omitempty"` - RInt64 []int64 `protobuf:"varint,3,rep,name=r_int64,json=rInt64" json:"r_int64,omitempty"` - RUint32 []uint32 `protobuf:"varint,4,rep,name=r_uint32,json=rUint32" json:"r_uint32,omitempty"` - RUint64 []uint64 `protobuf:"varint,5,rep,name=r_uint64,json=rUint64" json:"r_uint64,omitempty"` - RSint32 []int32 `protobuf:"zigzag32,6,rep,name=r_sint32,json=rSint32" json:"r_sint32,omitempty"` - RSint64 []int64 `protobuf:"zigzag64,7,rep,name=r_sint64,json=rSint64" json:"r_sint64,omitempty"` - RFloat []float32 `protobuf:"fixed32,8,rep,name=r_float,json=rFloat" json:"r_float,omitempty"` - RDouble []float64 `protobuf:"fixed64,9,rep,name=r_double,json=rDouble" json:"r_double,omitempty"` - RString []string `protobuf:"bytes,10,rep,name=r_string,json=rString" json:"r_string,omitempty"` - RBytes [][]byte `protobuf:"bytes,11,rep,name=r_bytes,json=rBytes" json:"r_bytes,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Repeats) Reset() { *m = Repeats{} } -func (m *Repeats) String() string { return proto.CompactTextString(m) } -func (*Repeats) ProtoMessage() {} -func (*Repeats) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{2} } + RBool []bool `protobuf:"varint,1,rep,name=r_bool,json=rBool" json:"r_bool,omitempty"` + RInt32 []int32 `protobuf:"varint,2,rep,name=r_int32,json=rInt32" json:"r_int32,omitempty"` + RInt64 []int64 `protobuf:"varint,3,rep,name=r_int64,json=rInt64" json:"r_int64,omitempty"` + RUint32 []uint32 `protobuf:"varint,4,rep,name=r_uint32,json=rUint32" json:"r_uint32,omitempty"` + RUint64 []uint64 `protobuf:"varint,5,rep,name=r_uint64,json=rUint64" json:"r_uint64,omitempty"` + RSint32 []int32 `protobuf:"zigzag32,6,rep,name=r_sint32,json=rSint32" json:"r_sint32,omitempty"` + RSint64 []int64 `protobuf:"zigzag64,7,rep,name=r_sint64,json=rSint64" json:"r_sint64,omitempty"` + RFloat []float32 `protobuf:"fixed32,8,rep,name=r_float,json=rFloat" json:"r_float,omitempty"` + RDouble []float64 `protobuf:"fixed64,9,rep,name=r_double,json=rDouble" json:"r_double,omitempty"` + RString []string `protobuf:"bytes,10,rep,name=r_string,json=rString" json:"r_string,omitempty"` + RBytes [][]byte `protobuf:"bytes,11,rep,name=r_bytes,json=rBytes" json:"r_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Repeats) Reset() { *m = Repeats{} } +func (m *Repeats) String() string { return proto.CompactTextString(m) } +func (*Repeats) ProtoMessage() {} +func (*Repeats) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{2} +} + +func (m *Repeats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Repeats.Unmarshal(m, b) +} +func (m *Repeats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Repeats.Marshal(b, m, deterministic) +} +func (m *Repeats) XXX_Merge(src proto.Message) { + xxx_messageInfo_Repeats.Merge(m, src) +} +func (m *Repeats) XXX_Size() int { + return xxx_messageInfo_Repeats.Size(m) +} +func (m *Repeats) XXX_DiscardUnknown() { + xxx_messageInfo_Repeats.DiscardUnknown(m) +} + +var xxx_messageInfo_Repeats proto.InternalMessageInfo func (m *Repeats) GetRBool() []bool { if m != nil { @@ -310,19 +454,41 @@ func (m *Repeats) GetRBytes() [][]byte { // Test message for holding enums and nested messages. type Widget struct { - Color *Widget_Color `protobuf:"varint,1,opt,name=color,enum=jsonpb.Widget_Color" json:"color,omitempty"` - RColor []Widget_Color `protobuf:"varint,2,rep,name=r_color,json=rColor,enum=jsonpb.Widget_Color" json:"r_color,omitempty"` - Simple *Simple `protobuf:"bytes,10,opt,name=simple" json:"simple,omitempty"` - RSimple []*Simple `protobuf:"bytes,11,rep,name=r_simple,json=rSimple" json:"r_simple,omitempty"` - Repeats *Repeats `protobuf:"bytes,20,opt,name=repeats" json:"repeats,omitempty"` - RRepeats []*Repeats `protobuf:"bytes,21,rep,name=r_repeats,json=rRepeats" json:"r_repeats,omitempty"` - XXX_unrecognized []byte `json:"-"` + Color *Widget_Color `protobuf:"varint,1,opt,name=color,enum=jsonpb.Widget_Color" json:"color,omitempty"` + RColor []Widget_Color `protobuf:"varint,2,rep,name=r_color,json=rColor,enum=jsonpb.Widget_Color" json:"r_color,omitempty"` + Simple *Simple `protobuf:"bytes,10,opt,name=simple" json:"simple,omitempty"` + RSimple []*Simple `protobuf:"bytes,11,rep,name=r_simple,json=rSimple" json:"r_simple,omitempty"` + Repeats *Repeats `protobuf:"bytes,20,opt,name=repeats" json:"repeats,omitempty"` + RRepeats []*Repeats `protobuf:"bytes,21,rep,name=r_repeats,json=rRepeats" json:"r_repeats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Widget) Reset() { *m = Widget{} } +func (m *Widget) String() string { return proto.CompactTextString(m) } +func (*Widget) ProtoMessage() {} +func (*Widget) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{3} +} + +func (m *Widget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Widget.Unmarshal(m, b) +} +func (m *Widget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Widget.Marshal(b, m, deterministic) +} +func (m *Widget) XXX_Merge(src proto.Message) { + xxx_messageInfo_Widget.Merge(m, src) +} +func (m *Widget) XXX_Size() int { + return xxx_messageInfo_Widget.Size(m) +} +func (m *Widget) XXX_DiscardUnknown() { + xxx_messageInfo_Widget.DiscardUnknown(m) } -func (m *Widget) Reset() { *m = Widget{} } -func (m *Widget) String() string { return proto.CompactTextString(m) } -func (*Widget) ProtoMessage() {} -func (*Widget) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{3} } +var xxx_messageInfo_Widget proto.InternalMessageInfo func (m *Widget) GetColor() Widget_Color { if m != nil && m.Color != nil { @@ -367,15 +533,37 @@ func (m *Widget) GetRRepeats() []*Repeats { } type Maps struct { - MInt64Str map[int64]string `protobuf:"bytes,1,rep,name=m_int64_str,json=mInt64Str" json:"m_int64_str,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - MBoolSimple map[bool]*Simple `protobuf:"bytes,2,rep,name=m_bool_simple,json=mBoolSimple" json:"m_bool_simple,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - XXX_unrecognized []byte `json:"-"` + MInt64Str map[int64]string `protobuf:"bytes,1,rep,name=m_int64_str,json=mInt64Str" json:"m_int64_str,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + MBoolSimple map[bool]*Simple `protobuf:"bytes,2,rep,name=m_bool_simple,json=mBoolSimple" json:"m_bool_simple,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Maps) Reset() { *m = Maps{} } -func (m *Maps) String() string { return proto.CompactTextString(m) } -func (*Maps) ProtoMessage() {} -func (*Maps) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{4} } +func (m *Maps) Reset() { *m = Maps{} } +func (m *Maps) String() string { return proto.CompactTextString(m) } +func (*Maps) ProtoMessage() {} +func (*Maps) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{4} +} + +func (m *Maps) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Maps.Unmarshal(m, b) +} +func (m *Maps) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Maps.Marshal(b, m, deterministic) +} +func (m *Maps) XXX_Merge(src proto.Message) { + xxx_messageInfo_Maps.Merge(m, src) +} +func (m *Maps) XXX_Size() int { + return xxx_messageInfo_Maps.Size(m) +} +func (m *Maps) XXX_DiscardUnknown() { + xxx_messageInfo_Maps.DiscardUnknown(m) +} + +var xxx_messageInfo_Maps proto.InternalMessageInfo func (m *Maps) GetMInt64Str() map[int64]string { if m != nil { @@ -397,14 +585,37 @@ type MsgWithOneof struct { // *MsgWithOneof_Salary // *MsgWithOneof_Country // *MsgWithOneof_HomeAddress - Union isMsgWithOneof_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` + // *MsgWithOneof_MsgWithRequired + Union isMsgWithOneof_Union `protobuf_oneof:"union"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MsgWithOneof) Reset() { *m = MsgWithOneof{} } +func (m *MsgWithOneof) String() string { return proto.CompactTextString(m) } +func (*MsgWithOneof) ProtoMessage() {} +func (*MsgWithOneof) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{5} +} + +func (m *MsgWithOneof) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithOneof.Unmarshal(m, b) +} +func (m *MsgWithOneof) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithOneof.Marshal(b, m, deterministic) +} +func (m *MsgWithOneof) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithOneof.Merge(m, src) +} +func (m *MsgWithOneof) XXX_Size() int { + return xxx_messageInfo_MsgWithOneof.Size(m) +} +func (m *MsgWithOneof) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithOneof.DiscardUnknown(m) } -func (m *MsgWithOneof) Reset() { *m = MsgWithOneof{} } -func (m *MsgWithOneof) String() string { return proto.CompactTextString(m) } -func (*MsgWithOneof) ProtoMessage() {} -func (*MsgWithOneof) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{5} } +var xxx_messageInfo_MsgWithOneof proto.InternalMessageInfo type isMsgWithOneof_Union interface { isMsgWithOneof_Union() @@ -413,21 +624,33 @@ type isMsgWithOneof_Union interface { type MsgWithOneof_Title struct { Title string `protobuf:"bytes,1,opt,name=title,oneof"` } + type MsgWithOneof_Salary struct { Salary int64 `protobuf:"varint,2,opt,name=salary,oneof"` } + type MsgWithOneof_Country struct { Country string `protobuf:"bytes,3,opt,name=Country,oneof"` } + type MsgWithOneof_HomeAddress struct { HomeAddress string `protobuf:"bytes,4,opt,name=home_address,json=homeAddress,oneof"` } -func (*MsgWithOneof_Title) isMsgWithOneof_Union() {} -func (*MsgWithOneof_Salary) isMsgWithOneof_Union() {} -func (*MsgWithOneof_Country) isMsgWithOneof_Union() {} +type MsgWithOneof_MsgWithRequired struct { + MsgWithRequired *MsgWithRequired `protobuf:"bytes,5,opt,name=msg_with_required,json=msgWithRequired,oneof"` +} + +func (*MsgWithOneof_Title) isMsgWithOneof_Union() {} + +func (*MsgWithOneof_Salary) isMsgWithOneof_Union() {} + +func (*MsgWithOneof_Country) isMsgWithOneof_Union() {} + func (*MsgWithOneof_HomeAddress) isMsgWithOneof_Union() {} +func (*MsgWithOneof_MsgWithRequired) isMsgWithOneof_Union() {} + func (m *MsgWithOneof) GetUnion() isMsgWithOneof_Union { if m != nil { return m.Union @@ -463,120 +686,65 @@ func (m *MsgWithOneof) GetHomeAddress() string { return "" } -// XXX_OneofFuncs is for the internal use of the proto package. -func (*MsgWithOneof) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _MsgWithOneof_OneofMarshaler, _MsgWithOneof_OneofUnmarshaler, _MsgWithOneof_OneofSizer, []interface{}{ +func (m *MsgWithOneof) GetMsgWithRequired() *MsgWithRequired { + if x, ok := m.GetUnion().(*MsgWithOneof_MsgWithRequired); ok { + return x.MsgWithRequired + } + return nil +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*MsgWithOneof) XXX_OneofWrappers() []interface{} { + return []interface{}{ (*MsgWithOneof_Title)(nil), (*MsgWithOneof_Salary)(nil), (*MsgWithOneof_Country)(nil), (*MsgWithOneof_HomeAddress)(nil), + (*MsgWithOneof_MsgWithRequired)(nil), } } -func _MsgWithOneof_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*MsgWithOneof) - // union - switch x := m.Union.(type) { - case *MsgWithOneof_Title: - b.EncodeVarint(1<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Title) - case *MsgWithOneof_Salary: - b.EncodeVarint(2<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Salary)) - case *MsgWithOneof_Country: - b.EncodeVarint(3<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Country) - case *MsgWithOneof_HomeAddress: - b.EncodeVarint(4<<3 | proto.WireBytes) - b.EncodeStringBytes(x.HomeAddress) - case nil: - default: - return fmt.Errorf("MsgWithOneof.Union has unexpected type %T", x) - } - return nil -} - -func _MsgWithOneof_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*MsgWithOneof) - switch tag { - case 1: // union.title - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &MsgWithOneof_Title{x} - return true, err - case 2: // union.salary - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &MsgWithOneof_Salary{int64(x)} - return true, err - case 3: // union.Country - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &MsgWithOneof_Country{x} - return true, err - case 4: // union.home_address - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &MsgWithOneof_HomeAddress{x} - return true, err - default: - return false, nil - } -} - -func _MsgWithOneof_OneofSizer(msg proto.Message) (n int) { - m := msg.(*MsgWithOneof) - // union - switch x := m.Union.(type) { - case *MsgWithOneof_Title: - n += proto.SizeVarint(1<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Title))) - n += len(x.Title) - case *MsgWithOneof_Salary: - n += proto.SizeVarint(2<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Salary)) - case *MsgWithOneof_Country: - n += proto.SizeVarint(3<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Country))) - n += len(x.Country) - case *MsgWithOneof_HomeAddress: - n += proto.SizeVarint(4<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.HomeAddress))) - n += len(x.HomeAddress) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - type Real struct { Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Real) Reset() { *m = Real{} } -func (m *Real) String() string { return proto.CompactTextString(m) } -func (*Real) ProtoMessage() {} -func (*Real) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{6} } +func (m *Real) Reset() { *m = Real{} } +func (m *Real) String() string { return proto.CompactTextString(m) } +func (*Real) ProtoMessage() {} +func (*Real) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{6} +} var extRange_Real = []proto.ExtensionRange{ - {100, 536870911}, + {Start: 100, End: 536870911}, } func (*Real) ExtensionRangeArray() []proto.ExtensionRange { return extRange_Real } +func (m *Real) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Real.Unmarshal(m, b) +} +func (m *Real) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Real.Marshal(b, m, deterministic) +} +func (m *Real) XXX_Merge(src proto.Message) { + xxx_messageInfo_Real.Merge(m, src) +} +func (m *Real) XXX_Size() int { + return xxx_messageInfo_Real.Size(m) +} +func (m *Real) XXX_DiscardUnknown() { + xxx_messageInfo_Real.DiscardUnknown(m) +} + +var xxx_messageInfo_Real proto.InternalMessageInfo + func (m *Real) GetValue() float64 { if m != nil && m.Value != nil { return *m.Value @@ -586,23 +754,45 @@ func (m *Real) GetValue() float64 { type Complex struct { Imaginary *float64 `protobuf:"fixed64,1,opt,name=imaginary" json:"imaginary,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Complex) Reset() { *m = Complex{} } -func (m *Complex) String() string { return proto.CompactTextString(m) } -func (*Complex) ProtoMessage() {} -func (*Complex) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{7} } +func (m *Complex) Reset() { *m = Complex{} } +func (m *Complex) String() string { return proto.CompactTextString(m) } +func (*Complex) ProtoMessage() {} +func (*Complex) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{7} +} var extRange_Complex = []proto.ExtensionRange{ - {100, 536870911}, + {Start: 100, End: 536870911}, } func (*Complex) ExtensionRangeArray() []proto.ExtensionRange { return extRange_Complex } +func (m *Complex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Complex.Unmarshal(m, b) +} +func (m *Complex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Complex.Marshal(b, m, deterministic) +} +func (m *Complex) XXX_Merge(src proto.Message) { + xxx_messageInfo_Complex.Merge(m, src) +} +func (m *Complex) XXX_Size() int { + return xxx_messageInfo_Complex.Size(m) +} +func (m *Complex) XXX_DiscardUnknown() { + xxx_messageInfo_Complex.DiscardUnknown(m) +} + +var xxx_messageInfo_Complex proto.InternalMessageInfo + func (m *Complex) GetImaginary() float64 { if m != nil && m.Imaginary != nil { return *m.Imaginary @@ -615,139 +805,334 @@ var E_Complex_RealExtension = &proto.ExtensionDesc{ ExtensionType: (*Complex)(nil), Field: 123, Name: "jsonpb.Complex.real_extension", - Tag: "bytes,123,opt,name=real_extension,json=realExtension", + Tag: "bytes,123,opt,name=real_extension", Filename: "test_objects.proto", } type KnownTypes struct { - An *google_protobuf.Any `protobuf:"bytes,14,opt,name=an" json:"an,omitempty"` - Dur *google_protobuf1.Duration `protobuf:"bytes,1,opt,name=dur" json:"dur,omitempty"` - St *google_protobuf2.Struct `protobuf:"bytes,12,opt,name=st" json:"st,omitempty"` - Ts *google_protobuf3.Timestamp `protobuf:"bytes,2,opt,name=ts" json:"ts,omitempty"` - Lv *google_protobuf2.ListValue `protobuf:"bytes,15,opt,name=lv" json:"lv,omitempty"` - Val *google_protobuf2.Value `protobuf:"bytes,16,opt,name=val" json:"val,omitempty"` - Dbl *google_protobuf4.DoubleValue `protobuf:"bytes,3,opt,name=dbl" json:"dbl,omitempty"` - Flt *google_protobuf4.FloatValue `protobuf:"bytes,4,opt,name=flt" json:"flt,omitempty"` - I64 *google_protobuf4.Int64Value `protobuf:"bytes,5,opt,name=i64" json:"i64,omitempty"` - U64 *google_protobuf4.UInt64Value `protobuf:"bytes,6,opt,name=u64" json:"u64,omitempty"` - I32 *google_protobuf4.Int32Value `protobuf:"bytes,7,opt,name=i32" json:"i32,omitempty"` - U32 *google_protobuf4.UInt32Value `protobuf:"bytes,8,opt,name=u32" json:"u32,omitempty"` - Bool *google_protobuf4.BoolValue `protobuf:"bytes,9,opt,name=bool" json:"bool,omitempty"` - Str *google_protobuf4.StringValue `protobuf:"bytes,10,opt,name=str" json:"str,omitempty"` - Bytes *google_protobuf4.BytesValue `protobuf:"bytes,11,opt,name=bytes" json:"bytes,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *KnownTypes) Reset() { *m = KnownTypes{} } -func (m *KnownTypes) String() string { return proto.CompactTextString(m) } -func (*KnownTypes) ProtoMessage() {} -func (*KnownTypes) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{8} } - -func (m *KnownTypes) GetAn() *google_protobuf.Any { + An *any.Any `protobuf:"bytes,14,opt,name=an" json:"an,omitempty"` + Dur *duration.Duration `protobuf:"bytes,1,opt,name=dur" json:"dur,omitempty"` + St *_struct.Struct `protobuf:"bytes,12,opt,name=st" json:"st,omitempty"` + Ts *timestamp.Timestamp `protobuf:"bytes,2,opt,name=ts" json:"ts,omitempty"` + Lv *_struct.ListValue `protobuf:"bytes,15,opt,name=lv" json:"lv,omitempty"` + Val *_struct.Value `protobuf:"bytes,16,opt,name=val" json:"val,omitempty"` + Dbl *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=dbl" json:"dbl,omitempty"` + Flt *wrappers.FloatValue `protobuf:"bytes,4,opt,name=flt" json:"flt,omitempty"` + I64 *wrappers.Int64Value `protobuf:"bytes,5,opt,name=i64" json:"i64,omitempty"` + U64 *wrappers.UInt64Value `protobuf:"bytes,6,opt,name=u64" json:"u64,omitempty"` + I32 *wrappers.Int32Value `protobuf:"bytes,7,opt,name=i32" json:"i32,omitempty"` + U32 *wrappers.UInt32Value `protobuf:"bytes,8,opt,name=u32" json:"u32,omitempty"` + Bool *wrappers.BoolValue `protobuf:"bytes,9,opt,name=bool" json:"bool,omitempty"` + Str *wrappers.StringValue `protobuf:"bytes,10,opt,name=str" json:"str,omitempty"` + Bytes *wrappers.BytesValue `protobuf:"bytes,11,opt,name=bytes" json:"bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KnownTypes) Reset() { *m = KnownTypes{} } +func (m *KnownTypes) String() string { return proto.CompactTextString(m) } +func (*KnownTypes) ProtoMessage() {} +func (*KnownTypes) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{8} +} + +func (m *KnownTypes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KnownTypes.Unmarshal(m, b) +} +func (m *KnownTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KnownTypes.Marshal(b, m, deterministic) +} +func (m *KnownTypes) XXX_Merge(src proto.Message) { + xxx_messageInfo_KnownTypes.Merge(m, src) +} +func (m *KnownTypes) XXX_Size() int { + return xxx_messageInfo_KnownTypes.Size(m) +} +func (m *KnownTypes) XXX_DiscardUnknown() { + xxx_messageInfo_KnownTypes.DiscardUnknown(m) +} + +var xxx_messageInfo_KnownTypes proto.InternalMessageInfo + +func (m *KnownTypes) GetAn() *any.Any { if m != nil { return m.An } return nil } -func (m *KnownTypes) GetDur() *google_protobuf1.Duration { +func (m *KnownTypes) GetDur() *duration.Duration { if m != nil { return m.Dur } return nil } -func (m *KnownTypes) GetSt() *google_protobuf2.Struct { +func (m *KnownTypes) GetSt() *_struct.Struct { if m != nil { return m.St } return nil } -func (m *KnownTypes) GetTs() *google_protobuf3.Timestamp { +func (m *KnownTypes) GetTs() *timestamp.Timestamp { if m != nil { return m.Ts } return nil } -func (m *KnownTypes) GetLv() *google_protobuf2.ListValue { +func (m *KnownTypes) GetLv() *_struct.ListValue { if m != nil { return m.Lv } return nil } -func (m *KnownTypes) GetVal() *google_protobuf2.Value { +func (m *KnownTypes) GetVal() *_struct.Value { if m != nil { return m.Val } return nil } -func (m *KnownTypes) GetDbl() *google_protobuf4.DoubleValue { +func (m *KnownTypes) GetDbl() *wrappers.DoubleValue { if m != nil { return m.Dbl } return nil } -func (m *KnownTypes) GetFlt() *google_protobuf4.FloatValue { +func (m *KnownTypes) GetFlt() *wrappers.FloatValue { if m != nil { return m.Flt } return nil } -func (m *KnownTypes) GetI64() *google_protobuf4.Int64Value { +func (m *KnownTypes) GetI64() *wrappers.Int64Value { if m != nil { return m.I64 } return nil } -func (m *KnownTypes) GetU64() *google_protobuf4.UInt64Value { +func (m *KnownTypes) GetU64() *wrappers.UInt64Value { if m != nil { return m.U64 } return nil } -func (m *KnownTypes) GetI32() *google_protobuf4.Int32Value { +func (m *KnownTypes) GetI32() *wrappers.Int32Value { if m != nil { return m.I32 } return nil } -func (m *KnownTypes) GetU32() *google_protobuf4.UInt32Value { +func (m *KnownTypes) GetU32() *wrappers.UInt32Value { if m != nil { return m.U32 } return nil } -func (m *KnownTypes) GetBool() *google_protobuf4.BoolValue { +func (m *KnownTypes) GetBool() *wrappers.BoolValue { if m != nil { return m.Bool } return nil } -func (m *KnownTypes) GetStr() *google_protobuf4.StringValue { +func (m *KnownTypes) GetStr() *wrappers.StringValue { if m != nil { return m.Str } return nil } -func (m *KnownTypes) GetBytes() *google_protobuf4.BytesValue { +func (m *KnownTypes) GetBytes() *wrappers.BytesValue { if m != nil { return m.Bytes } return nil } +// Test messages for marshaling/unmarshaling required fields. +type MsgWithRequired struct { + Str *string `protobuf:"bytes,1,req,name=str" json:"str,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MsgWithRequired) Reset() { *m = MsgWithRequired{} } +func (m *MsgWithRequired) String() string { return proto.CompactTextString(m) } +func (*MsgWithRequired) ProtoMessage() {} +func (*MsgWithRequired) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{9} +} + +func (m *MsgWithRequired) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithRequired.Unmarshal(m, b) +} +func (m *MsgWithRequired) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithRequired.Marshal(b, m, deterministic) +} +func (m *MsgWithRequired) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithRequired.Merge(m, src) +} +func (m *MsgWithRequired) XXX_Size() int { + return xxx_messageInfo_MsgWithRequired.Size(m) +} +func (m *MsgWithRequired) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithRequired.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgWithRequired proto.InternalMessageInfo + +func (m *MsgWithRequired) GetStr() string { + if m != nil && m.Str != nil { + return *m.Str + } + return "" +} + +type MsgWithIndirectRequired struct { + Subm *MsgWithRequired `protobuf:"bytes,1,opt,name=subm" json:"subm,omitempty"` + MapField map[string]*MsgWithRequired `protobuf:"bytes,2,rep,name=map_field,json=mapField" json:"map_field,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + SliceField []*MsgWithRequired `protobuf:"bytes,3,rep,name=slice_field,json=sliceField" json:"slice_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MsgWithIndirectRequired) Reset() { *m = MsgWithIndirectRequired{} } +func (m *MsgWithIndirectRequired) String() string { return proto.CompactTextString(m) } +func (*MsgWithIndirectRequired) ProtoMessage() {} +func (*MsgWithIndirectRequired) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{10} +} + +func (m *MsgWithIndirectRequired) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithIndirectRequired.Unmarshal(m, b) +} +func (m *MsgWithIndirectRequired) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithIndirectRequired.Marshal(b, m, deterministic) +} +func (m *MsgWithIndirectRequired) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithIndirectRequired.Merge(m, src) +} +func (m *MsgWithIndirectRequired) XXX_Size() int { + return xxx_messageInfo_MsgWithIndirectRequired.Size(m) +} +func (m *MsgWithIndirectRequired) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithIndirectRequired.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgWithIndirectRequired proto.InternalMessageInfo + +func (m *MsgWithIndirectRequired) GetSubm() *MsgWithRequired { + if m != nil { + return m.Subm + } + return nil +} + +func (m *MsgWithIndirectRequired) GetMapField() map[string]*MsgWithRequired { + if m != nil { + return m.MapField + } + return nil +} + +func (m *MsgWithIndirectRequired) GetSliceField() []*MsgWithRequired { + if m != nil { + return m.SliceField + } + return nil +} + +type MsgWithRequiredBytes struct { + Byts []byte `protobuf:"bytes,1,req,name=byts" json:"byts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MsgWithRequiredBytes) Reset() { *m = MsgWithRequiredBytes{} } +func (m *MsgWithRequiredBytes) String() string { return proto.CompactTextString(m) } +func (*MsgWithRequiredBytes) ProtoMessage() {} +func (*MsgWithRequiredBytes) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{11} +} + +func (m *MsgWithRequiredBytes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithRequiredBytes.Unmarshal(m, b) +} +func (m *MsgWithRequiredBytes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithRequiredBytes.Marshal(b, m, deterministic) +} +func (m *MsgWithRequiredBytes) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithRequiredBytes.Merge(m, src) +} +func (m *MsgWithRequiredBytes) XXX_Size() int { + return xxx_messageInfo_MsgWithRequiredBytes.Size(m) +} +func (m *MsgWithRequiredBytes) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithRequiredBytes.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgWithRequiredBytes proto.InternalMessageInfo + +func (m *MsgWithRequiredBytes) GetByts() []byte { + if m != nil { + return m.Byts + } + return nil +} + +type MsgWithRequiredWKT struct { + Str *wrappers.StringValue `protobuf:"bytes,1,req,name=str" json:"str,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MsgWithRequiredWKT) Reset() { *m = MsgWithRequiredWKT{} } +func (m *MsgWithRequiredWKT) String() string { return proto.CompactTextString(m) } +func (*MsgWithRequiredWKT) ProtoMessage() {} +func (*MsgWithRequiredWKT) Descriptor() ([]byte, []int) { + return fileDescriptor_e97c739a0ce14cc6, []int{12} +} + +func (m *MsgWithRequiredWKT) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MsgWithRequiredWKT.Unmarshal(m, b) +} +func (m *MsgWithRequiredWKT) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MsgWithRequiredWKT.Marshal(b, m, deterministic) +} +func (m *MsgWithRequiredWKT) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgWithRequiredWKT.Merge(m, src) +} +func (m *MsgWithRequiredWKT) XXX_Size() int { + return xxx_messageInfo_MsgWithRequiredWKT.Size(m) +} +func (m *MsgWithRequiredWKT) XXX_DiscardUnknown() { + xxx_messageInfo_MsgWithRequiredWKT.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgWithRequiredWKT proto.InternalMessageInfo + +func (m *MsgWithRequiredWKT) GetStr() *wrappers.StringValue { + if m != nil { + return m.Str + } + return nil +} + var E_Name = &proto.ExtensionDesc{ ExtendedType: (*Real)(nil), ExtensionType: (*string)(nil), @@ -757,96 +1142,132 @@ var E_Name = &proto.ExtensionDesc{ Filename: "test_objects.proto", } +var E_Extm = &proto.ExtensionDesc{ + ExtendedType: (*Real)(nil), + ExtensionType: (*MsgWithRequired)(nil), + Field: 125, + Name: "jsonpb.extm", + Tag: "bytes,125,opt,name=extm", + Filename: "test_objects.proto", +} + func init() { + proto.RegisterEnum("jsonpb.Widget_Color", Widget_Color_name, Widget_Color_value) proto.RegisterType((*Simple)(nil), "jsonpb.Simple") proto.RegisterType((*NonFinites)(nil), "jsonpb.NonFinites") proto.RegisterType((*Repeats)(nil), "jsonpb.Repeats") proto.RegisterType((*Widget)(nil), "jsonpb.Widget") proto.RegisterType((*Maps)(nil), "jsonpb.Maps") + proto.RegisterMapType((map[bool]*Simple)(nil), "jsonpb.Maps.MBoolSimpleEntry") + proto.RegisterMapType((map[int64]string)(nil), "jsonpb.Maps.MInt64StrEntry") proto.RegisterType((*MsgWithOneof)(nil), "jsonpb.MsgWithOneof") proto.RegisterType((*Real)(nil), "jsonpb.Real") + proto.RegisterExtension(E_Complex_RealExtension) proto.RegisterType((*Complex)(nil), "jsonpb.Complex") proto.RegisterType((*KnownTypes)(nil), "jsonpb.KnownTypes") - proto.RegisterEnum("jsonpb.Widget_Color", Widget_Color_name, Widget_Color_value) - proto.RegisterExtension(E_Complex_RealExtension) + proto.RegisterType((*MsgWithRequired)(nil), "jsonpb.MsgWithRequired") + proto.RegisterType((*MsgWithIndirectRequired)(nil), "jsonpb.MsgWithIndirectRequired") + proto.RegisterMapType((map[string]*MsgWithRequired)(nil), "jsonpb.MsgWithIndirectRequired.MapFieldEntry") + proto.RegisterType((*MsgWithRequiredBytes)(nil), "jsonpb.MsgWithRequiredBytes") + proto.RegisterType((*MsgWithRequiredWKT)(nil), "jsonpb.MsgWithRequiredWKT") proto.RegisterExtension(E_Name) -} - -func init() { proto.RegisterFile("test_objects.proto", fileDescriptor1) } - -var fileDescriptor1 = []byte{ - // 1160 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x95, 0x41, 0x73, 0xdb, 0x44, - 0x14, 0xc7, 0x23, 0xc9, 0x92, 0xed, 0x75, 0x92, 0x9a, 0x6d, 0xda, 0x2a, 0x26, 0x80, 0xc6, 0x94, - 0x22, 0x0a, 0x75, 0x07, 0xc7, 0xe3, 0x61, 0x0a, 0x97, 0xa4, 0x71, 0x29, 0x43, 0x13, 0x98, 0x4d, - 0x43, 0x8f, 0x1e, 0x39, 0x5a, 0xbb, 0x2a, 0xf2, 0xae, 0x67, 0x77, 0x95, 0xd4, 0x03, 0x87, 0x9c, - 0x39, 0x32, 0x7c, 0x05, 0xf8, 0x08, 0x1c, 0xf8, 0x74, 0xcc, 0xdb, 0x95, 0xac, 0xc4, 0x8e, 0x4f, - 0xf1, 0x7b, 0xef, 0xff, 0xfe, 0x59, 0xed, 0x6f, 0x77, 0x1f, 0xc2, 0x8a, 0x4a, 0x35, 0xe4, 0xa3, - 0x77, 0xf4, 0x5c, 0xc9, 0xce, 0x4c, 0x70, 0xc5, 0xb1, 0xf7, 0x4e, 0x72, 0x36, 0x1b, 0xb5, 0x76, - 0x27, 0x9c, 0x4f, 0x52, 0xfa, 0x54, 0x67, 0x47, 0xd9, 0xf8, 0x69, 0xc4, 0xe6, 0x46, 0xd2, 0xfa, - 0x78, 0xb9, 0x14, 0x67, 0x22, 0x52, 0x09, 0x67, 0x79, 0x7d, 0x6f, 0xb9, 0x2e, 0x95, 0xc8, 0xce, - 0x55, 0x5e, 0xfd, 0x64, 0xb9, 0xaa, 0x92, 0x29, 0x95, 0x2a, 0x9a, 0xce, 0xd6, 0xd9, 0x5f, 0x8a, - 0x68, 0x36, 0xa3, 0x22, 0x5f, 0x61, 0xfb, 0x6f, 0x1b, 0x79, 0xa7, 0xc9, 0x74, 0x96, 0x52, 0x7c, - 0x0f, 0x79, 0x7c, 0x38, 0xe2, 0x3c, 0xf5, 0xad, 0xc0, 0x0a, 0x6b, 0xc4, 0xe5, 0x87, 0x9c, 0xa7, - 0xf8, 0x01, 0xaa, 0xf2, 0x61, 0xc2, 0xd4, 0x7e, 0xd7, 0xb7, 0x03, 0x2b, 0x74, 0x89, 0xc7, 0x7f, - 0x80, 0x68, 0x51, 0xe8, 0xf7, 0x7c, 0x27, 0xb0, 0x42, 0xc7, 0x14, 0xfa, 0x3d, 0xbc, 0x8b, 0x6a, - 0x7c, 0x98, 0x99, 0x96, 0x4a, 0x60, 0x85, 0x5b, 0xa4, 0xca, 0xcf, 0x74, 0x58, 0x96, 0xfa, 0x3d, - 0xdf, 0x0d, 0xac, 0xb0, 0x92, 0x97, 0x8a, 0x2e, 0x69, 0xba, 0xbc, 0xc0, 0x0a, 0x3f, 0x20, 0x55, - 0x7e, 0x7a, 0xad, 0x4b, 0x9a, 0xae, 0x6a, 0x60, 0x85, 0x38, 0x2f, 0xf5, 0x7b, 0x66, 0x11, 0xe3, - 0x94, 0x47, 0xca, 0xaf, 0x05, 0x56, 0x68, 0x13, 0x8f, 0xbf, 0x80, 0xc8, 0xf4, 0xc4, 0x3c, 0x1b, - 0xa5, 0xd4, 0xaf, 0x07, 0x56, 0x68, 0x91, 0x2a, 0x3f, 0xd2, 0x61, 0x6e, 0xa7, 0x44, 0xc2, 0x26, - 0x3e, 0x0a, 0xac, 0xb0, 0x0e, 0x76, 0x3a, 0x34, 0x76, 0xa3, 0xb9, 0xa2, 0xd2, 0x6f, 0x04, 0x56, - 0xb8, 0x49, 0x3c, 0x7e, 0x08, 0x51, 0xfb, 0x4f, 0x0b, 0xa1, 0x13, 0xce, 0x5e, 0x24, 0x2c, 0x51, - 0x54, 0xe2, 0xbb, 0xc8, 0x1d, 0x0f, 0x59, 0xc4, 0xf4, 0x56, 0xd9, 0xa4, 0x32, 0x3e, 0x89, 0x18, - 0x6c, 0xe0, 0x78, 0x38, 0x4b, 0xd8, 0x58, 0x6f, 0x94, 0x4d, 0xdc, 0xf1, 0xcf, 0x09, 0x1b, 0x9b, - 0x34, 0x83, 0xb4, 0x93, 0xa7, 0x4f, 0x20, 0x7d, 0x17, 0xb9, 0xb1, 0xb6, 0xa8, 0xe8, 0xd5, 0x55, - 0xe2, 0xdc, 0x22, 0x36, 0x16, 0xae, 0xce, 0xba, 0x71, 0x61, 0x11, 0x1b, 0x0b, 0x2f, 0x4f, 0x83, - 0x45, 0xfb, 0x1f, 0x1b, 0x55, 0x09, 0x9d, 0xd1, 0x48, 0x49, 0x90, 0x88, 0x82, 0x9e, 0x03, 0xf4, - 0x44, 0x41, 0x4f, 0x2c, 0xe8, 0x39, 0x40, 0x4f, 0x2c, 0xe8, 0x89, 0x05, 0x3d, 0x07, 0xe8, 0x89, - 0x05, 0x3d, 0x51, 0xd2, 0x73, 0x80, 0x9e, 0x28, 0xe9, 0x89, 0x92, 0x9e, 0x03, 0xf4, 0x44, 0x49, - 0x4f, 0x94, 0xf4, 0x1c, 0xa0, 0x27, 0x4e, 0xaf, 0x75, 0x2d, 0xe8, 0x39, 0x40, 0x4f, 0x94, 0xf4, - 0xc4, 0x82, 0x9e, 0x03, 0xf4, 0xc4, 0x82, 0x9e, 0x28, 0xe9, 0x39, 0x40, 0x4f, 0x94, 0xf4, 0x44, - 0x49, 0xcf, 0x01, 0x7a, 0xa2, 0xa4, 0x27, 0x16, 0xf4, 0x1c, 0xa0, 0x27, 0x0c, 0xbd, 0x7f, 0x6d, - 0xe4, 0xbd, 0x49, 0xe2, 0x09, 0x55, 0xf8, 0x31, 0x72, 0xcf, 0x79, 0xca, 0x85, 0x26, 0xb7, 0xdd, - 0xdd, 0xe9, 0x98, 0x2b, 0xda, 0x31, 0xe5, 0xce, 0x73, 0xa8, 0x11, 0x23, 0xc1, 0x4f, 0xc0, 0xcf, - 0xa8, 0x61, 0xf3, 0xd6, 0xa9, 0x3d, 0xa1, 0xff, 0xe2, 0x47, 0xc8, 0x93, 0xfa, 0x2a, 0xe9, 0x53, - 0xd5, 0xe8, 0x6e, 0x17, 0x6a, 0x73, 0xc1, 0x48, 0x5e, 0xc5, 0x5f, 0x98, 0x0d, 0xd1, 0x4a, 0x58, - 0xe7, 0xaa, 0x12, 0x36, 0x28, 0x97, 0x56, 0x85, 0x01, 0xec, 0xef, 0x68, 0xcf, 0x3b, 0x85, 0x32, - 0xe7, 0x4e, 0x8a, 0x3a, 0xfe, 0x0a, 0xd5, 0xc5, 0xb0, 0x10, 0xdf, 0xd3, 0xb6, 0x2b, 0xe2, 0x9a, - 0xc8, 0x7f, 0xb5, 0x3f, 0x43, 0xae, 0x59, 0x74, 0x15, 0x39, 0x64, 0x70, 0xd4, 0xdc, 0xc0, 0x75, - 0xe4, 0x7e, 0x4f, 0x06, 0x83, 0x93, 0xa6, 0x85, 0x6b, 0xa8, 0x72, 0xf8, 0xea, 0x6c, 0xd0, 0xb4, - 0xdb, 0x7f, 0xd9, 0xa8, 0x72, 0x1c, 0xcd, 0x24, 0xfe, 0x16, 0x35, 0xa6, 0xe6, 0xb8, 0xc0, 0xde, - 0xeb, 0x33, 0xd6, 0xe8, 0x7e, 0x58, 0xf8, 0x83, 0xa4, 0x73, 0xac, 0xcf, 0xcf, 0xa9, 0x12, 0x03, - 0xa6, 0xc4, 0x9c, 0xd4, 0xa7, 0x45, 0x8c, 0x0f, 0xd0, 0xd6, 0x54, 0x9f, 0xcd, 0xe2, 0xab, 0x6d, - 0xdd, 0xfe, 0xd1, 0xcd, 0x76, 0x38, 0xaf, 0xe6, 0xb3, 0x8d, 0x41, 0x63, 0x5a, 0x66, 0x5a, 0xdf, - 0xa1, 0xed, 0x9b, 0xfe, 0xb8, 0x89, 0x9c, 0x5f, 0xe9, 0x5c, 0x63, 0x74, 0x08, 0xfc, 0xc4, 0x3b, - 0xc8, 0xbd, 0x88, 0xd2, 0x8c, 0xea, 0xeb, 0x57, 0x27, 0x26, 0x78, 0x66, 0x7f, 0x63, 0xb5, 0x4e, - 0x50, 0x73, 0xd9, 0xfe, 0x7a, 0x7f, 0xcd, 0xf4, 0x3f, 0xbc, 0xde, 0xbf, 0x0a, 0xa5, 0xf4, 0x6b, - 0xff, 0x61, 0xa1, 0xcd, 0x63, 0x39, 0x79, 0x93, 0xa8, 0xb7, 0x3f, 0x31, 0xca, 0xc7, 0xf8, 0x3e, - 0x72, 0x55, 0xa2, 0x52, 0xaa, 0xed, 0xea, 0x2f, 0x37, 0x88, 0x09, 0xb1, 0x8f, 0x3c, 0x19, 0xa5, - 0x91, 0x98, 0x6b, 0x4f, 0xe7, 0xe5, 0x06, 0xc9, 0x63, 0xdc, 0x42, 0xd5, 0xe7, 0x3c, 0x83, 0x95, - 0xe8, 0x67, 0x01, 0x7a, 0x8a, 0x04, 0xfe, 0x14, 0x6d, 0xbe, 0xe5, 0x53, 0x3a, 0x8c, 0xe2, 0x58, - 0x50, 0x29, 0xf5, 0x0b, 0x01, 0x82, 0x06, 0x64, 0x0f, 0x4c, 0xf2, 0xb0, 0x8a, 0xdc, 0x8c, 0x25, - 0x9c, 0xb5, 0x1f, 0xa1, 0x0a, 0xa1, 0x51, 0x5a, 0x7e, 0xbe, 0x65, 0xde, 0x08, 0x1d, 0x3c, 0xae, - 0xd5, 0xe2, 0xe6, 0xd5, 0xd5, 0xd5, 0x95, 0xdd, 0xbe, 0x84, 0xff, 0x08, 0x5f, 0xf2, 0x1e, 0xef, - 0xa1, 0x7a, 0x32, 0x8d, 0x26, 0x09, 0x83, 0x95, 0x19, 0x79, 0x99, 0x28, 0x5b, 0xba, 0x47, 0x68, - 0x5b, 0xd0, 0x28, 0x1d, 0xd2, 0xf7, 0x8a, 0x32, 0x99, 0x70, 0x86, 0x37, 0xcb, 0x23, 0x15, 0xa5, - 0xfe, 0x6f, 0x37, 0xcf, 0x64, 0x6e, 0x4f, 0xb6, 0xa0, 0x69, 0x50, 0xf4, 0xb4, 0xff, 0x73, 0x11, - 0xfa, 0x91, 0xf1, 0x4b, 0xf6, 0x7a, 0x3e, 0xa3, 0x12, 0x3f, 0x44, 0x76, 0xc4, 0xfc, 0x6d, 0xdd, - 0xba, 0xd3, 0x31, 0xf3, 0xa9, 0x53, 0xcc, 0xa7, 0xce, 0x01, 0x9b, 0x13, 0x3b, 0x62, 0xf8, 0x4b, - 0xe4, 0xc4, 0x99, 0xb9, 0xa5, 0x8d, 0xee, 0xee, 0x8a, 0xec, 0x28, 0x9f, 0x92, 0x04, 0x54, 0xf8, - 0x73, 0x64, 0x4b, 0xe5, 0x6f, 0x6a, 0xed, 0x83, 0x15, 0xed, 0xa9, 0x9e, 0x98, 0xc4, 0x96, 0x70, - 0xfb, 0x6d, 0x25, 0x73, 0xbe, 0xad, 0x15, 0xe1, 0xeb, 0x62, 0x78, 0x12, 0x5b, 0x49, 0xd0, 0xa6, - 0x17, 0xfe, 0x9d, 0x35, 0xda, 0x57, 0x89, 0x54, 0xbf, 0xc0, 0x0e, 0x13, 0x3b, 0xbd, 0xc0, 0x21, - 0x72, 0x2e, 0xa2, 0xd4, 0x6f, 0x6a, 0xf1, 0xfd, 0x15, 0xb1, 0x11, 0x82, 0x04, 0x77, 0x90, 0x13, - 0x8f, 0x52, 0xcd, 0xbc, 0xd1, 0xdd, 0x5b, 0xfd, 0x2e, 0xfd, 0xc8, 0xe5, 0xfa, 0x78, 0x94, 0xe2, - 0x27, 0xc8, 0x19, 0xa7, 0x4a, 0x1f, 0x01, 0xb8, 0x70, 0xcb, 0x7a, 0xfd, 0x5c, 0xe6, 0xf2, 0x71, - 0xaa, 0x40, 0x9e, 0xe4, 0xb3, 0xf5, 0x36, 0xb9, 0xbe, 0x42, 0xb9, 0x3c, 0xe9, 0xf7, 0x60, 0x35, - 0x59, 0xbf, 0xa7, 0xa7, 0xca, 0x6d, 0xab, 0x39, 0xbb, 0xae, 0xcf, 0xfa, 0x3d, 0x6d, 0xbf, 0xdf, - 0xd5, 0x43, 0x78, 0x8d, 0xfd, 0x7e, 0xb7, 0xb0, 0xdf, 0xef, 0x6a, 0xfb, 0xfd, 0xae, 0x9e, 0xcc, - 0xeb, 0xec, 0x17, 0xfa, 0x4c, 0xeb, 0x2b, 0x7a, 0x84, 0xd5, 0xd7, 0x6c, 0x3a, 0xdc, 0x61, 0x23, - 0xd7, 0x3a, 0xf0, 0x87, 0xd7, 0x08, 0xad, 0xf1, 0x37, 0x63, 0x21, 0xf7, 0x97, 0x4a, 0xe0, 0xaf, - 0x91, 0x5b, 0x0e, 0xf7, 0xdb, 0x3e, 0x40, 0x8f, 0x0b, 0xd3, 0x60, 0x94, 0xcf, 0x02, 0x54, 0x61, - 0xd1, 0x94, 0x2e, 0x1d, 0xfc, 0xdf, 0xf5, 0x0b, 0xa3, 0x2b, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, - 0xd5, 0x39, 0x32, 0x09, 0xf9, 0x09, 0x00, 0x00, + proto.RegisterExtension(E_Extm) +} + +func init() { proto.RegisterFile("test_objects.proto", fileDescriptor_e97c739a0ce14cc6) } + +var fileDescriptor_e97c739a0ce14cc6 = []byte{ + // 1460 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0xdd, 0x72, 0xdb, 0x44, + 0x14, 0x8e, 0x24, 0xcb, 0xb6, 0x8e, 0xf3, 0xd7, 0x6d, 0xda, 0x2a, 0xa1, 0x14, 0x8d, 0x5b, 0x8a, + 0x69, 0x89, 0x3b, 0x38, 0x1e, 0x4f, 0x29, 0xdc, 0x34, 0x4d, 0x4a, 0x4b, 0xdb, 0xc0, 0x6c, 0x52, + 0x7a, 0xe9, 0x91, 0x23, 0x39, 0x55, 0x91, 0xb4, 0x66, 0x77, 0x9d, 0xd4, 0x03, 0xcc, 0xe4, 0x19, + 0x18, 0x9e, 0x80, 0x0b, 0x6e, 0xb9, 0xe3, 0x82, 0xb7, 0xe0, 0x8d, 0x98, 0x3d, 0xbb, 0xf2, 0x5f, + 0xe2, 0x81, 0x2b, 0x7b, 0xf7, 0xfb, 0xd9, 0xd5, 0x9e, 0x4f, 0x67, 0x05, 0x44, 0xc6, 0x42, 0x76, + 0x59, 0xef, 0x5d, 0x7c, 0x2c, 0x45, 0x73, 0xc0, 0x99, 0x64, 0xa4, 0xfc, 0x4e, 0xb0, 0x7c, 0xd0, + 0xdb, 0xda, 0x3c, 0x61, 0xec, 0x24, 0x8d, 0x1f, 0xe0, 0x6c, 0x6f, 0xd8, 0x7f, 0x10, 0xe6, 0x23, + 0x4d, 0xd9, 0xba, 0x35, 0x0f, 0x45, 0x43, 0x1e, 0xca, 0x84, 0xe5, 0x06, 0xbf, 0x39, 0x8f, 0x0b, + 0xc9, 0x87, 0xc7, 0xd2, 0xa0, 0x1f, 0xcd, 0xa3, 0x32, 0xc9, 0x62, 0x21, 0xc3, 0x6c, 0xb0, 0xc8, + 0xfe, 0x8c, 0x87, 0x83, 0x41, 0xcc, 0xcd, 0x0e, 0xeb, 0x7f, 0x96, 0xa0, 0x7c, 0x98, 0x64, 0x83, + 0x34, 0x26, 0xd7, 0xa0, 0xcc, 0xba, 0x3d, 0xc6, 0x52, 0xdf, 0x0a, 0xac, 0x46, 0x95, 0xba, 0x6c, + 0x97, 0xb1, 0x94, 0xdc, 0x80, 0x0a, 0xeb, 0x26, 0xb9, 0xdc, 0x69, 0xf9, 0x76, 0x60, 0x35, 0x5c, + 0x5a, 0x66, 0xcf, 0xd5, 0x88, 0xdc, 0x82, 0x9a, 0x01, 0xba, 0x42, 0x72, 0xdf, 0x41, 0xd0, 0xd3, + 0xe0, 0xa1, 0xe4, 0x63, 0x61, 0xa7, 0xed, 0x97, 0x02, 0xab, 0xe1, 0x68, 0x61, 0xa7, 0x3d, 0x16, + 0x76, 0xda, 0x28, 0x74, 0x11, 0xf4, 0x34, 0xa8, 0x84, 0x9b, 0x50, 0x65, 0xdd, 0xa1, 0x5e, 0xb2, + 0x1c, 0x58, 0x8d, 0x15, 0x5a, 0x61, 0xaf, 0x71, 0x48, 0x02, 0x58, 0x2e, 0x20, 0xd4, 0x56, 0x10, + 0x06, 0x03, 0xcf, 0x88, 0x3b, 0x6d, 0xbf, 0x1a, 0x58, 0x8d, 0x92, 0x11, 0x77, 0xda, 0x13, 0xb1, + 0x59, 0xd8, 0x43, 0x18, 0x0c, 0x3c, 0x16, 0x0b, 0xbd, 0x32, 0x04, 0x56, 0xe3, 0x0a, 0xad, 0xb0, + 0xc3, 0xa9, 0x95, 0xc5, 0x64, 0xe5, 0x1a, 0xc2, 0x60, 0xe0, 0x19, 0x71, 0xa7, 0xed, 0x2f, 0x07, + 0x56, 0x83, 0x18, 0x71, 0xb1, 0xb2, 0x98, 0xac, 0xbc, 0x82, 0x30, 0x18, 0x78, 0x7c, 0x58, 0xfd, + 0x94, 0x85, 0xd2, 0x5f, 0x0d, 0xac, 0x86, 0x4d, 0xcb, 0xec, 0xa9, 0x1a, 0xe9, 0xc3, 0x42, 0x00, + 0x95, 0x6b, 0x08, 0x7a, 0x1a, 0x1c, 0xaf, 0x1a, 0xb1, 0x61, 0x2f, 0x8d, 0xfd, 0xf5, 0xc0, 0x6a, + 0x58, 0xb4, 0xc2, 0xf6, 0x70, 0xa8, 0x57, 0xd5, 0x10, 0x6a, 0xaf, 0x20, 0x0c, 0x06, 0x9e, 0x6c, + 0x59, 0xf2, 0x24, 0x3f, 0xf1, 0x49, 0x60, 0x35, 0x3c, 0xb5, 0x65, 0x1c, 0xea, 0x0d, 0xf5, 0x46, + 0x32, 0x16, 0xfe, 0xd5, 0xc0, 0x6a, 0x2c, 0xd3, 0x32, 0xdb, 0x55, 0xa3, 0xfa, 0xaf, 0x16, 0xc0, + 0x01, 0xcb, 0x9f, 0x26, 0x79, 0x22, 0x63, 0x41, 0xae, 0x82, 0xdb, 0xef, 0xe6, 0x61, 0x8e, 0xa1, + 0xb1, 0x69, 0xa9, 0x7f, 0x10, 0xe6, 0x2a, 0x4a, 0xfd, 0xee, 0x20, 0xc9, 0xfb, 0x18, 0x19, 0x9b, + 0xba, 0xfd, 0xef, 0x92, 0xbc, 0xaf, 0xa7, 0x73, 0x35, 0xed, 0x98, 0xe9, 0x03, 0x35, 0x7d, 0x15, + 0xdc, 0x08, 0x2d, 0x4a, 0xb8, 0xc1, 0x52, 0x64, 0x2c, 0x22, 0x6d, 0xe1, 0xe2, 0xac, 0x1b, 0x15, + 0x16, 0x91, 0xb6, 0x28, 0x9b, 0x69, 0x65, 0x51, 0xff, 0xc3, 0x86, 0x0a, 0x8d, 0x07, 0x71, 0x28, + 0x85, 0xa2, 0xf0, 0x22, 0xc7, 0x8e, 0xca, 0x31, 0x2f, 0x72, 0xcc, 0xc7, 0x39, 0x76, 0x54, 0x8e, + 0xb9, 0xce, 0x71, 0x01, 0x74, 0xda, 0xbe, 0x13, 0x38, 0x2a, 0xa7, 0x5c, 0xe7, 0x74, 0x13, 0xaa, + 0xbc, 0xc8, 0x61, 0x29, 0x70, 0x54, 0x0e, 0xb9, 0xc9, 0xe1, 0x18, 0xea, 0xb4, 0x7d, 0x37, 0x70, + 0x54, 0xca, 0xb8, 0x49, 0x19, 0x42, 0xa2, 0x48, 0xaf, 0xa3, 0x32, 0xc4, 0x0f, 0xa7, 0x54, 0x26, + 0x21, 0x95, 0xc0, 0x51, 0x09, 0xe1, 0x26, 0x21, 0xb8, 0x09, 0x5d, 0xff, 0x6a, 0xe0, 0xa8, 0xfa, + 0x73, 0x5d, 0x7f, 0xd4, 0x98, 0xfa, 0x7a, 0x81, 0xa3, 0xea, 0xcb, 0x4d, 0x7d, 0xb5, 0x9d, 0xae, + 0x1e, 0x04, 0x8e, 0xaa, 0x1e, 0x9f, 0x54, 0x8f, 0x9b, 0xea, 0xd5, 0x02, 0x47, 0x55, 0x8f, 0xeb, + 0xea, 0xfd, 0x65, 0x43, 0xf9, 0x4d, 0x12, 0x9d, 0xc4, 0x92, 0xdc, 0x03, 0xf7, 0x98, 0xa5, 0x8c, + 0x63, 0xe5, 0x56, 0x5b, 0x1b, 0x4d, 0xdd, 0xac, 0x9a, 0x1a, 0x6e, 0x3e, 0x51, 0x18, 0xd5, 0x14, + 0xb2, 0xad, 0xfc, 0x34, 0x5b, 0x1d, 0xde, 0x22, 0x76, 0x99, 0xe3, 0x2f, 0xb9, 0x0b, 0x65, 0x81, + 0x4d, 0x05, 0xdf, 0xa2, 0x5a, 0x6b, 0xb5, 0x60, 0xeb, 0x56, 0x43, 0x0d, 0x4a, 0x3e, 0xd5, 0x07, + 0x82, 0x4c, 0xb5, 0xcf, 0x8b, 0x4c, 0x75, 0x40, 0x86, 0x5a, 0xe1, 0xba, 0xc0, 0xfe, 0x06, 0x7a, + 0xae, 0x15, 0x4c, 0x53, 0x77, 0x5a, 0xe0, 0xe4, 0x33, 0xf0, 0x78, 0xb7, 0x20, 0x5f, 0x43, 0xdb, + 0x0b, 0xe4, 0x2a, 0x37, 0xff, 0xea, 0x1f, 0x83, 0xab, 0x37, 0x5d, 0x01, 0x87, 0xee, 0xef, 0xad, + 0x2f, 0x11, 0x0f, 0xdc, 0xaf, 0xe9, 0xfe, 0xfe, 0xc1, 0xba, 0x45, 0xaa, 0x50, 0xda, 0x7d, 0xf9, + 0x7a, 0x7f, 0xdd, 0xae, 0xff, 0x66, 0x43, 0xe9, 0x55, 0x38, 0x10, 0xe4, 0x4b, 0xa8, 0x65, 0x53, + 0xdd, 0xcb, 0x42, 0xff, 0x0f, 0x0a, 0x7f, 0x45, 0x69, 0xbe, 0x2a, 0x5a, 0xd9, 0x7e, 0x2e, 0xf9, + 0x88, 0x7a, 0xd9, 0xb8, 0xb5, 0x3d, 0x86, 0x95, 0x0c, 0xb3, 0x59, 0x3c, 0xb5, 0x8d, 0xf2, 0x0f, + 0x67, 0xe5, 0x2a, 0xaf, 0xfa, 0xb1, 0xb5, 0x41, 0x2d, 0x9b, 0xcc, 0x6c, 0x7d, 0x05, 0xab, 0xb3, + 0xfe, 0x64, 0x1d, 0x9c, 0x1f, 0xe2, 0x11, 0x96, 0xd1, 0xa1, 0xea, 0x2f, 0xd9, 0x00, 0xf7, 0x34, + 0x4c, 0x87, 0x31, 0xbe, 0x7e, 0x1e, 0xd5, 0x83, 0x47, 0xf6, 0x43, 0x6b, 0xeb, 0x00, 0xd6, 0xe7, + 0xed, 0xa7, 0xf5, 0x55, 0xad, 0xbf, 0x33, 0xad, 0xbf, 0x58, 0x94, 0x89, 0x5f, 0xfd, 0x1f, 0x0b, + 0x96, 0x5f, 0x89, 0x93, 0x37, 0x89, 0x7c, 0xfb, 0x6d, 0x1e, 0xb3, 0x3e, 0xb9, 0x0e, 0xae, 0x4c, + 0x64, 0x1a, 0xa3, 0x9d, 0xf7, 0x6c, 0x89, 0xea, 0x21, 0xf1, 0xa1, 0x2c, 0xc2, 0x34, 0xe4, 0x23, + 0xf4, 0x74, 0x9e, 0x2d, 0x51, 0x33, 0x26, 0x5b, 0x50, 0x79, 0xc2, 0x86, 0x6a, 0x27, 0xd8, 0x16, + 0x94, 0xa6, 0x98, 0x20, 0xb7, 0x61, 0xf9, 0x2d, 0xcb, 0xe2, 0x6e, 0x18, 0x45, 0x3c, 0x16, 0x02, + 0x3b, 0x84, 0x22, 0xd4, 0xd4, 0xec, 0x63, 0x3d, 0x49, 0xf6, 0xe1, 0x4a, 0x26, 0x4e, 0xba, 0x67, + 0x89, 0x7c, 0xdb, 0xe5, 0xf1, 0x8f, 0xc3, 0x84, 0xc7, 0x11, 0x76, 0x8d, 0x5a, 0xeb, 0xc6, 0xf8, + 0x60, 0xf5, 0x1e, 0xa9, 0x81, 0x9f, 0x2d, 0xd1, 0xb5, 0x6c, 0x76, 0x6a, 0xb7, 0x02, 0xee, 0x30, + 0x4f, 0x58, 0x5e, 0xbf, 0x0b, 0x25, 0x1a, 0x87, 0xe9, 0xe4, 0x14, 0x2d, 0xdd, 0x6a, 0x70, 0x70, + 0xaf, 0x5a, 0x8d, 0xd6, 0xcf, 0xcf, 0xcf, 0xcf, 0xed, 0xfa, 0x99, 0xda, 0xb8, 0x3a, 0x90, 0xf7, + 0xe4, 0x26, 0x78, 0x49, 0x16, 0x9e, 0x24, 0xb9, 0x7a, 0x40, 0x4d, 0x9f, 0x4c, 0x4c, 0x24, 0xad, + 0x3d, 0x58, 0xe5, 0x71, 0x98, 0x76, 0xe3, 0xf7, 0x32, 0xce, 0x45, 0xc2, 0x72, 0xb2, 0x3c, 0x49, + 0x66, 0x98, 0xfa, 0x3f, 0xcd, 0x46, 0xdb, 0xd8, 0xd3, 0x15, 0x25, 0xda, 0x2f, 0x34, 0xf5, 0xbf, + 0x5d, 0x80, 0x17, 0x39, 0x3b, 0xcb, 0x8f, 0x46, 0x83, 0x58, 0x90, 0x3b, 0x60, 0x87, 0x39, 0x5e, + 0x1b, 0xb5, 0xd6, 0x46, 0x53, 0x5f, 0xf8, 0xcd, 0xe2, 0xc2, 0x6f, 0x3e, 0xce, 0x47, 0xd4, 0x0e, + 0x73, 0x72, 0x1f, 0x9c, 0x68, 0xa8, 0x5f, 0xf6, 0x5a, 0x6b, 0xf3, 0x02, 0x6d, 0xcf, 0x7c, 0x76, + 0x50, 0xc5, 0x22, 0x9f, 0x80, 0x2d, 0x24, 0xde, 0x62, 0xea, 0x0c, 0xe7, 0xb9, 0x87, 0xf8, 0x09, + 0x42, 0x6d, 0xa1, 0x9a, 0x88, 0x2d, 0x85, 0x89, 0xc9, 0xd6, 0x05, 0xe2, 0x51, 0xf1, 0x35, 0x42, + 0x6d, 0x29, 0x14, 0x37, 0x3d, 0xc5, 0x1b, 0xec, 0x32, 0xee, 0xcb, 0x44, 0xc8, 0xef, 0xd5, 0x09, + 0x53, 0x3b, 0x3d, 0x25, 0x0d, 0x70, 0x4e, 0xc3, 0x14, 0x6f, 0xb4, 0x5a, 0xeb, 0xfa, 0x05, 0xb2, + 0x26, 0x2a, 0x0a, 0x69, 0x82, 0x13, 0xf5, 0x52, 0x8c, 0x4e, 0xad, 0x75, 0xf3, 0xe2, 0x73, 0x61, + 0xaf, 0x34, 0xfc, 0xa8, 0x97, 0x92, 0x6d, 0x70, 0xfa, 0xa9, 0xc4, 0x24, 0xa9, 0xf7, 0x76, 0x9e, + 0x8f, 0x5d, 0xd7, 0xd0, 0xfb, 0xa9, 0x54, 0xf4, 0x04, 0x9b, 0xfc, 0xe5, 0x74, 0x7c, 0x13, 0x0d, + 0x3d, 0xe9, 0xb4, 0xd5, 0x6e, 0x86, 0x9d, 0x36, 0x5e, 0x4e, 0x97, 0xed, 0xe6, 0xf5, 0x34, 0x7f, + 0xd8, 0x69, 0xa3, 0xfd, 0x4e, 0x0b, 0xbf, 0x63, 0x16, 0xd8, 0xef, 0xb4, 0x0a, 0xfb, 0x9d, 0x16, + 0xda, 0xef, 0xb4, 0xf0, 0xc3, 0x66, 0x91, 0xfd, 0x98, 0x3f, 0x44, 0x7e, 0x09, 0x6f, 0x42, 0x6f, + 0xc1, 0xa1, 0xab, 0x56, 0xa0, 0xe9, 0xc8, 0x53, 0xfe, 0xaa, 0xa9, 0xc1, 0x02, 0x7f, 0x7d, 0xbb, + 0x18, 0x7f, 0x21, 0x39, 0xf9, 0x1c, 0xdc, 0xe2, 0x96, 0xb9, 0xfc, 0x01, 0xf0, 0xd6, 0xd1, 0x02, + 0xcd, 0xac, 0xdf, 0x86, 0xb5, 0xb9, 0x97, 0x51, 0x35, 0x20, 0xdd, 0x4a, 0xed, 0x86, 0x87, 0xbe, + 0xf5, 0xdf, 0x6d, 0xb8, 0x61, 0x58, 0xcf, 0xf3, 0x28, 0xe1, 0xf1, 0xb1, 0x1c, 0xb3, 0xef, 0x43, + 0x49, 0x0c, 0x7b, 0x99, 0x49, 0xf2, 0xa2, 0x37, 0x9c, 0x22, 0x89, 0x7c, 0x03, 0x5e, 0x16, 0x0e, + 0xba, 0xfd, 0x24, 0x4e, 0x23, 0xd3, 0x6c, 0xb7, 0xe7, 0x14, 0xf3, 0x0b, 0xa8, 0x26, 0xfc, 0x54, + 0xf1, 0x75, 0xf3, 0xad, 0x66, 0x66, 0x48, 0x1e, 0x42, 0x4d, 0xa4, 0xc9, 0x71, 0x6c, 0xdc, 0x1c, + 0x74, 0x5b, 0xb8, 0x3e, 0x20, 0x17, 0x95, 0x5b, 0x47, 0xb0, 0x32, 0x63, 0x3a, 0xdd, 0x72, 0x3d, + 0xdd, 0x72, 0xb7, 0x67, 0x5b, 0xee, 0x42, 0xdb, 0xa9, 0xde, 0x7b, 0x0f, 0x36, 0xe6, 0x50, 0x3c, + 0x6d, 0x42, 0xa0, 0xd4, 0x1b, 0x49, 0x81, 0xe7, 0xb9, 0x4c, 0xf1, 0x7f, 0x7d, 0x0f, 0xc8, 0x1c, + 0xf7, 0xcd, 0x8b, 0xa3, 0xa2, 0xdc, 0x8a, 0xf8, 0x7f, 0xca, 0xfd, 0x28, 0x80, 0x52, 0x1e, 0x66, + 0xf1, 0x5c, 0xd3, 0xfa, 0x19, 0x9f, 0x02, 0x91, 0x47, 0x5f, 0x40, 0x29, 0x7e, 0x2f, 0xb3, 0x39, + 0xc6, 0x2f, 0xff, 0x51, 0x2a, 0x25, 0xf9, 0x37, 0x00, 0x00, 0xff, 0xff, 0xe9, 0xd4, 0xfd, 0x2f, + 0x41, 0x0d, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto index 0d2fc1f..e01386e 100644 --- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto @@ -43,15 +43,23 @@ package jsonpb; message Simple { optional bool o_bool = 1; optional int32 o_int32 = 2; - optional int64 o_int64 = 3; - optional uint32 o_uint32 = 4; - optional uint64 o_uint64 = 5; - optional sint32 o_sint32 = 6; - optional sint64 o_sint64 = 7; - optional float o_float = 8; - optional double o_double = 9; - optional string o_string = 10; - optional bytes o_bytes = 11; + optional int32 o_int32_str = 3; + optional int64 o_int64 = 4; + optional int64 o_int64_str = 5; + optional uint32 o_uint32 = 6; + optional uint32 o_uint32_str = 7; + optional uint64 o_uint64 = 8; + optional uint64 o_uint64_str = 9; + optional sint32 o_sint32 = 10; + optional sint32 o_sint32_str = 11; + optional sint64 o_sint64 = 12; + optional sint64 o_sint64_str = 13; + optional float o_float = 14; + optional float o_float_str = 15; + optional double o_double = 16; + optional double o_double_str = 17; + optional string o_string = 18; + optional bytes o_bytes = 19; } // Test message for holding special non-finites primitives. @@ -107,6 +115,7 @@ message MsgWithOneof { int64 salary = 2; string Country = 3; string home_address = 4; + MsgWithRequired msg_with_required = 5; } } @@ -145,3 +154,26 @@ message KnownTypes { optional google.protobuf.StringValue str = 10; optional google.protobuf.BytesValue bytes = 11; } + +// Test messages for marshaling/unmarshaling required fields. +message MsgWithRequired { + required string str = 1; +} + +message MsgWithIndirectRequired { + optional MsgWithRequired subm = 1; + map map_field = 2; + repeated MsgWithRequired slice_field = 3; +} + +message MsgWithRequiredBytes { + required bytes byts = 1; +} + +message MsgWithRequiredWKT { + required google.protobuf.StringValue str = 1; +} + +extend Real { + optional MsgWithRequired extm = 125; +} diff --git a/vendor/github.com/golang/protobuf/proto/Makefile b/vendor/github.com/golang/protobuf/proto/Makefile deleted file mode 100644 index e2e0651..0000000 --- a/vendor/github.com/golang/protobuf/proto/Makefile +++ /dev/null @@ -1,43 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -install: - go install - -test: install generate-test-pbs - go test - - -generate-test-pbs: - make install - make -C testdata - protoc --go_out=Mtestdata/test.proto=github.com/golang/protobuf/proto/testdata,Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. proto3_proto/proto3.proto - make diff --git a/vendor/github.com/golang/protobuf/proto/all_test.go b/vendor/github.com/golang/protobuf/proto/all_test.go index 41451a4..1bea4b6 100644 --- a/vendor/github.com/golang/protobuf/proto/all_test.go +++ b/vendor/github.com/golang/protobuf/proto/all_test.go @@ -41,11 +41,13 @@ import ( "reflect" "runtime/debug" "strings" + "sync" "testing" "time" . "github.com/golang/protobuf/proto" - . "github.com/golang/protobuf/proto/testdata" + pb3 "github.com/golang/protobuf/proto/proto3_proto" + . "github.com/golang/protobuf/proto/test_proto" ) var globalO *Buffer @@ -114,6 +116,8 @@ func initGoTest(setdefaults bool) *GoTest { pb.F_BytesDefaulted = Default_GoTest_F_BytesDefaulted pb.F_Sint32Defaulted = Int32(Default_GoTest_F_Sint32Defaulted) pb.F_Sint64Defaulted = Int64(Default_GoTest_F_Sint64Defaulted) + pb.F_Sfixed32Defaulted = Int32(Default_GoTest_F_Sfixed32Defaulted) + pb.F_Sfixed64Defaulted = Int64(Default_GoTest_F_Sfixed64Defaulted) } pb.Kind = GoTest_TIME.Enum() @@ -131,135 +135,13 @@ func initGoTest(setdefaults bool) *GoTest { pb.F_BytesRequired = []byte("bytes") pb.F_Sint32Required = Int32(-32) pb.F_Sint64Required = Int64(-64) + pb.F_Sfixed32Required = Int32(-32) + pb.F_Sfixed64Required = Int64(-64) pb.Requiredgroup = initGoTest_RequiredGroup() return pb } -func fail(msg string, b *bytes.Buffer, s string, t *testing.T) { - data := b.Bytes() - ld := len(data) - ls := len(s) / 2 - - fmt.Printf("fail %s ld=%d ls=%d\n", msg, ld, ls) - - // find the interesting spot - n - n := ls - if ld < ls { - n = ld - } - j := 0 - for i := 0; i < n; i++ { - bs := hex(s[j])*16 + hex(s[j+1]) - j += 2 - if data[i] == bs { - continue - } - n = i - break - } - l := n - 10 - if l < 0 { - l = 0 - } - h := n + 10 - - // find the interesting spot - n - fmt.Printf("is[%d]:", l) - for i := l; i < h; i++ { - if i >= ld { - fmt.Printf(" --") - continue - } - fmt.Printf(" %.2x", data[i]) - } - fmt.Printf("\n") - - fmt.Printf("sb[%d]:", l) - for i := l; i < h; i++ { - if i >= ls { - fmt.Printf(" --") - continue - } - bs := hex(s[j])*16 + hex(s[j+1]) - j += 2 - fmt.Printf(" %.2x", bs) - } - fmt.Printf("\n") - - t.Fail() - - // t.Errorf("%s: \ngood: %s\nbad: %x", msg, s, b.Bytes()) - // Print the output in a partially-decoded format; can - // be helpful when updating the test. It produces the output - // that is pasted, with minor edits, into the argument to verify(). - // data := b.Bytes() - // nesting := 0 - // for b.Len() > 0 { - // start := len(data) - b.Len() - // var u uint64 - // u, err := DecodeVarint(b) - // if err != nil { - // fmt.Printf("decode error on varint:", err) - // return - // } - // wire := u & 0x7 - // tag := u >> 3 - // switch wire { - // case WireVarint: - // v, err := DecodeVarint(b) - // if err != nil { - // fmt.Printf("decode error on varint:", err) - // return - // } - // fmt.Printf("\t\t\"%x\" // field %d, encoding %d, value %d\n", - // data[start:len(data)-b.Len()], tag, wire, v) - // case WireFixed32: - // v, err := DecodeFixed32(b) - // if err != nil { - // fmt.Printf("decode error on fixed32:", err) - // return - // } - // fmt.Printf("\t\t\"%x\" // field %d, encoding %d, value %d\n", - // data[start:len(data)-b.Len()], tag, wire, v) - // case WireFixed64: - // v, err := DecodeFixed64(b) - // if err != nil { - // fmt.Printf("decode error on fixed64:", err) - // return - // } - // fmt.Printf("\t\t\"%x\" // field %d, encoding %d, value %d\n", - // data[start:len(data)-b.Len()], tag, wire, v) - // case WireBytes: - // nb, err := DecodeVarint(b) - // if err != nil { - // fmt.Printf("decode error on bytes:", err) - // return - // } - // after_tag := len(data) - b.Len() - // str := make([]byte, nb) - // _, err = b.Read(str) - // if err != nil { - // fmt.Printf("decode error on bytes:", err) - // return - // } - // fmt.Printf("\t\t\"%x\" \"%x\" // field %d, encoding %d (FIELD)\n", - // data[start:after_tag], str, tag, wire) - // case WireStartGroup: - // nesting++ - // fmt.Printf("\t\t\"%x\"\t\t// start group field %d level %d\n", - // data[start:len(data)-b.Len()], tag, nesting) - // case WireEndGroup: - // fmt.Printf("\t\t\"%x\"\t\t// end group field %d level %d\n", - // data[start:len(data)-b.Len()], tag, nesting) - // nesting-- - // default: - // fmt.Printf("unrecognized wire type %d\n", wire) - // return - // } - // } -} - func hex(c uint8) uint8 { if '0' <= c && c <= '9' { return c - '0' @@ -482,6 +364,48 @@ func TestMarshalerEncoding(t *testing.T) { } } +// Ensure that Buffer.Marshal uses O(N) memory for N messages +func TestBufferMarshalAllocs(t *testing.T) { + value := &OtherMessage{Key: Int64(1)} + msg := &MyMessage{Count: Int32(1), Others: []*OtherMessage{value}} + + reallocSize := func(t *testing.T, items int, prealloc int) (int64, int64) { + var b Buffer + b.SetBuf(make([]byte, 0, prealloc)) + + var allocSpace int64 + prevCap := cap(b.Bytes()) + for i := 0; i < items; i++ { + err := b.Marshal(msg) + if err != nil { + t.Errorf("Marshal err = %q", err) + break + } + if c := cap(b.Bytes()); prevCap != c { + allocSpace += int64(c) + prevCap = c + } + } + needSpace := int64(len(b.Bytes())) + return allocSpace, needSpace + } + + for _, prealloc := range []int{0, 100, 10000} { + for _, items := range []int{1, 2, 5, 10, 20, 50, 100, 200, 500, 1000} { + runtimeSpace, need := reallocSize(t, items, prealloc) + totalSpace := int64(prealloc) + runtimeSpace + + runtimeRatio := float64(runtimeSpace) / float64(need) + totalRatio := float64(totalSpace) / float64(need) + + if totalRatio < 1 || runtimeRatio > 4 { + t.Errorf("needed %dB, allocated %dB total (ratio %.1f), allocated %dB at runtime (ratio %.1f)", + need, totalSpace, totalRatio, runtimeSpace, runtimeRatio) + } + } + } +} + // Simple tests for bytes func TestBytesPrimitives(t *testing.T) { o := old() @@ -519,7 +443,7 @@ func TestRequiredBit(t *testing.T) { err := o.Marshal(pb) if err == nil { t.Error("did not catch missing required fields") - } else if strings.Index(err.Error(), "Kind") < 0 { + } else if !strings.Contains(err.Error(), "Kind") { t.Error("wrong error type:", err) } } @@ -612,7 +536,9 @@ func TestEncodeDecode1(t *testing.T) { "b404"+ // field 70, encoding 4, end group "aa0605"+"6279746573"+ // field 101, encoding 2, string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 - "b8067f") // field 103, encoding 0, 0x7f zigzag64 + "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff") // field 105, encoding 1, -64 fixed64 } // All required fields set, defaults provided. @@ -647,9 +573,13 @@ func TestEncodeDecode2(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" "90193f"+ // field 402, encoding 0, value 63 - "98197f") // field 403, encoding 0, value 127 + "98197f"+ // field 403, encoding 0, value 127 + "a519e0ffffff"+ // field 404, encoding 5, -32 fixed32 + "a919c0ffffffffffffff") // field 405, encoding 1, -64 fixed64 } @@ -669,6 +599,8 @@ func TestEncodeDecode3(t *testing.T) { pb.F_BytesDefaulted = []byte("Bignose") pb.F_Sint32Defaulted = Int32(-32) pb.F_Sint64Defaulted = Int64(-64) + pb.F_Sfixed32Defaulted = Int32(-32) + pb.F_Sfixed64Defaulted = Int64(-64) overify(t, pb, "0807"+ // field 1, encoding 0, value 7 @@ -699,9 +631,13 @@ func TestEncodeDecode3(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" "90193f"+ // field 402, encoding 0, value 63 - "98197f") // field 403, encoding 0, value 127 + "98197f"+ // field 403, encoding 0, value 127 + "a519e0ffffff"+ // field 404, encoding 5, -32 fixed32 + "a919c0ffffffffffffff") // field 405, encoding 1, -64 fixed64 } @@ -724,6 +660,8 @@ func TestEncodeDecode4(t *testing.T) { pb.F_BytesOptional = []byte("Bignose") pb.F_Sint32Optional = Int32(-32) pb.F_Sint64Optional = Int64(-64) + pb.F_Sfixed32Optional = Int32(-32) + pb.F_Sfixed64Optional = Int64(-64) pb.Optionalgroup = initGoTest_OptionalGroup() overify(t, pb, @@ -771,12 +709,18 @@ func TestEncodeDecode4(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "ea1207"+"4269676e6f7365"+ // field 301, encoding 2, string "Bignose" "f0123f"+ // field 302, encoding 0, value 63 "f8127f"+ // field 303, encoding 0, value 127 + "8513e0ffffff"+ // field 304, encoding 5, -32 fixed32 + "8913c0ffffffffffffff"+ // field 305, encoding 1, -64 fixed64 "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" "90193f"+ // field 402, encoding 0, value 63 - "98197f") // field 403, encoding 0, value 127 + "98197f"+ // field 403, encoding 0, value 127 + "a519e0ffffff"+ // field 404, encoding 5, -32 fixed32 + "a919c0ffffffffffffff") // field 405, encoding 1, -64 fixed64 } @@ -797,6 +741,8 @@ func TestEncodeDecode5(t *testing.T) { pb.F_BytesRepeated = [][]byte{[]byte("big"), []byte("nose")} pb.F_Sint32Repeated = []int32{32, -32} pb.F_Sint64Repeated = []int64{64, -64} + pb.F_Sfixed32Repeated = []int32{32, -32} + pb.F_Sfixed64Repeated = []int64{64, -64} pb.Repeatedgroup = []*GoTest_RepeatedGroup{initGoTest_RepeatedGroup(), initGoTest_RepeatedGroup()} overify(t, pb, @@ -856,15 +802,23 @@ func TestEncodeDecode5(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "ca0c03"+"626967"+ // field 201, encoding 2, string "big" "ca0c04"+"6e6f7365"+ // field 201, encoding 2, string "nose" "d00c40"+ // field 202, encoding 0, value 32 "d00c3f"+ // field 202, encoding 0, value -32 "d80c8001"+ // field 203, encoding 0, value 64 "d80c7f"+ // field 203, encoding 0, value -64 + "e50c20000000"+ // field 204, encoding 5, 32 fixed32 + "e50ce0ffffff"+ // field 204, encoding 5, -32 fixed32 + "e90c4000000000000000"+ // field 205, encoding 1, 64 fixed64 + "e90cc0ffffffffffffff"+ // field 205, encoding 1, -64 fixed64 "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" "90193f"+ // field 402, encoding 0, value 63 - "98197f") // field 403, encoding 0, value 127 + "98197f"+ // field 403, encoding 0, value 127 + "a519e0ffffff"+ // field 404, encoding 5, -32 fixed32 + "a919c0ffffffffffffff") // field 405, encoding 1, -64 fixed64 } @@ -882,6 +836,8 @@ func TestEncodeDecode6(t *testing.T) { pb.F_DoubleRepeatedPacked = []float64{64., 65.} pb.F_Sint32RepeatedPacked = []int32{32, -32} pb.F_Sint64RepeatedPacked = []int64{64, -64} + pb.F_Sfixed32RepeatedPacked = []int32{32, -32} + pb.F_Sfixed64RepeatedPacked = []int64{64, -64} overify(t, pb, "0807"+ // field 1, encoding 0, value 7 @@ -917,10 +873,17 @@ func TestEncodeDecode6(t *testing.T) { "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff"+ // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff"+ // field 105, encoding 1, -64 fixed64 "b21f02"+ // field 502, encoding 2, 2 bytes "403f"+ // value 32, value -32 "ba1f03"+ // field 503, encoding 2, 3 bytes - "80017f") // value 64, value -64 + "80017f"+ // value 64, value -64 + "c21f08"+ // field 504, encoding 2, 8 bytes + "20000000e0ffffff"+ // value 32, value -32 + "ca1f10"+ // field 505, encoding 2, 16 bytes + "4000000000000000c0ffffffffffffff") // value 64, value -64 + } // Test that we can encode empty bytes fields. @@ -1167,13 +1130,10 @@ func TestBigRepeated(t *testing.T) { if pbd.Repeatedgroup[i] == nil { // TODO: more checking? t.Error("pbd.Repeatedgroup bad") } - var x uint64 - x = uint64(pbd.F_Sint64Repeated[i]) - if x != i { + if x := uint64(pbd.F_Sint64Repeated[i]); x != i { t.Error("pbd.F_Sint64Repeated bad", x, i) } - x = uint64(pbd.F_Sint32Repeated[i]) - if x != i { + if x := uint64(pbd.F_Sint32Repeated[i]); x != i { t.Error("pbd.F_Sint32Repeated bad", x, i) } s := fmt.Sprint(i) @@ -1181,39 +1141,31 @@ func TestBigRepeated(t *testing.T) { if pbd.F_StringRepeated[i] != s { t.Error("pbd.F_Sint32Repeated bad", pbd.F_StringRepeated[i], i) } - x = uint64(pbd.F_DoubleRepeated[i]) - if x != i { + if x := uint64(pbd.F_DoubleRepeated[i]); x != i { t.Error("pbd.F_DoubleRepeated bad", x, i) } - x = uint64(pbd.F_FloatRepeated[i]) - if x != i { + if x := uint64(pbd.F_FloatRepeated[i]); x != i { t.Error("pbd.F_FloatRepeated bad", x, i) } - x = pbd.F_Uint64Repeated[i] - if x != i { + if x := pbd.F_Uint64Repeated[i]; x != i { t.Error("pbd.F_Uint64Repeated bad", x, i) } - x = uint64(pbd.F_Uint32Repeated[i]) - if x != i { + if x := uint64(pbd.F_Uint32Repeated[i]); x != i { t.Error("pbd.F_Uint32Repeated bad", x, i) } - x = pbd.F_Fixed64Repeated[i] - if x != i { + if x := pbd.F_Fixed64Repeated[i]; x != i { t.Error("pbd.F_Fixed64Repeated bad", x, i) } - x = uint64(pbd.F_Fixed32Repeated[i]) - if x != i { + if x := uint64(pbd.F_Fixed32Repeated[i]); x != i { t.Error("pbd.F_Fixed32Repeated bad", x, i) } - x = uint64(pbd.F_Int64Repeated[i]) - if x != i { + if x := uint64(pbd.F_Int64Repeated[i]); x != i { t.Error("pbd.F_Int64Repeated bad", x, i) } - x = uint64(pbd.F_Int32Repeated[i]) - if x != i { + if x := uint64(pbd.F_Int32Repeated[i]); x != i { t.Error("pbd.F_Int32Repeated bad", x, i) } - if pbd.F_BoolRepeated[i] != (i%2 == 0) { + if x := pbd.F_BoolRepeated[i]; x != (i%2 == 0) { t.Error("pbd.F_BoolRepeated bad", x, i) } if pbd.RepeatedField[i] == nil { // TODO: more checking? @@ -1222,21 +1174,25 @@ func TestBigRepeated(t *testing.T) { } } -// Verify we give a useful message when decoding to the wrong structure type. -func TestTypeMismatch(t *testing.T) { - pb1 := initGoTest(true) +func TestBadWireTypeUnknown(t *testing.T) { + var b []byte + fmt.Sscanf("0a01780d00000000080b101612036161611521000000202c220362626225370000002203636363214200000000000000584d5a036464645900000000000056405d63000000", "%x", &b) - // Marshal - o := old() - o.Marshal(pb1) + m := new(MyMessage) + if err := Unmarshal(b, m); err != nil { + t.Errorf("unexpected Unmarshal error: %v", err) + } - // Now Unmarshal it to the wrong type. - pb2 := initGoTestField() - err := o.Unmarshal(pb2) - if err == nil { - t.Error("expected error, got no error") - } else if !strings.Contains(err.Error(), "bad wiretype") { - t.Error("expected bad wiretype error, got", err) + var unknown []byte + fmt.Sscanf("0a01780d0000000010161521000000202c2537000000214200000000000000584d5a036464645d63000000", "%x", &unknown) + if !bytes.Equal(m.XXX_unrecognized, unknown) { + t.Errorf("unknown bytes mismatch:\ngot %x\nwant %x", m.XXX_unrecognized, unknown) + } + DiscardUnknown(m) + + want := &MyMessage{Count: Int32(11), Name: String("aaa"), Pet: []string{"bbb", "ccc"}, Bigfloat: Float64(88)} + if !Equal(m, want) { + t.Errorf("message mismatch:\ngot %v\nwant %v", m, want) } } @@ -1331,7 +1287,8 @@ func TestRequiredFieldEnforcement(t *testing.T) { err = Unmarshal(buf, pb) if err == nil { t.Error("unmarshal: expected error, got nil") - } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "{Unknown}") { + } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "Type") && !strings.Contains(err.Error(), "{Unknown}") { + // TODO: remove unknown cases once we commit to the new unmarshaler. t.Errorf("unmarshal: bad error type: %v", err) } } @@ -1348,7 +1305,7 @@ func TestRequiredFieldEnforcementGroups(t *testing.T) { buf := []byte{11, 12} if err := Unmarshal(buf, pb); err == nil { t.Error("unmarshal: expected error, got nil") - } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "Group.{Unknown}") { + } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "Group.Field") && !strings.Contains(err.Error(), "Group.{Unknown}") { t.Errorf("unmarshal: bad error type: %v", err) } } @@ -1385,18 +1342,7 @@ func (*NNIMessage) Reset() {} func (*NNIMessage) String() string { return "" } func (*NNIMessage) ProtoMessage() {} -// A type that implements the Marshaler interface and is nillable. -type nillableMessage struct { - x uint64 -} - -func (nm *nillableMessage) Marshal() ([]byte, error) { - return EncodeVarint(nm.x), nil -} - -type NMMessage struct { - nm *nillableMessage -} +type NMMessage struct{} func (*NMMessage) Reset() {} func (*NMMessage) String() string { return "" } @@ -1595,6 +1541,14 @@ func TestVarintOverflow(t *testing.T) { } } +func TestBytesWithInvalidLengthInGroup(t *testing.T) { + // Overflowing a 64-bit length should not be allowed. + b := []byte{0xbb, 0x30, 0xb2, 0x30, 0xb0, 0xb2, 0x83, 0xf1, 0xb0, 0xb2, 0xef, 0xbf, 0xbd, 0x01} + if err := Unmarshal(b, new(MyMessage)); err == nil { + t.Fatalf("Overflowed uint64 length without error") + } +} + func TestUnmarshalFuzz(t *testing.T) { const N = 1000 seed := time.Now().UnixNano() @@ -1668,6 +1622,28 @@ func TestExtensionMarshalOrder(t *testing.T) { } } +func TestExtensionMapFieldMarshalDeterministic(t *testing.T) { + m := &MyMessage{Count: Int(123)} + if err := SetExtension(m, E_Ext_More, &Ext{MapField: map[int32]int32{1: 1, 2: 2, 3: 3, 4: 4}}); err != nil { + t.Fatalf("SetExtension: %v", err) + } + marshal := func(m Message) []byte { + var b Buffer + b.SetDeterministic(true) + if err := b.Marshal(m); err != nil { + t.Fatalf("Marshal failed: %v", err) + } + return b.Bytes() + } + + want := marshal(m) + for i := 0; i < 100; i++ { + if got := marshal(m); !bytes.Equal(got, want) { + t.Errorf("Marshal produced inconsistent output with determinism enabled (pass %d).\n got %v\nwant %v", i, got, want) + } + } +} + // Many extensions, because small maps might not iterate differently on each iteration. var exts = []*ExtensionDesc{ E_X201, @@ -1802,6 +1778,43 @@ func TestUnmarshalMergesMessages(t *testing.T) { } } +func TestUnmarshalMergesGroups(t *testing.T) { + // If a nested group occurs twice in the input, + // the fields should be merged when decoding. + a := &GroupNew{ + G: &GroupNew_G{ + X: Int32(7), + Y: Int32(8), + }, + } + aData, err := Marshal(a) + if err != nil { + t.Fatalf("Marshal(a): %v", err) + } + b := &GroupNew{ + G: &GroupNew_G{ + X: Int32(9), + }, + } + bData, err := Marshal(b) + if err != nil { + t.Fatalf("Marshal(b): %v", err) + } + want := &GroupNew{ + G: &GroupNew_G{ + X: Int32(9), + Y: Int32(8), + }, + } + got := new(GroupNew) + if err := Unmarshal(append(aData, bData...), got); err != nil { + t.Fatalf("Unmarshal: %v", err) + } + if !Equal(got, want) { + t.Errorf("\n got %v\nwant %v", got, want) + } +} + func TestEncodingSizes(t *testing.T) { tests := []struct { m Message @@ -1845,7 +1858,9 @@ func TestRequiredNotSetError(t *testing.T) { "b404" + // field 70, encoding 4, end group "aa0605" + "6279746573" + // field 101, encoding 2, string "bytes" "b0063f" + // field 102, encoding 0, 0x3f zigzag32 - "b8067f" // field 103, encoding 0, 0x7f zigzag64 + "b8067f" + // field 103, encoding 0, 0x7f zigzag64 + "c506e0ffffff" + // field 104, encoding 5, -32 fixed32 + "c906c0ffffffffffffff" // field 105, encoding 1, -64 fixed64 o := old() bytes, err := Marshal(pb) @@ -1854,7 +1869,7 @@ func TestRequiredNotSetError(t *testing.T) { o.DebugPrint("", bytes) t.Fatalf("expected = %s", expected) } - if strings.Index(err.Error(), "RequiredField.Label") < 0 { + if !strings.Contains(err.Error(), "RequiredField.Label") { t.Errorf("marshal-1 wrong err msg: %v", err) } if !equal(bytes, expected, t) { @@ -1870,7 +1885,7 @@ func TestRequiredNotSetError(t *testing.T) { o.DebugPrint("", bytes) t.Fatalf("string = %s", expected) } - if strings.Index(err.Error(), "RequiredField.{Unknown}") < 0 { + if !strings.Contains(err.Error(), "RequiredField.Label") && !strings.Contains(err.Error(), "RequiredField.{Unknown}") { t.Errorf("unmarshal wrong err msg: %v", err) } bytes, err = Marshal(pbd) @@ -1879,7 +1894,7 @@ func TestRequiredNotSetError(t *testing.T) { o.DebugPrint("", bytes) t.Fatalf("string = %s", expected) } - if strings.Index(err.Error(), "RequiredField.Label") < 0 { + if !strings.Contains(err.Error(), "RequiredField.Label") { t.Errorf("marshal-2 wrong err msg: %v", err) } if !equal(bytes, expected, t) { @@ -1888,6 +1903,25 @@ func TestRequiredNotSetError(t *testing.T) { } } +func TestRequiredNotSetErrorWithBadWireTypes(t *testing.T) { + // Required field expects a varint, and properly found a varint. + if err := Unmarshal([]byte{0x08, 0x00}, new(GoEnum)); err != nil { + t.Errorf("Unmarshal = %v, want nil", err) + } + // Required field expects a varint, but found a fixed32 instead. + if err := Unmarshal([]byte{0x0d, 0x00, 0x00, 0x00, 0x00}, new(GoEnum)); err == nil { + t.Errorf("Unmarshal = nil, want RequiredNotSetError") + } + // Required field expects a varint, and found both a varint and fixed32 (ignored). + m := new(GoEnum) + if err := Unmarshal([]byte{0x08, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x00}, m); err != nil { + t.Errorf("Unmarshal = %v, want nil", err) + } + if !bytes.Equal(m.XXX_unrecognized, []byte{0x0d, 0x00, 0x00, 0x00, 0x00}) { + t.Errorf("expected fixed32 to appear as unknown bytes: %x", m.XXX_unrecognized) + } +} + func fuzzUnmarshal(t *testing.T, data []byte) { defer func() { if e := recover(); e != nil { @@ -1946,6 +1980,32 @@ func TestMapFieldMarshal(t *testing.T) { (new(Buffer)).DebugPrint("Dump of b", b) } +func TestMapFieldDeterministicMarshal(t *testing.T) { + m := &MessageWithMap{ + NameMapping: map[int32]string{ + 1: "Rob", + 4: "Ian", + 8: "Dave", + }, + } + + marshal := func(m Message) []byte { + var b Buffer + b.SetDeterministic(true) + if err := b.Marshal(m); err != nil { + t.Fatalf("Marshal failed: %v", err) + } + return b.Bytes() + } + + want := marshal(m) + for i := 0; i < 10; i++ { + if got := marshal(m); !bytes.Equal(got, want) { + t.Errorf("Marshal produced inconsistent output with determinism enabled (pass %d).\n got %v\nwant %v", i, got, want) + } + } +} + func TestMapFieldRoundTrips(t *testing.T) { m := &MessageWithMap{ NameMapping: map[int32]string{ @@ -1954,7 +2014,7 @@ func TestMapFieldRoundTrips(t *testing.T) { 8: "Dave", }, MsgMapping: map[int64]*FloatingPoint{ - 0x7001: &FloatingPoint{F: Float64(2.0)}, + 0x7001: {F: Float64(2.0)}, }, ByteMapping: map[bool][]byte{ false: []byte("that's not right!"), @@ -1970,14 +2030,8 @@ func TestMapFieldRoundTrips(t *testing.T) { if err := Unmarshal(b, m2); err != nil { t.Fatalf("Unmarshal: %v", err) } - for _, pair := range [][2]interface{}{ - {m.NameMapping, m2.NameMapping}, - {m.MsgMapping, m2.MsgMapping}, - {m.ByteMapping, m2.ByteMapping}, - } { - if !reflect.DeepEqual(pair[0], pair[1]) { - t.Errorf("Map did not survive a round trip.\ninitial: %v\n final: %v", pair[0], pair[1]) - } + if !Equal(m, m2) { + t.Errorf("Map did not survive a round trip.\ninitial: %v\n final: %v", m, m2) } } @@ -2005,7 +2059,7 @@ func TestMapFieldWithNil(t *testing.T) { func TestMapFieldWithNilBytes(t *testing.T) { m1 := &MessageWithMap{ ByteMapping: map[bool][]byte{ - false: []byte{}, + false: {}, true: nil, }, } @@ -2119,6 +2173,22 @@ func TestOneof(t *testing.T) { } } +func TestOneofNilBytes(t *testing.T) { + // A oneof with nil byte slice should marshal to tag + 0 (size), with no error. + m := &Communique{Union: &Communique_Data{Data: nil}} + b, err := Marshal(m) + if err != nil { + t.Fatalf("Marshal failed: %v", err) + } + want := []byte{ + 7<<3 | 2, // tag 7, wire type 2 + 0, // size + } + if !bytes.Equal(b, want) { + t.Errorf("Wrong result of Marshal: got %x, want %x", b, want) + } +} + func TestInefficientPackedBool(t *testing.T) { // https://github.com/golang/protobuf/issues/76 inp := []byte{ @@ -2132,6 +2202,150 @@ func TestInefficientPackedBool(t *testing.T) { } } +// Make sure pure-reflect-based implementation handles +// []int32-[]enum conversion correctly. +func TestRepeatedEnum2(t *testing.T) { + pb := &RepeatedEnum{ + Color: []RepeatedEnum_Color{RepeatedEnum_RED}, + } + b, err := Marshal(pb) + if err != nil { + t.Fatalf("Marshal failed: %v", err) + } + x := new(RepeatedEnum) + err = Unmarshal(b, x) + if err != nil { + t.Fatalf("Unmarshal failed: %v", err) + } + if !Equal(pb, x) { + t.Errorf("Incorrect result: want: %v got: %v", pb, x) + } +} + +// TestConcurrentMarshal makes sure that it is safe to marshal +// same message in multiple goroutines concurrently. +func TestConcurrentMarshal(t *testing.T) { + pb := initGoTest(true) + const N = 100 + b := make([][]byte, N) + + var wg sync.WaitGroup + for i := 0; i < N; i++ { + wg.Add(1) + go func(i int) { + defer wg.Done() + var err error + b[i], err = Marshal(pb) + if err != nil { + t.Errorf("marshal error: %v", err) + } + }(i) + } + + wg.Wait() + for i := 1; i < N; i++ { + if !bytes.Equal(b[0], b[i]) { + t.Errorf("concurrent marshal result not same: b[0] = %v, b[%d] = %v", b[0], i, b[i]) + } + } +} + +func TestInvalidUTF8(t *testing.T) { + const invalidUTF8 = "\xde\xad\xbe\xef\x80\x00\xff" + tests := []struct { + label string + proto2 Message + proto3 Message + want []byte + }{{ + label: "Scalar", + proto2: &TestUTF8{Scalar: String(invalidUTF8)}, + proto3: &pb3.TestUTF8{Scalar: invalidUTF8}, + want: []byte{0x0a, 0x07, 0xde, 0xad, 0xbe, 0xef, 0x80, 0x00, 0xff}, + }, { + label: "Vector", + proto2: &TestUTF8{Vector: []string{invalidUTF8}}, + proto3: &pb3.TestUTF8{Vector: []string{invalidUTF8}}, + want: []byte{0x12, 0x07, 0xde, 0xad, 0xbe, 0xef, 0x80, 0x00, 0xff}, + }, { + label: "Oneof", + proto2: &TestUTF8{Oneof: &TestUTF8_Field{invalidUTF8}}, + proto3: &pb3.TestUTF8{Oneof: &pb3.TestUTF8_Field{invalidUTF8}}, + want: []byte{0x1a, 0x07, 0xde, 0xad, 0xbe, 0xef, 0x80, 0x00, 0xff}, + }, { + label: "MapKey", + proto2: &TestUTF8{MapKey: map[string]int64{invalidUTF8: 0}}, + proto3: &pb3.TestUTF8{MapKey: map[string]int64{invalidUTF8: 0}}, + want: []byte{0x22, 0x0b, 0x0a, 0x07, 0xde, 0xad, 0xbe, 0xef, 0x80, 0x00, 0xff, 0x10, 0x00}, + }, { + label: "MapValue", + proto2: &TestUTF8{MapValue: map[int64]string{0: invalidUTF8}}, + proto3: &pb3.TestUTF8{MapValue: map[int64]string{0: invalidUTF8}}, + want: []byte{0x2a, 0x0b, 0x08, 0x00, 0x12, 0x07, 0xde, 0xad, 0xbe, 0xef, 0x80, 0x00, 0xff}, + }} + + for _, tt := range tests { + // Proto2 should not validate UTF-8. + b, err := Marshal(tt.proto2) + if err != nil { + t.Errorf("Marshal(proto2.%s) = %v, want nil", tt.label, err) + } + if !bytes.Equal(b, tt.want) { + t.Errorf("Marshal(proto2.%s) = %x, want %x", tt.label, b, tt.want) + } + + m := Clone(tt.proto2) + m.Reset() + if err = Unmarshal(tt.want, m); err != nil { + t.Errorf("Unmarshal(proto2.%s) = %v, want nil", tt.label, err) + } + if !Equal(m, tt.proto2) { + t.Errorf("proto2.%s: output mismatch:\ngot %v\nwant %v", tt.label, m, tt.proto2) + } + + // Proto3 should validate UTF-8. + b, err = Marshal(tt.proto3) + if err == nil { + t.Errorf("Marshal(proto3.%s) = %v, want non-nil", tt.label, err) + } + if !bytes.Equal(b, tt.want) { + t.Errorf("Marshal(proto3.%s) = %x, want %x", tt.label, b, tt.want) + } + + m = Clone(tt.proto3) + m.Reset() + err = Unmarshal(tt.want, m) + if err == nil { + t.Errorf("Unmarshal(proto3.%s) = %v, want non-nil", tt.label, err) + } + if !Equal(m, tt.proto3) { + t.Errorf("proto3.%s: output mismatch:\ngot %v\nwant %v", tt.label, m, tt.proto2) + } + } +} + +func TestRequired(t *testing.T) { + // The F_BoolRequired field appears after all of the required fields. + // It should still be handled even after multiple required field violations. + m := &GoTest{F_BoolRequired: Bool(true)} + got, err := Marshal(m) + if _, ok := err.(*RequiredNotSetError); !ok { + t.Errorf("Marshal() = %v, want RequiredNotSetError error", err) + } + if want := []byte{0x50, 0x01}; !bytes.Equal(got, want) { + t.Errorf("Marshal() = %x, want %x", got, want) + } + + m = new(GoTest) + err = Unmarshal(got, m) + if _, ok := err.(*RequiredNotSetError); !ok { + t.Errorf("Marshal() = %v, want RequiredNotSetError error", err) + } + if !m.GetF_BoolRequired() { + t.Error("m.F_BoolRequired = false, want true") + } +} + // Benchmarks func testMsg() *GoTest { diff --git a/vendor/github.com/golang/protobuf/proto/any_test.go b/vendor/github.com/golang/protobuf/proto/any_test.go index 1a3c22e..56fc97c 100644 --- a/vendor/github.com/golang/protobuf/proto/any_test.go +++ b/vendor/github.com/golang/protobuf/proto/any_test.go @@ -38,7 +38,7 @@ import ( "github.com/golang/protobuf/proto" pb "github.com/golang/protobuf/proto/proto3_proto" - testpb "github.com/golang/protobuf/proto/testdata" + testpb "github.com/golang/protobuf/proto/test_proto" anypb "github.com/golang/protobuf/ptypes/any" ) @@ -166,33 +166,33 @@ anything: < name: "David" result_count: 47 anything: < - [type.googleapis.com/testdata.MyMessage]: < + [type.googleapis.com/test_proto.MyMessage]: < count: 47 name: "David" - [testdata.Ext.more]: < + [test_proto.Ext.more]: < data: "foo" > - [testdata.Ext.text]: "bar" + [test_proto.Ext.text]: "bar" > > many_things: < - [type.googleapis.com/testdata.MyMessage]: < + [type.googleapis.com/test_proto.MyMessage]: < count: 42 bikeshed: GREEN rep_bytes: "roboto" - [testdata.Ext.more]: < + [test_proto.Ext.more]: < data: "baz" > > > many_things: < - [type.googleapis.com/testdata.MyMessage]: < + [type.googleapis.com/test_proto.MyMessage]: < count: 47 name: "David" - [testdata.Ext.more]: < + [test_proto.Ext.more]: < data: "foo" > - [testdata.Ext.text]: "bar" + [test_proto.Ext.text]: "bar" > > ` diff --git a/vendor/github.com/golang/protobuf/proto/clone.go b/vendor/github.com/golang/protobuf/proto/clone.go index e392575..3cd3249 100644 --- a/vendor/github.com/golang/protobuf/proto/clone.go +++ b/vendor/github.com/golang/protobuf/proto/clone.go @@ -35,22 +35,39 @@ package proto import ( + "fmt" "log" "reflect" "strings" ) // Clone returns a deep copy of a protocol buffer. -func Clone(pb Message) Message { - in := reflect.ValueOf(pb) +func Clone(src Message) Message { + in := reflect.ValueOf(src) if in.IsNil() { - return pb + return src } - out := reflect.New(in.Type().Elem()) - // out is empty so a merge is a deep copy. - mergeStruct(out.Elem(), in.Elem()) - return out.Interface().(Message) + dst := out.Interface().(Message) + Merge(dst, src) + return dst +} + +// Merger is the interface representing objects that can merge messages of the same type. +type Merger interface { + // Merge merges src into this message. + // Required and optional fields that are set in src will be set to that value in dst. + // Elements of repeated fields will be appended. + // + // Merge may panic if called with a different argument type than the receiver. + Merge(src Message) +} + +// generatedMerger is the custom merge method that generated protos will have. +// We must add this method since a generate Merge method will conflict with +// many existing protos that have a Merge data field already defined. +type generatedMerger interface { + XXX_Merge(src Message) } // Merge merges src into dst. @@ -58,17 +75,24 @@ func Clone(pb Message) Message { // Elements of repeated fields will be appended. // Merge panics if src and dst are not the same type, or if dst is nil. func Merge(dst, src Message) { + if m, ok := dst.(Merger); ok { + m.Merge(src) + return + } + in := reflect.ValueOf(src) out := reflect.ValueOf(dst) if out.IsNil() { panic("proto: nil destination") } if in.Type() != out.Type() { - // Explicit test prior to mergeStruct so that mistyped nils will fail - panic("proto: type mismatch") + panic(fmt.Sprintf("proto.Merge(%T, %T) type mismatch", dst, src)) } if in.IsNil() { - // Merging nil into non-nil is a quiet no-op + return // Merge from nil src is a noop + } + if m, ok := dst.(generatedMerger); ok { + m.XXX_Merge(src) return } mergeStruct(out.Elem(), in.Elem()) @@ -84,7 +108,7 @@ func mergeStruct(out, in reflect.Value) { mergeAny(out.Field(i), in.Field(i), false, sprop.Prop[i]) } - if emIn, ok := extendable(in.Addr().Interface()); ok { + if emIn, err := extendable(in.Addr().Interface()); err == nil { emOut, _ := extendable(out.Addr().Interface()) mIn, muIn := emIn.extensionsRead() if mIn != nil { diff --git a/vendor/github.com/golang/protobuf/proto/clone_test.go b/vendor/github.com/golang/protobuf/proto/clone_test.go index f607ff4..b04989e 100644 --- a/vendor/github.com/golang/protobuf/proto/clone_test.go +++ b/vendor/github.com/golang/protobuf/proto/clone_test.go @@ -37,7 +37,7 @@ import ( "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - pb "github.com/golang/protobuf/proto/testdata" + pb "github.com/golang/protobuf/proto/test_proto" ) var cloneTestMessage = &pb.MyMessage{ @@ -67,34 +67,50 @@ func init() { if err := proto.SetExtension(cloneTestMessage, pb.E_Ext_More, ext); err != nil { panic("SetExtension: " + err.Error()) } + if err := proto.SetExtension(cloneTestMessage, pb.E_Ext_Text, proto.String("hello")); err != nil { + panic("SetExtension: " + err.Error()) + } + if err := proto.SetExtension(cloneTestMessage, pb.E_Greeting, []string{"one", "two"}); err != nil { + panic("SetExtension: " + err.Error()) + } } func TestClone(t *testing.T) { + // Create a clone using a marshal/unmarshal roundtrip. + vanilla := new(pb.MyMessage) + b, err := proto.Marshal(cloneTestMessage) + if err != nil { + t.Errorf("unexpected Marshal error: %v", err) + } + if err := proto.Unmarshal(b, vanilla); err != nil { + t.Errorf("unexpected Unarshal error: %v", err) + } + + // Create a clone using Clone and verify that it is equal to the original. m := proto.Clone(cloneTestMessage).(*pb.MyMessage) if !proto.Equal(m, cloneTestMessage) { - t.Errorf("Clone(%v) = %v", cloneTestMessage, m) + t.Fatalf("Clone(%v) = %v", cloneTestMessage, m) } - // Verify it was a deep copy. - *m.Inner.Port++ - if proto.Equal(m, cloneTestMessage) { - t.Error("Mutating clone changed the original") - } - // Byte fields and repeated fields should be copied. - if &m.Pet[0] == &cloneTestMessage.Pet[0] { - t.Error("Pet: repeated field not copied") + // Mutate the clone, which should not affect the original. + x1, err := proto.GetExtension(m, pb.E_Ext_More) + if err != nil { + t.Errorf("unexpected GetExtension(%v) error: %v", pb.E_Ext_More.Name, err) } - if &m.Others[0] == &cloneTestMessage.Others[0] { - t.Error("Others: repeated field not copied") + x2, err := proto.GetExtension(m, pb.E_Ext_Text) + if err != nil { + t.Errorf("unexpected GetExtension(%v) error: %v", pb.E_Ext_Text.Name, err) } - if &m.Others[0].Value[0] == &cloneTestMessage.Others[0].Value[0] { - t.Error("Others[0].Value: bytes field not copied") + x3, err := proto.GetExtension(m, pb.E_Greeting) + if err != nil { + t.Errorf("unexpected GetExtension(%v) error: %v", pb.E_Greeting.Name, err) } - if &m.RepBytes[0] == &cloneTestMessage.RepBytes[0] { - t.Error("RepBytes: repeated field not copied") - } - if &m.RepBytes[0][0] == &cloneTestMessage.RepBytes[0][0] { - t.Error("RepBytes[0]: bytes field not copied") + *m.Inner.Port++ + *(x1.(*pb.Ext)).Data = "blah blah" + *(x2.(*string)) = "goodbye" + x3.([]string)[0] = "zero" + if !proto.Equal(cloneTestMessage, vanilla) { + t.Fatalf("mutation on original detected:\ngot %v\nwant %v", cloneTestMessage, vanilla) } } @@ -244,27 +260,45 @@ var mergeTests = []struct { Data: []byte("texas!"), }, }, - // Oneof fields should merge by assignment. + { // Oneof fields should merge by assignment. + src: &pb.Communique{Union: &pb.Communique_Number{41}}, + dst: &pb.Communique{Union: &pb.Communique_Name{"Bobby Tables"}}, + want: &pb.Communique{Union: &pb.Communique_Number{41}}, + }, + { // Oneof nil is the same as not set. + src: &pb.Communique{}, + dst: &pb.Communique{Union: &pb.Communique_Name{"Bobby Tables"}}, + want: &pb.Communique{Union: &pb.Communique_Name{"Bobby Tables"}}, + }, { - src: &pb.Communique{ - Union: &pb.Communique_Number{41}, - }, - dst: &pb.Communique{ - Union: &pb.Communique_Name{"Bobby Tables"}, - }, - want: &pb.Communique{ - Union: &pb.Communique_Number{41}, - }, + src: &pb.Communique{Union: &pb.Communique_Number{1337}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Number{1337}}, }, - // Oneof nil is the same as not set. { - src: &pb.Communique{}, - dst: &pb.Communique{ - Union: &pb.Communique_Name{"Bobby Tables"}, - }, - want: &pb.Communique{ - Union: &pb.Communique_Name{"Bobby Tables"}, - }, + src: &pb.Communique{Union: &pb.Communique_Col{pb.MyMessage_RED}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Col{pb.MyMessage_RED}}, + }, + { + src: &pb.Communique{Union: &pb.Communique_Data{[]byte("hello")}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Data{[]byte("hello")}}, + }, + { + src: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{BytesField: []byte{1, 2, 3}}}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{BytesField: []byte{1, 2, 3}}}}, + }, + { + src: &pb.Communique{Union: &pb.Communique_Msg{}}, + dst: &pb.Communique{}, + want: &pb.Communique{Union: &pb.Communique_Msg{}}, + }, + { + src: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{StringField: proto.String("123")}}}, + dst: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{BytesField: []byte{1, 2, 3}}}}, + want: &pb.Communique{Union: &pb.Communique_Msg{&pb.Strings{StringField: proto.String("123"), BytesField: []byte{1, 2, 3}}}}, }, { src: &proto3pb.Message{ @@ -287,14 +321,86 @@ var mergeTests = []struct { }, }, }, + { + src: &pb.GoTest{ + F_BoolRepeated: []bool{}, + F_Int32Repeated: []int32{}, + F_Int64Repeated: []int64{}, + F_Uint32Repeated: []uint32{}, + F_Uint64Repeated: []uint64{}, + F_FloatRepeated: []float32{}, + F_DoubleRepeated: []float64{}, + F_StringRepeated: []string{}, + F_BytesRepeated: [][]byte{}, + }, + dst: &pb.GoTest{}, + want: &pb.GoTest{ + F_BoolRepeated: []bool{}, + F_Int32Repeated: []int32{}, + F_Int64Repeated: []int64{}, + F_Uint32Repeated: []uint32{}, + F_Uint64Repeated: []uint64{}, + F_FloatRepeated: []float32{}, + F_DoubleRepeated: []float64{}, + F_StringRepeated: []string{}, + F_BytesRepeated: [][]byte{}, + }, + }, + { + src: &pb.GoTest{}, + dst: &pb.GoTest{ + F_BoolRepeated: []bool{}, + F_Int32Repeated: []int32{}, + F_Int64Repeated: []int64{}, + F_Uint32Repeated: []uint32{}, + F_Uint64Repeated: []uint64{}, + F_FloatRepeated: []float32{}, + F_DoubleRepeated: []float64{}, + F_StringRepeated: []string{}, + F_BytesRepeated: [][]byte{}, + }, + want: &pb.GoTest{ + F_BoolRepeated: []bool{}, + F_Int32Repeated: []int32{}, + F_Int64Repeated: []int64{}, + F_Uint32Repeated: []uint32{}, + F_Uint64Repeated: []uint64{}, + F_FloatRepeated: []float32{}, + F_DoubleRepeated: []float64{}, + F_StringRepeated: []string{}, + F_BytesRepeated: [][]byte{}, + }, + }, + { + src: &pb.GoTest{ + F_BytesRepeated: [][]byte{nil, []byte{}, []byte{0}}, + }, + dst: &pb.GoTest{}, + want: &pb.GoTest{ + F_BytesRepeated: [][]byte{nil, []byte{}, []byte{0}}, + }, + }, + { + src: &pb.MyMessage{ + Others: []*pb.OtherMessage{}, + }, + dst: &pb.MyMessage{}, + want: &pb.MyMessage{ + Others: []*pb.OtherMessage{}, + }, + }, } func TestMerge(t *testing.T) { for _, m := range mergeTests { got := proto.Clone(m.dst) + if !proto.Equal(got, m.dst) { + t.Errorf("Clone()\ngot %v\nwant %v", got, m.dst) + continue + } proto.Merge(got, m.src) if !proto.Equal(got, m.want) { - t.Errorf("Merge(%v, %v)\n got %v\nwant %v\n", m.dst, m.src, got, m.want) + t.Errorf("Merge(%v, %v)\ngot %v\nwant %v", m.dst, m.src, got, m.want) } } } diff --git a/vendor/github.com/golang/protobuf/proto/decode.go b/vendor/github.com/golang/protobuf/proto/decode.go index aa20729..63b0f08 100644 --- a/vendor/github.com/golang/protobuf/proto/decode.go +++ b/vendor/github.com/golang/protobuf/proto/decode.go @@ -39,8 +39,6 @@ import ( "errors" "fmt" "io" - "os" - "reflect" ) // errOverflow is returned when an integer is too large to be represented. @@ -50,10 +48,6 @@ var errOverflow = errors.New("proto: integer overflow") // wire type is encountered. It does not get returned to user code. var ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof") -// The fundamental decoders that interpret bytes on the wire. -// Those that take integer types all return uint64 and are -// therefore of type valueDecoder. - // DecodeVarint reads a varint-encoded integer from the slice. // It returns the integer and the number of bytes consumed, or // zero if there is not enough. @@ -192,7 +186,6 @@ func (p *Buffer) DecodeVarint() (x uint64, err error) { if b&0x80 == 0 { goto done } - // x -= 0x80 << 63 // Always zero. return 0, errOverflow @@ -267,9 +260,6 @@ func (p *Buffer) DecodeZigzag32() (x uint64, err error) { return } -// These are not ValueDecoders: they produce an array of bytes or a string. -// bytes, embedded messages - // DecodeRawBytes reads a count-delimited byte buffer from the Buffer. // This is the format used for the bytes protocol buffer // type and for embedded messages. @@ -311,81 +301,29 @@ func (p *Buffer) DecodeStringBytes() (s string, err error) { return string(buf), nil } -// Skip the next item in the buffer. Its wire type is decoded and presented as an argument. -// If the protocol buffer has extensions, and the field matches, add it as an extension. -// Otherwise, if the XXX_unrecognized field exists, append the skipped data there. -func (o *Buffer) skipAndSave(t reflect.Type, tag, wire int, base structPointer, unrecField field) error { - oi := o.index - - err := o.skip(t, tag, wire) - if err != nil { - return err - } - - if !unrecField.IsValid() { - return nil - } - - ptr := structPointer_Bytes(base, unrecField) - - // Add the skipped field to struct field - obuf := o.buf - - o.buf = *ptr - o.EncodeVarint(uint64(tag<<3 | wire)) - *ptr = append(o.buf, obuf[oi:o.index]...) - - o.buf = obuf - - return nil -} - -// Skip the next item in the buffer. Its wire type is decoded and presented as an argument. -func (o *Buffer) skip(t reflect.Type, tag, wire int) error { - - var u uint64 - var err error - - switch wire { - case WireVarint: - _, err = o.DecodeVarint() - case WireFixed64: - _, err = o.DecodeFixed64() - case WireBytes: - _, err = o.DecodeRawBytes(false) - case WireFixed32: - _, err = o.DecodeFixed32() - case WireStartGroup: - for { - u, err = o.DecodeVarint() - if err != nil { - break - } - fwire := int(u & 0x7) - if fwire == WireEndGroup { - break - } - ftag := int(u >> 3) - err = o.skip(t, ftag, fwire) - if err != nil { - break - } - } - default: - err = fmt.Errorf("proto: can't skip unknown wire type %d for %s", wire, t) - } - return err -} - // Unmarshaler is the interface representing objects that can -// unmarshal themselves. The method should reset the receiver before -// decoding starts. The argument points to data that may be +// unmarshal themselves. The argument points to data that may be // overwritten, so implementations should not keep references to the // buffer. +// Unmarshal implementations should not clear the receiver. +// Any unmarshaled data should be merged into the receiver. +// Callers of Unmarshal that do not want to retain existing data +// should Reset the receiver before calling Unmarshal. type Unmarshaler interface { Unmarshal([]byte) error } +// newUnmarshaler is the interface representing objects that can +// unmarshal themselves. The semantics are identical to Unmarshaler. +// +// This exists to support protoc-gen-go generated messages. +// The proto package will stop type-asserting to this interface in the future. +// +// DO NOT DEPEND ON THIS. +type newUnmarshaler interface { + XXX_Unmarshal([]byte) error +} + // Unmarshal parses the protocol buffer representation in buf and places the // decoded result in pb. If the struct underlying pb does not match // the data in buf, the results can be unpredictable. @@ -395,7 +333,13 @@ type Unmarshaler interface { // to preserve and append to existing data. func Unmarshal(buf []byte, pb Message) error { pb.Reset() - return UnmarshalMerge(buf, pb) + if u, ok := pb.(newUnmarshaler); ok { + return u.XXX_Unmarshal(buf) + } + if u, ok := pb.(Unmarshaler); ok { + return u.Unmarshal(buf) + } + return NewBuffer(buf).Unmarshal(pb) } // UnmarshalMerge parses the protocol buffer representation in buf and @@ -405,8 +349,16 @@ func Unmarshal(buf []byte, pb Message) error { // UnmarshalMerge merges into existing data in pb. // Most code should use Unmarshal instead. func UnmarshalMerge(buf []byte, pb Message) error { - // If the object can unmarshal itself, let it. + if u, ok := pb.(newUnmarshaler); ok { + return u.XXX_Unmarshal(buf) + } if u, ok := pb.(Unmarshaler); ok { + // NOTE: The history of proto have unfortunately been inconsistent + // whether Unmarshaler should or should not implicitly clear itself. + // Some implementations do, most do not. + // Thus, calling this here may or may not do what people want. + // + // See https://github.com/golang/protobuf/issues/424 return u.Unmarshal(buf) } return NewBuffer(buf).Unmarshal(pb) @@ -422,12 +374,17 @@ func (p *Buffer) DecodeMessage(pb Message) error { } // DecodeGroup reads a tag-delimited group from the Buffer. +// StartGroup tag is already consumed. This function consumes +// EndGroup tag. func (p *Buffer) DecodeGroup(pb Message) error { - typ, base, err := getbase(pb) - if err != nil { - return err + b := p.buf[p.index:] + x, y := findEndGroup(b) + if x < 0 { + return io.ErrUnexpectedEOF } - return p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), true, base) + err := Unmarshal(b[:x], pb) + p.index += y + return err } // Unmarshal parses the protocol buffer representation in the @@ -438,533 +395,33 @@ func (p *Buffer) DecodeGroup(pb Message) error { // Unlike proto.Unmarshal, this does not reset pb before starting to unmarshal. func (p *Buffer) Unmarshal(pb Message) error { // If the object can unmarshal itself, let it. - if u, ok := pb.(Unmarshaler); ok { - err := u.Unmarshal(p.buf[p.index:]) + if u, ok := pb.(newUnmarshaler); ok { + err := u.XXX_Unmarshal(p.buf[p.index:]) p.index = len(p.buf) return err } - - typ, base, err := getbase(pb) - if err != nil { - return err - } - - err = p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), false, base) - - if collectStats { - stats.Decode++ - } - - return err -} - -// unmarshalType does the work of unmarshaling a structure. -func (o *Buffer) unmarshalType(st reflect.Type, prop *StructProperties, is_group bool, base structPointer) error { - var state errorState - required, reqFields := prop.reqCount, uint64(0) - - var err error - for err == nil && o.index < len(o.buf) { - oi := o.index - var u uint64 - u, err = o.DecodeVarint() - if err != nil { - break - } - wire := int(u & 0x7) - if wire == WireEndGroup { - if is_group { - if required > 0 { - // Not enough information to determine the exact field. - // (See below.) - return &RequiredNotSetError{"{Unknown}"} - } - return nil // input is satisfied - } - return fmt.Errorf("proto: %s: wiretype end group for non-group", st) - } - tag := int(u >> 3) - if tag <= 0 { - return fmt.Errorf("proto: %s: illegal tag %d (wire type %d)", st, tag, wire) - } - fieldnum, ok := prop.decoderTags.get(tag) - if !ok { - // Maybe it's an extension? - if prop.extendable { - if e, _ := extendable(structPointer_Interface(base, st)); isExtensionField(e, int32(tag)) { - if err = o.skip(st, tag, wire); err == nil { - extmap := e.extensionsWrite() - ext := extmap[int32(tag)] // may be missing - ext.enc = append(ext.enc, o.buf[oi:o.index]...) - extmap[int32(tag)] = ext - } - continue - } - } - // Maybe it's a oneof? - if prop.oneofUnmarshaler != nil { - m := structPointer_Interface(base, st).(Message) - // First return value indicates whether tag is a oneof field. - ok, err = prop.oneofUnmarshaler(m, tag, wire, o) - if err == ErrInternalBadWireType { - // Map the error to something more descriptive. - // Do the formatting here to save generated code space. - err = fmt.Errorf("bad wiretype for oneof field in %T", m) - } - if ok { - continue - } - } - err = o.skipAndSave(st, tag, wire, base, prop.unrecField) - continue - } - p := prop.Prop[fieldnum] - - if p.dec == nil { - fmt.Fprintf(os.Stderr, "proto: no protobuf decoder for %s.%s\n", st, st.Field(fieldnum).Name) - continue - } - dec := p.dec - if wire != WireStartGroup && wire != p.WireType { - if wire == WireBytes && p.packedDec != nil { - // a packable field - dec = p.packedDec - } else { - err = fmt.Errorf("proto: bad wiretype for field %s.%s: got wiretype %d, want %d", st, st.Field(fieldnum).Name, wire, p.WireType) - continue - } - } - decErr := dec(o, p, base) - if decErr != nil && !state.shouldContinue(decErr, p) { - err = decErr - } - if err == nil && p.Required { - // Successfully decoded a required field. - if tag <= 64 { - // use bitmap for fields 1-64 to catch field reuse. - var mask uint64 = 1 << uint64(tag-1) - if reqFields&mask == 0 { - // new required field - reqFields |= mask - required-- - } - } else { - // This is imprecise. It can be fooled by a required field - // with a tag > 64 that is encoded twice; that's very rare. - // A fully correct implementation would require allocating - // a data structure, which we would like to avoid. - required-- - } - } - } - if err == nil { - if is_group { - return io.ErrUnexpectedEOF - } - if state.err != nil { - return state.err - } - if required > 0 { - // Not enough information to determine the exact field. If we use extra - // CPU, we could determine the field only if the missing required field - // has a tag <= 64 and we check reqFields. - return &RequiredNotSetError{"{Unknown}"} - } - } - return err -} - -// Individual type decoders -// For each, -// u is the decoded value, -// v is a pointer to the field (pointer) in the struct - -// Sizes of the pools to allocate inside the Buffer. -// The goal is modest amortization and allocation -// on at least 16-byte boundaries. -const ( - boolPoolSize = 16 - uint32PoolSize = 8 - uint64PoolSize = 4 -) - -// Decode a bool. -func (o *Buffer) dec_bool(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - if len(o.bools) == 0 { - o.bools = make([]bool, boolPoolSize) - } - o.bools[0] = u != 0 - *structPointer_Bool(base, p.field) = &o.bools[0] - o.bools = o.bools[1:] - return nil -} - -func (o *Buffer) dec_proto3_bool(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - *structPointer_BoolVal(base, p.field) = u != 0 - return nil -} - -// Decode an int32. -func (o *Buffer) dec_int32(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word32_Set(structPointer_Word32(base, p.field), o, uint32(u)) - return nil -} - -func (o *Buffer) dec_proto3_int32(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word32Val_Set(structPointer_Word32Val(base, p.field), uint32(u)) - return nil -} - -// Decode an int64. -func (o *Buffer) dec_int64(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word64_Set(structPointer_Word64(base, p.field), o, u) - return nil -} - -func (o *Buffer) dec_proto3_int64(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word64Val_Set(structPointer_Word64Val(base, p.field), o, u) - return nil -} - -// Decode a string. -func (o *Buffer) dec_string(p *Properties, base structPointer) error { - s, err := o.DecodeStringBytes() - if err != nil { - return err - } - *structPointer_String(base, p.field) = &s - return nil -} - -func (o *Buffer) dec_proto3_string(p *Properties, base structPointer) error { - s, err := o.DecodeStringBytes() - if err != nil { - return err - } - *structPointer_StringVal(base, p.field) = s - return nil -} - -// Decode a slice of bytes ([]byte). -func (o *Buffer) dec_slice_byte(p *Properties, base structPointer) error { - b, err := o.DecodeRawBytes(true) - if err != nil { - return err - } - *structPointer_Bytes(base, p.field) = b - return nil -} - -// Decode a slice of bools ([]bool). -func (o *Buffer) dec_slice_bool(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - v := structPointer_BoolSlice(base, p.field) - *v = append(*v, u != 0) - return nil -} - -// Decode a slice of bools ([]bool) in packed format. -func (o *Buffer) dec_slice_packed_bool(p *Properties, base structPointer) error { - v := structPointer_BoolSlice(base, p.field) - - nn, err := o.DecodeVarint() - if err != nil { - return err - } - nb := int(nn) // number of bytes of encoded bools - fin := o.index + nb - if fin < o.index { - return errOverflow - } - - y := *v - for o.index < fin { - u, err := p.valDec(o) - if err != nil { - return err - } - y = append(y, u != 0) - } - - *v = y - return nil -} - -// Decode a slice of int32s ([]int32). -func (o *Buffer) dec_slice_int32(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - structPointer_Word32Slice(base, p.field).Append(uint32(u)) - return nil -} - -// Decode a slice of int32s ([]int32) in packed format. -func (o *Buffer) dec_slice_packed_int32(p *Properties, base structPointer) error { - v := structPointer_Word32Slice(base, p.field) - - nn, err := o.DecodeVarint() - if err != nil { - return err - } - nb := int(nn) // number of bytes of encoded int32s - - fin := o.index + nb - if fin < o.index { - return errOverflow - } - for o.index < fin { - u, err := p.valDec(o) - if err != nil { - return err - } - v.Append(uint32(u)) - } - return nil -} - -// Decode a slice of int64s ([]int64). -func (o *Buffer) dec_slice_int64(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - - structPointer_Word64Slice(base, p.field).Append(u) - return nil -} - -// Decode a slice of int64s ([]int64) in packed format. -func (o *Buffer) dec_slice_packed_int64(p *Properties, base structPointer) error { - v := structPointer_Word64Slice(base, p.field) - - nn, err := o.DecodeVarint() - if err != nil { - return err - } - nb := int(nn) // number of bytes of encoded int64s - - fin := o.index + nb - if fin < o.index { - return errOverflow - } - for o.index < fin { - u, err := p.valDec(o) - if err != nil { - return err - } - v.Append(u) - } - return nil -} - -// Decode a slice of strings ([]string). -func (o *Buffer) dec_slice_string(p *Properties, base structPointer) error { - s, err := o.DecodeStringBytes() - if err != nil { - return err - } - v := structPointer_StringSlice(base, p.field) - *v = append(*v, s) - return nil -} - -// Decode a slice of slice of bytes ([][]byte). -func (o *Buffer) dec_slice_slice_byte(p *Properties, base structPointer) error { - b, err := o.DecodeRawBytes(true) - if err != nil { - return err - } - v := structPointer_BytesSlice(base, p.field) - *v = append(*v, b) - return nil -} - -// Decode a map field. -func (o *Buffer) dec_new_map(p *Properties, base structPointer) error { - raw, err := o.DecodeRawBytes(false) - if err != nil { - return err - } - oi := o.index // index at the end of this map entry - o.index -= len(raw) // move buffer back to start of map entry - - mptr := structPointer_NewAt(base, p.field, p.mtype) // *map[K]V - if mptr.Elem().IsNil() { - mptr.Elem().Set(reflect.MakeMap(mptr.Type().Elem())) - } - v := mptr.Elem() // map[K]V - - // Prepare addressable doubly-indirect placeholders for the key and value types. - // See enc_new_map for why. - keyptr := reflect.New(reflect.PtrTo(p.mtype.Key())).Elem() // addressable *K - keybase := toStructPointer(keyptr.Addr()) // **K - - var valbase structPointer - var valptr reflect.Value - switch p.mtype.Elem().Kind() { - case reflect.Slice: - // []byte - var dummy []byte - valptr = reflect.ValueOf(&dummy) // *[]byte - valbase = toStructPointer(valptr) // *[]byte - case reflect.Ptr: - // message; valptr is **Msg; need to allocate the intermediate pointer - valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V - valptr.Set(reflect.New(valptr.Type().Elem())) - valbase = toStructPointer(valptr) - default: - // everything else - valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V - valbase = toStructPointer(valptr.Addr()) // **V - } - - // Decode. - // This parses a restricted wire format, namely the encoding of a message - // with two fields. See enc_new_map for the format. - for o.index < oi { - // tagcode for key and value properties are always a single byte - // because they have tags 1 and 2. - tagcode := o.buf[o.index] - o.index++ - switch tagcode { - case p.mkeyprop.tagcode[0]: - if err := p.mkeyprop.dec(o, p.mkeyprop, keybase); err != nil { - return err - } - case p.mvalprop.tagcode[0]: - if err := p.mvalprop.dec(o, p.mvalprop, valbase); err != nil { - return err - } - default: - // TODO: Should we silently skip this instead? - return fmt.Errorf("proto: bad map data tag %d", raw[0]) - } - } - keyelem, valelem := keyptr.Elem(), valptr.Elem() - if !keyelem.IsValid() { - keyelem = reflect.Zero(p.mtype.Key()) - } - if !valelem.IsValid() { - valelem = reflect.Zero(p.mtype.Elem()) - } - - v.SetMapIndex(keyelem, valelem) - return nil -} - -// Decode a group. -func (o *Buffer) dec_struct_group(p *Properties, base structPointer) error { - bas := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(bas) { - // allocate new nested message - bas = toStructPointer(reflect.New(p.stype)) - structPointer_SetStructPointer(base, p.field, bas) - } - return o.unmarshalType(p.stype, p.sprop, true, bas) -} - -// Decode an embedded message. -func (o *Buffer) dec_struct_message(p *Properties, base structPointer) (err error) { - raw, e := o.DecodeRawBytes(false) - if e != nil { - return e - } - - bas := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(bas) { - // allocate new nested message - bas = toStructPointer(reflect.New(p.stype)) - structPointer_SetStructPointer(base, p.field, bas) - } - - // If the object can unmarshal itself, let it. - if p.isUnmarshaler { - iv := structPointer_Interface(bas, p.stype) - return iv.(Unmarshaler).Unmarshal(raw) - } - - obuf := o.buf - oi := o.index - o.buf = raw - o.index = 0 - - err = o.unmarshalType(p.stype, p.sprop, false, bas) - o.buf = obuf - o.index = oi - - return err -} - -// Decode a slice of embedded messages. -func (o *Buffer) dec_slice_struct_message(p *Properties, base structPointer) error { - return o.dec_slice_struct(p, false, base) -} - -// Decode a slice of embedded groups. -func (o *Buffer) dec_slice_struct_group(p *Properties, base structPointer) error { - return o.dec_slice_struct(p, true, base) -} - -// Decode a slice of structs ([]*struct). -func (o *Buffer) dec_slice_struct(p *Properties, is_group bool, base structPointer) error { - v := reflect.New(p.stype) - bas := toStructPointer(v) - structPointer_StructPointerSlice(base, p.field).Append(bas) - - if is_group { - err := o.unmarshalType(p.stype, p.sprop, is_group, bas) - return err - } - - raw, err := o.DecodeRawBytes(false) - if err != nil { + if u, ok := pb.(Unmarshaler); ok { + // NOTE: The history of proto have unfortunately been inconsistent + // whether Unmarshaler should or should not implicitly clear itself. + // Some implementations do, most do not. + // Thus, calling this here may or may not do what people want. + // + // See https://github.com/golang/protobuf/issues/424 + err := u.Unmarshal(p.buf[p.index:]) + p.index = len(p.buf) return err } - // If the object can unmarshal itself, let it. - if p.isUnmarshaler { - iv := v.Interface() - return iv.(Unmarshaler).Unmarshal(raw) - } - - obuf := o.buf - oi := o.index - o.buf = raw - o.index = 0 - - err = o.unmarshalType(p.stype, p.sprop, is_group, bas) - - o.buf = obuf - o.index = oi - + // Slow workaround for messages that aren't Unmarshalers. + // This includes some hand-coded .pb.go files and + // bootstrap protos. + // TODO: fix all of those and then add Unmarshal to + // the Message interface. Then: + // The cast above and code below can be deleted. + // The old unmarshaler can be deleted. + // Clients can call Unmarshal directly (can already do that, actually). + var info InternalMessageInfo + err := info.Unmarshal(pb, p.buf[p.index:]) + p.index = len(p.buf) return err } diff --git a/vendor/github.com/golang/protobuf/proto/decode_test.go b/vendor/github.com/golang/protobuf/proto/decode_test.go index 2c4c31d..949be3a 100644 --- a/vendor/github.com/golang/protobuf/proto/decode_test.go +++ b/vendor/github.com/golang/protobuf/proto/decode_test.go @@ -41,10 +41,7 @@ import ( tpb "github.com/golang/protobuf/proto/proto3_proto" ) -var ( - bytesBlackhole []byte - msgBlackhole = new(tpb.Message) -) +var msgBlackhole = new(tpb.Message) // BenchmarkVarint32ArraySmall shows the performance on an array of small int32 fields (1 and // 2 bytes long). diff --git a/vendor/github.com/golang/protobuf/proto/deprecated.go b/vendor/github.com/golang/protobuf/proto/deprecated.go new file mode 100644 index 0000000..35b882c --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/deprecated.go @@ -0,0 +1,63 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import "errors" + +// Deprecated: do not use. +type Stats struct{ Emalloc, Dmalloc, Encode, Decode, Chit, Cmiss, Size uint64 } + +// Deprecated: do not use. +func GetStats() Stats { return Stats{} } + +// Deprecated: do not use. +func MarshalMessageSet(interface{}) ([]byte, error) { + return nil, errors.New("proto: not implemented") +} + +// Deprecated: do not use. +func UnmarshalMessageSet([]byte, interface{}) error { + return errors.New("proto: not implemented") +} + +// Deprecated: do not use. +func MarshalMessageSetJSON(interface{}) ([]byte, error) { + return nil, errors.New("proto: not implemented") +} + +// Deprecated: do not use. +func UnmarshalMessageSetJSON([]byte, interface{}) error { + return errors.New("proto: not implemented") +} + +// Deprecated: do not use. +func RegisterMessageSetType(Message, int32, string) {} diff --git a/vendor/github.com/golang/protobuf/proto/discard.go b/vendor/github.com/golang/protobuf/proto/discard.go new file mode 100644 index 0000000..dea2617 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/discard.go @@ -0,0 +1,350 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2017 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "fmt" + "reflect" + "strings" + "sync" + "sync/atomic" +) + +type generatedDiscarder interface { + XXX_DiscardUnknown() +} + +// DiscardUnknown recursively discards all unknown fields from this message +// and all embedded messages. +// +// When unmarshaling a message with unrecognized fields, the tags and values +// of such fields are preserved in the Message. This allows a later call to +// marshal to be able to produce a message that continues to have those +// unrecognized fields. To avoid this, DiscardUnknown is used to +// explicitly clear the unknown fields after unmarshaling. +// +// For proto2 messages, the unknown fields of message extensions are only +// discarded from messages that have been accessed via GetExtension. +func DiscardUnknown(m Message) { + if m, ok := m.(generatedDiscarder); ok { + m.XXX_DiscardUnknown() + return + } + // TODO: Dynamically populate a InternalMessageInfo for legacy messages, + // but the master branch has no implementation for InternalMessageInfo, + // so it would be more work to replicate that approach. + discardLegacy(m) +} + +// DiscardUnknown recursively discards all unknown fields. +func (a *InternalMessageInfo) DiscardUnknown(m Message) { + di := atomicLoadDiscardInfo(&a.discard) + if di == nil { + di = getDiscardInfo(reflect.TypeOf(m).Elem()) + atomicStoreDiscardInfo(&a.discard, di) + } + di.discard(toPointer(&m)) +} + +type discardInfo struct { + typ reflect.Type + + initialized int32 // 0: only typ is valid, 1: everything is valid + lock sync.Mutex + + fields []discardFieldInfo + unrecognized field +} + +type discardFieldInfo struct { + field field // Offset of field, guaranteed to be valid + discard func(src pointer) +} + +var ( + discardInfoMap = map[reflect.Type]*discardInfo{} + discardInfoLock sync.Mutex +) + +func getDiscardInfo(t reflect.Type) *discardInfo { + discardInfoLock.Lock() + defer discardInfoLock.Unlock() + di := discardInfoMap[t] + if di == nil { + di = &discardInfo{typ: t} + discardInfoMap[t] = di + } + return di +} + +func (di *discardInfo) discard(src pointer) { + if src.isNil() { + return // Nothing to do. + } + + if atomic.LoadInt32(&di.initialized) == 0 { + di.computeDiscardInfo() + } + + for _, fi := range di.fields { + sfp := src.offset(fi.field) + fi.discard(sfp) + } + + // For proto2 messages, only discard unknown fields in message extensions + // that have been accessed via GetExtension. + if em, err := extendable(src.asPointerTo(di.typ).Interface()); err == nil { + // Ignore lock since DiscardUnknown is not concurrency safe. + emm, _ := em.extensionsRead() + for _, mx := range emm { + if m, ok := mx.value.(Message); ok { + DiscardUnknown(m) + } + } + } + + if di.unrecognized.IsValid() { + *src.offset(di.unrecognized).toBytes() = nil + } +} + +func (di *discardInfo) computeDiscardInfo() { + di.lock.Lock() + defer di.lock.Unlock() + if di.initialized != 0 { + return + } + t := di.typ + n := t.NumField() + + for i := 0; i < n; i++ { + f := t.Field(i) + if strings.HasPrefix(f.Name, "XXX_") { + continue + } + + dfi := discardFieldInfo{field: toField(&f)} + tf := f.Type + + // Unwrap tf to get its most basic type. + var isPointer, isSlice bool + if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { + isSlice = true + tf = tf.Elem() + } + if tf.Kind() == reflect.Ptr { + isPointer = true + tf = tf.Elem() + } + if isPointer && isSlice && tf.Kind() != reflect.Struct { + panic(fmt.Sprintf("%v.%s cannot be a slice of pointers to primitive types", t, f.Name)) + } + + switch tf.Kind() { + case reflect.Struct: + switch { + case !isPointer: + panic(fmt.Sprintf("%v.%s cannot be a direct struct value", t, f.Name)) + case isSlice: // E.g., []*pb.T + di := getDiscardInfo(tf) + dfi.discard = func(src pointer) { + sps := src.getPointerSlice() + for _, sp := range sps { + if !sp.isNil() { + di.discard(sp) + } + } + } + default: // E.g., *pb.T + di := getDiscardInfo(tf) + dfi.discard = func(src pointer) { + sp := src.getPointer() + if !sp.isNil() { + di.discard(sp) + } + } + } + case reflect.Map: + switch { + case isPointer || isSlice: + panic(fmt.Sprintf("%v.%s cannot be a pointer to a map or a slice of map values", t, f.Name)) + default: // E.g., map[K]V + if tf.Elem().Kind() == reflect.Ptr { // Proto struct (e.g., *T) + dfi.discard = func(src pointer) { + sm := src.asPointerTo(tf).Elem() + if sm.Len() == 0 { + return + } + for _, key := range sm.MapKeys() { + val := sm.MapIndex(key) + DiscardUnknown(val.Interface().(Message)) + } + } + } else { + dfi.discard = func(pointer) {} // Noop + } + } + case reflect.Interface: + // Must be oneof field. + switch { + case isPointer || isSlice: + panic(fmt.Sprintf("%v.%s cannot be a pointer to a interface or a slice of interface values", t, f.Name)) + default: // E.g., interface{} + // TODO: Make this faster? + dfi.discard = func(src pointer) { + su := src.asPointerTo(tf).Elem() + if !su.IsNil() { + sv := su.Elem().Elem().Field(0) + if sv.Kind() == reflect.Ptr && sv.IsNil() { + return + } + switch sv.Type().Kind() { + case reflect.Ptr: // Proto struct (e.g., *T) + DiscardUnknown(sv.Interface().(Message)) + } + } + } + } + default: + continue + } + di.fields = append(di.fields, dfi) + } + + di.unrecognized = invalidField + if f, ok := t.FieldByName("XXX_unrecognized"); ok { + if f.Type != reflect.TypeOf([]byte{}) { + panic("expected XXX_unrecognized to be of type []byte") + } + di.unrecognized = toField(&f) + } + + atomic.StoreInt32(&di.initialized, 1) +} + +func discardLegacy(m Message) { + v := reflect.ValueOf(m) + if v.Kind() != reflect.Ptr || v.IsNil() { + return + } + v = v.Elem() + if v.Kind() != reflect.Struct { + return + } + t := v.Type() + + for i := 0; i < v.NumField(); i++ { + f := t.Field(i) + if strings.HasPrefix(f.Name, "XXX_") { + continue + } + vf := v.Field(i) + tf := f.Type + + // Unwrap tf to get its most basic type. + var isPointer, isSlice bool + if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { + isSlice = true + tf = tf.Elem() + } + if tf.Kind() == reflect.Ptr { + isPointer = true + tf = tf.Elem() + } + if isPointer && isSlice && tf.Kind() != reflect.Struct { + panic(fmt.Sprintf("%T.%s cannot be a slice of pointers to primitive types", m, f.Name)) + } + + switch tf.Kind() { + case reflect.Struct: + switch { + case !isPointer: + panic(fmt.Sprintf("%T.%s cannot be a direct struct value", m, f.Name)) + case isSlice: // E.g., []*pb.T + for j := 0; j < vf.Len(); j++ { + discardLegacy(vf.Index(j).Interface().(Message)) + } + default: // E.g., *pb.T + discardLegacy(vf.Interface().(Message)) + } + case reflect.Map: + switch { + case isPointer || isSlice: + panic(fmt.Sprintf("%T.%s cannot be a pointer to a map or a slice of map values", m, f.Name)) + default: // E.g., map[K]V + tv := vf.Type().Elem() + if tv.Kind() == reflect.Ptr && tv.Implements(protoMessageType) { // Proto struct (e.g., *T) + for _, key := range vf.MapKeys() { + val := vf.MapIndex(key) + discardLegacy(val.Interface().(Message)) + } + } + } + case reflect.Interface: + // Must be oneof field. + switch { + case isPointer || isSlice: + panic(fmt.Sprintf("%T.%s cannot be a pointer to a interface or a slice of interface values", m, f.Name)) + default: // E.g., test_proto.isCommunique_Union interface + if !vf.IsNil() && f.Tag.Get("protobuf_oneof") != "" { + vf = vf.Elem() // E.g., *test_proto.Communique_Msg + if !vf.IsNil() { + vf = vf.Elem() // E.g., test_proto.Communique_Msg + vf = vf.Field(0) // E.g., Proto struct (e.g., *T) or primitive value + if vf.Kind() == reflect.Ptr { + discardLegacy(vf.Interface().(Message)) + } + } + } + } + } + } + + if vf := v.FieldByName("XXX_unrecognized"); vf.IsValid() { + if vf.Type() != reflect.TypeOf([]byte{}) { + panic("expected XXX_unrecognized to be of type []byte") + } + vf.Set(reflect.ValueOf([]byte(nil))) + } + + // For proto2 messages, only discard unknown fields in message extensions + // that have been accessed via GetExtension. + if em, err := extendable(m); err == nil { + // Ignore lock since discardLegacy is not concurrency safe. + emm, _ := em.extensionsRead() + for _, mx := range emm { + if m, ok := mx.value.(Message); ok { + discardLegacy(m) + } + } + } +} diff --git a/vendor/github.com/golang/protobuf/proto/discard_test.go b/vendor/github.com/golang/protobuf/proto/discard_test.go new file mode 100644 index 0000000..a2ff550 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/discard_test.go @@ -0,0 +1,170 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2017 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "testing" + + "github.com/golang/protobuf/proto" + + proto3pb "github.com/golang/protobuf/proto/proto3_proto" + pb "github.com/golang/protobuf/proto/test_proto" +) + +func TestDiscardUnknown(t *testing.T) { + tests := []struct { + desc string + in, want proto.Message + }{{ + desc: "Nil", + in: nil, want: nil, // Should not panic + }, { + desc: "NilPtr", + in: (*proto3pb.Message)(nil), want: (*proto3pb.Message)(nil), // Should not panic + }, { + desc: "Nested", + in: &proto3pb.Message{ + Name: "Aaron", + Nested: &proto3pb.Nested{Cute: true, XXX_unrecognized: []byte("blah")}, + XXX_unrecognized: []byte("blah"), + }, + want: &proto3pb.Message{ + Name: "Aaron", + Nested: &proto3pb.Nested{Cute: true}, + }, + }, { + desc: "Slice", + in: &proto3pb.Message{ + Name: "Aaron", + Children: []*proto3pb.Message{ + {Name: "Sarah", XXX_unrecognized: []byte("blah")}, + {Name: "Abraham", XXX_unrecognized: []byte("blah")}, + }, + XXX_unrecognized: []byte("blah"), + }, + want: &proto3pb.Message{ + Name: "Aaron", + Children: []*proto3pb.Message{ + {Name: "Sarah"}, + {Name: "Abraham"}, + }, + }, + }, { + desc: "OneOf", + in: &pb.Communique{ + Union: &pb.Communique_Msg{&pb.Strings{ + StringField: proto.String("123"), + XXX_unrecognized: []byte("blah"), + }}, + XXX_unrecognized: []byte("blah"), + }, + want: &pb.Communique{ + Union: &pb.Communique_Msg{&pb.Strings{StringField: proto.String("123")}}, + }, + }, { + desc: "Map", + in: &pb.MessageWithMap{MsgMapping: map[int64]*pb.FloatingPoint{ + 0x4002: &pb.FloatingPoint{ + Exact: proto.Bool(true), + XXX_unrecognized: []byte("blah"), + }, + }}, + want: &pb.MessageWithMap{MsgMapping: map[int64]*pb.FloatingPoint{ + 0x4002: &pb.FloatingPoint{Exact: proto.Bool(true)}, + }}, + }, { + desc: "Extension", + in: func() proto.Message { + m := &pb.MyMessage{ + Count: proto.Int32(42), + Somegroup: &pb.MyMessage_SomeGroup{ + GroupField: proto.Int32(6), + XXX_unrecognized: []byte("blah"), + }, + XXX_unrecognized: []byte("blah"), + } + proto.SetExtension(m, pb.E_Ext_More, &pb.Ext{ + Data: proto.String("extension"), + XXX_unrecognized: []byte("blah"), + }) + return m + }(), + want: func() proto.Message { + m := &pb.MyMessage{ + Count: proto.Int32(42), + Somegroup: &pb.MyMessage_SomeGroup{GroupField: proto.Int32(6)}, + } + proto.SetExtension(m, pb.E_Ext_More, &pb.Ext{Data: proto.String("extension")}) + return m + }(), + }} + + // Test the legacy code path. + for _, tt := range tests { + // Clone the input so that we don't alter the original. + in := tt.in + if in != nil { + in = proto.Clone(tt.in) + } + + var m LegacyMessage + m.Message, _ = in.(*proto3pb.Message) + m.Communique, _ = in.(*pb.Communique) + m.MessageWithMap, _ = in.(*pb.MessageWithMap) + m.MyMessage, _ = in.(*pb.MyMessage) + proto.DiscardUnknown(&m) + if !proto.Equal(in, tt.want) { + t.Errorf("test %s/Legacy, expected unknown fields to be discarded\ngot %v\nwant %v", tt.desc, in, tt.want) + } + } + + for _, tt := range tests { + proto.DiscardUnknown(tt.in) + if !proto.Equal(tt.in, tt.want) { + t.Errorf("test %s, expected unknown fields to be discarded\ngot %v\nwant %v", tt.desc, tt.in, tt.want) + } + } +} + +// LegacyMessage is a proto.Message that has several nested messages. +// This does not have the XXX_DiscardUnknown method and so forces DiscardUnknown +// to use the legacy fallback logic. +type LegacyMessage struct { + Message *proto3pb.Message + Communique *pb.Communique + MessageWithMap *pb.MessageWithMap + MyMessage *pb.MyMessage +} + +func (m *LegacyMessage) Reset() { *m = LegacyMessage{} } +func (m *LegacyMessage) String() string { return proto.CompactTextString(m) } +func (*LegacyMessage) ProtoMessage() {} diff --git a/vendor/github.com/golang/protobuf/proto/encode.go b/vendor/github.com/golang/protobuf/proto/encode.go index 8b84d1b..3abfed2 100644 --- a/vendor/github.com/golang/protobuf/proto/encode.go +++ b/vendor/github.com/golang/protobuf/proto/encode.go @@ -37,28 +37,9 @@ package proto import ( "errors" - "fmt" "reflect" - "sort" ) -// RequiredNotSetError is the error returned if Marshal is called with -// a protocol buffer struct whose required fields have not -// all been initialized. It is also the error returned if Unmarshal is -// called with an encoded protocol buffer that does not include all the -// required fields. -// -// When printed, RequiredNotSetError reports the first unset required field in a -// message. If the field cannot be precisely determined, it is reported as -// "{Unknown}". -type RequiredNotSetError struct { - field string -} - -func (e *RequiredNotSetError) Error() string { - return fmt.Sprintf("proto: required field %q not set", e.field) -} - var ( // errRepeatedHasNil is the error returned if Marshal is called with // a struct with a repeated field containing a nil element. @@ -82,10 +63,6 @@ var ( const maxVarintBytes = 10 // maximum length of a varint -// maxMarshalSize is the largest allowed size of an encoded protobuf, -// since C++ and Java use signed int32s for the size. -const maxMarshalSize = 1<<31 - 1 - // EncodeVarint returns the varint encoding of x. // This is the format for the // int32, int64, uint32, uint64, bool, and enum @@ -119,18 +96,27 @@ func (p *Buffer) EncodeVarint(x uint64) error { // SizeVarint returns the varint encoding size of an integer. func SizeVarint(x uint64) int { - return sizeVarint(x) -} - -func sizeVarint(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n + switch { + case x < 1<<7: + return 1 + case x < 1<<14: + return 2 + case x < 1<<21: + return 3 + case x < 1<<28: + return 4 + case x < 1<<35: + return 5 + case x < 1<<42: + return 6 + case x < 1<<49: + return 7 + case x < 1<<56: + return 8 + case x < 1<<63: + return 9 + } + return 10 } // EncodeFixed64 writes a 64-bit integer to the Buffer. @@ -149,10 +135,6 @@ func (p *Buffer) EncodeFixed64(x uint64) error { return nil } -func sizeFixed64(x uint64) int { - return 8 -} - // EncodeFixed32 writes a 32-bit integer to the Buffer. // This is the format for the // fixed32, sfixed32, and float protocol buffer types. @@ -165,20 +147,12 @@ func (p *Buffer) EncodeFixed32(x uint64) error { return nil } -func sizeFixed32(x uint64) int { - return 4 -} - // EncodeZigzag64 writes a zigzag-encoded 64-bit integer // to the Buffer. // This is the format used for the sint64 protocol buffer type. func (p *Buffer) EncodeZigzag64(x uint64) error { // use signed number to get arithmetic right shift. - return p.EncodeVarint((x << 1) ^ uint64((int64(x) >> 63))) -} - -func sizeZigzag64(x uint64) int { - return sizeVarint((x << 1) ^ uint64((int64(x) >> 63))) + return p.EncodeVarint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } // EncodeZigzag32 writes a zigzag-encoded 32-bit integer @@ -189,10 +163,6 @@ func (p *Buffer) EncodeZigzag32(x uint64) error { return p.EncodeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) } -func sizeZigzag32(x uint64) int { - return sizeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) -} - // EncodeRawBytes writes a count-delimited byte buffer to the Buffer. // This is the format used for the bytes protocol buffer // type and for embedded messages. @@ -202,11 +172,6 @@ func (p *Buffer) EncodeRawBytes(b []byte) error { return nil } -func sizeRawBytes(b []byte) int { - return sizeVarint(uint64(len(b))) + - len(b) -} - // EncodeStringBytes writes an encoded string to the Buffer. // This is the format used for the proto2 string type. func (p *Buffer) EncodeStringBytes(s string) error { @@ -215,319 +180,17 @@ func (p *Buffer) EncodeStringBytes(s string) error { return nil } -func sizeStringBytes(s string) int { - return sizeVarint(uint64(len(s))) + - len(s) -} - // Marshaler is the interface representing objects that can marshal themselves. type Marshaler interface { Marshal() ([]byte, error) } -// Marshal takes the protocol buffer -// and encodes it into the wire format, returning the data. -func Marshal(pb Message) ([]byte, error) { - // Can the object marshal itself? - if m, ok := pb.(Marshaler); ok { - return m.Marshal() - } - p := NewBuffer(nil) - err := p.Marshal(pb) - if p.buf == nil && err == nil { - // Return a non-nil slice on success. - return []byte{}, nil - } - return p.buf, err -} - // EncodeMessage writes the protocol buffer to the Buffer, // prefixed by a varint-encoded length. func (p *Buffer) EncodeMessage(pb Message) error { - t, base, err := getbase(pb) - if structPointer_IsNil(base) { - return ErrNil - } - if err == nil { - var state errorState - err = p.enc_len_struct(GetProperties(t.Elem()), base, &state) - } - return err -} - -// Marshal takes the protocol buffer -// and encodes it into the wire format, writing the result to the -// Buffer. -func (p *Buffer) Marshal(pb Message) error { - // Can the object marshal itself? - if m, ok := pb.(Marshaler); ok { - data, err := m.Marshal() - p.buf = append(p.buf, data...) - return err - } - - t, base, err := getbase(pb) - if structPointer_IsNil(base) { - return ErrNil - } - if err == nil { - err = p.enc_struct(GetProperties(t.Elem()), base) - } - - if collectStats { - (stats).Encode++ // Parens are to work around a goimports bug. - } - - if len(p.buf) > maxMarshalSize { - return ErrTooLarge - } - return err -} - -// Size returns the encoded size of a protocol buffer. -func Size(pb Message) (n int) { - // Can the object marshal itself? If so, Size is slow. - // TODO: add Size to Marshaler, or add a Sizer interface. - if m, ok := pb.(Marshaler); ok { - b, _ := m.Marshal() - return len(b) - } - - t, base, err := getbase(pb) - if structPointer_IsNil(base) { - return 0 - } - if err == nil { - n = size_struct(GetProperties(t.Elem()), base) - } - - if collectStats { - (stats).Size++ // Parens are to work around a goimports bug. - } - - return -} - -// Individual type encoders. - -// Encode a bool. -func (o *Buffer) enc_bool(p *Properties, base structPointer) error { - v := *structPointer_Bool(base, p.field) - if v == nil { - return ErrNil - } - x := 0 - if *v { - x = 1 - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func (o *Buffer) enc_proto3_bool(p *Properties, base structPointer) error { - v := *structPointer_BoolVal(base, p.field) - if !v { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, 1) - return nil -} - -func size_bool(p *Properties, base structPointer) int { - v := *structPointer_Bool(base, p.field) - if v == nil { - return 0 - } - return len(p.tagcode) + 1 // each bool takes exactly one byte -} - -func size_proto3_bool(p *Properties, base structPointer) int { - v := *structPointer_BoolVal(base, p.field) - if !v && !p.oneof { - return 0 - } - return len(p.tagcode) + 1 // each bool takes exactly one byte -} - -// Encode an int32. -func (o *Buffer) enc_int32(p *Properties, base structPointer) error { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return ErrNil - } - x := int32(word32_Get(v)) // permit sign extension to use full 64-bit range - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func (o *Buffer) enc_proto3_int32(p *Properties, base structPointer) error { - v := structPointer_Word32Val(base, p.field) - x := int32(word32Val_Get(v)) // permit sign extension to use full 64-bit range - if x == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func size_int32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return 0 - } - x := int32(word32_Get(v)) // permit sign extension to use full 64-bit range - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -func size_proto3_int32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32Val(base, p.field) - x := int32(word32Val_Get(v)) // permit sign extension to use full 64-bit range - if x == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -// Encode a uint32. -// Exactly the same as int32, except for no sign extension. -func (o *Buffer) enc_uint32(p *Properties, base structPointer) error { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return ErrNil - } - x := word32_Get(v) - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func (o *Buffer) enc_proto3_uint32(p *Properties, base structPointer) error { - v := structPointer_Word32Val(base, p.field) - x := word32Val_Get(v) - if x == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func size_uint32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return 0 - } - x := word32_Get(v) - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -func size_proto3_uint32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32Val(base, p.field) - x := word32Val_Get(v) - if x == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -// Encode an int64. -func (o *Buffer) enc_int64(p *Properties, base structPointer) error { - v := structPointer_Word64(base, p.field) - if word64_IsNil(v) { - return ErrNil - } - x := word64_Get(v) - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, x) - return nil -} - -func (o *Buffer) enc_proto3_int64(p *Properties, base structPointer) error { - v := structPointer_Word64Val(base, p.field) - x := word64Val_Get(v) - if x == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, x) - return nil -} - -func size_int64(p *Properties, base structPointer) (n int) { - v := structPointer_Word64(base, p.field) - if word64_IsNil(v) { - return 0 - } - x := word64_Get(v) - n += len(p.tagcode) - n += p.valSize(x) - return -} - -func size_proto3_int64(p *Properties, base structPointer) (n int) { - v := structPointer_Word64Val(base, p.field) - x := word64Val_Get(v) - if x == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += p.valSize(x) - return -} - -// Encode a string. -func (o *Buffer) enc_string(p *Properties, base structPointer) error { - v := *structPointer_String(base, p.field) - if v == nil { - return ErrNil - } - x := *v - o.buf = append(o.buf, p.tagcode...) - o.EncodeStringBytes(x) - return nil -} - -func (o *Buffer) enc_proto3_string(p *Properties, base structPointer) error { - v := *structPointer_StringVal(base, p.field) - if v == "" { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeStringBytes(v) - return nil -} - -func size_string(p *Properties, base structPointer) (n int) { - v := *structPointer_String(base, p.field) - if v == nil { - return 0 - } - x := *v - n += len(p.tagcode) - n += sizeStringBytes(x) - return -} - -func size_proto3_string(p *Properties, base structPointer) (n int) { - v := *structPointer_StringVal(base, p.field) - if v == "" && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += sizeStringBytes(v) - return + siz := Size(pb) + p.EncodeVarint(uint64(siz)) + return p.Marshal(pb) } // All protocol buffer fields are nillable, but be careful. @@ -538,825 +201,3 @@ func isNil(v reflect.Value) bool { } return false } - -// Encode a message struct. -func (o *Buffer) enc_struct_message(p *Properties, base structPointer) error { - var state errorState - structp := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(structp) { - return ErrNil - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, err := m.Marshal() - if err != nil && !state.shouldContinue(err, nil) { - return err - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(data) - return state.err - } - - o.buf = append(o.buf, p.tagcode...) - return o.enc_len_struct(p.sprop, structp, &state) -} - -func size_struct_message(p *Properties, base structPointer) int { - structp := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(structp) { - return 0 - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, _ := m.Marshal() - n0 := len(p.tagcode) - n1 := sizeRawBytes(data) - return n0 + n1 - } - - n0 := len(p.tagcode) - n1 := size_struct(p.sprop, structp) - n2 := sizeVarint(uint64(n1)) // size of encoded length - return n0 + n1 + n2 -} - -// Encode a group struct. -func (o *Buffer) enc_struct_group(p *Properties, base structPointer) error { - var state errorState - b := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(b) { - return ErrNil - } - - o.EncodeVarint(uint64((p.Tag << 3) | WireStartGroup)) - err := o.enc_struct(p.sprop, b) - if err != nil && !state.shouldContinue(err, nil) { - return err - } - o.EncodeVarint(uint64((p.Tag << 3) | WireEndGroup)) - return state.err -} - -func size_struct_group(p *Properties, base structPointer) (n int) { - b := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(b) { - return 0 - } - - n += sizeVarint(uint64((p.Tag << 3) | WireStartGroup)) - n += size_struct(p.sprop, b) - n += sizeVarint(uint64((p.Tag << 3) | WireEndGroup)) - return -} - -// Encode a slice of bools ([]bool). -func (o *Buffer) enc_slice_bool(p *Properties, base structPointer) error { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return ErrNil - } - for _, x := range s { - o.buf = append(o.buf, p.tagcode...) - v := uint64(0) - if x { - v = 1 - } - p.valEnc(o, v) - } - return nil -} - -func size_slice_bool(p *Properties, base structPointer) int { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return 0 - } - return l * (len(p.tagcode) + 1) // each bool takes exactly one byte -} - -// Encode a slice of bools ([]bool) in packed format. -func (o *Buffer) enc_slice_packed_bool(p *Properties, base structPointer) error { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(l)) // each bool takes exactly one byte - for _, x := range s { - v := uint64(0) - if x { - v = 1 - } - p.valEnc(o, v) - } - return nil -} - -func size_slice_packed_bool(p *Properties, base structPointer) (n int) { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return 0 - } - n += len(p.tagcode) - n += sizeVarint(uint64(l)) - n += l // each bool takes exactly one byte - return -} - -// Encode a slice of bytes ([]byte). -func (o *Buffer) enc_slice_byte(p *Properties, base structPointer) error { - s := *structPointer_Bytes(base, p.field) - if s == nil { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(s) - return nil -} - -func (o *Buffer) enc_proto3_slice_byte(p *Properties, base structPointer) error { - s := *structPointer_Bytes(base, p.field) - if len(s) == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(s) - return nil -} - -func size_slice_byte(p *Properties, base structPointer) (n int) { - s := *structPointer_Bytes(base, p.field) - if s == nil && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += sizeRawBytes(s) - return -} - -func size_proto3_slice_byte(p *Properties, base structPointer) (n int) { - s := *structPointer_Bytes(base, p.field) - if len(s) == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += sizeRawBytes(s) - return -} - -// Encode a slice of int32s ([]int32). -func (o *Buffer) enc_slice_int32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - p.valEnc(o, uint64(x)) - } - return nil -} - -func size_slice_int32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - for i := 0; i < l; i++ { - n += len(p.tagcode) - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - n += p.valSize(uint64(x)) - } - return -} - -// Encode a slice of int32s ([]int32) in packed format. -func (o *Buffer) enc_slice_packed_int32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - // TODO: Reuse a Buffer. - buf := NewBuffer(nil) - for i := 0; i < l; i++ { - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - p.valEnc(buf, uint64(x)) - } - - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(len(buf.buf))) - o.buf = append(o.buf, buf.buf...) - return nil -} - -func size_slice_packed_int32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - var bufSize int - for i := 0; i < l; i++ { - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - bufSize += p.valSize(uint64(x)) - } - - n += len(p.tagcode) - n += sizeVarint(uint64(bufSize)) - n += bufSize - return -} - -// Encode a slice of uint32s ([]uint32). -// Exactly the same as int32, except for no sign extension. -func (o *Buffer) enc_slice_uint32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - x := s.Index(i) - p.valEnc(o, uint64(x)) - } - return nil -} - -func size_slice_uint32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - for i := 0; i < l; i++ { - n += len(p.tagcode) - x := s.Index(i) - n += p.valSize(uint64(x)) - } - return -} - -// Encode a slice of uint32s ([]uint32) in packed format. -// Exactly the same as int32, except for no sign extension. -func (o *Buffer) enc_slice_packed_uint32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - // TODO: Reuse a Buffer. - buf := NewBuffer(nil) - for i := 0; i < l; i++ { - p.valEnc(buf, uint64(s.Index(i))) - } - - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(len(buf.buf))) - o.buf = append(o.buf, buf.buf...) - return nil -} - -func size_slice_packed_uint32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - var bufSize int - for i := 0; i < l; i++ { - bufSize += p.valSize(uint64(s.Index(i))) - } - - n += len(p.tagcode) - n += sizeVarint(uint64(bufSize)) - n += bufSize - return -} - -// Encode a slice of int64s ([]int64). -func (o *Buffer) enc_slice_int64(p *Properties, base structPointer) error { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, s.Index(i)) - } - return nil -} - -func size_slice_int64(p *Properties, base structPointer) (n int) { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - for i := 0; i < l; i++ { - n += len(p.tagcode) - n += p.valSize(s.Index(i)) - } - return -} - -// Encode a slice of int64s ([]int64) in packed format. -func (o *Buffer) enc_slice_packed_int64(p *Properties, base structPointer) error { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - // TODO: Reuse a Buffer. - buf := NewBuffer(nil) - for i := 0; i < l; i++ { - p.valEnc(buf, s.Index(i)) - } - - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(len(buf.buf))) - o.buf = append(o.buf, buf.buf...) - return nil -} - -func size_slice_packed_int64(p *Properties, base structPointer) (n int) { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - var bufSize int - for i := 0; i < l; i++ { - bufSize += p.valSize(s.Index(i)) - } - - n += len(p.tagcode) - n += sizeVarint(uint64(bufSize)) - n += bufSize - return -} - -// Encode a slice of slice of bytes ([][]byte). -func (o *Buffer) enc_slice_slice_byte(p *Properties, base structPointer) error { - ss := *structPointer_BytesSlice(base, p.field) - l := len(ss) - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(ss[i]) - } - return nil -} - -func size_slice_slice_byte(p *Properties, base structPointer) (n int) { - ss := *structPointer_BytesSlice(base, p.field) - l := len(ss) - if l == 0 { - return 0 - } - n += l * len(p.tagcode) - for i := 0; i < l; i++ { - n += sizeRawBytes(ss[i]) - } - return -} - -// Encode a slice of strings ([]string). -func (o *Buffer) enc_slice_string(p *Properties, base structPointer) error { - ss := *structPointer_StringSlice(base, p.field) - l := len(ss) - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - o.EncodeStringBytes(ss[i]) - } - return nil -} - -func size_slice_string(p *Properties, base structPointer) (n int) { - ss := *structPointer_StringSlice(base, p.field) - l := len(ss) - n += l * len(p.tagcode) - for i := 0; i < l; i++ { - n += sizeStringBytes(ss[i]) - } - return -} - -// Encode a slice of message structs ([]*struct). -func (o *Buffer) enc_slice_struct_message(p *Properties, base structPointer) error { - var state errorState - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - - for i := 0; i < l; i++ { - structp := s.Index(i) - if structPointer_IsNil(structp) { - return errRepeatedHasNil - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, err := m.Marshal() - if err != nil && !state.shouldContinue(err, nil) { - return err - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(data) - continue - } - - o.buf = append(o.buf, p.tagcode...) - err := o.enc_len_struct(p.sprop, structp, &state) - if err != nil && !state.shouldContinue(err, nil) { - if err == ErrNil { - return errRepeatedHasNil - } - return err - } - } - return state.err -} - -func size_slice_struct_message(p *Properties, base structPointer) (n int) { - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - n += l * len(p.tagcode) - for i := 0; i < l; i++ { - structp := s.Index(i) - if structPointer_IsNil(structp) { - return // return the size up to this point - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, _ := m.Marshal() - n += sizeRawBytes(data) - continue - } - - n0 := size_struct(p.sprop, structp) - n1 := sizeVarint(uint64(n0)) // size of encoded length - n += n0 + n1 - } - return -} - -// Encode a slice of group structs ([]*struct). -func (o *Buffer) enc_slice_struct_group(p *Properties, base structPointer) error { - var state errorState - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - - for i := 0; i < l; i++ { - b := s.Index(i) - if structPointer_IsNil(b) { - return errRepeatedHasNil - } - - o.EncodeVarint(uint64((p.Tag << 3) | WireStartGroup)) - - err := o.enc_struct(p.sprop, b) - - if err != nil && !state.shouldContinue(err, nil) { - if err == ErrNil { - return errRepeatedHasNil - } - return err - } - - o.EncodeVarint(uint64((p.Tag << 3) | WireEndGroup)) - } - return state.err -} - -func size_slice_struct_group(p *Properties, base structPointer) (n int) { - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - - n += l * sizeVarint(uint64((p.Tag<<3)|WireStartGroup)) - n += l * sizeVarint(uint64((p.Tag<<3)|WireEndGroup)) - for i := 0; i < l; i++ { - b := s.Index(i) - if structPointer_IsNil(b) { - return // return size up to this point - } - - n += size_struct(p.sprop, b) - } - return -} - -// Encode an extension map. -func (o *Buffer) enc_map(p *Properties, base structPointer) error { - exts := structPointer_ExtMap(base, p.field) - if err := encodeExtensionsMap(*exts); err != nil { - return err - } - - return o.enc_map_body(*exts) -} - -func (o *Buffer) enc_exts(p *Properties, base structPointer) error { - exts := structPointer_Extensions(base, p.field) - - v, mu := exts.extensionsRead() - if v == nil { - return nil - } - - mu.Lock() - defer mu.Unlock() - if err := encodeExtensionsMap(v); err != nil { - return err - } - - return o.enc_map_body(v) -} - -func (o *Buffer) enc_map_body(v map[int32]Extension) error { - // Fast-path for common cases: zero or one extensions. - if len(v) <= 1 { - for _, e := range v { - o.buf = append(o.buf, e.enc...) - } - return nil - } - - // Sort keys to provide a deterministic encoding. - keys := make([]int, 0, len(v)) - for k := range v { - keys = append(keys, int(k)) - } - sort.Ints(keys) - - for _, k := range keys { - o.buf = append(o.buf, v[int32(k)].enc...) - } - return nil -} - -func size_map(p *Properties, base structPointer) int { - v := structPointer_ExtMap(base, p.field) - return extensionsMapSize(*v) -} - -func size_exts(p *Properties, base structPointer) int { - v := structPointer_Extensions(base, p.field) - return extensionsSize(v) -} - -// Encode a map field. -func (o *Buffer) enc_new_map(p *Properties, base structPointer) error { - var state errorState // XXX: or do we need to plumb this through? - - /* - A map defined as - map map_field = N; - is encoded in the same way as - message MapFieldEntry { - key_type key = 1; - value_type value = 2; - } - repeated MapFieldEntry map_field = N; - */ - - v := structPointer_NewAt(base, p.field, p.mtype).Elem() // map[K]V - if v.Len() == 0 { - return nil - } - - keycopy, valcopy, keybase, valbase := mapEncodeScratch(p.mtype) - - enc := func() error { - if err := p.mkeyprop.enc(o, p.mkeyprop, keybase); err != nil { - return err - } - if err := p.mvalprop.enc(o, p.mvalprop, valbase); err != nil && err != ErrNil { - return err - } - return nil - } - - // Don't sort map keys. It is not required by the spec, and C++ doesn't do it. - for _, key := range v.MapKeys() { - val := v.MapIndex(key) - - keycopy.Set(key) - valcopy.Set(val) - - o.buf = append(o.buf, p.tagcode...) - if err := o.enc_len_thing(enc, &state); err != nil { - return err - } - } - return nil -} - -func size_new_map(p *Properties, base structPointer) int { - v := structPointer_NewAt(base, p.field, p.mtype).Elem() // map[K]V - - keycopy, valcopy, keybase, valbase := mapEncodeScratch(p.mtype) - - n := 0 - for _, key := range v.MapKeys() { - val := v.MapIndex(key) - keycopy.Set(key) - valcopy.Set(val) - - // Tag codes for key and val are the responsibility of the sub-sizer. - keysize := p.mkeyprop.size(p.mkeyprop, keybase) - valsize := p.mvalprop.size(p.mvalprop, valbase) - entry := keysize + valsize - // Add on tag code and length of map entry itself. - n += len(p.tagcode) + sizeVarint(uint64(entry)) + entry - } - return n -} - -// mapEncodeScratch returns a new reflect.Value matching the map's value type, -// and a structPointer suitable for passing to an encoder or sizer. -func mapEncodeScratch(mapType reflect.Type) (keycopy, valcopy reflect.Value, keybase, valbase structPointer) { - // Prepare addressable doubly-indirect placeholders for the key and value types. - // This is needed because the element-type encoders expect **T, but the map iteration produces T. - - keycopy = reflect.New(mapType.Key()).Elem() // addressable K - keyptr := reflect.New(reflect.PtrTo(keycopy.Type())).Elem() // addressable *K - keyptr.Set(keycopy.Addr()) // - keybase = toStructPointer(keyptr.Addr()) // **K - - // Value types are more varied and require special handling. - switch mapType.Elem().Kind() { - case reflect.Slice: - // []byte - var dummy []byte - valcopy = reflect.ValueOf(&dummy).Elem() // addressable []byte - valbase = toStructPointer(valcopy.Addr()) - case reflect.Ptr: - // message; the generated field type is map[K]*Msg (so V is *Msg), - // so we only need one level of indirection. - valcopy = reflect.New(mapType.Elem()).Elem() // addressable V - valbase = toStructPointer(valcopy.Addr()) - default: - // everything else - valcopy = reflect.New(mapType.Elem()).Elem() // addressable V - valptr := reflect.New(reflect.PtrTo(valcopy.Type())).Elem() // addressable *V - valptr.Set(valcopy.Addr()) // - valbase = toStructPointer(valptr.Addr()) // **V - } - return -} - -// Encode a struct. -func (o *Buffer) enc_struct(prop *StructProperties, base structPointer) error { - var state errorState - // Encode fields in tag order so that decoders may use optimizations - // that depend on the ordering. - // https://developers.google.com/protocol-buffers/docs/encoding#order - for _, i := range prop.order { - p := prop.Prop[i] - if p.enc != nil { - err := p.enc(o, p, base) - if err != nil { - if err == ErrNil { - if p.Required && state.err == nil { - state.err = &RequiredNotSetError{p.Name} - } - } else if err == errRepeatedHasNil { - // Give more context to nil values in repeated fields. - return errors.New("repeated field " + p.OrigName + " has nil element") - } else if !state.shouldContinue(err, p) { - return err - } - } - if len(o.buf) > maxMarshalSize { - return ErrTooLarge - } - } - } - - // Do oneof fields. - if prop.oneofMarshaler != nil { - m := structPointer_Interface(base, prop.stype).(Message) - if err := prop.oneofMarshaler(m, o); err == ErrNil { - return errOneofHasNil - } else if err != nil { - return err - } - } - - // Add unrecognized fields at the end. - if prop.unrecField.IsValid() { - v := *structPointer_Bytes(base, prop.unrecField) - if len(o.buf)+len(v) > maxMarshalSize { - return ErrTooLarge - } - if len(v) > 0 { - o.buf = append(o.buf, v...) - } - } - - return state.err -} - -func size_struct(prop *StructProperties, base structPointer) (n int) { - for _, i := range prop.order { - p := prop.Prop[i] - if p.size != nil { - n += p.size(p, base) - } - } - - // Add unrecognized fields at the end. - if prop.unrecField.IsValid() { - v := *structPointer_Bytes(base, prop.unrecField) - n += len(v) - } - - // Factor in any oneof fields. - if prop.oneofSizer != nil { - m := structPointer_Interface(base, prop.stype).(Message) - n += prop.oneofSizer(m) - } - - return -} - -var zeroes [20]byte // longer than any conceivable sizeVarint - -// Encode a struct, preceded by its encoded length (as a varint). -func (o *Buffer) enc_len_struct(prop *StructProperties, base structPointer, state *errorState) error { - return o.enc_len_thing(func() error { return o.enc_struct(prop, base) }, state) -} - -// Encode something, preceded by its encoded length (as a varint). -func (o *Buffer) enc_len_thing(enc func() error, state *errorState) error { - iLen := len(o.buf) - o.buf = append(o.buf, 0, 0, 0, 0) // reserve four bytes for length - iMsg := len(o.buf) - err := enc() - if err != nil && !state.shouldContinue(err, nil) { - return err - } - lMsg := len(o.buf) - iMsg - lLen := sizeVarint(uint64(lMsg)) - switch x := lLen - (iMsg - iLen); { - case x > 0: // actual length is x bytes larger than the space we reserved - // Move msg x bytes right. - o.buf = append(o.buf, zeroes[:x]...) - copy(o.buf[iMsg+x:], o.buf[iMsg:iMsg+lMsg]) - case x < 0: // actual length is x bytes smaller than the space we reserved - // Move msg x bytes left. - copy(o.buf[iMsg+x:], o.buf[iMsg:iMsg+lMsg]) - o.buf = o.buf[:len(o.buf)+x] // x is negative - } - // Encode the length in the reserved space. - o.buf = o.buf[:iLen] - o.EncodeVarint(uint64(lMsg)) - o.buf = o.buf[:len(o.buf)+lMsg] - return state.err -} - -// errorState maintains the first error that occurs and updates that error -// with additional context. -type errorState struct { - err error -} - -// shouldContinue reports whether encoding should continue upon encountering the -// given error. If the error is RequiredNotSetError, shouldContinue returns true -// and, if this is the first appearance of that error, remembers it for future -// reporting. -// -// If prop is not nil, it may update any error with additional context about the -// field with the error. -func (s *errorState) shouldContinue(err error, prop *Properties) bool { - // Ignore unset required fields. - reqNotSet, ok := err.(*RequiredNotSetError) - if !ok { - return false - } - if s.err == nil { - if prop != nil { - err = &RequiredNotSetError{prop.Name + "." + reqNotSet.field} - } - s.err = err - } - return true -} diff --git a/vendor/github.com/golang/protobuf/proto/equal.go b/vendor/github.com/golang/protobuf/proto/equal.go index 2ed1cf5..f9b6e41 100644 --- a/vendor/github.com/golang/protobuf/proto/equal.go +++ b/vendor/github.com/golang/protobuf/proto/equal.go @@ -109,15 +109,6 @@ func equalStruct(v1, v2 reflect.Value) bool { // set/unset mismatch return false } - b1, ok := f1.Interface().(raw) - if ok { - b2 := f2.Interface().(raw) - // RawMessage - if !bytes.Equal(b1.Bytes(), b2.Bytes()) { - return false - } - continue - } f1, f2 = f1.Elem(), f2.Elem() } if !equalAny(f1, f2, sprop.Prop[i]) { @@ -146,11 +137,7 @@ func equalStruct(v1, v2 reflect.Value) bool { u1 := uf.Bytes() u2 := v2.FieldByName("XXX_unrecognized").Bytes() - if !bytes.Equal(u1, u2) { - return false - } - - return true + return bytes.Equal(u1, u2) } // v1 and v2 are known to have the same type. @@ -259,7 +246,17 @@ func equalExtMap(base reflect.Type, em1, em2 map[int32]Extension) bool { return false } - m1, m2 := e1.value, e2.value + m1 := extensionAsLegacyType(e1.value) + m2 := extensionAsLegacyType(e2.value) + + if m1 == nil && m2 == nil { + // Both have only encoded form. + if bytes.Equal(e1.enc, e2.enc) { + continue + } + // The bytes are different, but the extensions might still be + // equal. We need to decode them to compare. + } if m1 != nil && m2 != nil { // Both are unencoded. @@ -276,8 +273,12 @@ func equalExtMap(base reflect.Type, em1, em2 map[int32]Extension) bool { desc = m[extNum] } if desc == nil { + // If both have only encoded form and the bytes are the same, + // it is handled above. We get here when the bytes are different. + // We don't know how to decode it, so just compare them as byte + // slices. log.Printf("proto: don't know how to compare extension %d of %v", extNum, base) - continue + return false } var err error if m1 == nil { diff --git a/vendor/github.com/golang/protobuf/proto/equal_test.go b/vendor/github.com/golang/protobuf/proto/equal_test.go index a2febb3..93ff88f 100644 --- a/vendor/github.com/golang/protobuf/proto/equal_test.go +++ b/vendor/github.com/golang/protobuf/proto/equal_test.go @@ -36,7 +36,7 @@ import ( . "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - pb "github.com/golang/protobuf/proto/testdata" + pb "github.com/golang/protobuf/proto/test_proto" ) // Four identical base messages. @@ -45,6 +45,9 @@ var messageWithoutExtension = &pb.MyMessage{Count: Int32(7)} var messageWithExtension1a = &pb.MyMessage{Count: Int32(7)} var messageWithExtension1b = &pb.MyMessage{Count: Int32(7)} var messageWithExtension2 = &pb.MyMessage{Count: Int32(7)} +var messageWithExtension3a = &pb.MyMessage{Count: Int32(7)} +var messageWithExtension3b = &pb.MyMessage{Count: Int32(7)} +var messageWithExtension3c = &pb.MyMessage{Count: Int32(7)} // Two messages with non-message extensions. var messageWithInt32Extension1 = &pb.MyMessage{Count: Int32(8)} @@ -83,6 +86,20 @@ func init() { if err := SetExtension(messageWithInt32Extension1, pb.E_Ext_Number, Int32(24)); err != nil { panic("SetExtension on Int32-2 failed: " + err.Error()) } + + // messageWithExtension3{a,b,c} has unregistered extension. + if RegisteredExtensions(messageWithExtension3a)[200] != nil { + panic("expect extension 200 unregistered") + } + bytes := []byte{ + 0xc0, 0x0c, 0x01, // id=200, wiretype=0 (varint), data=1 + } + bytes2 := []byte{ + 0xc0, 0x0c, 0x02, // id=200, wiretype=0 (varint), data=2 + } + SetRawExtension(messageWithExtension3a, 200, bytes) + SetRawExtension(messageWithExtension3b, 200, bytes) + SetRawExtension(messageWithExtension3c, 200, bytes2) } var EqualTests = []struct { @@ -142,6 +159,9 @@ var EqualTests = []struct { {"int32 extension vs. itself", messageWithInt32Extension1, messageWithInt32Extension1, true}, {"int32 extension vs. a different int32", messageWithInt32Extension1, messageWithInt32Extension2, false}, + {"unregistered extension same", messageWithExtension3a, messageWithExtension3b, true}, + {"unregistered extension different", messageWithExtension3a, messageWithExtension3c, false}, + { "message with group", &pb.MyMessage{ diff --git a/vendor/github.com/golang/protobuf/proto/extensions.go b/vendor/github.com/golang/protobuf/proto/extensions.go index eaad218..fa88add 100644 --- a/vendor/github.com/golang/protobuf/proto/extensions.go +++ b/vendor/github.com/golang/protobuf/proto/extensions.go @@ -38,6 +38,7 @@ package proto import ( "errors" "fmt" + "io" "reflect" "strconv" "sync" @@ -91,14 +92,29 @@ func (n notLocker) Unlock() {} // extendable returns the extendableProto interface for the given generated proto message. // If the proto message has the old extension format, it returns a wrapper that implements // the extendableProto interface. -func extendable(p interface{}) (extendableProto, bool) { - if ep, ok := p.(extendableProto); ok { - return ep, ok - } - if ep, ok := p.(extendableProtoV1); ok { - return extensionAdapter{ep}, ok +func extendable(p interface{}) (extendableProto, error) { + switch p := p.(type) { + case extendableProto: + if isNilPtr(p) { + return nil, fmt.Errorf("proto: nil %T is not extendable", p) + } + return p, nil + case extendableProtoV1: + if isNilPtr(p) { + return nil, fmt.Errorf("proto: nil %T is not extendable", p) + } + return extensionAdapter{p}, nil } - return nil, false + // Don't allocate a specific error containing %T: + // this is the hot path for Clone and MarshalText. + return nil, errNotExtendable +} + +var errNotExtendable = errors.New("proto: not an extendable proto.Message") + +func isNilPtr(x interface{}) bool { + v := reflect.ValueOf(x) + return v.Kind() == reflect.Ptr && v.IsNil() } // XXX_InternalExtensions is an internal representation of proto extensions. @@ -143,9 +159,6 @@ func (e *XXX_InternalExtensions) extensionsRead() (map[int32]Extension, sync.Loc return e.p.extensionMap, &e.p.mu } -var extendableProtoType = reflect.TypeOf((*extendableProto)(nil)).Elem() -var extendableProtoV1Type = reflect.TypeOf((*extendableProtoV1)(nil)).Elem() - // ExtensionDesc represents an extension specification. // Used in generated code from the protocol compiler. type ExtensionDesc struct { @@ -172,15 +185,31 @@ type Extension struct { // extension will have only enc set. When such an extension is // accessed using GetExtension (or GetExtensions) desc and value // will be set. - desc *ExtensionDesc + desc *ExtensionDesc + + // value is a concrete value for the extension field. Let the type of + // desc.ExtensionType be the "API type" and the type of Extension.value + // be the "storage type". The API type and storage type are the same except: + // * For scalars (except []byte), the API type uses *T, + // while the storage type uses T. + // * For repeated fields, the API type uses []T, while the storage type + // uses *[]T. + // + // The reason for the divergence is so that the storage type more naturally + // matches what is expected of when retrieving the values through the + // protobuf reflection APIs. + // + // The value may only be populated if desc is also populated. value interface{} - enc []byte + + // enc is the raw bytes for the extension field. + enc []byte } // SetRawExtension is for testing only. func SetRawExtension(base Message, id int32, b []byte) { - epb, ok := extendable(base) - if !ok { + epb, err := extendable(base) + if err != nil { return } extmap := epb.extensionsWrite() @@ -205,7 +234,7 @@ func checkExtensionTypes(pb extendableProto, extension *ExtensionDesc) error { pbi = ea.extendableProtoV1 } if a, b := reflect.TypeOf(pbi), reflect.TypeOf(extension.ExtendedType); a != b { - return errors.New("proto: bad extended type; " + b.String() + " does not extend " + a.String()) + return fmt.Errorf("proto: bad extended type; %v does not extend %v", b, a) } // Check the range. if !isExtensionField(pb, extension.Field) { @@ -250,85 +279,11 @@ func extensionProperties(ed *ExtensionDesc) *Properties { return prop } -// encode encodes any unmarshaled (unencoded) extensions in e. -func encodeExtensions(e *XXX_InternalExtensions) error { - m, mu := e.extensionsRead() - if m == nil { - return nil // fast path - } - mu.Lock() - defer mu.Unlock() - return encodeExtensionsMap(m) -} - -// encode encodes any unmarshaled (unencoded) extensions in e. -func encodeExtensionsMap(m map[int32]Extension) error { - for k, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - et := reflect.TypeOf(e.desc.ExtensionType) - props := extensionProperties(e.desc) - - p := NewBuffer(nil) - // If e.value has type T, the encoder expects a *struct{ X T }. - // Pass a *T with a zero field and hope it all works out. - x := reflect.New(et) - x.Elem().Set(reflect.ValueOf(e.value)) - if err := props.enc(p, props, toStructPointer(x)); err != nil { - return err - } - e.enc = p.buf - m[k] = e - } - return nil -} - -func extensionsSize(e *XXX_InternalExtensions) (n int) { - m, mu := e.extensionsRead() - if m == nil { - return 0 - } - mu.Lock() - defer mu.Unlock() - return extensionsMapSize(m) -} - -func extensionsMapSize(m map[int32]Extension) (n int) { - for _, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - n += len(e.enc) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - et := reflect.TypeOf(e.desc.ExtensionType) - props := extensionProperties(e.desc) - - // If e.value has type T, the encoder expects a *struct{ X T }. - // Pass a *T with a zero field and hope it all works out. - x := reflect.New(et) - x.Elem().Set(reflect.ValueOf(e.value)) - n += props.size(props, toStructPointer(x)) - } - return -} - // HasExtension returns whether the given extension is present in pb. func HasExtension(pb Message, extension *ExtensionDesc) bool { // TODO: Check types, field numbers, etc.? - epb, ok := extendable(pb) - if !ok { + epb, err := extendable(pb) + if err != nil { return false } extmap, mu := epb.extensionsRead() @@ -336,15 +291,15 @@ func HasExtension(pb Message, extension *ExtensionDesc) bool { return false } mu.Lock() - _, ok = extmap[extension.Field] + _, ok := extmap[extension.Field] mu.Unlock() return ok } // ClearExtension removes the given extension from pb. func ClearExtension(pb Message, extension *ExtensionDesc) { - epb, ok := extendable(pb) - if !ok { + epb, err := extendable(pb) + if err != nil { return } // TODO: Check types, field numbers, etc.? @@ -352,16 +307,26 @@ func ClearExtension(pb Message, extension *ExtensionDesc) { delete(extmap, extension.Field) } -// GetExtension parses and returns the given extension of pb. -// If the extension is not present and has no default value it returns ErrMissingExtension. +// GetExtension retrieves a proto2 extended field from pb. +// +// If the descriptor is type complete (i.e., ExtensionDesc.ExtensionType is non-nil), +// then GetExtension parses the encoded field and returns a Go value of the specified type. +// If the field is not present, then the default value is returned (if one is specified), +// otherwise ErrMissingExtension is reported. +// +// If the descriptor is not type complete (i.e., ExtensionDesc.ExtensionType is nil), +// then GetExtension returns the raw encoded bytes of the field extension. func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) { - epb, ok := extendable(pb) - if !ok { - return nil, errors.New("proto: not an extendable proto") + epb, err := extendable(pb) + if err != nil { + return nil, err } - if err := checkExtensionTypes(epb, extension); err != nil { - return nil, err + if extension.ExtendedType != nil { + // can only check type if this is a complete descriptor + if err := checkExtensionTypes(epb, extension); err != nil { + return nil, err + } } emap, mu := epb.extensionsRead() @@ -385,7 +350,12 @@ func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) { // descriptors with the same field number. return nil, errors.New("proto: descriptor conflict") } - return e.value, nil + return extensionAsLegacyType(e.value), nil + } + + if extension.ExtensionType == nil { + // incomplete descriptor + return e.enc, nil } v, err := decodeExtension(e.enc, extension) @@ -395,16 +365,21 @@ func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) { // Remember the decoded version and drop the encoded version. // That way it is safe to mutate what we return. - e.value = v + e.value = extensionAsStorageType(v) e.desc = extension e.enc = nil emap[extension.Field] = e - return e.value, nil + return extensionAsLegacyType(e.value), nil } // defaultExtensionValue returns the default value for extension. // If no default for an extension is defined ErrMissingExtension is returned. func defaultExtensionValue(extension *ExtensionDesc) (interface{}, error) { + if extension.ExtensionType == nil { + // incomplete descriptor, so no default + return nil, ErrMissingExtension + } + t := reflect.TypeOf(extension.ExtensionType) props := extensionProperties(extension) @@ -439,31 +414,28 @@ func defaultExtensionValue(extension *ExtensionDesc) (interface{}, error) { // decodeExtension decodes an extension encoded in b. func decodeExtension(b []byte, extension *ExtensionDesc) (interface{}, error) { - o := NewBuffer(b) - t := reflect.TypeOf(extension.ExtensionType) - - props := extensionProperties(extension) + unmarshal := typeUnmarshaler(t, extension.Tag) // t is a pointer to a struct, pointer to basic type or a slice. - // Allocate a "field" to store the pointer/slice itself; the - // pointer/slice will be stored here. We pass - // the address of this field to props.dec. - // This passes a zero field and a *t and lets props.dec - // interpret it as a *struct{ x t }. + // Allocate space to store the pointer/slice. value := reflect.New(t).Elem() + var err error for { - // Discard wire type and field number varint. It isn't needed. - if _, err := o.DecodeVarint(); err != nil { - return nil, err + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF } + b = b[n:] + wire := int(x) & 7 - if err := props.dec(o, props, toStructPointer(value.Addr())); err != nil { + b, err = unmarshal(b, valToPointer(value.Addr()), wire) + if err != nil { return nil, err } - if o.index >= len(o.buf) { + if len(b) == 0 { break } } @@ -473,9 +445,9 @@ func decodeExtension(b []byte, extension *ExtensionDesc) (interface{}, error) { // GetExtensions returns a slice of the extensions present in pb that are also listed in es. // The returned slice has the same length as es; missing extensions will appear as nil elements. func GetExtensions(pb Message, es []*ExtensionDesc) (extensions []interface{}, err error) { - epb, ok := extendable(pb) - if !ok { - return nil, errors.New("proto: not an extendable proto") + epb, err := extendable(pb) + if err != nil { + return nil, err } extensions = make([]interface{}, len(es)) for i, e := range es { @@ -494,9 +466,9 @@ func GetExtensions(pb Message, es []*ExtensionDesc) (extensions []interface{}, e // For non-registered extensions, ExtensionDescs returns an incomplete descriptor containing // just the Field field, which defines the extension's field number. func ExtensionDescs(pb Message) ([]*ExtensionDesc, error) { - epb, ok := extendable(pb) - if !ok { - return nil, fmt.Errorf("proto: %T is not an extendable proto.Message", pb) + epb, err := extendable(pb) + if err != nil { + return nil, err } registeredExtensions := RegisteredExtensions(pb) @@ -523,16 +495,16 @@ func ExtensionDescs(pb Message) ([]*ExtensionDesc, error) { // SetExtension sets the specified extension of pb to the specified value. func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error { - epb, ok := extendable(pb) - if !ok { - return errors.New("proto: not an extendable proto") + epb, err := extendable(pb) + if err != nil { + return err } if err := checkExtensionTypes(epb, extension); err != nil { return err } typ := reflect.TypeOf(extension.ExtensionType) if typ != reflect.TypeOf(value) { - return errors.New("proto: bad extension value type") + return fmt.Errorf("proto: bad extension value type. got: %T, want: %T", value, extension.ExtensionType) } // nil extension values need to be caught early, because the // encoder can't distinguish an ErrNil due to a nil extension @@ -544,14 +516,14 @@ func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error } extmap := epb.extensionsWrite() - extmap[extension.Field] = Extension{desc: extension, value: value} + extmap[extension.Field] = Extension{desc: extension, value: extensionAsStorageType(value)} return nil } // ClearAllExtensions clears all extensions from pb. func ClearAllExtensions(pb Message) { - epb, ok := extendable(pb) - if !ok { + epb, err := extendable(pb) + if err != nil { return } m := epb.extensionsWrite() @@ -585,3 +557,51 @@ func RegisterExtension(desc *ExtensionDesc) { func RegisteredExtensions(pb Message) map[int32]*ExtensionDesc { return extensionMaps[reflect.TypeOf(pb).Elem()] } + +// extensionAsLegacyType converts an value in the storage type as the API type. +// See Extension.value. +func extensionAsLegacyType(v interface{}) interface{} { + switch rv := reflect.ValueOf(v); rv.Kind() { + case reflect.Bool, reflect.Int32, reflect.Int64, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String: + // Represent primitive types as a pointer to the value. + rv2 := reflect.New(rv.Type()) + rv2.Elem().Set(rv) + v = rv2.Interface() + case reflect.Ptr: + // Represent slice types as the value itself. + switch rv.Type().Elem().Kind() { + case reflect.Slice: + if rv.IsNil() { + v = reflect.Zero(rv.Type().Elem()).Interface() + } else { + v = rv.Elem().Interface() + } + } + } + return v +} + +// extensionAsStorageType converts an value in the API type as the storage type. +// See Extension.value. +func extensionAsStorageType(v interface{}) interface{} { + switch rv := reflect.ValueOf(v); rv.Kind() { + case reflect.Ptr: + // Represent slice types as the value itself. + switch rv.Type().Elem().Kind() { + case reflect.Bool, reflect.Int32, reflect.Int64, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String: + if rv.IsNil() { + v = reflect.Zero(rv.Type().Elem()).Interface() + } else { + v = rv.Elem().Interface() + } + } + case reflect.Slice: + // Represent slice types as a pointer to the value. + if rv.Type().Elem().Kind() != reflect.Uint8 { + rv2 := reflect.New(rv.Type()) + rv2.Elem().Set(rv) + v = rv2.Interface() + } + } + return v +} diff --git a/vendor/github.com/golang/protobuf/proto/extensions_test.go b/vendor/github.com/golang/protobuf/proto/extensions_test.go index b6d9114..f232a27 100644 --- a/vendor/github.com/golang/protobuf/proto/extensions_test.go +++ b/vendor/github.com/golang/protobuf/proto/extensions_test.go @@ -34,13 +34,15 @@ package proto_test import ( "bytes" "fmt" + "io" "reflect" "sort" + "strings" + "sync" "testing" "github.com/golang/protobuf/proto" - pb "github.com/golang/protobuf/proto/testdata" - "golang.org/x/sync/errgroup" + pb "github.com/golang/protobuf/proto/test_proto" ) func TestGetExtensionsWithMissingExtensions(t *testing.T) { @@ -64,7 +66,107 @@ func TestGetExtensionsWithMissingExtensions(t *testing.T) { } } -func TestExtensionDescsWithMissingExtensions(t *testing.T) { +func TestGetExtensionWithEmptyBuffer(t *testing.T) { + // Make sure that GetExtension returns an error if its + // undecoded buffer is empty. + msg := &pb.MyMessage{} + proto.SetRawExtension(msg, pb.E_Ext_More.Field, []byte{}) + _, err := proto.GetExtension(msg, pb.E_Ext_More) + if want := io.ErrUnexpectedEOF; err != want { + t.Errorf("unexpected error in GetExtension from empty buffer: got %v, want %v", err, want) + } +} + +func TestGetExtensionForIncompleteDesc(t *testing.T) { + msg := &pb.MyMessage{Count: proto.Int32(0)} + extdesc1 := &proto.ExtensionDesc{ + ExtendedType: (*pb.MyMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 123456789, + Name: "a.b", + Tag: "varint,123456789,opt", + } + ext1 := proto.Bool(true) + if err := proto.SetExtension(msg, extdesc1, ext1); err != nil { + t.Fatalf("Could not set ext1: %s", err) + } + extdesc2 := &proto.ExtensionDesc{ + ExtendedType: (*pb.MyMessage)(nil), + ExtensionType: ([]byte)(nil), + Field: 123456790, + Name: "a.c", + Tag: "bytes,123456790,opt", + } + ext2 := []byte{0, 1, 2, 3, 4, 5, 6, 7} + if err := proto.SetExtension(msg, extdesc2, ext2); err != nil { + t.Fatalf("Could not set ext2: %s", err) + } + extdesc3 := &proto.ExtensionDesc{ + ExtendedType: (*pb.MyMessage)(nil), + ExtensionType: (*pb.Ext)(nil), + Field: 123456791, + Name: "a.d", + Tag: "bytes,123456791,opt", + } + ext3 := &pb.Ext{Data: proto.String("foo")} + if err := proto.SetExtension(msg, extdesc3, ext3); err != nil { + t.Fatalf("Could not set ext3: %s", err) + } + + b, err := proto.Marshal(msg) + if err != nil { + t.Fatalf("Could not marshal msg: %v", err) + } + if err := proto.Unmarshal(b, msg); err != nil { + t.Fatalf("Could not unmarshal into msg: %v", err) + } + + var expected proto.Buffer + if err := expected.EncodeVarint(uint64((extdesc1.Field << 3) | proto.WireVarint)); err != nil { + t.Fatalf("failed to compute expected prefix for ext1: %s", err) + } + if err := expected.EncodeVarint(1 /* bool true */); err != nil { + t.Fatalf("failed to compute expected value for ext1: %s", err) + } + + if b, err := proto.GetExtension(msg, &proto.ExtensionDesc{Field: extdesc1.Field}); err != nil { + t.Fatalf("Failed to get raw value for ext1: %s", err) + } else if !reflect.DeepEqual(b, expected.Bytes()) { + t.Fatalf("Raw value for ext1: got %v, want %v", b, expected.Bytes()) + } + + expected = proto.Buffer{} // reset + if err := expected.EncodeVarint(uint64((extdesc2.Field << 3) | proto.WireBytes)); err != nil { + t.Fatalf("failed to compute expected prefix for ext2: %s", err) + } + if err := expected.EncodeRawBytes(ext2); err != nil { + t.Fatalf("failed to compute expected value for ext2: %s", err) + } + + if b, err := proto.GetExtension(msg, &proto.ExtensionDesc{Field: extdesc2.Field}); err != nil { + t.Fatalf("Failed to get raw value for ext2: %s", err) + } else if !reflect.DeepEqual(b, expected.Bytes()) { + t.Fatalf("Raw value for ext2: got %v, want %v", b, expected.Bytes()) + } + + expected = proto.Buffer{} // reset + if err := expected.EncodeVarint(uint64((extdesc3.Field << 3) | proto.WireBytes)); err != nil { + t.Fatalf("failed to compute expected prefix for ext3: %s", err) + } + if b, err := proto.Marshal(ext3); err != nil { + t.Fatalf("failed to compute expected value for ext3: %s", err) + } else if err := expected.EncodeRawBytes(b); err != nil { + t.Fatalf("failed to compute expected value for ext3: %s", err) + } + + if b, err := proto.GetExtension(msg, &proto.ExtensionDesc{Field: extdesc3.Field}); err != nil { + t.Fatalf("Failed to get raw value for ext3: %s", err) + } else if !reflect.DeepEqual(b, expected.Bytes()) { + t.Fatalf("Raw value for ext3: got %v, want %v", b, expected.Bytes()) + } +} + +func TestExtensionDescsWithUnregisteredExtensions(t *testing.T) { msg := &pb.MyMessage{Count: proto.Int32(0)} extdesc1 := pb.E_Ext_More if descs, err := proto.ExtensionDescs(msg); len(descs) != 0 || err != nil { @@ -100,7 +202,7 @@ func TestExtensionDescsWithMissingExtensions(t *testing.T) { t.Fatalf("proto.ExtensionDescs: got error %v", err) } sortExtDescs(descs) - wantDescs := []*proto.ExtensionDesc{extdesc1, &proto.ExtensionDesc{Field: extdesc2.Field}} + wantDescs := []*proto.ExtensionDesc{extdesc1, {Field: extdesc2.Field}} if !reflect.DeepEqual(descs, wantDescs) { t.Errorf("proto.ExtensionDescs(msg) sorted extension ids: got %+v, want %+v", descs, wantDescs) } @@ -200,7 +302,7 @@ func TestGetExtensionDefaults(t *testing.T) { {pb.E_DefaultSfixed64, setInt64, int64(51)}, {pb.E_DefaultBool, setBool, true}, {pb.E_DefaultBool, setBool2, true}, - {pb.E_DefaultString, setString, "Hello, string"}, + {pb.E_DefaultString, setString, "Hello, string,def=foo"}, {pb.E_DefaultBytes, setBytes, []byte("Hello, bytes")}, {pb.E_DefaultEnum, setEnum, pb.DefaultsMessage_ONE}, } @@ -287,6 +389,44 @@ func TestGetExtensionDefaults(t *testing.T) { } } +func TestNilMessage(t *testing.T) { + name := "nil interface" + if got, err := proto.GetExtension(nil, pb.E_Ext_More); err == nil { + t.Errorf("%s: got %T %v, expected to fail", name, got, got) + } else if !strings.Contains(err.Error(), "extendable") { + t.Errorf("%s: got error %v, expected not-extendable error", name, err) + } + + // Regression tests: all functions of the Extension API + // used to panic when passed (*M)(nil), where M is a concrete message + // type. Now they handle this gracefully as a no-op or reported error. + var nilMsg *pb.MyMessage + desc := pb.E_Ext_More + + isNotExtendable := func(err error) bool { + return strings.Contains(fmt.Sprint(err), "not extendable") + } + + if proto.HasExtension(nilMsg, desc) { + t.Error("HasExtension(nil) = true") + } + + if _, err := proto.GetExtensions(nilMsg, []*proto.ExtensionDesc{desc}); !isNotExtendable(err) { + t.Errorf("GetExtensions(nil) = %q (wrong error)", err) + } + + if _, err := proto.ExtensionDescs(nilMsg); !isNotExtendable(err) { + t.Errorf("ExtensionDescs(nil) = %q (wrong error)", err) + } + + if err := proto.SetExtension(nilMsg, desc, nil); !isNotExtendable(err) { + t.Errorf("SetExtension(nil) = %q (wrong error)", err) + } + + proto.ClearExtension(nilMsg, desc) // no-op + proto.ClearAllExtensions(nilMsg) // no-op +} + func TestExtensionsRoundTrip(t *testing.T) { msg := &pb.MyMessage{} ext1 := &pb.Ext{ @@ -311,7 +451,7 @@ func TestExtensionsRoundTrip(t *testing.T) { } x, ok := e.(*pb.Ext) if !ok { - t.Errorf("e has type %T, expected testdata.Ext", e) + t.Errorf("e has type %T, expected test_proto.Ext", e) } else if *x.Data != "there" { t.Errorf("SetExtension failed to overwrite, got %+v, not 'there'", x) } @@ -339,7 +479,7 @@ func TestNilExtension(t *testing.T) { } if err := proto.SetExtension(msg, pb.E_Ext_More, (*pb.Ext)(nil)); err == nil { t.Error("expected SetExtension to fail due to a nil extension") - } else if want := "proto: SetExtension called with nil value of type *testdata.Ext"; err.Error() != want { + } else if want := fmt.Sprintf("proto: SetExtension called with nil value of type %T", new(pb.Ext)); err.Error() != want { t.Errorf("expected error %v, got %v", want, err) } // Note: if the behavior of Marshal is ever changed to ignore nil extensions, update @@ -402,8 +542,13 @@ func TestMarshalUnmarshalRepeatedExtension(t *testing.T) { if ext == nil { t.Fatalf("[%s] Invalid extension", test.name) } - if !reflect.DeepEqual(ext, test.ext) { - t.Errorf("[%s] Wrong value for ComplexExtension: got: %v want: %v\n", test.name, ext, test.ext) + if len(ext) != len(test.ext) { + t.Errorf("[%s] Wrong length of ComplexExtension: got: %v want: %v\n", test.name, len(ext), len(test.ext)) + } + for i := range test.ext { + if !proto.Equal(ext[i], test.ext[i]) { + t.Errorf("[%s] Wrong value for ComplexExtension[%d]: got: %v want: %v\n", test.name, i, ext[i], test.ext[i]) + } } } } @@ -477,8 +622,8 @@ func TestUnmarshalRepeatingNonRepeatedExtension(t *testing.T) { if ext == nil { t.Fatalf("[%s] Invalid extension", test.name) } - if !reflect.DeepEqual(*ext, want) { - t.Errorf("[%s] Wrong value for ComplexExtension: got: %s want: %s\n", test.name, ext, want) + if !proto.Equal(ext, &want) { + t.Errorf("[%s] Wrong value for ComplexExtension: got: %s want: %s\n", test.name, ext, &want) } } } @@ -509,28 +654,39 @@ func TestClearAllExtensions(t *testing.T) { } func TestMarshalRace(t *testing.T) { - // unregistered extension - desc := &proto.ExtensionDesc{ - ExtendedType: (*pb.MyMessage)(nil), - ExtensionType: (*bool)(nil), - Field: 101010100, - Name: "emptyextension", - Tag: "varint,0,opt", + ext := &pb.Ext{} + m := &pb.MyMessage{Count: proto.Int32(4)} + if err := proto.SetExtension(m, pb.E_Ext_More, ext); err != nil { + t.Fatalf("proto.SetExtension(m, desc, true): got error %q, want nil", err) } - m := &pb.MyMessage{Count: proto.Int32(4)} - if err := proto.SetExtension(m, desc, proto.Bool(true)); err != nil { - t.Errorf("proto.SetExtension(m, desc, true): got error %q, want nil", err) + b, err := proto.Marshal(m) + if err != nil { + t.Fatalf("Could not marshal message: %v", err) } + if err := proto.Unmarshal(b, m); err != nil { + t.Fatalf("Could not unmarshal message: %v", err) + } + // after Unmarshal, the extension is in undecoded form. + // GetExtension will decode it lazily. Make sure this does + // not race against Marshal. - var g errgroup.Group + wg := sync.WaitGroup{} + errs := make(chan error, 3) for n := 3; n > 0; n-- { - g.Go(func() error { + wg.Add(1) + go func() { + defer wg.Done() _, err := proto.Marshal(m) - return err - }) + errs <- err + }() } - if err := g.Wait(); err != nil { - t.Fatal(err) + wg.Wait() + close(errs) + + for err = range errs { + if err != nil { + t.Fatal(err) + } } } diff --git a/vendor/github.com/golang/protobuf/proto/lib.go b/vendor/github.com/golang/protobuf/proto/lib.go index 1c22550..fdd328b 100644 --- a/vendor/github.com/golang/protobuf/proto/lib.go +++ b/vendor/github.com/golang/protobuf/proto/lib.go @@ -273,32 +273,73 @@ import ( "sync" ) -// Message is implemented by generated protocol buffer messages. -type Message interface { - Reset() - String() string - ProtoMessage() +// RequiredNotSetError is an error type returned by either Marshal or Unmarshal. +// Marshal reports this when a required field is not initialized. +// Unmarshal reports this when a required field is missing from the wire data. +type RequiredNotSetError struct{ field string } + +func (e *RequiredNotSetError) Error() string { + if e.field == "" { + return fmt.Sprintf("proto: required field not set") + } + return fmt.Sprintf("proto: required field %q not set", e.field) +} +func (e *RequiredNotSetError) RequiredNotSet() bool { + return true } -// Stats records allocation details about the protocol buffer encoders -// and decoders. Useful for tuning the library itself. -type Stats struct { - Emalloc uint64 // mallocs in encode - Dmalloc uint64 // mallocs in decode - Encode uint64 // number of encodes - Decode uint64 // number of decodes - Chit uint64 // number of cache hits - Cmiss uint64 // number of cache misses - Size uint64 // number of sizes +type invalidUTF8Error struct{ field string } + +func (e *invalidUTF8Error) Error() string { + if e.field == "" { + return "proto: invalid UTF-8 detected" + } + return fmt.Sprintf("proto: field %q contains invalid UTF-8", e.field) +} +func (e *invalidUTF8Error) InvalidUTF8() bool { + return true } -// Set to true to enable stats collection. -const collectStats = false +// errInvalidUTF8 is a sentinel error to identify fields with invalid UTF-8. +// This error should not be exposed to the external API as such errors should +// be recreated with the field information. +var errInvalidUTF8 = &invalidUTF8Error{} -var stats Stats +// isNonFatal reports whether the error is either a RequiredNotSet error +// or a InvalidUTF8 error. +func isNonFatal(err error) bool { + if re, ok := err.(interface{ RequiredNotSet() bool }); ok && re.RequiredNotSet() { + return true + } + if re, ok := err.(interface{ InvalidUTF8() bool }); ok && re.InvalidUTF8() { + return true + } + return false +} -// GetStats returns a copy of the global Stats structure. -func GetStats() Stats { return stats } +type nonFatal struct{ E error } + +// Merge merges err into nf and reports whether it was successful. +// Otherwise it returns false for any fatal non-nil errors. +func (nf *nonFatal) Merge(err error) (ok bool) { + if err == nil { + return true // not an error + } + if !isNonFatal(err) { + return false // fatal error + } + if nf.E == nil { + nf.E = err // store first instance of non-fatal error + } + return true +} + +// Message is implemented by generated protocol buffer messages. +type Message interface { + Reset() + String() string + ProtoMessage() +} // A Buffer is a buffer manager for marshaling and unmarshaling // protocol buffers. It may be reused between invocations to @@ -309,16 +350,7 @@ type Buffer struct { buf []byte // encode/decode byte stream index int // read point - // pools of basic types to amortize allocation. - bools []bool - uint32s []uint32 - uint64s []uint64 - - // extra pools, only used with pointer_reflect.go - int32s []int32 - int64s []int64 - float32s []float32 - float64s []float64 + deterministic bool } // NewBuffer allocates a new Buffer and initializes its internal data to @@ -343,6 +375,30 @@ func (p *Buffer) SetBuf(s []byte) { // Bytes returns the contents of the Buffer. func (p *Buffer) Bytes() []byte { return p.buf } +// SetDeterministic sets whether to use deterministic serialization. +// +// Deterministic serialization guarantees that for a given binary, equal +// messages will always be serialized to the same bytes. This implies: +// +// - Repeated serialization of a message will return the same bytes. +// - Different processes of the same binary (which may be executing on +// different machines) will serialize equal messages to the same bytes. +// +// Note that the deterministic serialization is NOT canonical across +// languages. It is not guaranteed to remain stable over time. It is unstable +// across different builds with schema changes due to unknown fields. +// Users who need canonical serialization (e.g., persistent storage in a +// canonical form, fingerprinting, etc.) should define their own +// canonicalization specification and implement their own serializer rather +// than relying on this API. +// +// If deterministic serialization is requested, map entries will be sorted +// by keys in lexographical order. This is an implementation detail and +// subject to change. +func (p *Buffer) SetDeterministic(deterministic bool) { + p.deterministic = deterministic +} + /* * Helper routines for simplifying the creation of optional fields of basic type. */ @@ -831,22 +887,12 @@ func fieldDefault(ft reflect.Type, prop *Properties) (sf *scalarField, nestedMes return sf, false, nil } +// mapKeys returns a sort.Interface to be used for sorting the map keys. // Map fields may have key types of non-float scalars, strings and enums. -// The easiest way to sort them in some deterministic order is to use fmt. -// If this turns out to be inefficient we can always consider other options, -// such as doing a Schwartzian transform. - func mapKeys(vs []reflect.Value) sort.Interface { - s := mapKeySorter{ - vs: vs, - // default Less function: textual comparison - less: func(a, b reflect.Value) bool { - return fmt.Sprint(a.Interface()) < fmt.Sprint(b.Interface()) - }, - } + s := mapKeySorter{vs: vs} - // Type specialization per https://developers.google.com/protocol-buffers/docs/proto#maps; - // numeric keys are sorted numerically. + // Type specialization per https://developers.google.com/protocol-buffers/docs/proto#maps. if len(vs) == 0 { return s } @@ -855,6 +901,12 @@ func mapKeys(vs []reflect.Value) sort.Interface { s.less = func(a, b reflect.Value) bool { return a.Int() < b.Int() } case reflect.Uint32, reflect.Uint64: s.less = func(a, b reflect.Value) bool { return a.Uint() < b.Uint() } + case reflect.Bool: + s.less = func(a, b reflect.Value) bool { return !a.Bool() && b.Bool() } // false < true + case reflect.String: + s.less = func(a, b reflect.Value) bool { return a.String() < b.String() } + default: + panic(fmt.Sprintf("unsupported map key type: %v", vs[0].Kind())) } return s @@ -888,10 +940,26 @@ func isProto3Zero(v reflect.Value) bool { return false } -// ProtoPackageIsVersion2 is referenced from generated protocol buffer files -// to assert that that code is compatible with this version of the proto package. -const ProtoPackageIsVersion2 = true +const ( + // ProtoPackageIsVersion3 is referenced from generated protocol buffer files + // to assert that that code is compatible with this version of the proto package. + ProtoPackageIsVersion3 = true + + // ProtoPackageIsVersion2 is referenced from generated protocol buffer files + // to assert that that code is compatible with this version of the proto package. + ProtoPackageIsVersion2 = true -// ProtoPackageIsVersion1 is referenced from generated protocol buffer files -// to assert that that code is compatible with this version of the proto package. -const ProtoPackageIsVersion1 = true + // ProtoPackageIsVersion1 is referenced from generated protocol buffer files + // to assert that that code is compatible with this version of the proto package. + ProtoPackageIsVersion1 = true +) + +// InternalMessageInfo is a type used internally by generated .pb.go files. +// This type is not intended to be used by non-generated code. +// This type is not subject to any compatibility guarantee. +type InternalMessageInfo struct { + marshal *marshalInfo + unmarshal *unmarshalInfo + merge *mergeInfo + discard *discardInfo +} diff --git a/vendor/github.com/golang/protobuf/proto/map_test.go b/vendor/github.com/golang/protobuf/proto/map_test.go index 313e879..b1e1529 100644 --- a/vendor/github.com/golang/protobuf/proto/map_test.go +++ b/vendor/github.com/golang/protobuf/proto/map_test.go @@ -2,12 +2,36 @@ package proto_test import ( "fmt" + "reflect" "testing" "github.com/golang/protobuf/proto" ppb "github.com/golang/protobuf/proto/proto3_proto" ) +func TestMap(t *testing.T) { + var b []byte + fmt.Sscanf("a2010c0a044b657931120456616c31a201130a044b657932120556616c3261120456616c32a201240a044b6579330d05000000120556616c33621a0556616c3361120456616c331505000000a20100a201260a044b657934130a07536f6d6555524c1209536f6d655469746c651a08536e69707065743114", "%x", &b) + + var m ppb.Message + if err := proto.Unmarshal(b, &m); err != nil { + t.Fatalf("proto.Unmarshal error: %v", err) + } + + got := m.StringMap + want := map[string]string{ + "": "", + "Key1": "Val1", + "Key2": "Val2", + "Key3": "Val3", + "Key4": "", + } + + if !reflect.DeepEqual(got, want) { + t.Errorf("maps differ:\ngot %#v\nwant %#v", got, want) + } +} + func marshalled() []byte { m := &ppb.IntMaps{} for i := 0; i < 1000; i++ { diff --git a/vendor/github.com/golang/protobuf/proto/message_set.go b/vendor/github.com/golang/protobuf/proto/message_set.go index fd982de..f48a756 100644 --- a/vendor/github.com/golang/protobuf/proto/message_set.go +++ b/vendor/github.com/golang/protobuf/proto/message_set.go @@ -36,12 +36,7 @@ package proto */ import ( - "bytes" - "encoding/json" "errors" - "fmt" - "reflect" - "sort" ) // errNoMessageTypeID occurs when a protocol buffer does not have a message type ID. @@ -94,10 +89,7 @@ func (ms *messageSet) find(pb Message) *_MessageSet_Item { } func (ms *messageSet) Has(pb Message) bool { - if ms.find(pb) != nil { - return true - } - return false + return ms.find(pb) != nil } func (ms *messageSet) Unmarshal(pb Message) error { @@ -147,50 +139,9 @@ func skipVarint(buf []byte) []byte { return buf[i+1:] } -// MarshalMessageSet encodes the extension map represented by m in the message set wire format. -// It is called by generated Marshal methods on protocol buffer messages with the message_set_wire_format option. -func MarshalMessageSet(exts interface{}) ([]byte, error) { - var m map[int32]Extension - switch exts := exts.(type) { - case *XXX_InternalExtensions: - if err := encodeExtensions(exts); err != nil { - return nil, err - } - m, _ = exts.extensionsRead() - case map[int32]Extension: - if err := encodeExtensionsMap(exts); err != nil { - return nil, err - } - m = exts - default: - return nil, errors.New("proto: not an extension map") - } - - // Sort extension IDs to provide a deterministic encoding. - // See also enc_map in encode.go. - ids := make([]int, 0, len(m)) - for id := range m { - ids = append(ids, int(id)) - } - sort.Ints(ids) - - ms := &messageSet{Item: make([]*_MessageSet_Item, 0, len(m))} - for _, id := range ids { - e := m[int32(id)] - // Remove the wire type and field number varint, as well as the length varint. - msg := skipVarint(skipVarint(e.enc)) - - ms.Item = append(ms.Item, &_MessageSet_Item{ - TypeId: Int32(int32(id)), - Message: msg, - }) - } - return Marshal(ms) -} - -// UnmarshalMessageSet decodes the extension map encoded in buf in the message set wire format. -// It is called by generated Unmarshal methods on protocol buffer messages with the message_set_wire_format option. -func UnmarshalMessageSet(buf []byte, exts interface{}) error { +// unmarshalMessageSet decodes the extension map encoded in buf in the message set wire format. +// It is called by Unmarshal methods on protocol buffer messages with the message_set_wire_format option. +func unmarshalMessageSet(buf []byte, exts interface{}) error { var m map[int32]Extension switch exts := exts.(type) { case *XXX_InternalExtensions: @@ -228,84 +179,3 @@ func UnmarshalMessageSet(buf []byte, exts interface{}) error { } return nil } - -// MarshalMessageSetJSON encodes the extension map represented by m in JSON format. -// It is called by generated MarshalJSON methods on protocol buffer messages with the message_set_wire_format option. -func MarshalMessageSetJSON(exts interface{}) ([]byte, error) { - var m map[int32]Extension - switch exts := exts.(type) { - case *XXX_InternalExtensions: - m, _ = exts.extensionsRead() - case map[int32]Extension: - m = exts - default: - return nil, errors.New("proto: not an extension map") - } - var b bytes.Buffer - b.WriteByte('{') - - // Process the map in key order for deterministic output. - ids := make([]int32, 0, len(m)) - for id := range m { - ids = append(ids, id) - } - sort.Sort(int32Slice(ids)) // int32Slice defined in text.go - - for i, id := range ids { - ext := m[id] - if i > 0 { - b.WriteByte(',') - } - - msd, ok := messageSetMap[id] - if !ok { - // Unknown type; we can't render it, so skip it. - continue - } - fmt.Fprintf(&b, `"[%s]":`, msd.name) - - x := ext.value - if x == nil { - x = reflect.New(msd.t.Elem()).Interface() - if err := Unmarshal(ext.enc, x.(Message)); err != nil { - return nil, err - } - } - d, err := json.Marshal(x) - if err != nil { - return nil, err - } - b.Write(d) - } - b.WriteByte('}') - return b.Bytes(), nil -} - -// UnmarshalMessageSetJSON decodes the extension map encoded in buf in JSON format. -// It is called by generated UnmarshalJSON methods on protocol buffer messages with the message_set_wire_format option. -func UnmarshalMessageSetJSON(buf []byte, exts interface{}) error { - // Common-case fast path. - if len(buf) == 0 || bytes.Equal(buf, []byte("{}")) { - return nil - } - - // This is fairly tricky, and it's not clear that it is needed. - return errors.New("TODO: UnmarshalMessageSetJSON not yet implemented") -} - -// A global registry of types that can be used in a MessageSet. - -var messageSetMap = make(map[int32]messageSetDesc) - -type messageSetDesc struct { - t reflect.Type // pointer to struct - name string -} - -// RegisterMessageSetType is called from the generated code. -func RegisterMessageSetType(m Message, fieldNum int32, name string) { - messageSetMap[fieldNum] = messageSetDesc{ - t: reflect.TypeOf(m), - name: name, - } -} diff --git a/vendor/github.com/golang/protobuf/proto/message_set_test.go b/vendor/github.com/golang/protobuf/proto/message_set_test.go index 353a3ea..1bd11aa 100644 --- a/vendor/github.com/golang/protobuf/proto/message_set_test.go +++ b/vendor/github.com/golang/protobuf/proto/message_set_test.go @@ -29,38 +29,60 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -package proto +package proto_test import ( "bytes" + "fmt" "testing" + + "github.com/golang/protobuf/proto" + . "github.com/golang/protobuf/proto/test_proto" ) func TestUnmarshalMessageSetWithDuplicate(t *testing.T) { - // Check that a repeated message set entry will be concatenated. - in := &messageSet{ - Item: []*_MessageSet_Item{ - {TypeId: Int32(12345), Message: []byte("hoo")}, - {TypeId: Int32(12345), Message: []byte("hah")}, - }, + /* + Message{ + Tag{1, StartGroup}, + Message{ + Tag{2, Varint}, Uvarint(12345), + Tag{3, Bytes}, Bytes("hoo"), + }, + Tag{1, EndGroup}, + Tag{1, StartGroup}, + Message{ + Tag{2, Varint}, Uvarint(12345), + Tag{3, Bytes}, Bytes("hah"), + }, + Tag{1, EndGroup}, + } + */ + var in []byte + fmt.Sscanf("0b10b9601a03686f6f0c0b10b9601a036861680c", "%x", &in) + + /* + Message{ + Tag{1, StartGroup}, + Message{ + Tag{2, Varint}, Uvarint(12345), + Tag{3, Bytes}, Bytes("hoohah"), + }, + Tag{1, EndGroup}, + } + */ + var want []byte + fmt.Sscanf("0b10b9601a06686f6f6861680c", "%x", &want) + + var m MyMessageSet + if err := proto.Unmarshal(in, &m); err != nil { + t.Fatalf("unexpected Unmarshal error: %v", err) } - b, err := Marshal(in) + got, err := proto.Marshal(&m) if err != nil { - t.Fatalf("Marshal: %v", err) + t.Fatalf("unexpected Marshal error: %v", err) } - t.Logf("Marshaled bytes: %q", b) - var extensions XXX_InternalExtensions - if err := UnmarshalMessageSet(b, &extensions); err != nil { - t.Fatalf("UnmarshalMessageSet: %v", err) - } - ext, ok := extensions.p.extensionMap[12345] - if !ok { - t.Fatalf("Didn't retrieve extension 12345; map is %v", extensions.p.extensionMap) - } - // Skip wire type/field number and length varints. - got := skipVarint(skipVarint(ext.enc)) - if want := []byte("hoohah"); !bytes.Equal(got, want) { - t.Errorf("Combined extension is %q, want %q", got, want) + if !bytes.Equal(got, want) { + t.Errorf("output mismatch:\ngot %x\nwant %x", got, want) } } diff --git a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go index fb512e2..94fa919 100644 --- a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go +++ b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go @@ -29,7 +29,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// +build appengine js +// +build purego appengine js // This file contains an implementation of proto field accesses using package reflect. // It is slower than the code in pointer_unsafe.go but it avoids package unsafe and can @@ -38,32 +38,13 @@ package proto import ( - "math" "reflect" + "sync" ) -// A structPointer is a pointer to a struct. -type structPointer struct { - v reflect.Value -} - -// toStructPointer returns a structPointer equivalent to the given reflect value. -// The reflect value must itself be a pointer to a struct. -func toStructPointer(v reflect.Value) structPointer { - return structPointer{v} -} - -// IsNil reports whether p is nil. -func structPointer_IsNil(p structPointer) bool { - return p.v.IsNil() -} +const unsafeAllowed = false -// Interface returns the struct pointer as an interface value. -func structPointer_Interface(p structPointer, _ reflect.Type) interface{} { - return p.v.Interface() -} - -// A field identifies a field in a struct, accessible from a structPointer. +// A field identifies a field in a struct, accessible from a pointer. // In this implementation, a field is identified by the sequence of field indices // passed to reflect's FieldByIndex. type field []int @@ -76,409 +57,304 @@ func toField(f *reflect.StructField) field { // invalidField is an invalid field identifier. var invalidField = field(nil) +// zeroField is a noop when calling pointer.offset. +var zeroField = field([]int{}) + // IsValid reports whether the field identifier is valid. func (f field) IsValid() bool { return f != nil } -// field returns the given field in the struct as a reflect value. -func structPointer_field(p structPointer, f field) reflect.Value { - // Special case: an extension map entry with a value of type T - // passes a *T to the struct-handling code with a zero field, - // expecting that it will be treated as equivalent to *struct{ X T }, - // which has the same memory layout. We have to handle that case - // specially, because reflect will panic if we call FieldByIndex on a - // non-struct. - if f == nil { - return p.v.Elem() - } - - return p.v.Elem().FieldByIndex(f) +// The pointer type is for the table-driven decoder. +// The implementation here uses a reflect.Value of pointer type to +// create a generic pointer. In pointer_unsafe.go we use unsafe +// instead of reflect to implement the same (but faster) interface. +type pointer struct { + v reflect.Value } -// ifield returns the given field in the struct as an interface value. -func structPointer_ifield(p structPointer, f field) interface{} { - return structPointer_field(p, f).Addr().Interface() +// toPointer converts an interface of pointer type to a pointer +// that points to the same target. +func toPointer(i *Message) pointer { + return pointer{v: reflect.ValueOf(*i)} } -// Bytes returns the address of a []byte field in the struct. -func structPointer_Bytes(p structPointer, f field) *[]byte { - return structPointer_ifield(p, f).(*[]byte) +// toAddrPointer converts an interface to a pointer that points to +// the interface data. +func toAddrPointer(i *interface{}, isptr, deref bool) pointer { + v := reflect.ValueOf(*i) + u := reflect.New(v.Type()) + u.Elem().Set(v) + if deref { + u = u.Elem() + } + return pointer{v: u} } -// BytesSlice returns the address of a [][]byte field in the struct. -func structPointer_BytesSlice(p structPointer, f field) *[][]byte { - return structPointer_ifield(p, f).(*[][]byte) +// valToPointer converts v to a pointer. v must be of pointer type. +func valToPointer(v reflect.Value) pointer { + return pointer{v: v} } -// Bool returns the address of a *bool field in the struct. -func structPointer_Bool(p structPointer, f field) **bool { - return structPointer_ifield(p, f).(**bool) +// offset converts from a pointer to a structure to a pointer to +// one of its fields. +func (p pointer) offset(f field) pointer { + return pointer{v: p.v.Elem().FieldByIndex(f).Addr()} } -// BoolVal returns the address of a bool field in the struct. -func structPointer_BoolVal(p structPointer, f field) *bool { - return structPointer_ifield(p, f).(*bool) +func (p pointer) isNil() bool { + return p.v.IsNil() } -// BoolSlice returns the address of a []bool field in the struct. -func structPointer_BoolSlice(p structPointer, f field) *[]bool { - return structPointer_ifield(p, f).(*[]bool) +// grow updates the slice s in place to make it one element longer. +// s must be addressable. +// Returns the (addressable) new element. +func grow(s reflect.Value) reflect.Value { + n, m := s.Len(), s.Cap() + if n < m { + s.SetLen(n + 1) + } else { + s.Set(reflect.Append(s, reflect.Zero(s.Type().Elem()))) + } + return s.Index(n) } -// String returns the address of a *string field in the struct. -func structPointer_String(p structPointer, f field) **string { - return structPointer_ifield(p, f).(**string) +func (p pointer) toInt64() *int64 { + return p.v.Interface().(*int64) } - -// StringVal returns the address of a string field in the struct. -func structPointer_StringVal(p structPointer, f field) *string { - return structPointer_ifield(p, f).(*string) +func (p pointer) toInt64Ptr() **int64 { + return p.v.Interface().(**int64) } - -// StringSlice returns the address of a []string field in the struct. -func structPointer_StringSlice(p structPointer, f field) *[]string { - return structPointer_ifield(p, f).(*[]string) +func (p pointer) toInt64Slice() *[]int64 { + return p.v.Interface().(*[]int64) } -// Extensions returns the address of an extension map field in the struct. -func structPointer_Extensions(p structPointer, f field) *XXX_InternalExtensions { - return structPointer_ifield(p, f).(*XXX_InternalExtensions) -} +var int32ptr = reflect.TypeOf((*int32)(nil)) -// ExtMap returns the address of an extension map field in the struct. -func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension { - return structPointer_ifield(p, f).(*map[int32]Extension) +func (p pointer) toInt32() *int32 { + return p.v.Convert(int32ptr).Interface().(*int32) } -// NewAt returns the reflect.Value for a pointer to a field in the struct. -func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value { - return structPointer_field(p, f).Addr() +// The toInt32Ptr/Slice methods don't work because of enums. +// Instead, we must use set/get methods for the int32ptr/slice case. +/* + func (p pointer) toInt32Ptr() **int32 { + return p.v.Interface().(**int32) } - -// SetStructPointer writes a *struct field in the struct. -func structPointer_SetStructPointer(p structPointer, f field, q structPointer) { - structPointer_field(p, f).Set(q.v) + func (p pointer) toInt32Slice() *[]int32 { + return p.v.Interface().(*[]int32) } - -// GetStructPointer reads a *struct field in the struct. -func structPointer_GetStructPointer(p structPointer, f field) structPointer { - return structPointer{structPointer_field(p, f)} +*/ +func (p pointer) getInt32Ptr() *int32 { + if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { + // raw int32 type + return p.v.Elem().Interface().(*int32) + } + // an enum + return p.v.Elem().Convert(int32PtrType).Interface().(*int32) +} +func (p pointer) setInt32Ptr(v int32) { + // Allocate value in a *int32. Possibly convert that to a *enum. + // Then assign it to a **int32 or **enum. + // Note: we can convert *int32 to *enum, but we can't convert + // **int32 to **enum! + p.v.Elem().Set(reflect.ValueOf(&v).Convert(p.v.Type().Elem())) +} + +// getInt32Slice copies []int32 from p as a new slice. +// This behavior differs from the implementation in pointer_unsafe.go. +func (p pointer) getInt32Slice() []int32 { + if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { + // raw int32 type + return p.v.Elem().Interface().([]int32) + } + // an enum + // Allocate a []int32, then assign []enum's values into it. + // Note: we can't convert []enum to []int32. + slice := p.v.Elem() + s := make([]int32, slice.Len()) + for i := 0; i < slice.Len(); i++ { + s[i] = int32(slice.Index(i).Int()) + } + return s } -// StructPointerSlice the address of a []*struct field in the struct. -func structPointer_StructPointerSlice(p structPointer, f field) structPointerSlice { - return structPointerSlice{structPointer_field(p, f)} +// setInt32Slice copies []int32 into p as a new slice. +// This behavior differs from the implementation in pointer_unsafe.go. +func (p pointer) setInt32Slice(v []int32) { + if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { + // raw int32 type + p.v.Elem().Set(reflect.ValueOf(v)) + return + } + // an enum + // Allocate a []enum, then assign []int32's values into it. + // Note: we can't convert []enum to []int32. + slice := reflect.MakeSlice(p.v.Type().Elem(), len(v), cap(v)) + for i, x := range v { + slice.Index(i).SetInt(int64(x)) + } + p.v.Elem().Set(slice) } - -// A structPointerSlice represents the address of a slice of pointers to structs -// (themselves messages or groups). That is, v.Type() is *[]*struct{...}. -type structPointerSlice struct { - v reflect.Value +func (p pointer) appendInt32Slice(v int32) { + grow(p.v.Elem()).SetInt(int64(v)) } -func (p structPointerSlice) Len() int { return p.v.Len() } -func (p structPointerSlice) Index(i int) structPointer { return structPointer{p.v.Index(i)} } -func (p structPointerSlice) Append(q structPointer) { - p.v.Set(reflect.Append(p.v, q.v)) +func (p pointer) toUint64() *uint64 { + return p.v.Interface().(*uint64) } - -var ( - int32Type = reflect.TypeOf(int32(0)) - uint32Type = reflect.TypeOf(uint32(0)) - float32Type = reflect.TypeOf(float32(0)) - int64Type = reflect.TypeOf(int64(0)) - uint64Type = reflect.TypeOf(uint64(0)) - float64Type = reflect.TypeOf(float64(0)) -) - -// A word32 represents a field of type *int32, *uint32, *float32, or *enum. -// That is, v.Type() is *int32, *uint32, *float32, or *enum and v is assignable. -type word32 struct { - v reflect.Value +func (p pointer) toUint64Ptr() **uint64 { + return p.v.Interface().(**uint64) } - -// IsNil reports whether p is nil. -func word32_IsNil(p word32) bool { - return p.v.IsNil() +func (p pointer) toUint64Slice() *[]uint64 { + return p.v.Interface().(*[]uint64) } - -// Set sets p to point at a newly allocated word with bits set to x. -func word32_Set(p word32, o *Buffer, x uint32) { - t := p.v.Type().Elem() - switch t { - case int32Type: - if len(o.int32s) == 0 { - o.int32s = make([]int32, uint32PoolSize) - } - o.int32s[0] = int32(x) - p.v.Set(reflect.ValueOf(&o.int32s[0])) - o.int32s = o.int32s[1:] - return - case uint32Type: - if len(o.uint32s) == 0 { - o.uint32s = make([]uint32, uint32PoolSize) - } - o.uint32s[0] = x - p.v.Set(reflect.ValueOf(&o.uint32s[0])) - o.uint32s = o.uint32s[1:] - return - case float32Type: - if len(o.float32s) == 0 { - o.float32s = make([]float32, uint32PoolSize) - } - o.float32s[0] = math.Float32frombits(x) - p.v.Set(reflect.ValueOf(&o.float32s[0])) - o.float32s = o.float32s[1:] - return - } - - // must be enum - p.v.Set(reflect.New(t)) - p.v.Elem().SetInt(int64(int32(x))) +func (p pointer) toUint32() *uint32 { + return p.v.Interface().(*uint32) } - -// Get gets the bits pointed at by p, as a uint32. -func word32_Get(p word32) uint32 { - elem := p.v.Elem() - switch elem.Kind() { - case reflect.Int32: - return uint32(elem.Int()) - case reflect.Uint32: - return uint32(elem.Uint()) - case reflect.Float32: - return math.Float32bits(float32(elem.Float())) - } - panic("unreachable") +func (p pointer) toUint32Ptr() **uint32 { + return p.v.Interface().(**uint32) } - -// Word32 returns a reference to a *int32, *uint32, *float32, or *enum field in the struct. -func structPointer_Word32(p structPointer, f field) word32 { - return word32{structPointer_field(p, f)} +func (p pointer) toUint32Slice() *[]uint32 { + return p.v.Interface().(*[]uint32) } - -// A word32Val represents a field of type int32, uint32, float32, or enum. -// That is, v.Type() is int32, uint32, float32, or enum and v is assignable. -type word32Val struct { - v reflect.Value +func (p pointer) toBool() *bool { + return p.v.Interface().(*bool) } - -// Set sets *p to x. -func word32Val_Set(p word32Val, x uint32) { - switch p.v.Type() { - case int32Type: - p.v.SetInt(int64(x)) - return - case uint32Type: - p.v.SetUint(uint64(x)) - return - case float32Type: - p.v.SetFloat(float64(math.Float32frombits(x))) - return - } - - // must be enum - p.v.SetInt(int64(int32(x))) +func (p pointer) toBoolPtr() **bool { + return p.v.Interface().(**bool) } - -// Get gets the bits pointed at by p, as a uint32. -func word32Val_Get(p word32Val) uint32 { - elem := p.v - switch elem.Kind() { - case reflect.Int32: - return uint32(elem.Int()) - case reflect.Uint32: - return uint32(elem.Uint()) - case reflect.Float32: - return math.Float32bits(float32(elem.Float())) - } - panic("unreachable") +func (p pointer) toBoolSlice() *[]bool { + return p.v.Interface().(*[]bool) } - -// Word32Val returns a reference to a int32, uint32, float32, or enum field in the struct. -func structPointer_Word32Val(p structPointer, f field) word32Val { - return word32Val{structPointer_field(p, f)} +func (p pointer) toFloat64() *float64 { + return p.v.Interface().(*float64) } - -// A word32Slice is a slice of 32-bit values. -// That is, v.Type() is []int32, []uint32, []float32, or []enum. -type word32Slice struct { - v reflect.Value +func (p pointer) toFloat64Ptr() **float64 { + return p.v.Interface().(**float64) } - -func (p word32Slice) Append(x uint32) { - n, m := p.v.Len(), p.v.Cap() - if n < m { - p.v.SetLen(n + 1) - } else { - t := p.v.Type().Elem() - p.v.Set(reflect.Append(p.v, reflect.Zero(t))) - } - elem := p.v.Index(n) - switch elem.Kind() { - case reflect.Int32: - elem.SetInt(int64(int32(x))) - case reflect.Uint32: - elem.SetUint(uint64(x)) - case reflect.Float32: - elem.SetFloat(float64(math.Float32frombits(x))) - } +func (p pointer) toFloat64Slice() *[]float64 { + return p.v.Interface().(*[]float64) } - -func (p word32Slice) Len() int { - return p.v.Len() +func (p pointer) toFloat32() *float32 { + return p.v.Interface().(*float32) } - -func (p word32Slice) Index(i int) uint32 { - elem := p.v.Index(i) - switch elem.Kind() { - case reflect.Int32: - return uint32(elem.Int()) - case reflect.Uint32: - return uint32(elem.Uint()) - case reflect.Float32: - return math.Float32bits(float32(elem.Float())) - } - panic("unreachable") +func (p pointer) toFloat32Ptr() **float32 { + return p.v.Interface().(**float32) } - -// Word32Slice returns a reference to a []int32, []uint32, []float32, or []enum field in the struct. -func structPointer_Word32Slice(p structPointer, f field) word32Slice { - return word32Slice{structPointer_field(p, f)} +func (p pointer) toFloat32Slice() *[]float32 { + return p.v.Interface().(*[]float32) } - -// word64 is like word32 but for 64-bit values. -type word64 struct { - v reflect.Value +func (p pointer) toString() *string { + return p.v.Interface().(*string) } - -func word64_Set(p word64, o *Buffer, x uint64) { - t := p.v.Type().Elem() - switch t { - case int64Type: - if len(o.int64s) == 0 { - o.int64s = make([]int64, uint64PoolSize) - } - o.int64s[0] = int64(x) - p.v.Set(reflect.ValueOf(&o.int64s[0])) - o.int64s = o.int64s[1:] - return - case uint64Type: - if len(o.uint64s) == 0 { - o.uint64s = make([]uint64, uint64PoolSize) - } - o.uint64s[0] = x - p.v.Set(reflect.ValueOf(&o.uint64s[0])) - o.uint64s = o.uint64s[1:] - return - case float64Type: - if len(o.float64s) == 0 { - o.float64s = make([]float64, uint64PoolSize) - } - o.float64s[0] = math.Float64frombits(x) - p.v.Set(reflect.ValueOf(&o.float64s[0])) - o.float64s = o.float64s[1:] - return - } - panic("unreachable") +func (p pointer) toStringPtr() **string { + return p.v.Interface().(**string) } - -func word64_IsNil(p word64) bool { - return p.v.IsNil() +func (p pointer) toStringSlice() *[]string { + return p.v.Interface().(*[]string) } - -func word64_Get(p word64) uint64 { - elem := p.v.Elem() - switch elem.Kind() { - case reflect.Int64: - return uint64(elem.Int()) - case reflect.Uint64: - return elem.Uint() - case reflect.Float64: - return math.Float64bits(elem.Float()) - } - panic("unreachable") +func (p pointer) toBytes() *[]byte { + return p.v.Interface().(*[]byte) } - -func structPointer_Word64(p structPointer, f field) word64 { - return word64{structPointer_field(p, f)} +func (p pointer) toBytesSlice() *[][]byte { + return p.v.Interface().(*[][]byte) +} +func (p pointer) toExtensions() *XXX_InternalExtensions { + return p.v.Interface().(*XXX_InternalExtensions) +} +func (p pointer) toOldExtensions() *map[int32]Extension { + return p.v.Interface().(*map[int32]Extension) +} +func (p pointer) getPointer() pointer { + return pointer{v: p.v.Elem()} +} +func (p pointer) setPointer(q pointer) { + p.v.Elem().Set(q.v) +} +func (p pointer) appendPointer(q pointer) { + grow(p.v.Elem()).Set(q.v) } -// word64Val is like word32Val but for 64-bit values. -type word64Val struct { - v reflect.Value +// getPointerSlice copies []*T from p as a new []pointer. +// This behavior differs from the implementation in pointer_unsafe.go. +func (p pointer) getPointerSlice() []pointer { + if p.v.IsNil() { + return nil + } + n := p.v.Elem().Len() + s := make([]pointer, n) + for i := 0; i < n; i++ { + s[i] = pointer{v: p.v.Elem().Index(i)} + } + return s } -func word64Val_Set(p word64Val, o *Buffer, x uint64) { - switch p.v.Type() { - case int64Type: - p.v.SetInt(int64(x)) - return - case uint64Type: - p.v.SetUint(x) - return - case float64Type: - p.v.SetFloat(math.Float64frombits(x)) +// setPointerSlice copies []pointer into p as a new []*T. +// This behavior differs from the implementation in pointer_unsafe.go. +func (p pointer) setPointerSlice(v []pointer) { + if v == nil { + p.v.Elem().Set(reflect.New(p.v.Elem().Type()).Elem()) return } - panic("unreachable") + s := reflect.MakeSlice(p.v.Elem().Type(), 0, len(v)) + for _, p := range v { + s = reflect.Append(s, p.v) + } + p.v.Elem().Set(s) } -func word64Val_Get(p word64Val) uint64 { - elem := p.v - switch elem.Kind() { - case reflect.Int64: - return uint64(elem.Int()) - case reflect.Uint64: - return elem.Uint() - case reflect.Float64: - return math.Float64bits(elem.Float()) +// getInterfacePointer returns a pointer that points to the +// interface data of the interface pointed by p. +func (p pointer) getInterfacePointer() pointer { + if p.v.Elem().IsNil() { + return pointer{v: p.v.Elem()} } - panic("unreachable") + return pointer{v: p.v.Elem().Elem().Elem().Field(0).Addr()} // *interface -> interface -> *struct -> struct } -func structPointer_Word64Val(p structPointer, f field) word64Val { - return word64Val{structPointer_field(p, f)} +func (p pointer) asPointerTo(t reflect.Type) reflect.Value { + // TODO: check that p.v.Type().Elem() == t? + return p.v } -type word64Slice struct { - v reflect.Value +func atomicLoadUnmarshalInfo(p **unmarshalInfo) *unmarshalInfo { + atomicLock.Lock() + defer atomicLock.Unlock() + return *p } - -func (p word64Slice) Append(x uint64) { - n, m := p.v.Len(), p.v.Cap() - if n < m { - p.v.SetLen(n + 1) - } else { - t := p.v.Type().Elem() - p.v.Set(reflect.Append(p.v, reflect.Zero(t))) - } - elem := p.v.Index(n) - switch elem.Kind() { - case reflect.Int64: - elem.SetInt(int64(int64(x))) - case reflect.Uint64: - elem.SetUint(uint64(x)) - case reflect.Float64: - elem.SetFloat(float64(math.Float64frombits(x))) - } +func atomicStoreUnmarshalInfo(p **unmarshalInfo, v *unmarshalInfo) { + atomicLock.Lock() + defer atomicLock.Unlock() + *p = v } - -func (p word64Slice) Len() int { - return p.v.Len() +func atomicLoadMarshalInfo(p **marshalInfo) *marshalInfo { + atomicLock.Lock() + defer atomicLock.Unlock() + return *p } - -func (p word64Slice) Index(i int) uint64 { - elem := p.v.Index(i) - switch elem.Kind() { - case reflect.Int64: - return uint64(elem.Int()) - case reflect.Uint64: - return uint64(elem.Uint()) - case reflect.Float64: - return math.Float64bits(float64(elem.Float())) - } - panic("unreachable") +func atomicStoreMarshalInfo(p **marshalInfo, v *marshalInfo) { + atomicLock.Lock() + defer atomicLock.Unlock() + *p = v } - -func structPointer_Word64Slice(p structPointer, f field) word64Slice { - return word64Slice{structPointer_field(p, f)} +func atomicLoadMergeInfo(p **mergeInfo) *mergeInfo { + atomicLock.Lock() + defer atomicLock.Unlock() + return *p +} +func atomicStoreMergeInfo(p **mergeInfo, v *mergeInfo) { + atomicLock.Lock() + defer atomicLock.Unlock() + *p = v } +func atomicLoadDiscardInfo(p **discardInfo) *discardInfo { + atomicLock.Lock() + defer atomicLock.Unlock() + return *p +} +func atomicStoreDiscardInfo(p **discardInfo, v *discardInfo) { + atomicLock.Lock() + defer atomicLock.Unlock() + *p = v +} + +var atomicLock sync.Mutex diff --git a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go index 6b5567d..dbfffe0 100644 --- a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go +++ b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go @@ -29,7 +29,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// +build !appengine,!js +// +build !purego,!appengine,!js // This file contains the implementation of the proto field accesses using package unsafe. @@ -37,38 +37,13 @@ package proto import ( "reflect" + "sync/atomic" "unsafe" ) -// NOTE: These type_Foo functions would more idiomatically be methods, -// but Go does not allow methods on pointer types, and we must preserve -// some pointer type for the garbage collector. We use these -// funcs with clunky names as our poor approximation to methods. -// -// An alternative would be -// type structPointer struct { p unsafe.Pointer } -// but that does not registerize as well. - -// A structPointer is a pointer to a struct. -type structPointer unsafe.Pointer - -// toStructPointer returns a structPointer equivalent to the given reflect value. -func toStructPointer(v reflect.Value) structPointer { - return structPointer(unsafe.Pointer(v.Pointer())) -} - -// IsNil reports whether p is nil. -func structPointer_IsNil(p structPointer) bool { - return p == nil -} - -// Interface returns the struct pointer, assumed to have element type t, -// as an interface value. -func structPointer_Interface(p structPointer, t reflect.Type) interface{} { - return reflect.NewAt(t, unsafe.Pointer(p)).Interface() -} +const unsafeAllowed = true -// A field identifies a field in a struct, accessible from a structPointer. +// A field identifies a field in a struct, accessible from a pointer. // In this implementation, a field is identified by its byte offset from the start of the struct. type field uintptr @@ -80,191 +55,259 @@ func toField(f *reflect.StructField) field { // invalidField is an invalid field identifier. const invalidField = ^field(0) +// zeroField is a noop when calling pointer.offset. +const zeroField = field(0) + // IsValid reports whether the field identifier is valid. func (f field) IsValid() bool { - return f != ^field(0) + return f != invalidField +} + +// The pointer type below is for the new table-driven encoder/decoder. +// The implementation here uses unsafe.Pointer to create a generic pointer. +// In pointer_reflect.go we use reflect instead of unsafe to implement +// the same (but slower) interface. +type pointer struct { + p unsafe.Pointer +} + +// size of pointer +var ptrSize = unsafe.Sizeof(uintptr(0)) + +// toPointer converts an interface of pointer type to a pointer +// that points to the same target. +func toPointer(i *Message) pointer { + // Super-tricky - read pointer out of data word of interface value. + // Saves ~25ns over the equivalent: + // return valToPointer(reflect.ValueOf(*i)) + return pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]} +} + +// toAddrPointer converts an interface to a pointer that points to +// the interface data. +func toAddrPointer(i *interface{}, isptr, deref bool) (p pointer) { + // Super-tricky - read or get the address of data word of interface value. + if isptr { + // The interface is of pointer type, thus it is a direct interface. + // The data word is the pointer data itself. We take its address. + p = pointer{p: unsafe.Pointer(uintptr(unsafe.Pointer(i)) + ptrSize)} + } else { + // The interface is not of pointer type. The data word is the pointer + // to the data. + p = pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]} + } + if deref { + p.p = *(*unsafe.Pointer)(p.p) + } + return p } -// Bytes returns the address of a []byte field in the struct. -func structPointer_Bytes(p structPointer, f field) *[]byte { - return (*[]byte)(unsafe.Pointer(uintptr(p) + uintptr(f))) +// valToPointer converts v to a pointer. v must be of pointer type. +func valToPointer(v reflect.Value) pointer { + return pointer{p: unsafe.Pointer(v.Pointer())} } -// BytesSlice returns the address of a [][]byte field in the struct. -func structPointer_BytesSlice(p structPointer, f field) *[][]byte { - return (*[][]byte)(unsafe.Pointer(uintptr(p) + uintptr(f))) +// offset converts from a pointer to a structure to a pointer to +// one of its fields. +func (p pointer) offset(f field) pointer { + // For safety, we should panic if !f.IsValid, however calling panic causes + // this to no longer be inlineable, which is a serious performance cost. + /* + if !f.IsValid() { + panic("invalid field") + } + */ + return pointer{p: unsafe.Pointer(uintptr(p.p) + uintptr(f))} } -// Bool returns the address of a *bool field in the struct. -func structPointer_Bool(p structPointer, f field) **bool { - return (**bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func (p pointer) isNil() bool { + return p.p == nil } -// BoolVal returns the address of a bool field in the struct. -func structPointer_BoolVal(p structPointer, f field) *bool { - return (*bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func (p pointer) toInt64() *int64 { + return (*int64)(p.p) } - -// BoolSlice returns the address of a []bool field in the struct. -func structPointer_BoolSlice(p structPointer, f field) *[]bool { - return (*[]bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func (p pointer) toInt64Ptr() **int64 { + return (**int64)(p.p) } - -// String returns the address of a *string field in the struct. -func structPointer_String(p structPointer, f field) **string { - return (**string)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func (p pointer) toInt64Slice() *[]int64 { + return (*[]int64)(p.p) } - -// StringVal returns the address of a string field in the struct. -func structPointer_StringVal(p structPointer, f field) *string { - return (*string)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func (p pointer) toInt32() *int32 { + return (*int32)(p.p) } -// StringSlice returns the address of a []string field in the struct. -func structPointer_StringSlice(p structPointer, f field) *[]string { - return (*[]string)(unsafe.Pointer(uintptr(p) + uintptr(f))) +// See pointer_reflect.go for why toInt32Ptr/Slice doesn't exist. +/* + func (p pointer) toInt32Ptr() **int32 { + return (**int32)(p.p) + } + func (p pointer) toInt32Slice() *[]int32 { + return (*[]int32)(p.p) + } +*/ +func (p pointer) getInt32Ptr() *int32 { + return *(**int32)(p.p) } - -// ExtMap returns the address of an extension map field in the struct. -func structPointer_Extensions(p structPointer, f field) *XXX_InternalExtensions { - return (*XXX_InternalExtensions)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func (p pointer) setInt32Ptr(v int32) { + *(**int32)(p.p) = &v } -func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension { - return (*map[int32]Extension)(unsafe.Pointer(uintptr(p) + uintptr(f))) +// getInt32Slice loads a []int32 from p. +// The value returned is aliased with the original slice. +// This behavior differs from the implementation in pointer_reflect.go. +func (p pointer) getInt32Slice() []int32 { + return *(*[]int32)(p.p) } -// NewAt returns the reflect.Value for a pointer to a field in the struct. -func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value { - return reflect.NewAt(typ, unsafe.Pointer(uintptr(p)+uintptr(f))) +// setInt32Slice stores a []int32 to p. +// The value set is aliased with the input slice. +// This behavior differs from the implementation in pointer_reflect.go. +func (p pointer) setInt32Slice(v []int32) { + *(*[]int32)(p.p) = v } -// SetStructPointer writes a *struct field in the struct. -func structPointer_SetStructPointer(p structPointer, f field, q structPointer) { - *(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f))) = q +// TODO: Can we get rid of appendInt32Slice and use setInt32Slice instead? +func (p pointer) appendInt32Slice(v int32) { + s := (*[]int32)(p.p) + *s = append(*s, v) } -// GetStructPointer reads a *struct field in the struct. -func structPointer_GetStructPointer(p structPointer, f field) structPointer { - return *(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func (p pointer) toUint64() *uint64 { + return (*uint64)(p.p) } - -// StructPointerSlice the address of a []*struct field in the struct. -func structPointer_StructPointerSlice(p structPointer, f field) *structPointerSlice { - return (*structPointerSlice)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func (p pointer) toUint64Ptr() **uint64 { + return (**uint64)(p.p) } - -// A structPointerSlice represents a slice of pointers to structs (themselves submessages or groups). -type structPointerSlice []structPointer - -func (v *structPointerSlice) Len() int { return len(*v) } -func (v *structPointerSlice) Index(i int) structPointer { return (*v)[i] } -func (v *structPointerSlice) Append(p structPointer) { *v = append(*v, p) } - -// A word32 is the address of a "pointer to 32-bit value" field. -type word32 **uint32 - -// IsNil reports whether *v is nil. -func word32_IsNil(p word32) bool { - return *p == nil +func (p pointer) toUint64Slice() *[]uint64 { + return (*[]uint64)(p.p) } - -// Set sets *v to point at a newly allocated word set to x. -func word32_Set(p word32, o *Buffer, x uint32) { - if len(o.uint32s) == 0 { - o.uint32s = make([]uint32, uint32PoolSize) - } - o.uint32s[0] = x - *p = &o.uint32s[0] - o.uint32s = o.uint32s[1:] +func (p pointer) toUint32() *uint32 { + return (*uint32)(p.p) } - -// Get gets the value pointed at by *v. -func word32_Get(p word32) uint32 { - return **p +func (p pointer) toUint32Ptr() **uint32 { + return (**uint32)(p.p) } - -// Word32 returns the address of a *int32, *uint32, *float32, or *enum field in the struct. -func structPointer_Word32(p structPointer, f field) word32 { - return word32((**uint32)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +func (p pointer) toUint32Slice() *[]uint32 { + return (*[]uint32)(p.p) } - -// A word32Val is the address of a 32-bit value field. -type word32Val *uint32 - -// Set sets *p to x. -func word32Val_Set(p word32Val, x uint32) { - *p = x +func (p pointer) toBool() *bool { + return (*bool)(p.p) } - -// Get gets the value pointed at by p. -func word32Val_Get(p word32Val) uint32 { - return *p +func (p pointer) toBoolPtr() **bool { + return (**bool)(p.p) } - -// Word32Val returns the address of a *int32, *uint32, *float32, or *enum field in the struct. -func structPointer_Word32Val(p structPointer, f field) word32Val { - return word32Val((*uint32)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +func (p pointer) toBoolSlice() *[]bool { + return (*[]bool)(p.p) } - -// A word32Slice is a slice of 32-bit values. -type word32Slice []uint32 - -func (v *word32Slice) Append(x uint32) { *v = append(*v, x) } -func (v *word32Slice) Len() int { return len(*v) } -func (v *word32Slice) Index(i int) uint32 { return (*v)[i] } - -// Word32Slice returns the address of a []int32, []uint32, []float32, or []enum field in the struct. -func structPointer_Word32Slice(p structPointer, f field) *word32Slice { - return (*word32Slice)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func (p pointer) toFloat64() *float64 { + return (*float64)(p.p) } - -// word64 is like word32 but for 64-bit values. -type word64 **uint64 - -func word64_Set(p word64, o *Buffer, x uint64) { - if len(o.uint64s) == 0 { - o.uint64s = make([]uint64, uint64PoolSize) - } - o.uint64s[0] = x - *p = &o.uint64s[0] - o.uint64s = o.uint64s[1:] +func (p pointer) toFloat64Ptr() **float64 { + return (**float64)(p.p) } - -func word64_IsNil(p word64) bool { - return *p == nil +func (p pointer) toFloat64Slice() *[]float64 { + return (*[]float64)(p.p) } - -func word64_Get(p word64) uint64 { - return **p +func (p pointer) toFloat32() *float32 { + return (*float32)(p.p) +} +func (p pointer) toFloat32Ptr() **float32 { + return (**float32)(p.p) +} +func (p pointer) toFloat32Slice() *[]float32 { + return (*[]float32)(p.p) +} +func (p pointer) toString() *string { + return (*string)(p.p) +} +func (p pointer) toStringPtr() **string { + return (**string)(p.p) +} +func (p pointer) toStringSlice() *[]string { + return (*[]string)(p.p) +} +func (p pointer) toBytes() *[]byte { + return (*[]byte)(p.p) +} +func (p pointer) toBytesSlice() *[][]byte { + return (*[][]byte)(p.p) +} +func (p pointer) toExtensions() *XXX_InternalExtensions { + return (*XXX_InternalExtensions)(p.p) +} +func (p pointer) toOldExtensions() *map[int32]Extension { + return (*map[int32]Extension)(p.p) } -func structPointer_Word64(p structPointer, f field) word64 { - return word64((**uint64)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +// getPointerSlice loads []*T from p as a []pointer. +// The value returned is aliased with the original slice. +// This behavior differs from the implementation in pointer_reflect.go. +func (p pointer) getPointerSlice() []pointer { + // Super-tricky - p should point to a []*T where T is a + // message type. We load it as []pointer. + return *(*[]pointer)(p.p) } -// word64Val is like word32Val but for 64-bit values. -type word64Val *uint64 +// setPointerSlice stores []pointer into p as a []*T. +// The value set is aliased with the input slice. +// This behavior differs from the implementation in pointer_reflect.go. +func (p pointer) setPointerSlice(v []pointer) { + // Super-tricky - p should point to a []*T where T is a + // message type. We store it as []pointer. + *(*[]pointer)(p.p) = v +} -func word64Val_Set(p word64Val, o *Buffer, x uint64) { - *p = x +// getPointer loads the pointer at p and returns it. +func (p pointer) getPointer() pointer { + return pointer{p: *(*unsafe.Pointer)(p.p)} } -func word64Val_Get(p word64Val) uint64 { - return *p +// setPointer stores the pointer q at p. +func (p pointer) setPointer(q pointer) { + *(*unsafe.Pointer)(p.p) = q.p } -func structPointer_Word64Val(p structPointer, f field) word64Val { - return word64Val((*uint64)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +// append q to the slice pointed to by p. +func (p pointer) appendPointer(q pointer) { + s := (*[]unsafe.Pointer)(p.p) + *s = append(*s, q.p) } -// word64Slice is like word32Slice but for 64-bit values. -type word64Slice []uint64 +// getInterfacePointer returns a pointer that points to the +// interface data of the interface pointed by p. +func (p pointer) getInterfacePointer() pointer { + // Super-tricky - read pointer out of data word of interface value. + return pointer{p: (*(*[2]unsafe.Pointer)(p.p))[1]} +} -func (v *word64Slice) Append(x uint64) { *v = append(*v, x) } -func (v *word64Slice) Len() int { return len(*v) } -func (v *word64Slice) Index(i int) uint64 { return (*v)[i] } +// asPointerTo returns a reflect.Value that is a pointer to an +// object of type t stored at p. +func (p pointer) asPointerTo(t reflect.Type) reflect.Value { + return reflect.NewAt(t, p.p) +} -func structPointer_Word64Slice(p structPointer, f field) *word64Slice { - return (*word64Slice)(unsafe.Pointer(uintptr(p) + uintptr(f))) +func atomicLoadUnmarshalInfo(p **unmarshalInfo) *unmarshalInfo { + return (*unmarshalInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) +} +func atomicStoreUnmarshalInfo(p **unmarshalInfo, v *unmarshalInfo) { + atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) +} +func atomicLoadMarshalInfo(p **marshalInfo) *marshalInfo { + return (*marshalInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) +} +func atomicStoreMarshalInfo(p **marshalInfo, v *marshalInfo) { + atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) +} +func atomicLoadMergeInfo(p **mergeInfo) *mergeInfo { + return (*mergeInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) +} +func atomicStoreMergeInfo(p **mergeInfo, v *mergeInfo) { + atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) +} +func atomicLoadDiscardInfo(p **discardInfo) *discardInfo { + return (*discardInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) +} +func atomicStoreDiscardInfo(p **discardInfo, v *discardInfo) { + atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) } diff --git a/vendor/github.com/golang/protobuf/proto/properties.go b/vendor/github.com/golang/protobuf/proto/properties.go index ec2289c..79668ff 100644 --- a/vendor/github.com/golang/protobuf/proto/properties.go +++ b/vendor/github.com/golang/protobuf/proto/properties.go @@ -58,42 +58,6 @@ const ( WireFixed32 = 5 ) -const startSize = 10 // initial slice/string sizes - -// Encoders are defined in encode.go -// An encoder outputs the full representation of a field, including its -// tag and encoder type. -type encoder func(p *Buffer, prop *Properties, base structPointer) error - -// A valueEncoder encodes a single integer in a particular encoding. -type valueEncoder func(o *Buffer, x uint64) error - -// Sizers are defined in encode.go -// A sizer returns the encoded size of a field, including its tag and encoder -// type. -type sizer func(prop *Properties, base structPointer) int - -// A valueSizer returns the encoded size of a single integer in a particular -// encoding. -type valueSizer func(x uint64) int - -// Decoders are defined in decode.go -// A decoder creates a value from its wire representation. -// Unrecognized subelements are saved in unrec. -type decoder func(p *Buffer, prop *Properties, base structPointer) error - -// A valueDecoder decodes a single integer in a particular encoding. -type valueDecoder func(o *Buffer) (x uint64, err error) - -// A oneofMarshaler does the marshaling for all oneof fields in a message. -type oneofMarshaler func(Message, *Buffer) error - -// A oneofUnmarshaler does the unmarshaling for a oneof field in a message. -type oneofUnmarshaler func(Message, int, int, *Buffer) (bool, error) - -// A oneofSizer does the sizing for all oneof fields in a message. -type oneofSizer func(Message) int - // tagMap is an optimization over map[int]int for typical protocol buffer // use-cases. Encoded protocol buffers are often in tag order with small tag // numbers. @@ -140,13 +104,6 @@ type StructProperties struct { decoderTags tagMap // map from proto tag to struct field number decoderOrigNames map[string]int // map from original name to struct field number order []int // list of struct field numbers in tag order - unrecField field // field id of the XXX_unrecognized []byte field - extendable bool // is this an extendable proto - - oneofMarshaler oneofMarshaler - oneofUnmarshaler oneofUnmarshaler - oneofSizer oneofSizer - stype reflect.Type // OneofTypes contains information about the oneof fields in this message. // It is keyed by the original name of a field. @@ -182,41 +139,24 @@ type Properties struct { Repeated bool Packed bool // relevant for repeated primitives only Enum string // set for enum types only - proto3 bool // whether this is known to be a proto3 field; set for []byte only + proto3 bool // whether this is known to be a proto3 field oneof bool // whether this is a oneof field Default string // default value HasDefault bool // whether an explicit default was provided - def_uint64 uint64 - - enc encoder - valEnc valueEncoder // set for bool and numeric types only - field field - tagcode []byte // encoding of EncodeVarint((Tag<<3)|WireType) - tagbuf [8]byte - stype reflect.Type // set for struct types only - sprop *StructProperties // set for struct types only - isMarshaler bool - isUnmarshaler bool - - mtype reflect.Type // set for map types only - mkeyprop *Properties // set for map types only - mvalprop *Properties // set for map types only - - size sizer - valSize valueSizer // set for bool and numeric types only - - dec decoder - valDec valueDecoder // set for bool and numeric types only - - // If this is a packable field, this will be the decoder for the packed version of the field. - packedDec decoder + + stype reflect.Type // set for struct types only + sprop *StructProperties // set for struct types only + + mtype reflect.Type // set for map types only + MapKeyProp *Properties // set for map types only + MapValProp *Properties // set for map types only } // String formats the properties in the protobuf struct field tag style. func (p *Properties) String() string { s := p.Wire - s = "," + s += "," s += strconv.Itoa(p.Tag) if p.Required { s += ",req" @@ -262,29 +202,14 @@ func (p *Properties) Parse(s string) { switch p.Wire { case "varint": p.WireType = WireVarint - p.valEnc = (*Buffer).EncodeVarint - p.valDec = (*Buffer).DecodeVarint - p.valSize = sizeVarint case "fixed32": p.WireType = WireFixed32 - p.valEnc = (*Buffer).EncodeFixed32 - p.valDec = (*Buffer).DecodeFixed32 - p.valSize = sizeFixed32 case "fixed64": p.WireType = WireFixed64 - p.valEnc = (*Buffer).EncodeFixed64 - p.valDec = (*Buffer).DecodeFixed64 - p.valSize = sizeFixed64 case "zigzag32": p.WireType = WireVarint - p.valEnc = (*Buffer).EncodeZigzag32 - p.valDec = (*Buffer).DecodeZigzag32 - p.valSize = sizeZigzag32 case "zigzag64": p.WireType = WireVarint - p.valEnc = (*Buffer).EncodeZigzag64 - p.valDec = (*Buffer).DecodeZigzag64 - p.valSize = sizeZigzag64 case "bytes", "group": p.WireType = WireBytes // no numeric converter for non-numeric types @@ -299,6 +224,7 @@ func (p *Properties) Parse(s string) { return } +outer: for i := 2; i < len(fields); i++ { f := fields[i] switch { @@ -326,255 +252,40 @@ func (p *Properties) Parse(s string) { if i+1 < len(fields) { // Commas aren't escaped, and def is always last. p.Default += "," + strings.Join(fields[i+1:], ",") - break + break outer } } } } -func logNoSliceEnc(t1, t2 reflect.Type) { - fmt.Fprintf(os.Stderr, "proto: no slice oenc for %T = []%T\n", t1, t2) -} - var protoMessageType = reflect.TypeOf((*Message)(nil)).Elem() -// Initialize the fields for encoding and decoding. -func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lockGetProp bool) { - p.enc = nil - p.dec = nil - p.size = nil - +// setFieldProps initializes the field properties for submessages and maps. +func (p *Properties) setFieldProps(typ reflect.Type, f *reflect.StructField, lockGetProp bool) { switch t1 := typ; t1.Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no coders for %v\n", t1) - - // proto3 scalar types - - case reflect.Bool: - p.enc = (*Buffer).enc_proto3_bool - p.dec = (*Buffer).dec_proto3_bool - p.size = size_proto3_bool - case reflect.Int32: - p.enc = (*Buffer).enc_proto3_int32 - p.dec = (*Buffer).dec_proto3_int32 - p.size = size_proto3_int32 - case reflect.Uint32: - p.enc = (*Buffer).enc_proto3_uint32 - p.dec = (*Buffer).dec_proto3_int32 // can reuse - p.size = size_proto3_uint32 - case reflect.Int64, reflect.Uint64: - p.enc = (*Buffer).enc_proto3_int64 - p.dec = (*Buffer).dec_proto3_int64 - p.size = size_proto3_int64 - case reflect.Float32: - p.enc = (*Buffer).enc_proto3_uint32 // can just treat them as bits - p.dec = (*Buffer).dec_proto3_int32 - p.size = size_proto3_uint32 - case reflect.Float64: - p.enc = (*Buffer).enc_proto3_int64 // can just treat them as bits - p.dec = (*Buffer).dec_proto3_int64 - p.size = size_proto3_int64 - case reflect.String: - p.enc = (*Buffer).enc_proto3_string - p.dec = (*Buffer).dec_proto3_string - p.size = size_proto3_string - case reflect.Ptr: - switch t2 := t1.Elem(); t2.Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no encoder function for %v -> %v\n", t1, t2) - break - case reflect.Bool: - p.enc = (*Buffer).enc_bool - p.dec = (*Buffer).dec_bool - p.size = size_bool - case reflect.Int32: - p.enc = (*Buffer).enc_int32 - p.dec = (*Buffer).dec_int32 - p.size = size_int32 - case reflect.Uint32: - p.enc = (*Buffer).enc_uint32 - p.dec = (*Buffer).dec_int32 // can reuse - p.size = size_uint32 - case reflect.Int64, reflect.Uint64: - p.enc = (*Buffer).enc_int64 - p.dec = (*Buffer).dec_int64 - p.size = size_int64 - case reflect.Float32: - p.enc = (*Buffer).enc_uint32 // can just treat them as bits - p.dec = (*Buffer).dec_int32 - p.size = size_uint32 - case reflect.Float64: - p.enc = (*Buffer).enc_int64 // can just treat them as bits - p.dec = (*Buffer).dec_int64 - p.size = size_int64 - case reflect.String: - p.enc = (*Buffer).enc_string - p.dec = (*Buffer).dec_string - p.size = size_string - case reflect.Struct: + if t1.Elem().Kind() == reflect.Struct { p.stype = t1.Elem() - p.isMarshaler = isMarshaler(t1) - p.isUnmarshaler = isUnmarshaler(t1) - if p.Wire == "bytes" { - p.enc = (*Buffer).enc_struct_message - p.dec = (*Buffer).dec_struct_message - p.size = size_struct_message - } else { - p.enc = (*Buffer).enc_struct_group - p.dec = (*Buffer).dec_struct_group - p.size = size_struct_group - } } case reflect.Slice: - switch t2 := t1.Elem(); t2.Kind() { - default: - logNoSliceEnc(t1, t2) - break - case reflect.Bool: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_bool - p.size = size_slice_packed_bool - } else { - p.enc = (*Buffer).enc_slice_bool - p.size = size_slice_bool - } - p.dec = (*Buffer).dec_slice_bool - p.packedDec = (*Buffer).dec_slice_packed_bool - case reflect.Int32: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_int32 - p.size = size_slice_packed_int32 - } else { - p.enc = (*Buffer).enc_slice_int32 - p.size = size_slice_int32 - } - p.dec = (*Buffer).dec_slice_int32 - p.packedDec = (*Buffer).dec_slice_packed_int32 - case reflect.Uint32: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_uint32 - p.size = size_slice_packed_uint32 - } else { - p.enc = (*Buffer).enc_slice_uint32 - p.size = size_slice_uint32 - } - p.dec = (*Buffer).dec_slice_int32 - p.packedDec = (*Buffer).dec_slice_packed_int32 - case reflect.Int64, reflect.Uint64: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_int64 - p.size = size_slice_packed_int64 - } else { - p.enc = (*Buffer).enc_slice_int64 - p.size = size_slice_int64 - } - p.dec = (*Buffer).dec_slice_int64 - p.packedDec = (*Buffer).dec_slice_packed_int64 - case reflect.Uint8: - p.dec = (*Buffer).dec_slice_byte - if p.proto3 { - p.enc = (*Buffer).enc_proto3_slice_byte - p.size = size_proto3_slice_byte - } else { - p.enc = (*Buffer).enc_slice_byte - p.size = size_slice_byte - } - case reflect.Float32, reflect.Float64: - switch t2.Bits() { - case 32: - // can just treat them as bits - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_uint32 - p.size = size_slice_packed_uint32 - } else { - p.enc = (*Buffer).enc_slice_uint32 - p.size = size_slice_uint32 - } - p.dec = (*Buffer).dec_slice_int32 - p.packedDec = (*Buffer).dec_slice_packed_int32 - case 64: - // can just treat them as bits - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_int64 - p.size = size_slice_packed_int64 - } else { - p.enc = (*Buffer).enc_slice_int64 - p.size = size_slice_int64 - } - p.dec = (*Buffer).dec_slice_int64 - p.packedDec = (*Buffer).dec_slice_packed_int64 - default: - logNoSliceEnc(t1, t2) - break - } - case reflect.String: - p.enc = (*Buffer).enc_slice_string - p.dec = (*Buffer).dec_slice_string - p.size = size_slice_string - case reflect.Ptr: - switch t3 := t2.Elem(); t3.Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no ptr oenc for %T -> %T -> %T\n", t1, t2, t3) - break - case reflect.Struct: - p.stype = t2.Elem() - p.isMarshaler = isMarshaler(t2) - p.isUnmarshaler = isUnmarshaler(t2) - if p.Wire == "bytes" { - p.enc = (*Buffer).enc_slice_struct_message - p.dec = (*Buffer).dec_slice_struct_message - p.size = size_slice_struct_message - } else { - p.enc = (*Buffer).enc_slice_struct_group - p.dec = (*Buffer).dec_slice_struct_group - p.size = size_slice_struct_group - } - } - case reflect.Slice: - switch t2.Elem().Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no slice elem oenc for %T -> %T -> %T\n", t1, t2, t2.Elem()) - break - case reflect.Uint8: - p.enc = (*Buffer).enc_slice_slice_byte - p.dec = (*Buffer).dec_slice_slice_byte - p.size = size_slice_slice_byte - } + if t2 := t1.Elem(); t2.Kind() == reflect.Ptr && t2.Elem().Kind() == reflect.Struct { + p.stype = t2.Elem() } case reflect.Map: - p.enc = (*Buffer).enc_new_map - p.dec = (*Buffer).dec_new_map - p.size = size_new_map - p.mtype = t1 - p.mkeyprop = &Properties{} - p.mkeyprop.init(reflect.PtrTo(p.mtype.Key()), "Key", f.Tag.Get("protobuf_key"), nil, lockGetProp) - p.mvalprop = &Properties{} + p.MapKeyProp = &Properties{} + p.MapKeyProp.init(reflect.PtrTo(p.mtype.Key()), "Key", f.Tag.Get("protobuf_key"), nil, lockGetProp) + p.MapValProp = &Properties{} vtype := p.mtype.Elem() if vtype.Kind() != reflect.Ptr && vtype.Kind() != reflect.Slice { // The value type is not a message (*T) or bytes ([]byte), // so we need encoders for the pointer to this type. vtype = reflect.PtrTo(vtype) } - p.mvalprop.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp) - } - - // precalculate tag code - wire := p.WireType - if p.Packed { - wire = WireBytes + p.MapValProp.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp) } - x := uint32(p.Tag)<<3 | uint32(wire) - i := 0 - for i = 0; x > 127; i++ { - p.tagbuf[i] = 0x80 | uint8(x&0x7F) - x >>= 7 - } - p.tagbuf[i] = uint8(x) - p.tagcode = p.tagbuf[0 : i+1] if p.stype != nil { if lockGetProp { @@ -586,32 +297,9 @@ func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lock } var ( - marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem() - unmarshalerType = reflect.TypeOf((*Unmarshaler)(nil)).Elem() + marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem() ) -// isMarshaler reports whether type t implements Marshaler. -func isMarshaler(t reflect.Type) bool { - // We're checking for (likely) pointer-receiver methods - // so if t is not a pointer, something is very wrong. - // The calls above only invoke isMarshaler on pointer types. - if t.Kind() != reflect.Ptr { - panic("proto: misuse of isMarshaler") - } - return t.Implements(marshalerType) -} - -// isUnmarshaler reports whether type t implements Unmarshaler. -func isUnmarshaler(t reflect.Type) bool { - // We're checking for (likely) pointer-receiver methods - // so if t is not a pointer, something is very wrong. - // The calls above only invoke isUnmarshaler on pointer types. - if t.Kind() != reflect.Ptr { - panic("proto: misuse of isUnmarshaler") - } - return t.Implements(unmarshalerType) -} - // Init populates the properties from a protocol buffer struct tag. func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) { p.init(typ, name, tag, f, true) @@ -621,14 +309,11 @@ func (p *Properties) init(typ reflect.Type, name, tag string, f *reflect.StructF // "bytes,49,opt,def=hello!" p.Name = name p.OrigName = name - if f != nil { - p.field = toField(f) - } if tag == "" { return } p.Parse(tag) - p.setEncAndDec(typ, f, lockGetProp) + p.setFieldProps(typ, f, lockGetProp) } var ( @@ -649,9 +334,6 @@ func GetProperties(t reflect.Type) *StructProperties { sprop, ok := propertiesMap[t] propertiesMu.RUnlock() if ok { - if collectStats { - stats.Chit++ - } return sprop } @@ -661,26 +343,26 @@ func GetProperties(t reflect.Type) *StructProperties { return sprop } +type ( + oneofFuncsIface interface { + XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{}) + } + oneofWrappersIface interface { + XXX_OneofWrappers() []interface{} + } +) + // getPropertiesLocked requires that propertiesMu is held. func getPropertiesLocked(t reflect.Type) *StructProperties { if prop, ok := propertiesMap[t]; ok { - if collectStats { - stats.Chit++ - } return prop } - if collectStats { - stats.Cmiss++ - } prop := new(StructProperties) // in case of recursive protos, fill this in now. propertiesMap[t] = prop // build properties - prop.extendable = reflect.PtrTo(t).Implements(extendableProtoType) || - reflect.PtrTo(t).Implements(extendableProtoV1Type) - prop.unrecField = invalidField prop.Prop = make([]*Properties, t.NumField()) prop.order = make([]int, t.NumField()) @@ -690,17 +372,6 @@ func getPropertiesLocked(t reflect.Type) *StructProperties { name := f.Name p.init(f.Type, name, f.Tag.Get("protobuf"), &f, false) - if f.Name == "XXX_InternalExtensions" { // special case - p.enc = (*Buffer).enc_exts - p.dec = nil // not needed - p.size = size_exts - } else if f.Name == "XXX_extensions" { // special case - p.enc = (*Buffer).enc_map - p.dec = nil // not needed - p.size = size_map - } else if f.Name == "XXX_unrecognized" { // special case - prop.unrecField = toField(&f) - } oneof := f.Tag.Get("protobuf_oneof") // special case if oneof != "" { // Oneof fields don't use the traditional protobuf tag. @@ -715,22 +386,19 @@ func getPropertiesLocked(t reflect.Type) *StructProperties { } print("\n") } - if p.enc == nil && !strings.HasPrefix(f.Name, "XXX_") && oneof == "" { - fmt.Fprintln(os.Stderr, "proto: no encoder for", f.Name, f.Type.String(), "[GetProperties]") - } } // Re-order prop.order. sort.Sort(prop) - type oneofMessage interface { - XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{}) + var oots []interface{} + switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) { + case oneofFuncsIface: + _, _, _, oots = m.XXX_OneofFuncs() + case oneofWrappersIface: + oots = m.XXX_OneofWrappers() } - if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok { - var oots []interface{} - prop.oneofMarshaler, prop.oneofUnmarshaler, prop.oneofSizer, oots = om.XXX_OneofFuncs() - prop.stype = t - + if len(oots) > 0 { // Interpret oneof metadata. prop.OneofTypes = make(map[string]*OneofProperties) for _, oot := range oots { @@ -779,30 +447,6 @@ func getPropertiesLocked(t reflect.Type) *StructProperties { return prop } -// Return the Properties object for the x[0]'th field of the structure. -func propByIndex(t reflect.Type, x []int) *Properties { - if len(x) != 1 { - fmt.Fprintf(os.Stderr, "proto: field index dimension %d (not 1) for type %s\n", len(x), t) - return nil - } - prop := GetProperties(t) - return prop.Prop[x[0]] -} - -// Get the address and type of a pointer to a struct from an interface. -func getbase(pb Message) (t reflect.Type, b structPointer, err error) { - if pb == nil { - err = ErrNil - return - } - // get the reflect type of the pointer to the struct. - t = reflect.TypeOf(pb) - // get the address of the struct. - value := reflect.ValueOf(pb) - b = toStructPointer(value) - return -} - // A global registry of enum types. // The generated code will register the generated maps by calling RegisterEnum. @@ -826,20 +470,42 @@ func EnumValueMap(enumType string) map[string]int32 { // A registry of all linked message types. // The string is a fully-qualified proto name ("pkg.Message"). var ( - protoTypes = make(map[string]reflect.Type) - revProtoTypes = make(map[reflect.Type]string) + protoTypedNils = make(map[string]Message) // a map from proto names to typed nil pointers + protoMapTypes = make(map[string]reflect.Type) // a map from proto names to map types + revProtoTypes = make(map[reflect.Type]string) ) // RegisterType is called from generated code and maps from the fully qualified // proto name to the type (pointer to struct) of the protocol buffer. func RegisterType(x Message, name string) { - if _, ok := protoTypes[name]; ok { + if _, ok := protoTypedNils[name]; ok { // TODO: Some day, make this a panic. log.Printf("proto: duplicate proto type registered: %s", name) return } t := reflect.TypeOf(x) - protoTypes[name] = t + if v := reflect.ValueOf(x); v.Kind() == reflect.Ptr && v.Pointer() == 0 { + // Generated code always calls RegisterType with nil x. + // This check is just for extra safety. + protoTypedNils[name] = x + } else { + protoTypedNils[name] = reflect.Zero(t).Interface().(Message) + } + revProtoTypes[t] = name +} + +// RegisterMapType is called from generated code and maps from the fully qualified +// proto name to the native map type of the proto map definition. +func RegisterMapType(x interface{}, name string) { + if reflect.TypeOf(x).Kind() != reflect.Map { + panic(fmt.Sprintf("RegisterMapType(%T, %q); want map", x, name)) + } + if _, ok := protoMapTypes[name]; ok { + log.Printf("proto: duplicate proto type registered: %s", name) + return + } + t := reflect.TypeOf(x) + protoMapTypes[name] = t revProtoTypes[t] = name } @@ -855,7 +521,14 @@ func MessageName(x Message) string { } // MessageType returns the message type (pointer to struct) for a named message. -func MessageType(name string) reflect.Type { return protoTypes[name] } +// The type is not guaranteed to implement proto.Message if the name refers to a +// map entry. +func MessageType(name string) reflect.Type { + if t, ok := protoTypedNils[name]; ok { + return reflect.TypeOf(t) + } + return protoMapTypes[name] +} // A registry of all linked proto files. var ( diff --git a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go index cc4d048..9a754c8 100644 --- a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go +++ b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go @@ -1,27 +1,15 @@ -// Code generated by protoc-gen-go. +// Code generated by protoc-gen-go. DO NOT EDIT. // source: proto3_proto/proto3.proto -// DO NOT EDIT! -/* -Package proto3_proto is a generated protocol buffer package. - -It is generated from these files: - proto3_proto/proto3.proto - -It has these top-level messages: - Message - Nested - MessageWithMap - IntMap - IntMaps -*/ package proto3_proto -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import google_protobuf "github.com/golang/protobuf/ptypes/any" -import testdata "github.com/golang/protobuf/proto/testdata" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + test_proto "github.com/golang/protobuf/proto/test_proto" + any "github.com/golang/protobuf/ptypes/any" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -32,7 +20,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type Message_Humour int32 @@ -49,6 +37,7 @@ var Message_Humour_name = map[int32]string{ 2: "SLAPSTICK", 3: "BILL_BAILEY", } + var Message_Humour_value = map[string]int32{ "UNKNOWN": 0, "PUNS": 1, @@ -59,33 +48,60 @@ var Message_Humour_value = map[string]int32{ func (x Message_Humour) String() string { return proto.EnumName(Message_Humour_name, int32(x)) } -func (Message_Humour) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} } + +func (Message_Humour) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_1c50d9b824d4ac38, []int{0, 0} +} type Message struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Hilarity Message_Humour `protobuf:"varint,2,opt,name=hilarity,enum=proto3_proto.Message_Humour" json:"hilarity,omitempty"` - HeightInCm uint32 `protobuf:"varint,3,opt,name=height_in_cm,json=heightInCm" json:"height_in_cm,omitempty"` - Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` - ResultCount int64 `protobuf:"varint,7,opt,name=result_count,json=resultCount" json:"result_count,omitempty"` - TrueScotsman bool `protobuf:"varint,8,opt,name=true_scotsman,json=trueScotsman" json:"true_scotsman,omitempty"` - Score float32 `protobuf:"fixed32,9,opt,name=score" json:"score,omitempty"` - Key []uint64 `protobuf:"varint,5,rep,packed,name=key" json:"key,omitempty"` - ShortKey []int32 `protobuf:"varint,19,rep,packed,name=short_key,json=shortKey" json:"short_key,omitempty"` - Nested *Nested `protobuf:"bytes,6,opt,name=nested" json:"nested,omitempty"` - RFunny []Message_Humour `protobuf:"varint,16,rep,packed,name=r_funny,json=rFunny,enum=proto3_proto.Message_Humour" json:"r_funny,omitempty"` - Terrain map[string]*Nested `protobuf:"bytes,10,rep,name=terrain" json:"terrain,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Proto2Field *testdata.SubDefaults `protobuf:"bytes,11,opt,name=proto2_field,json=proto2Field" json:"proto2_field,omitempty"` - Proto2Value map[string]*testdata.SubDefaults `protobuf:"bytes,13,rep,name=proto2_value,json=proto2Value" json:"proto2_value,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Anything *google_protobuf.Any `protobuf:"bytes,14,opt,name=anything" json:"anything,omitempty"` - ManyThings []*google_protobuf.Any `protobuf:"bytes,15,rep,name=many_things,json=manyThings" json:"many_things,omitempty"` - Submessage *Message `protobuf:"bytes,17,opt,name=submessage" json:"submessage,omitempty"` - Children []*Message `protobuf:"bytes,18,rep,name=children" json:"children,omitempty"` -} - -func (m *Message) Reset() { *m = Message{} } -func (m *Message) String() string { return proto.CompactTextString(m) } -func (*Message) ProtoMessage() {} -func (*Message) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Hilarity Message_Humour `protobuf:"varint,2,opt,name=hilarity,proto3,enum=proto3_proto.Message_Humour" json:"hilarity,omitempty"` + HeightInCm uint32 `protobuf:"varint,3,opt,name=height_in_cm,json=heightInCm,proto3" json:"height_in_cm,omitempty"` + Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` + ResultCount int64 `protobuf:"varint,7,opt,name=result_count,json=resultCount,proto3" json:"result_count,omitempty"` + TrueScotsman bool `protobuf:"varint,8,opt,name=true_scotsman,json=trueScotsman,proto3" json:"true_scotsman,omitempty"` + Score float32 `protobuf:"fixed32,9,opt,name=score,proto3" json:"score,omitempty"` + Key []uint64 `protobuf:"varint,5,rep,packed,name=key,proto3" json:"key,omitempty"` + ShortKey []int32 `protobuf:"varint,19,rep,packed,name=short_key,json=shortKey,proto3" json:"short_key,omitempty"` + Nested *Nested `protobuf:"bytes,6,opt,name=nested,proto3" json:"nested,omitempty"` + RFunny []Message_Humour `protobuf:"varint,16,rep,packed,name=r_funny,json=rFunny,proto3,enum=proto3_proto.Message_Humour" json:"r_funny,omitempty"` + Terrain map[string]*Nested `protobuf:"bytes,10,rep,name=terrain,proto3" json:"terrain,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Proto2Field *test_proto.SubDefaults `protobuf:"bytes,11,opt,name=proto2_field,json=proto2Field,proto3" json:"proto2_field,omitempty"` + Proto2Value map[string]*test_proto.SubDefaults `protobuf:"bytes,13,rep,name=proto2_value,json=proto2Value,proto3" json:"proto2_value,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Anything *any.Any `protobuf:"bytes,14,opt,name=anything,proto3" json:"anything,omitempty"` + ManyThings []*any.Any `protobuf:"bytes,15,rep,name=many_things,json=manyThings,proto3" json:"many_things,omitempty"` + Submessage *Message `protobuf:"bytes,17,opt,name=submessage,proto3" json:"submessage,omitempty"` + Children []*Message `protobuf:"bytes,18,rep,name=children,proto3" json:"children,omitempty"` + StringMap map[string]string `protobuf:"bytes,20,rep,name=string_map,json=stringMap,proto3" json:"string_map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Message) Reset() { *m = Message{} } +func (m *Message) String() string { return proto.CompactTextString(m) } +func (*Message) ProtoMessage() {} +func (*Message) Descriptor() ([]byte, []int) { + return fileDescriptor_1c50d9b824d4ac38, []int{0} +} + +func (m *Message) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Message.Unmarshal(m, b) +} +func (m *Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Message.Marshal(b, m, deterministic) +} +func (m *Message) XXX_Merge(src proto.Message) { + xxx_messageInfo_Message.Merge(m, src) +} +func (m *Message) XXX_Size() int { + return xxx_messageInfo_Message.Size(m) +} +func (m *Message) XXX_DiscardUnknown() { + xxx_messageInfo_Message.DiscardUnknown(m) +} + +var xxx_messageInfo_Message proto.InternalMessageInfo func (m *Message) GetName() string { if m != nil { @@ -171,28 +187,28 @@ func (m *Message) GetTerrain() map[string]*Nested { return nil } -func (m *Message) GetProto2Field() *testdata.SubDefaults { +func (m *Message) GetProto2Field() *test_proto.SubDefaults { if m != nil { return m.Proto2Field } return nil } -func (m *Message) GetProto2Value() map[string]*testdata.SubDefaults { +func (m *Message) GetProto2Value() map[string]*test_proto.SubDefaults { if m != nil { return m.Proto2Value } return nil } -func (m *Message) GetAnything() *google_protobuf.Any { +func (m *Message) GetAnything() *any.Any { if m != nil { return m.Anything } return nil } -func (m *Message) GetManyThings() []*google_protobuf.Any { +func (m *Message) GetManyThings() []*any.Any { if m != nil { return m.ManyThings } @@ -213,15 +229,45 @@ func (m *Message) GetChildren() []*Message { return nil } +func (m *Message) GetStringMap() map[string]string { + if m != nil { + return m.StringMap + } + return nil +} + type Nested struct { - Bunny string `protobuf:"bytes,1,opt,name=bunny" json:"bunny,omitempty"` - Cute bool `protobuf:"varint,2,opt,name=cute" json:"cute,omitempty"` + Bunny string `protobuf:"bytes,1,opt,name=bunny,proto3" json:"bunny,omitempty"` + Cute bool `protobuf:"varint,2,opt,name=cute,proto3" json:"cute,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Nested) Reset() { *m = Nested{} } +func (m *Nested) String() string { return proto.CompactTextString(m) } +func (*Nested) ProtoMessage() {} +func (*Nested) Descriptor() ([]byte, []int) { + return fileDescriptor_1c50d9b824d4ac38, []int{1} +} + +func (m *Nested) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Nested.Unmarshal(m, b) +} +func (m *Nested) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Nested.Marshal(b, m, deterministic) +} +func (m *Nested) XXX_Merge(src proto.Message) { + xxx_messageInfo_Nested.Merge(m, src) +} +func (m *Nested) XXX_Size() int { + return xxx_messageInfo_Nested.Size(m) +} +func (m *Nested) XXX_DiscardUnknown() { + xxx_messageInfo_Nested.DiscardUnknown(m) } -func (m *Nested) Reset() { *m = Nested{} } -func (m *Nested) String() string { return proto.CompactTextString(m) } -func (*Nested) ProtoMessage() {} -func (*Nested) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +var xxx_messageInfo_Nested proto.InternalMessageInfo func (m *Nested) GetBunny() string { if m != nil { @@ -238,13 +284,36 @@ func (m *Nested) GetCute() bool { } type MessageWithMap struct { - ByteMapping map[bool][]byte `protobuf:"bytes,1,rep,name=byte_mapping,json=byteMapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value,proto3"` + ByteMapping map[bool][]byte `protobuf:"bytes,1,rep,name=byte_mapping,json=byteMapping,proto3" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } +func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } +func (*MessageWithMap) ProtoMessage() {} +func (*MessageWithMap) Descriptor() ([]byte, []int) { + return fileDescriptor_1c50d9b824d4ac38, []int{2} +} + +func (m *MessageWithMap) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageWithMap.Unmarshal(m, b) +} +func (m *MessageWithMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageWithMap.Marshal(b, m, deterministic) +} +func (m *MessageWithMap) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageWithMap.Merge(m, src) +} +func (m *MessageWithMap) XXX_Size() int { + return xxx_messageInfo_MessageWithMap.Size(m) +} +func (m *MessageWithMap) XXX_DiscardUnknown() { + xxx_messageInfo_MessageWithMap.DiscardUnknown(m) } -func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } -func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } -func (*MessageWithMap) ProtoMessage() {} -func (*MessageWithMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +var xxx_messageInfo_MessageWithMap proto.InternalMessageInfo func (m *MessageWithMap) GetByteMapping() map[bool][]byte { if m != nil { @@ -254,13 +323,36 @@ func (m *MessageWithMap) GetByteMapping() map[bool][]byte { } type IntMap struct { - Rtt map[int32]int32 `protobuf:"bytes,1,rep,name=rtt" json:"rtt,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + Rtt map[int32]int32 `protobuf:"bytes,1,rep,name=rtt,proto3" json:"rtt,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IntMap) Reset() { *m = IntMap{} } +func (m *IntMap) String() string { return proto.CompactTextString(m) } +func (*IntMap) ProtoMessage() {} +func (*IntMap) Descriptor() ([]byte, []int) { + return fileDescriptor_1c50d9b824d4ac38, []int{3} +} + +func (m *IntMap) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IntMap.Unmarshal(m, b) +} +func (m *IntMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IntMap.Marshal(b, m, deterministic) +} +func (m *IntMap) XXX_Merge(src proto.Message) { + xxx_messageInfo_IntMap.Merge(m, src) +} +func (m *IntMap) XXX_Size() int { + return xxx_messageInfo_IntMap.Size(m) +} +func (m *IntMap) XXX_DiscardUnknown() { + xxx_messageInfo_IntMap.DiscardUnknown(m) } -func (m *IntMap) Reset() { *m = IntMap{} } -func (m *IntMap) String() string { return proto.CompactTextString(m) } -func (*IntMap) ProtoMessage() {} -func (*IntMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +var xxx_messageInfo_IntMap proto.InternalMessageInfo func (m *IntMap) GetRtt() map[int32]int32 { if m != nil { @@ -270,13 +362,36 @@ func (m *IntMap) GetRtt() map[int32]int32 { } type IntMaps struct { - Maps []*IntMap `protobuf:"bytes,1,rep,name=maps" json:"maps,omitempty"` + Maps []*IntMap `protobuf:"bytes,1,rep,name=maps,proto3" json:"maps,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IntMaps) Reset() { *m = IntMaps{} } +func (m *IntMaps) String() string { return proto.CompactTextString(m) } +func (*IntMaps) ProtoMessage() {} +func (*IntMaps) Descriptor() ([]byte, []int) { + return fileDescriptor_1c50d9b824d4ac38, []int{4} +} + +func (m *IntMaps) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IntMaps.Unmarshal(m, b) +} +func (m *IntMaps) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IntMaps.Marshal(b, m, deterministic) +} +func (m *IntMaps) XXX_Merge(src proto.Message) { + xxx_messageInfo_IntMaps.Merge(m, src) +} +func (m *IntMaps) XXX_Size() int { + return xxx_messageInfo_IntMaps.Size(m) +} +func (m *IntMaps) XXX_DiscardUnknown() { + xxx_messageInfo_IntMaps.DiscardUnknown(m) } -func (m *IntMaps) Reset() { *m = IntMaps{} } -func (m *IntMaps) String() string { return proto.CompactTextString(m) } -func (*IntMaps) ProtoMessage() {} -func (*IntMaps) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } +var xxx_messageInfo_IntMaps proto.InternalMessageInfo func (m *IntMaps) GetMaps() []*IntMap { if m != nil { @@ -285,63 +400,178 @@ func (m *IntMaps) GetMaps() []*IntMap { return nil } +type TestUTF8 struct { + Scalar string `protobuf:"bytes,1,opt,name=scalar,proto3" json:"scalar,omitempty"` + Vector []string `protobuf:"bytes,2,rep,name=vector,proto3" json:"vector,omitempty"` + // Types that are valid to be assigned to Oneof: + // *TestUTF8_Field + Oneof isTestUTF8_Oneof `protobuf_oneof:"oneof"` + MapKey map[string]int64 `protobuf:"bytes,4,rep,name=map_key,json=mapKey,proto3" json:"map_key,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + MapValue map[int64]string `protobuf:"bytes,5,rep,name=map_value,json=mapValue,proto3" json:"map_value,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestUTF8) Reset() { *m = TestUTF8{} } +func (m *TestUTF8) String() string { return proto.CompactTextString(m) } +func (*TestUTF8) ProtoMessage() {} +func (*TestUTF8) Descriptor() ([]byte, []int) { + return fileDescriptor_1c50d9b824d4ac38, []int{5} +} + +func (m *TestUTF8) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestUTF8.Unmarshal(m, b) +} +func (m *TestUTF8) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestUTF8.Marshal(b, m, deterministic) +} +func (m *TestUTF8) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestUTF8.Merge(m, src) +} +func (m *TestUTF8) XXX_Size() int { + return xxx_messageInfo_TestUTF8.Size(m) +} +func (m *TestUTF8) XXX_DiscardUnknown() { + xxx_messageInfo_TestUTF8.DiscardUnknown(m) +} + +var xxx_messageInfo_TestUTF8 proto.InternalMessageInfo + +func (m *TestUTF8) GetScalar() string { + if m != nil { + return m.Scalar + } + return "" +} + +func (m *TestUTF8) GetVector() []string { + if m != nil { + return m.Vector + } + return nil +} + +type isTestUTF8_Oneof interface { + isTestUTF8_Oneof() +} + +type TestUTF8_Field struct { + Field string `protobuf:"bytes,3,opt,name=field,proto3,oneof"` +} + +func (*TestUTF8_Field) isTestUTF8_Oneof() {} + +func (m *TestUTF8) GetOneof() isTestUTF8_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *TestUTF8) GetField() string { + if x, ok := m.GetOneof().(*TestUTF8_Field); ok { + return x.Field + } + return "" +} + +func (m *TestUTF8) GetMapKey() map[string]int64 { + if m != nil { + return m.MapKey + } + return nil +} + +func (m *TestUTF8) GetMapValue() map[int64]string { + if m != nil { + return m.MapValue + } + return nil +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*TestUTF8) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*TestUTF8_Field)(nil), + } +} + func init() { + proto.RegisterEnum("proto3_proto.Message_Humour", Message_Humour_name, Message_Humour_value) proto.RegisterType((*Message)(nil), "proto3_proto.Message") + proto.RegisterMapType((map[string]*test_proto.SubDefaults)(nil), "proto3_proto.Message.Proto2ValueEntry") + proto.RegisterMapType((map[string]string)(nil), "proto3_proto.Message.StringMapEntry") + proto.RegisterMapType((map[string]*Nested)(nil), "proto3_proto.Message.TerrainEntry") proto.RegisterType((*Nested)(nil), "proto3_proto.Nested") proto.RegisterType((*MessageWithMap)(nil), "proto3_proto.MessageWithMap") + proto.RegisterMapType((map[bool][]byte)(nil), "proto3_proto.MessageWithMap.ByteMappingEntry") proto.RegisterType((*IntMap)(nil), "proto3_proto.IntMap") + proto.RegisterMapType((map[int32]int32)(nil), "proto3_proto.IntMap.RttEntry") proto.RegisterType((*IntMaps)(nil), "proto3_proto.IntMaps") - proto.RegisterEnum("proto3_proto.Message_Humour", Message_Humour_name, Message_Humour_value) -} - -func init() { proto.RegisterFile("proto3_proto/proto3.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 733 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x84, 0x53, 0x6d, 0x6f, 0xf3, 0x34, - 0x14, 0x25, 0x4d, 0x5f, 0xd2, 0x9b, 0x74, 0x0b, 0x5e, 0x91, 0xbc, 0x02, 0x52, 0x28, 0x12, 0x8a, - 0x78, 0x49, 0xa1, 0xd3, 0xd0, 0x84, 0x10, 0x68, 0x1b, 0x9b, 0xa8, 0xd6, 0x95, 0xca, 0xdd, 0x98, - 0xf8, 0x14, 0xa5, 0xad, 0xdb, 0x46, 0x34, 0x4e, 0x49, 0x1c, 0xa4, 0xfc, 0x1d, 0xfe, 0x28, 0x8f, - 0x6c, 0xa7, 0x5d, 0x36, 0x65, 0xcf, 0xf3, 0x29, 0xf6, 0xf1, 0xb9, 0xf7, 0x9c, 0x1c, 0x5f, 0xc3, - 0xe9, 0x2e, 0x89, 0x79, 0x7c, 0xe6, 0xcb, 0xcf, 0x40, 0x6d, 0x3c, 0xf9, 0x41, 0x56, 0xf9, 0xa8, - 0x77, 0xba, 0x8e, 0xe3, 0xf5, 0x96, 0x2a, 0xca, 0x3c, 0x5b, 0x0d, 0x02, 0x96, 0x2b, 0x62, 0xef, - 0x84, 0xd3, 0x94, 0x2f, 0x03, 0x1e, 0x0c, 0xc4, 0x42, 0x81, 0xfd, 0xff, 0x5b, 0xd0, 0xba, 0xa7, - 0x69, 0x1a, 0xac, 0x29, 0x42, 0x50, 0x67, 0x41, 0x44, 0xb1, 0xe6, 0x68, 0x6e, 0x9b, 0xc8, 0x35, - 0xba, 0x00, 0x63, 0x13, 0x6e, 0x83, 0x24, 0xe4, 0x39, 0xae, 0x39, 0x9a, 0x7b, 0x34, 0xfc, 0xcc, - 0x2b, 0x0b, 0x7a, 0x45, 0xb1, 0xf7, 0x7b, 0x16, 0xc5, 0x59, 0x42, 0x0e, 0x6c, 0xe4, 0x80, 0xb5, - 0xa1, 0xe1, 0x7a, 0xc3, 0xfd, 0x90, 0xf9, 0x8b, 0x08, 0xeb, 0x8e, 0xe6, 0x76, 0x08, 0x28, 0x6c, - 0xc4, 0xae, 0x23, 0xa1, 0x27, 0xec, 0xe0, 0xba, 0xa3, 0xb9, 0x16, 0x91, 0x6b, 0xf4, 0x05, 0x58, - 0x09, 0x4d, 0xb3, 0x2d, 0xf7, 0x17, 0x71, 0xc6, 0x38, 0x6e, 0x39, 0x9a, 0xab, 0x13, 0x53, 0x61, - 0xd7, 0x02, 0x42, 0x5f, 0x42, 0x87, 0x27, 0x19, 0xf5, 0xd3, 0x45, 0xcc, 0xd3, 0x28, 0x60, 0xd8, - 0x70, 0x34, 0xd7, 0x20, 0x96, 0x00, 0x67, 0x05, 0x86, 0xba, 0xd0, 0x48, 0x17, 0x71, 0x42, 0x71, - 0xdb, 0xd1, 0xdc, 0x1a, 0x51, 0x1b, 0x64, 0x83, 0xfe, 0x37, 0xcd, 0x71, 0xc3, 0xd1, 0xdd, 0x3a, - 0x11, 0x4b, 0xf4, 0x29, 0xb4, 0xd3, 0x4d, 0x9c, 0x70, 0x5f, 0xe0, 0x27, 0x8e, 0xee, 0x36, 0x88, - 0x21, 0x81, 0x3b, 0x9a, 0xa3, 0x6f, 0xa1, 0xc9, 0x68, 0xca, 0xe9, 0x12, 0x37, 0x1d, 0xcd, 0x35, - 0x87, 0xdd, 0x97, 0xbf, 0x3e, 0x91, 0x67, 0xa4, 0xe0, 0xa0, 0x73, 0x68, 0x25, 0xfe, 0x2a, 0x63, - 0x2c, 0xc7, 0xb6, 0xa3, 0x7f, 0x30, 0xa9, 0x66, 0x72, 0x2b, 0xb8, 0xe8, 0x67, 0x68, 0x71, 0x9a, - 0x24, 0x41, 0xc8, 0x30, 0x38, 0xba, 0x6b, 0x0e, 0xfb, 0xd5, 0x65, 0x0f, 0x8a, 0x74, 0xc3, 0x78, - 0x92, 0x93, 0x7d, 0x09, 0xba, 0x00, 0x75, 0xff, 0x43, 0x7f, 0x15, 0xd2, 0xed, 0x12, 0x9b, 0xd2, - 0xe8, 0x27, 0xde, 0xfe, 0xae, 0xbd, 0x59, 0x36, 0xff, 0x8d, 0xae, 0x82, 0x6c, 0xcb, 0x53, 0x62, - 0x2a, 0xea, 0xad, 0x60, 0xa2, 0xd1, 0xa1, 0xf2, 0xdf, 0x60, 0x9b, 0x51, 0xdc, 0x91, 0xe2, 0x5f, - 0x55, 0x8b, 0x4f, 0x25, 0xf3, 0x4f, 0x41, 0x54, 0x06, 0x8a, 0x56, 0x12, 0x41, 0xdf, 0x83, 0x11, - 0xb0, 0x9c, 0x6f, 0x42, 0xb6, 0xc6, 0x47, 0x45, 0x52, 0x6a, 0x0e, 0xbd, 0xfd, 0x1c, 0x7a, 0x97, - 0x2c, 0x27, 0x07, 0x16, 0x3a, 0x07, 0x33, 0x0a, 0x58, 0xee, 0xcb, 0x5d, 0x8a, 0x8f, 0xa5, 0x76, - 0x75, 0x11, 0x08, 0xe2, 0x83, 0xe4, 0xa1, 0x73, 0x80, 0x34, 0x9b, 0x47, 0xca, 0x14, 0xfe, 0xb8, - 0xf8, 0xd7, 0x2a, 0xc7, 0xa4, 0x44, 0x44, 0x3f, 0x80, 0xb1, 0xd8, 0x84, 0xdb, 0x65, 0x42, 0x19, - 0x46, 0x52, 0xea, 0x8d, 0xa2, 0x03, 0xad, 0x37, 0x05, 0xab, 0x1c, 0xf8, 0x7e, 0x72, 0xd4, 0xd3, - 0x90, 0x93, 0xf3, 0x35, 0x34, 0x54, 0x70, 0xb5, 0xf7, 0xcc, 0x86, 0xa2, 0xfc, 0x54, 0xbb, 0xd0, - 0x7a, 0x8f, 0x60, 0xbf, 0x4e, 0xb1, 0xa2, 0xeb, 0x37, 0x2f, 0xbb, 0xbe, 0x71, 0x91, 0xcf, 0x6d, - 0xfb, 0xbf, 0x42, 0x53, 0x0d, 0x14, 0x32, 0xa1, 0xf5, 0x38, 0xb9, 0x9b, 0xfc, 0xf1, 0x34, 0xb1, - 0x3f, 0x42, 0x06, 0xd4, 0xa7, 0x8f, 0x93, 0x99, 0xad, 0xa1, 0x0e, 0xb4, 0x67, 0xe3, 0xcb, 0xe9, - 0xec, 0x61, 0x74, 0x7d, 0x67, 0xd7, 0xd0, 0x31, 0x98, 0x57, 0xa3, 0xf1, 0xd8, 0xbf, 0xba, 0x1c, - 0x8d, 0x6f, 0xfe, 0xb2, 0xf5, 0xfe, 0x10, 0x9a, 0xca, 0xac, 0x78, 0x33, 0x73, 0x39, 0xbe, 0xca, - 0x8f, 0xda, 0x88, 0x57, 0xba, 0xc8, 0xb8, 0x32, 0x64, 0x10, 0xb9, 0xee, 0xff, 0xa7, 0xc1, 0x51, - 0x91, 0xd9, 0x53, 0xc8, 0x37, 0xf7, 0xc1, 0x0e, 0x4d, 0xc1, 0x9a, 0xe7, 0x9c, 0xfa, 0x51, 0xb0, - 0xdb, 0x89, 0x39, 0xd0, 0x64, 0xce, 0xdf, 0x55, 0xe6, 0x5c, 0xd4, 0x78, 0x57, 0x39, 0xa7, 0xf7, - 0x8a, 0x5f, 0x4c, 0xd5, 0xfc, 0x19, 0xe9, 0xfd, 0x02, 0xf6, 0x6b, 0x42, 0x39, 0x30, 0x43, 0x05, - 0xd6, 0x2d, 0x07, 0x66, 0x95, 0x93, 0xf9, 0x07, 0x9a, 0x23, 0xc6, 0x85, 0xb7, 0x01, 0xe8, 0x09, - 0xe7, 0x85, 0xa5, 0xcf, 0x5f, 0x5a, 0x52, 0x14, 0x8f, 0x70, 0xae, 0x2c, 0x08, 0x66, 0xef, 0x47, - 0x30, 0xf6, 0x40, 0x59, 0xb2, 0x51, 0x21, 0xd9, 0x28, 0x4b, 0x9e, 0x41, 0x4b, 0xf5, 0x4b, 0x91, - 0x0b, 0xf5, 0x28, 0xd8, 0xa5, 0x85, 0x68, 0xb7, 0x4a, 0x94, 0x48, 0xc6, 0xbc, 0xa9, 0x8e, 0xde, - 0x05, 0x00, 0x00, 0xff, 0xff, 0x75, 0x38, 0xad, 0x84, 0xe4, 0x05, 0x00, 0x00, + proto.RegisterType((*TestUTF8)(nil), "proto3_proto.TestUTF8") + proto.RegisterMapType((map[string]int64)(nil), "proto3_proto.TestUTF8.MapKeyEntry") + proto.RegisterMapType((map[int64]string)(nil), "proto3_proto.TestUTF8.MapValueEntry") +} + +func init() { proto.RegisterFile("proto3_proto/proto3.proto", fileDescriptor_1c50d9b824d4ac38) } + +var fileDescriptor_1c50d9b824d4ac38 = []byte{ + // 896 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0x6f, 0x6f, 0xdb, 0xb6, + 0x13, 0xae, 0x2c, 0xff, 0x91, 0xcf, 0x76, 0xea, 0x1f, 0x7f, 0x6e, 0xc7, 0x7a, 0x1b, 0xa0, 0x79, + 0xc3, 0x20, 0x0c, 0xab, 0xb2, 0xb9, 0xc8, 0x90, 0xb5, 0xc5, 0x86, 0x24, 0x6b, 0x50, 0x23, 0xb1, + 0x67, 0xd0, 0xce, 0x82, 0xbd, 0x12, 0x68, 0x87, 0xb6, 0x85, 0x59, 0x94, 0x27, 0x52, 0x05, 0xf4, + 0x05, 0xf6, 0x41, 0xf6, 0x95, 0xf6, 0x85, 0x06, 0x92, 0x72, 0x2a, 0x17, 0xea, 0xf2, 0x4a, 0xbc, + 0x47, 0xcf, 0xdd, 0x73, 0xbc, 0x3b, 0x1e, 0x3c, 0xdb, 0x25, 0xb1, 0x8c, 0x5f, 0x04, 0xfa, 0x73, + 0x6c, 0x0c, 0x5f, 0x7f, 0x50, 0xbb, 0xf8, 0xab, 0xff, 0x6c, 0x1d, 0xc7, 0xeb, 0x2d, 0x33, 0x94, + 0x45, 0xba, 0x3a, 0xa6, 0x3c, 0x33, 0xc4, 0xfe, 0x13, 0xc9, 0x84, 0xcc, 0x23, 0xa8, 0xa3, 0x81, + 0x07, 0x7f, 0x35, 0xa1, 0x31, 0x66, 0x42, 0xd0, 0x35, 0x43, 0x08, 0xaa, 0x9c, 0x46, 0x0c, 0x5b, + 0xae, 0xe5, 0x35, 0x89, 0x3e, 0xa3, 0x53, 0x70, 0x36, 0xe1, 0x96, 0x26, 0xa1, 0xcc, 0x70, 0xc5, + 0xb5, 0xbc, 0xa3, 0xe1, 0x67, 0x7e, 0x51, 0xd2, 0xcf, 0x9d, 0xfd, 0xb7, 0x69, 0x14, 0xa7, 0x09, + 0xb9, 0x67, 0x23, 0x17, 0xda, 0x1b, 0x16, 0xae, 0x37, 0x32, 0x08, 0x79, 0xb0, 0x8c, 0xb0, 0xed, + 0x5a, 0x5e, 0x87, 0x80, 0xc1, 0x46, 0xfc, 0x22, 0x52, 0x7a, 0x77, 0x54, 0x52, 0x5c, 0x75, 0x2d, + 0xaf, 0x4d, 0xf4, 0x19, 0x7d, 0x01, 0xed, 0x84, 0x89, 0x74, 0x2b, 0x83, 0x65, 0x9c, 0x72, 0x89, + 0x1b, 0xae, 0xe5, 0xd9, 0xa4, 0x65, 0xb0, 0x0b, 0x05, 0xa1, 0x2f, 0xa1, 0x23, 0x93, 0x94, 0x05, + 0x62, 0x19, 0x4b, 0x11, 0x51, 0x8e, 0x1d, 0xd7, 0xf2, 0x1c, 0xd2, 0x56, 0xe0, 0x2c, 0xc7, 0x50, + 0x0f, 0x6a, 0x62, 0x19, 0x27, 0x0c, 0x37, 0x5d, 0xcb, 0xab, 0x10, 0x63, 0xa0, 0x2e, 0xd8, 0x7f, + 0xb0, 0x0c, 0xd7, 0x5c, 0xdb, 0xab, 0x12, 0x75, 0x44, 0x9f, 0x42, 0x53, 0x6c, 0xe2, 0x44, 0x06, + 0x0a, 0xff, 0xbf, 0x6b, 0x7b, 0x35, 0xe2, 0x68, 0xe0, 0x8a, 0x65, 0xe8, 0x5b, 0xa8, 0x73, 0x26, + 0x24, 0xbb, 0xc3, 0x75, 0xd7, 0xf2, 0x5a, 0xc3, 0xde, 0xe1, 0xd5, 0x27, 0xfa, 0x1f, 0xc9, 0x39, + 0xe8, 0x04, 0x1a, 0x49, 0xb0, 0x4a, 0x39, 0xcf, 0x70, 0xd7, 0xb5, 0x1f, 0xac, 0x54, 0x3d, 0xb9, + 0x54, 0x5c, 0xf4, 0x1a, 0x1a, 0x92, 0x25, 0x09, 0x0d, 0x39, 0x06, 0xd7, 0xf6, 0x5a, 0xc3, 0x41, + 0xb9, 0xdb, 0xdc, 0x90, 0xde, 0x70, 0x99, 0x64, 0x64, 0xef, 0x82, 0x5e, 0x82, 0x99, 0x80, 0x61, + 0xb0, 0x0a, 0xd9, 0xf6, 0x0e, 0xb7, 0x74, 0xa2, 0x9f, 0xf8, 0xef, 0xbb, 0xed, 0xcf, 0xd2, 0xc5, + 0x2f, 0x6c, 0x45, 0xd3, 0xad, 0x14, 0xa4, 0x65, 0xc8, 0x97, 0x8a, 0x8b, 0x46, 0xf7, 0xbe, 0xef, + 0xe8, 0x36, 0x65, 0xb8, 0xa3, 0xe5, 0xbf, 0x2e, 0x97, 0x9f, 0x6a, 0xe6, 0x6f, 0x8a, 0x68, 0x52, + 0xc8, 0x43, 0x69, 0x04, 0x7d, 0x07, 0x0e, 0xe5, 0x99, 0xdc, 0x84, 0x7c, 0x8d, 0x8f, 0xf2, 0x5a, + 0x99, 0x59, 0xf4, 0xf7, 0xb3, 0xe8, 0x9f, 0xf1, 0x8c, 0xdc, 0xb3, 0xd0, 0x09, 0xb4, 0x22, 0xca, + 0xb3, 0x40, 0x5b, 0x02, 0x3f, 0xd6, 0xda, 0xe5, 0x4e, 0xa0, 0x88, 0x73, 0xcd, 0x43, 0x27, 0x00, + 0x22, 0x5d, 0x44, 0x26, 0x29, 0xfc, 0x3f, 0x2d, 0xf5, 0xa4, 0x34, 0x63, 0x52, 0x20, 0xa2, 0xef, + 0xc1, 0x59, 0x6e, 0xc2, 0xed, 0x5d, 0xc2, 0x38, 0x46, 0x5a, 0xea, 0x23, 0x4e, 0xf7, 0x34, 0x74, + 0x01, 0x20, 0x64, 0x12, 0xf2, 0x75, 0x10, 0xd1, 0x1d, 0xee, 0x69, 0xa7, 0xaf, 0xca, 0x6b, 0x33, + 0xd3, 0xbc, 0x31, 0xdd, 0x99, 0xca, 0x34, 0xc5, 0xde, 0xee, 0x4f, 0xa1, 0x5d, 0xec, 0xdb, 0x7e, + 0x00, 0xcd, 0x0b, 0xd3, 0x03, 0xf8, 0x0d, 0xd4, 0x4c, 0xf5, 0x2b, 0xff, 0x31, 0x62, 0x86, 0xf2, + 0xb2, 0x72, 0x6a, 0xf5, 0x6f, 0xa1, 0xfb, 0x61, 0x2b, 0x4a, 0xa2, 0x3e, 0x3f, 0x8c, 0xfa, 0xd1, + 0x79, 0x28, 0x04, 0x7e, 0x0d, 0x47, 0x87, 0xf7, 0x28, 0x09, 0xdb, 0x2b, 0x86, 0x6d, 0x16, 0xbc, + 0x07, 0x3f, 0x43, 0xdd, 0xcc, 0x35, 0x6a, 0x41, 0xe3, 0x66, 0x72, 0x35, 0xf9, 0xf5, 0x76, 0xd2, + 0x7d, 0x84, 0x1c, 0xa8, 0x4e, 0x6f, 0x26, 0xb3, 0xae, 0x85, 0x3a, 0xd0, 0x9c, 0x5d, 0x9f, 0x4d, + 0x67, 0xf3, 0xd1, 0xc5, 0x55, 0xb7, 0x82, 0x1e, 0x43, 0xeb, 0x7c, 0x74, 0x7d, 0x1d, 0x9c, 0x9f, + 0x8d, 0xae, 0xdf, 0xfc, 0xde, 0xb5, 0x07, 0x43, 0xa8, 0x9b, 0xcb, 0x2a, 0x91, 0x85, 0x7e, 0x45, + 0x46, 0xd8, 0x18, 0x6a, 0x59, 0x2c, 0x53, 0x69, 0x94, 0x1d, 0xa2, 0xcf, 0x83, 0xbf, 0x2d, 0x38, + 0xca, 0x7b, 0x70, 0x1b, 0xca, 0xcd, 0x98, 0xee, 0xd0, 0x14, 0xda, 0x8b, 0x4c, 0x32, 0xd5, 0xb3, + 0x9d, 0x1a, 0x46, 0x4b, 0xf7, 0xed, 0x79, 0x69, 0xdf, 0x72, 0x1f, 0xff, 0x3c, 0x93, 0x6c, 0x6c, + 0xf8, 0xf9, 0x68, 0x2f, 0xde, 0x23, 0xfd, 0x9f, 0xa0, 0xfb, 0x21, 0xa1, 0x58, 0x19, 0xa7, 0xa4, + 0x32, 0xed, 0x62, 0x65, 0xfe, 0x84, 0xfa, 0x88, 0x4b, 0x95, 0xdb, 0x31, 0xd8, 0x89, 0x94, 0x79, + 0x4a, 0x9f, 0x1f, 0xa6, 0x64, 0x28, 0x3e, 0x91, 0xd2, 0xa4, 0xa0, 0x98, 0xfd, 0x1f, 0xc0, 0xd9, + 0x03, 0x45, 0xc9, 0x5a, 0x89, 0x64, 0xad, 0x28, 0xf9, 0x02, 0x1a, 0x26, 0x9e, 0x40, 0x1e, 0x54, + 0x23, 0xba, 0x13, 0xb9, 0x68, 0xaf, 0x4c, 0x94, 0x68, 0xc6, 0xe0, 0x9f, 0x0a, 0x38, 0x73, 0x26, + 0xe4, 0xcd, 0xfc, 0xf2, 0x14, 0x3d, 0x85, 0xba, 0x58, 0xd2, 0x2d, 0x4d, 0xf2, 0x26, 0xe4, 0x96, + 0xc2, 0xdf, 0xb1, 0xa5, 0x8c, 0x13, 0x5c, 0x71, 0x6d, 0x85, 0x1b, 0x0b, 0x3d, 0x85, 0x9a, 0xd9, + 0x3f, 0x6a, 0xcb, 0x37, 0xdf, 0x3e, 0x22, 0xc6, 0x44, 0xaf, 0xa0, 0x11, 0xd1, 0x9d, 0x5e, 0xae, + 0xd5, 0xb2, 0xe5, 0xb6, 0x17, 0xf4, 0xc7, 0x74, 0x77, 0xc5, 0x32, 0x73, 0xf7, 0x7a, 0xa4, 0x0d, + 0x74, 0x06, 0x4d, 0xe5, 0x6c, 0x2e, 0x59, 0x2b, 0x7b, 0x80, 0x45, 0xf7, 0xc2, 0x6a, 0x72, 0xa2, + 0xdc, 0xec, 0xff, 0x08, 0xad, 0x42, 0xe4, 0x87, 0x26, 0xda, 0x2e, 0xbe, 0x87, 0x57, 0xd0, 0x39, + 0x88, 0x5a, 0x74, 0xb6, 0x1f, 0x78, 0x0e, 0xe7, 0x0d, 0xa8, 0xc5, 0x9c, 0xc5, 0xab, 0x45, 0xdd, + 0xe4, 0xfb, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xeb, 0x74, 0x17, 0x7f, 0xc3, 0x07, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto index 2048655..6adea22 100644 --- a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto +++ b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto @@ -32,7 +32,7 @@ syntax = "proto3"; import "google/protobuf/any.proto"; -import "testdata/test.proto"; +import "test_proto/test.proto"; package proto3_proto; @@ -58,14 +58,16 @@ message Message { repeated Humour r_funny = 16; map terrain = 10; - testdata.SubDefaults proto2_field = 11; - map proto2_value = 13; + test_proto.SubDefaults proto2_field = 11; + map proto2_value = 13; google.protobuf.Any anything = 14; repeated google.protobuf.Any many_things = 15; Message submessage = 17; repeated Message children = 18; + + map string_map = 20; } message Nested { @@ -85,3 +87,11 @@ message IntMap { message IntMaps { repeated IntMap maps = 1; } + +message TestUTF8 { + string scalar = 1; + repeated string vector = 2; + oneof oneof { string field = 3; } + map map_key = 4; + map map_value = 5; +} diff --git a/vendor/github.com/golang/protobuf/proto/proto3_test.go b/vendor/github.com/golang/protobuf/proto/proto3_test.go index 735837f..73eed6c 100644 --- a/vendor/github.com/golang/protobuf/proto/proto3_test.go +++ b/vendor/github.com/golang/protobuf/proto/proto3_test.go @@ -32,11 +32,12 @@ package proto_test import ( + "bytes" "testing" "github.com/golang/protobuf/proto" pb "github.com/golang/protobuf/proto/proto3_proto" - tpb "github.com/golang/protobuf/proto/testdata" + tpb "github.com/golang/protobuf/proto/test_proto" ) func TestProto3ZeroValues(t *testing.T) { @@ -133,3 +134,18 @@ func TestProto3SetDefaults(t *testing.T) { t.Errorf("with in = %v\nproto.SetDefaults(in) =>\ngot %v\nwant %v", in, got, want) } } + +func TestUnknownFieldPreservation(t *testing.T) { + b1 := "\x0a\x05David" // Known tag 1 + b2 := "\xc2\x0c\x06Google" // Unknown tag 200 + b := []byte(b1 + b2) + + m := new(pb.Message) + if err := proto.Unmarshal(b, m); err != nil { + t.Fatalf("proto.Unmarshal: %v", err) + } + + if !bytes.Equal(m.XXX_unrecognized, []byte(b2)) { + t.Fatalf("mismatching unknown fields:\ngot %q\nwant %q", m.XXX_unrecognized, b2) + } +} diff --git a/vendor/github.com/golang/protobuf/proto/size2_test.go b/vendor/github.com/golang/protobuf/proto/size2_test.go index a2729c3..0b8eb85 100644 --- a/vendor/github.com/golang/protobuf/proto/size2_test.go +++ b/vendor/github.com/golang/protobuf/proto/size2_test.go @@ -32,6 +32,7 @@ package proto import ( + "math" "testing" ) @@ -51,11 +52,11 @@ func TestVarintSize(t *testing.T) { {128, 2}, {16383, 2}, {16384, 3}, - {1<<63 - 1, 9}, - {1 << 63, 10}, + {math.MaxInt64, 9}, + {math.MaxInt64 + 1, 10}, } for _, tc := range testCases { - size := sizeVarint(tc.n) + size := SizeVarint(tc.n) if size != tc.size { t.Errorf("sizeVarint(%d) = %d, want %d", tc.n, size, tc.size) } diff --git a/vendor/github.com/golang/protobuf/proto/size_test.go b/vendor/github.com/golang/protobuf/proto/size_test.go index af1034d..3abac41 100644 --- a/vendor/github.com/golang/protobuf/proto/size_test.go +++ b/vendor/github.com/golang/protobuf/proto/size_test.go @@ -38,7 +38,7 @@ import ( . "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - pb "github.com/golang/protobuf/proto/testdata" + pb "github.com/golang/protobuf/proto/test_proto" ) var messageWithExtension1 = &pb.MyMessage{Count: Int32(7)} @@ -59,6 +59,30 @@ func init() { } +// non-pointer custom message +type nonptrMessage struct{} + +func (m nonptrMessage) ProtoMessage() {} +func (m nonptrMessage) Reset() {} +func (m nonptrMessage) String() string { return "" } + +func (m nonptrMessage) Marshal() ([]byte, error) { + return []byte{42}, nil +} + +// custom message embedding a proto.Message +type messageWithEmbedding struct { + *pb.OtherMessage +} + +func (m *messageWithEmbedding) ProtoMessage() {} +func (m *messageWithEmbedding) Reset() {} +func (m *messageWithEmbedding) String() string { return "" } + +func (m *messageWithEmbedding) Marshal() ([]byte, error) { + return []byte{42}, nil +} + var SizeTests = []struct { desc string pb Message @@ -146,6 +170,9 @@ var SizeTests = []struct { {"oneof group", &pb.Oneof{Union: &pb.Oneof_FGroup{&pb.Oneof_F_Group{X: Int32(52)}}}}, {"oneof largest tag", &pb.Oneof{Union: &pb.Oneof_F_Largest_Tag{1}}}, {"multiple oneofs", &pb.Oneof{Union: &pb.Oneof_F_Int32{1}, Tormato: &pb.Oneof_Value{2}}}, + + {"non-pointer message", nonptrMessage{}}, + {"custom message with embedding", &messageWithEmbedding{&pb.OtherMessage{}}}, } func TestSize(t *testing.T) { diff --git a/vendor/github.com/golang/protobuf/proto/table_marshal.go b/vendor/github.com/golang/protobuf/proto/table_marshal.go new file mode 100644 index 0000000..5cb11fa --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/table_marshal.go @@ -0,0 +1,2776 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "errors" + "fmt" + "math" + "reflect" + "sort" + "strconv" + "strings" + "sync" + "sync/atomic" + "unicode/utf8" +) + +// a sizer takes a pointer to a field and the size of its tag, computes the size of +// the encoded data. +type sizer func(pointer, int) int + +// a marshaler takes a byte slice, a pointer to a field, and its tag (in wire format), +// marshals the field to the end of the slice, returns the slice and error (if any). +type marshaler func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) + +// marshalInfo is the information used for marshaling a message. +type marshalInfo struct { + typ reflect.Type + fields []*marshalFieldInfo + unrecognized field // offset of XXX_unrecognized + extensions field // offset of XXX_InternalExtensions + v1extensions field // offset of XXX_extensions + sizecache field // offset of XXX_sizecache + initialized int32 // 0 -- only typ is set, 1 -- fully initialized + messageset bool // uses message set wire format + hasmarshaler bool // has custom marshaler + sync.RWMutex // protect extElems map, also for initialization + extElems map[int32]*marshalElemInfo // info of extension elements +} + +// marshalFieldInfo is the information used for marshaling a field of a message. +type marshalFieldInfo struct { + field field + wiretag uint64 // tag in wire format + tagsize int // size of tag in wire format + sizer sizer + marshaler marshaler + isPointer bool + required bool // field is required + name string // name of the field, for error reporting + oneofElems map[reflect.Type]*marshalElemInfo // info of oneof elements +} + +// marshalElemInfo is the information used for marshaling an extension or oneof element. +type marshalElemInfo struct { + wiretag uint64 // tag in wire format + tagsize int // size of tag in wire format + sizer sizer + marshaler marshaler + isptr bool // elem is pointer typed, thus interface of this type is a direct interface (extension only) + deref bool // dereference the pointer before operating on it; implies isptr +} + +var ( + marshalInfoMap = map[reflect.Type]*marshalInfo{} + marshalInfoLock sync.Mutex +) + +// getMarshalInfo returns the information to marshal a given type of message. +// The info it returns may not necessarily initialized. +// t is the type of the message (NOT the pointer to it). +func getMarshalInfo(t reflect.Type) *marshalInfo { + marshalInfoLock.Lock() + u, ok := marshalInfoMap[t] + if !ok { + u = &marshalInfo{typ: t} + marshalInfoMap[t] = u + } + marshalInfoLock.Unlock() + return u +} + +// Size is the entry point from generated code, +// and should be ONLY called by generated code. +// It computes the size of encoded data of msg. +// a is a pointer to a place to store cached marshal info. +func (a *InternalMessageInfo) Size(msg Message) int { + u := getMessageMarshalInfo(msg, a) + ptr := toPointer(&msg) + if ptr.isNil() { + // We get here if msg is a typed nil ((*SomeMessage)(nil)), + // so it satisfies the interface, and msg == nil wouldn't + // catch it. We don't want crash in this case. + return 0 + } + return u.size(ptr) +} + +// Marshal is the entry point from generated code, +// and should be ONLY called by generated code. +// It marshals msg to the end of b. +// a is a pointer to a place to store cached marshal info. +func (a *InternalMessageInfo) Marshal(b []byte, msg Message, deterministic bool) ([]byte, error) { + u := getMessageMarshalInfo(msg, a) + ptr := toPointer(&msg) + if ptr.isNil() { + // We get here if msg is a typed nil ((*SomeMessage)(nil)), + // so it satisfies the interface, and msg == nil wouldn't + // catch it. We don't want crash in this case. + return b, ErrNil + } + return u.marshal(b, ptr, deterministic) +} + +func getMessageMarshalInfo(msg interface{}, a *InternalMessageInfo) *marshalInfo { + // u := a.marshal, but atomically. + // We use an atomic here to ensure memory consistency. + u := atomicLoadMarshalInfo(&a.marshal) + if u == nil { + // Get marshal information from type of message. + t := reflect.ValueOf(msg).Type() + if t.Kind() != reflect.Ptr { + panic(fmt.Sprintf("cannot handle non-pointer message type %v", t)) + } + u = getMarshalInfo(t.Elem()) + // Store it in the cache for later users. + // a.marshal = u, but atomically. + atomicStoreMarshalInfo(&a.marshal, u) + } + return u +} + +// size is the main function to compute the size of the encoded data of a message. +// ptr is the pointer to the message. +func (u *marshalInfo) size(ptr pointer) int { + if atomic.LoadInt32(&u.initialized) == 0 { + u.computeMarshalInfo() + } + + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + if u.hasmarshaler { + m := ptr.asPointerTo(u.typ).Interface().(Marshaler) + b, _ := m.Marshal() + return len(b) + } + + n := 0 + for _, f := range u.fields { + if f.isPointer && ptr.offset(f.field).getPointer().isNil() { + // nil pointer always marshals to nothing + continue + } + n += f.sizer(ptr.offset(f.field), f.tagsize) + } + if u.extensions.IsValid() { + e := ptr.offset(u.extensions).toExtensions() + if u.messageset { + n += u.sizeMessageSet(e) + } else { + n += u.sizeExtensions(e) + } + } + if u.v1extensions.IsValid() { + m := *ptr.offset(u.v1extensions).toOldExtensions() + n += u.sizeV1Extensions(m) + } + if u.unrecognized.IsValid() { + s := *ptr.offset(u.unrecognized).toBytes() + n += len(s) + } + // cache the result for use in marshal + if u.sizecache.IsValid() { + atomic.StoreInt32(ptr.offset(u.sizecache).toInt32(), int32(n)) + } + return n +} + +// cachedsize gets the size from cache. If there is no cache (i.e. message is not generated), +// fall back to compute the size. +func (u *marshalInfo) cachedsize(ptr pointer) int { + if u.sizecache.IsValid() { + return int(atomic.LoadInt32(ptr.offset(u.sizecache).toInt32())) + } + return u.size(ptr) +} + +// marshal is the main function to marshal a message. It takes a byte slice and appends +// the encoded data to the end of the slice, returns the slice and error (if any). +// ptr is the pointer to the message. +// If deterministic is true, map is marshaled in deterministic order. +func (u *marshalInfo) marshal(b []byte, ptr pointer, deterministic bool) ([]byte, error) { + if atomic.LoadInt32(&u.initialized) == 0 { + u.computeMarshalInfo() + } + + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + if u.hasmarshaler { + m := ptr.asPointerTo(u.typ).Interface().(Marshaler) + b1, err := m.Marshal() + b = append(b, b1...) + return b, err + } + + var err, errLater error + // The old marshaler encodes extensions at beginning. + if u.extensions.IsValid() { + e := ptr.offset(u.extensions).toExtensions() + if u.messageset { + b, err = u.appendMessageSet(b, e, deterministic) + } else { + b, err = u.appendExtensions(b, e, deterministic) + } + if err != nil { + return b, err + } + } + if u.v1extensions.IsValid() { + m := *ptr.offset(u.v1extensions).toOldExtensions() + b, err = u.appendV1Extensions(b, m, deterministic) + if err != nil { + return b, err + } + } + for _, f := range u.fields { + if f.required { + if ptr.offset(f.field).getPointer().isNil() { + // Required field is not set. + // We record the error but keep going, to give a complete marshaling. + if errLater == nil { + errLater = &RequiredNotSetError{f.name} + } + continue + } + } + if f.isPointer && ptr.offset(f.field).getPointer().isNil() { + // nil pointer always marshals to nothing + continue + } + b, err = f.marshaler(b, ptr.offset(f.field), f.wiretag, deterministic) + if err != nil { + if err1, ok := err.(*RequiredNotSetError); ok { + // Required field in submessage is not set. + // We record the error but keep going, to give a complete marshaling. + if errLater == nil { + errLater = &RequiredNotSetError{f.name + "." + err1.field} + } + continue + } + if err == errRepeatedHasNil { + err = errors.New("proto: repeated field " + f.name + " has nil element") + } + if err == errInvalidUTF8 { + if errLater == nil { + fullName := revProtoTypes[reflect.PtrTo(u.typ)] + "." + f.name + errLater = &invalidUTF8Error{fullName} + } + continue + } + return b, err + } + } + if u.unrecognized.IsValid() { + s := *ptr.offset(u.unrecognized).toBytes() + b = append(b, s...) + } + return b, errLater +} + +// computeMarshalInfo initializes the marshal info. +func (u *marshalInfo) computeMarshalInfo() { + u.Lock() + defer u.Unlock() + if u.initialized != 0 { // non-atomic read is ok as it is protected by the lock + return + } + + t := u.typ + u.unrecognized = invalidField + u.extensions = invalidField + u.v1extensions = invalidField + u.sizecache = invalidField + + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + if reflect.PtrTo(t).Implements(marshalerType) { + u.hasmarshaler = true + atomic.StoreInt32(&u.initialized, 1) + return + } + + // get oneof implementers + var oneofImplementers []interface{} + switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) { + case oneofFuncsIface: + _, _, _, oneofImplementers = m.XXX_OneofFuncs() + case oneofWrappersIface: + oneofImplementers = m.XXX_OneofWrappers() + } + + n := t.NumField() + + // deal with XXX fields first + for i := 0; i < t.NumField(); i++ { + f := t.Field(i) + if !strings.HasPrefix(f.Name, "XXX_") { + continue + } + switch f.Name { + case "XXX_sizecache": + u.sizecache = toField(&f) + case "XXX_unrecognized": + u.unrecognized = toField(&f) + case "XXX_InternalExtensions": + u.extensions = toField(&f) + u.messageset = f.Tag.Get("protobuf_messageset") == "1" + case "XXX_extensions": + u.v1extensions = toField(&f) + case "XXX_NoUnkeyedLiteral": + // nothing to do + default: + panic("unknown XXX field: " + f.Name) + } + n-- + } + + // normal fields + fields := make([]marshalFieldInfo, n) // batch allocation + u.fields = make([]*marshalFieldInfo, 0, n) + for i, j := 0, 0; i < t.NumField(); i++ { + f := t.Field(i) + + if strings.HasPrefix(f.Name, "XXX_") { + continue + } + field := &fields[j] + j++ + field.name = f.Name + u.fields = append(u.fields, field) + if f.Tag.Get("protobuf_oneof") != "" { + field.computeOneofFieldInfo(&f, oneofImplementers) + continue + } + if f.Tag.Get("protobuf") == "" { + // field has no tag (not in generated message), ignore it + u.fields = u.fields[:len(u.fields)-1] + j-- + continue + } + field.computeMarshalFieldInfo(&f) + } + + // fields are marshaled in tag order on the wire. + sort.Sort(byTag(u.fields)) + + atomic.StoreInt32(&u.initialized, 1) +} + +// helper for sorting fields by tag +type byTag []*marshalFieldInfo + +func (a byTag) Len() int { return len(a) } +func (a byTag) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byTag) Less(i, j int) bool { return a[i].wiretag < a[j].wiretag } + +// getExtElemInfo returns the information to marshal an extension element. +// The info it returns is initialized. +func (u *marshalInfo) getExtElemInfo(desc *ExtensionDesc) *marshalElemInfo { + // get from cache first + u.RLock() + e, ok := u.extElems[desc.Field] + u.RUnlock() + if ok { + return e + } + + t := reflect.TypeOf(desc.ExtensionType) // pointer or slice to basic type or struct + tags := strings.Split(desc.Tag, ",") + tag, err := strconv.Atoi(tags[1]) + if err != nil { + panic("tag is not an integer") + } + wt := wiretype(tags[0]) + if t.Kind() == reflect.Ptr && t.Elem().Kind() != reflect.Struct { + t = t.Elem() + } + sizer, marshaler := typeMarshaler(t, tags, false, false) + var deref bool + if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { + t = reflect.PtrTo(t) + deref = true + } + e = &marshalElemInfo{ + wiretag: uint64(tag)<<3 | wt, + tagsize: SizeVarint(uint64(tag) << 3), + sizer: sizer, + marshaler: marshaler, + isptr: t.Kind() == reflect.Ptr, + deref: deref, + } + + // update cache + u.Lock() + if u.extElems == nil { + u.extElems = make(map[int32]*marshalElemInfo) + } + u.extElems[desc.Field] = e + u.Unlock() + return e +} + +// computeMarshalFieldInfo fills up the information to marshal a field. +func (fi *marshalFieldInfo) computeMarshalFieldInfo(f *reflect.StructField) { + // parse protobuf tag of the field. + // tag has format of "bytes,49,opt,name=foo,def=hello!" + tags := strings.Split(f.Tag.Get("protobuf"), ",") + if tags[0] == "" { + return + } + tag, err := strconv.Atoi(tags[1]) + if err != nil { + panic("tag is not an integer") + } + wt := wiretype(tags[0]) + if tags[2] == "req" { + fi.required = true + } + fi.setTag(f, tag, wt) + fi.setMarshaler(f, tags) +} + +func (fi *marshalFieldInfo) computeOneofFieldInfo(f *reflect.StructField, oneofImplementers []interface{}) { + fi.field = toField(f) + fi.wiretag = math.MaxInt32 // Use a large tag number, make oneofs sorted at the end. This tag will not appear on the wire. + fi.isPointer = true + fi.sizer, fi.marshaler = makeOneOfMarshaler(fi, f) + fi.oneofElems = make(map[reflect.Type]*marshalElemInfo) + + ityp := f.Type // interface type + for _, o := range oneofImplementers { + t := reflect.TypeOf(o) + if !t.Implements(ityp) { + continue + } + sf := t.Elem().Field(0) // oneof implementer is a struct with a single field + tags := strings.Split(sf.Tag.Get("protobuf"), ",") + tag, err := strconv.Atoi(tags[1]) + if err != nil { + panic("tag is not an integer") + } + wt := wiretype(tags[0]) + sizer, marshaler := typeMarshaler(sf.Type, tags, false, true) // oneof should not omit any zero value + fi.oneofElems[t.Elem()] = &marshalElemInfo{ + wiretag: uint64(tag)<<3 | wt, + tagsize: SizeVarint(uint64(tag) << 3), + sizer: sizer, + marshaler: marshaler, + } + } +} + +// wiretype returns the wire encoding of the type. +func wiretype(encoding string) uint64 { + switch encoding { + case "fixed32": + return WireFixed32 + case "fixed64": + return WireFixed64 + case "varint", "zigzag32", "zigzag64": + return WireVarint + case "bytes": + return WireBytes + case "group": + return WireStartGroup + } + panic("unknown wire type " + encoding) +} + +// setTag fills up the tag (in wire format) and its size in the info of a field. +func (fi *marshalFieldInfo) setTag(f *reflect.StructField, tag int, wt uint64) { + fi.field = toField(f) + fi.wiretag = uint64(tag)<<3 | wt + fi.tagsize = SizeVarint(uint64(tag) << 3) +} + +// setMarshaler fills up the sizer and marshaler in the info of a field. +func (fi *marshalFieldInfo) setMarshaler(f *reflect.StructField, tags []string) { + switch f.Type.Kind() { + case reflect.Map: + // map field + fi.isPointer = true + fi.sizer, fi.marshaler = makeMapMarshaler(f) + return + case reflect.Ptr, reflect.Slice: + fi.isPointer = true + } + fi.sizer, fi.marshaler = typeMarshaler(f.Type, tags, true, false) +} + +// typeMarshaler returns the sizer and marshaler of a given field. +// t is the type of the field. +// tags is the generated "protobuf" tag of the field. +// If nozero is true, zero value is not marshaled to the wire. +// If oneof is true, it is a oneof field. +func typeMarshaler(t reflect.Type, tags []string, nozero, oneof bool) (sizer, marshaler) { + encoding := tags[0] + + pointer := false + slice := false + if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { + slice = true + t = t.Elem() + } + if t.Kind() == reflect.Ptr { + pointer = true + t = t.Elem() + } + + packed := false + proto3 := false + validateUTF8 := true + for i := 2; i < len(tags); i++ { + if tags[i] == "packed" { + packed = true + } + if tags[i] == "proto3" { + proto3 = true + } + } + validateUTF8 = validateUTF8 && proto3 + + switch t.Kind() { + case reflect.Bool: + if pointer { + return sizeBoolPtr, appendBoolPtr + } + if slice { + if packed { + return sizeBoolPackedSlice, appendBoolPackedSlice + } + return sizeBoolSlice, appendBoolSlice + } + if nozero { + return sizeBoolValueNoZero, appendBoolValueNoZero + } + return sizeBoolValue, appendBoolValue + case reflect.Uint32: + switch encoding { + case "fixed32": + if pointer { + return sizeFixed32Ptr, appendFixed32Ptr + } + if slice { + if packed { + return sizeFixed32PackedSlice, appendFixed32PackedSlice + } + return sizeFixed32Slice, appendFixed32Slice + } + if nozero { + return sizeFixed32ValueNoZero, appendFixed32ValueNoZero + } + return sizeFixed32Value, appendFixed32Value + case "varint": + if pointer { + return sizeVarint32Ptr, appendVarint32Ptr + } + if slice { + if packed { + return sizeVarint32PackedSlice, appendVarint32PackedSlice + } + return sizeVarint32Slice, appendVarint32Slice + } + if nozero { + return sizeVarint32ValueNoZero, appendVarint32ValueNoZero + } + return sizeVarint32Value, appendVarint32Value + } + case reflect.Int32: + switch encoding { + case "fixed32": + if pointer { + return sizeFixedS32Ptr, appendFixedS32Ptr + } + if slice { + if packed { + return sizeFixedS32PackedSlice, appendFixedS32PackedSlice + } + return sizeFixedS32Slice, appendFixedS32Slice + } + if nozero { + return sizeFixedS32ValueNoZero, appendFixedS32ValueNoZero + } + return sizeFixedS32Value, appendFixedS32Value + case "varint": + if pointer { + return sizeVarintS32Ptr, appendVarintS32Ptr + } + if slice { + if packed { + return sizeVarintS32PackedSlice, appendVarintS32PackedSlice + } + return sizeVarintS32Slice, appendVarintS32Slice + } + if nozero { + return sizeVarintS32ValueNoZero, appendVarintS32ValueNoZero + } + return sizeVarintS32Value, appendVarintS32Value + case "zigzag32": + if pointer { + return sizeZigzag32Ptr, appendZigzag32Ptr + } + if slice { + if packed { + return sizeZigzag32PackedSlice, appendZigzag32PackedSlice + } + return sizeZigzag32Slice, appendZigzag32Slice + } + if nozero { + return sizeZigzag32ValueNoZero, appendZigzag32ValueNoZero + } + return sizeZigzag32Value, appendZigzag32Value + } + case reflect.Uint64: + switch encoding { + case "fixed64": + if pointer { + return sizeFixed64Ptr, appendFixed64Ptr + } + if slice { + if packed { + return sizeFixed64PackedSlice, appendFixed64PackedSlice + } + return sizeFixed64Slice, appendFixed64Slice + } + if nozero { + return sizeFixed64ValueNoZero, appendFixed64ValueNoZero + } + return sizeFixed64Value, appendFixed64Value + case "varint": + if pointer { + return sizeVarint64Ptr, appendVarint64Ptr + } + if slice { + if packed { + return sizeVarint64PackedSlice, appendVarint64PackedSlice + } + return sizeVarint64Slice, appendVarint64Slice + } + if nozero { + return sizeVarint64ValueNoZero, appendVarint64ValueNoZero + } + return sizeVarint64Value, appendVarint64Value + } + case reflect.Int64: + switch encoding { + case "fixed64": + if pointer { + return sizeFixedS64Ptr, appendFixedS64Ptr + } + if slice { + if packed { + return sizeFixedS64PackedSlice, appendFixedS64PackedSlice + } + return sizeFixedS64Slice, appendFixedS64Slice + } + if nozero { + return sizeFixedS64ValueNoZero, appendFixedS64ValueNoZero + } + return sizeFixedS64Value, appendFixedS64Value + case "varint": + if pointer { + return sizeVarintS64Ptr, appendVarintS64Ptr + } + if slice { + if packed { + return sizeVarintS64PackedSlice, appendVarintS64PackedSlice + } + return sizeVarintS64Slice, appendVarintS64Slice + } + if nozero { + return sizeVarintS64ValueNoZero, appendVarintS64ValueNoZero + } + return sizeVarintS64Value, appendVarintS64Value + case "zigzag64": + if pointer { + return sizeZigzag64Ptr, appendZigzag64Ptr + } + if slice { + if packed { + return sizeZigzag64PackedSlice, appendZigzag64PackedSlice + } + return sizeZigzag64Slice, appendZigzag64Slice + } + if nozero { + return sizeZigzag64ValueNoZero, appendZigzag64ValueNoZero + } + return sizeZigzag64Value, appendZigzag64Value + } + case reflect.Float32: + if pointer { + return sizeFloat32Ptr, appendFloat32Ptr + } + if slice { + if packed { + return sizeFloat32PackedSlice, appendFloat32PackedSlice + } + return sizeFloat32Slice, appendFloat32Slice + } + if nozero { + return sizeFloat32ValueNoZero, appendFloat32ValueNoZero + } + return sizeFloat32Value, appendFloat32Value + case reflect.Float64: + if pointer { + return sizeFloat64Ptr, appendFloat64Ptr + } + if slice { + if packed { + return sizeFloat64PackedSlice, appendFloat64PackedSlice + } + return sizeFloat64Slice, appendFloat64Slice + } + if nozero { + return sizeFloat64ValueNoZero, appendFloat64ValueNoZero + } + return sizeFloat64Value, appendFloat64Value + case reflect.String: + if validateUTF8 { + if pointer { + return sizeStringPtr, appendUTF8StringPtr + } + if slice { + return sizeStringSlice, appendUTF8StringSlice + } + if nozero { + return sizeStringValueNoZero, appendUTF8StringValueNoZero + } + return sizeStringValue, appendUTF8StringValue + } + if pointer { + return sizeStringPtr, appendStringPtr + } + if slice { + return sizeStringSlice, appendStringSlice + } + if nozero { + return sizeStringValueNoZero, appendStringValueNoZero + } + return sizeStringValue, appendStringValue + case reflect.Slice: + if slice { + return sizeBytesSlice, appendBytesSlice + } + if oneof { + // Oneof bytes field may also have "proto3" tag. + // We want to marshal it as a oneof field. Do this + // check before the proto3 check. + return sizeBytesOneof, appendBytesOneof + } + if proto3 { + return sizeBytes3, appendBytes3 + } + return sizeBytes, appendBytes + case reflect.Struct: + switch encoding { + case "group": + if slice { + return makeGroupSliceMarshaler(getMarshalInfo(t)) + } + return makeGroupMarshaler(getMarshalInfo(t)) + case "bytes": + if slice { + return makeMessageSliceMarshaler(getMarshalInfo(t)) + } + return makeMessageMarshaler(getMarshalInfo(t)) + } + } + panic(fmt.Sprintf("unknown or mismatched type: type: %v, wire type: %v", t, encoding)) +} + +// Below are functions to size/marshal a specific type of a field. +// They are stored in the field's info, and called by function pointers. +// They have type sizer or marshaler. + +func sizeFixed32Value(_ pointer, tagsize int) int { + return 4 + tagsize +} +func sizeFixed32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toUint32() + if v == 0 { + return 0 + } + return 4 + tagsize +} +func sizeFixed32Ptr(ptr pointer, tagsize int) int { + p := *ptr.toUint32Ptr() + if p == nil { + return 0 + } + return 4 + tagsize +} +func sizeFixed32Slice(ptr pointer, tagsize int) int { + s := *ptr.toUint32Slice() + return (4 + tagsize) * len(s) +} +func sizeFixed32PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toUint32Slice() + if len(s) == 0 { + return 0 + } + return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize +} +func sizeFixedS32Value(_ pointer, tagsize int) int { + return 4 + tagsize +} +func sizeFixedS32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + if v == 0 { + return 0 + } + return 4 + tagsize +} +func sizeFixedS32Ptr(ptr pointer, tagsize int) int { + p := ptr.getInt32Ptr() + if p == nil { + return 0 + } + return 4 + tagsize +} +func sizeFixedS32Slice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + return (4 + tagsize) * len(s) +} +func sizeFixedS32PackedSlice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + if len(s) == 0 { + return 0 + } + return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize +} +func sizeFloat32Value(_ pointer, tagsize int) int { + return 4 + tagsize +} +func sizeFloat32ValueNoZero(ptr pointer, tagsize int) int { + v := math.Float32bits(*ptr.toFloat32()) + if v == 0 { + return 0 + } + return 4 + tagsize +} +func sizeFloat32Ptr(ptr pointer, tagsize int) int { + p := *ptr.toFloat32Ptr() + if p == nil { + return 0 + } + return 4 + tagsize +} +func sizeFloat32Slice(ptr pointer, tagsize int) int { + s := *ptr.toFloat32Slice() + return (4 + tagsize) * len(s) +} +func sizeFloat32PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toFloat32Slice() + if len(s) == 0 { + return 0 + } + return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize +} +func sizeFixed64Value(_ pointer, tagsize int) int { + return 8 + tagsize +} +func sizeFixed64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toUint64() + if v == 0 { + return 0 + } + return 8 + tagsize +} +func sizeFixed64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toUint64Ptr() + if p == nil { + return 0 + } + return 8 + tagsize +} +func sizeFixed64Slice(ptr pointer, tagsize int) int { + s := *ptr.toUint64Slice() + return (8 + tagsize) * len(s) +} +func sizeFixed64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toUint64Slice() + if len(s) == 0 { + return 0 + } + return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize +} +func sizeFixedS64Value(_ pointer, tagsize int) int { + return 8 + tagsize +} +func sizeFixedS64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + if v == 0 { + return 0 + } + return 8 + tagsize +} +func sizeFixedS64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toInt64Ptr() + if p == nil { + return 0 + } + return 8 + tagsize +} +func sizeFixedS64Slice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + return (8 + tagsize) * len(s) +} +func sizeFixedS64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return 0 + } + return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize +} +func sizeFloat64Value(_ pointer, tagsize int) int { + return 8 + tagsize +} +func sizeFloat64ValueNoZero(ptr pointer, tagsize int) int { + v := math.Float64bits(*ptr.toFloat64()) + if v == 0 { + return 0 + } + return 8 + tagsize +} +func sizeFloat64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toFloat64Ptr() + if p == nil { + return 0 + } + return 8 + tagsize +} +func sizeFloat64Slice(ptr pointer, tagsize int) int { + s := *ptr.toFloat64Slice() + return (8 + tagsize) * len(s) +} +func sizeFloat64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toFloat64Slice() + if len(s) == 0 { + return 0 + } + return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize +} +func sizeVarint32Value(ptr pointer, tagsize int) int { + v := *ptr.toUint32() + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarint32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toUint32() + if v == 0 { + return 0 + } + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarint32Ptr(ptr pointer, tagsize int) int { + p := *ptr.toUint32Ptr() + if p == nil { + return 0 + } + return SizeVarint(uint64(*p)) + tagsize +} +func sizeVarint32Slice(ptr pointer, tagsize int) int { + s := *ptr.toUint32Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + tagsize + } + return n +} +func sizeVarint32PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toUint32Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeVarintS32Value(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarintS32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + if v == 0 { + return 0 + } + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarintS32Ptr(ptr pointer, tagsize int) int { + p := ptr.getInt32Ptr() + if p == nil { + return 0 + } + return SizeVarint(uint64(*p)) + tagsize +} +func sizeVarintS32Slice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + tagsize + } + return n +} +func sizeVarintS32PackedSlice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeVarint64Value(ptr pointer, tagsize int) int { + v := *ptr.toUint64() + return SizeVarint(v) + tagsize +} +func sizeVarint64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toUint64() + if v == 0 { + return 0 + } + return SizeVarint(v) + tagsize +} +func sizeVarint64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toUint64Ptr() + if p == nil { + return 0 + } + return SizeVarint(*p) + tagsize +} +func sizeVarint64Slice(ptr pointer, tagsize int) int { + s := *ptr.toUint64Slice() + n := 0 + for _, v := range s { + n += SizeVarint(v) + tagsize + } + return n +} +func sizeVarint64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toUint64Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(v) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeVarintS64Value(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarintS64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + if v == 0 { + return 0 + } + return SizeVarint(uint64(v)) + tagsize +} +func sizeVarintS64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toInt64Ptr() + if p == nil { + return 0 + } + return SizeVarint(uint64(*p)) + tagsize +} +func sizeVarintS64Slice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + tagsize + } + return n +} +func sizeVarintS64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeZigzag32Value(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize +} +func sizeZigzag32ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt32() + if v == 0 { + return 0 + } + return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize +} +func sizeZigzag32Ptr(ptr pointer, tagsize int) int { + p := ptr.getInt32Ptr() + if p == nil { + return 0 + } + v := *p + return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize +} +func sizeZigzag32Slice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize + } + return n +} +func sizeZigzag32PackedSlice(ptr pointer, tagsize int) int { + s := ptr.getInt32Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeZigzag64Value(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize +} +func sizeZigzag64ValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toInt64() + if v == 0 { + return 0 + } + return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize +} +func sizeZigzag64Ptr(ptr pointer, tagsize int) int { + p := *ptr.toInt64Ptr() + if p == nil { + return 0 + } + v := *p + return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize +} +func sizeZigzag64Slice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize + } + return n +} +func sizeZigzag64PackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) + } + return n + SizeVarint(uint64(n)) + tagsize +} +func sizeBoolValue(_ pointer, tagsize int) int { + return 1 + tagsize +} +func sizeBoolValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toBool() + if !v { + return 0 + } + return 1 + tagsize +} +func sizeBoolPtr(ptr pointer, tagsize int) int { + p := *ptr.toBoolPtr() + if p == nil { + return 0 + } + return 1 + tagsize +} +func sizeBoolSlice(ptr pointer, tagsize int) int { + s := *ptr.toBoolSlice() + return (1 + tagsize) * len(s) +} +func sizeBoolPackedSlice(ptr pointer, tagsize int) int { + s := *ptr.toBoolSlice() + if len(s) == 0 { + return 0 + } + return len(s) + SizeVarint(uint64(len(s))) + tagsize +} +func sizeStringValue(ptr pointer, tagsize int) int { + v := *ptr.toString() + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeStringValueNoZero(ptr pointer, tagsize int) int { + v := *ptr.toString() + if v == "" { + return 0 + } + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeStringPtr(ptr pointer, tagsize int) int { + p := *ptr.toStringPtr() + if p == nil { + return 0 + } + v := *p + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeStringSlice(ptr pointer, tagsize int) int { + s := *ptr.toStringSlice() + n := 0 + for _, v := range s { + n += len(v) + SizeVarint(uint64(len(v))) + tagsize + } + return n +} +func sizeBytes(ptr pointer, tagsize int) int { + v := *ptr.toBytes() + if v == nil { + return 0 + } + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeBytes3(ptr pointer, tagsize int) int { + v := *ptr.toBytes() + if len(v) == 0 { + return 0 + } + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeBytesOneof(ptr pointer, tagsize int) int { + v := *ptr.toBytes() + return len(v) + SizeVarint(uint64(len(v))) + tagsize +} +func sizeBytesSlice(ptr pointer, tagsize int) int { + s := *ptr.toBytesSlice() + n := 0 + for _, v := range s { + n += len(v) + SizeVarint(uint64(len(v))) + tagsize + } + return n +} + +// appendFixed32 appends an encoded fixed32 to b. +func appendFixed32(b []byte, v uint32) []byte { + b = append(b, + byte(v), + byte(v>>8), + byte(v>>16), + byte(v>>24)) + return b +} + +// appendFixed64 appends an encoded fixed64 to b. +func appendFixed64(b []byte, v uint64) []byte { + b = append(b, + byte(v), + byte(v>>8), + byte(v>>16), + byte(v>>24), + byte(v>>32), + byte(v>>40), + byte(v>>48), + byte(v>>56)) + return b +} + +// appendVarint appends an encoded varint to b. +func appendVarint(b []byte, v uint64) []byte { + // TODO: make 1-byte (maybe 2-byte) case inline-able, once we + // have non-leaf inliner. + switch { + case v < 1<<7: + b = append(b, byte(v)) + case v < 1<<14: + b = append(b, + byte(v&0x7f|0x80), + byte(v>>7)) + case v < 1<<21: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte(v>>14)) + case v < 1<<28: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte(v>>21)) + case v < 1<<35: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte(v>>28)) + case v < 1<<42: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte(v>>35)) + case v < 1<<49: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte(v>>42)) + case v < 1<<56: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte((v>>42)&0x7f|0x80), + byte(v>>49)) + case v < 1<<63: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte((v>>42)&0x7f|0x80), + byte((v>>49)&0x7f|0x80), + byte(v>>56)) + default: + b = append(b, + byte(v&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte((v>>42)&0x7f|0x80), + byte((v>>49)&0x7f|0x80), + byte((v>>56)&0x7f|0x80), + 1) + } + return b +} + +func appendFixed32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint32() + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + return b, nil +} +func appendFixed32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + return b, nil +} +func appendFixed32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toUint32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, *p) + return b, nil +} +func appendFixed32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + } + return b, nil +} +func appendFixed32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(4*len(s))) + for _, v := range s { + b = appendFixed32(b, v) + } + return b, nil +} +func appendFixedS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + b = appendVarint(b, wiretag) + b = appendFixed32(b, uint32(v)) + return b, nil +} +func appendFixedS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, uint32(v)) + return b, nil +} +func appendFixedS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := ptr.getInt32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, uint32(*p)) + return b, nil +} +func appendFixedS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed32(b, uint32(v)) + } + return b, nil +} +func appendFixedS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(4*len(s))) + for _, v := range s { + b = appendFixed32(b, uint32(v)) + } + return b, nil +} +func appendFloat32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := math.Float32bits(*ptr.toFloat32()) + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + return b, nil +} +func appendFloat32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := math.Float32bits(*ptr.toFloat32()) + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, v) + return b, nil +} +func appendFloat32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toFloat32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed32(b, math.Float32bits(*p)) + return b, nil +} +func appendFloat32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toFloat32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed32(b, math.Float32bits(v)) + } + return b, nil +} +func appendFloat32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toFloat32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(4*len(s))) + for _, v := range s { + b = appendFixed32(b, math.Float32bits(v)) + } + return b, nil +} +func appendFixed64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint64() + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + return b, nil +} +func appendFixed64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + return b, nil +} +func appendFixed64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toUint64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, *p) + return b, nil +} +func appendFixed64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + } + return b, nil +} +func appendFixed64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(8*len(s))) + for _, v := range s { + b = appendFixed64(b, v) + } + return b, nil +} +func appendFixedS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + b = appendVarint(b, wiretag) + b = appendFixed64(b, uint64(v)) + return b, nil +} +func appendFixedS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, uint64(v)) + return b, nil +} +func appendFixedS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toInt64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, uint64(*p)) + return b, nil +} +func appendFixedS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed64(b, uint64(v)) + } + return b, nil +} +func appendFixedS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(8*len(s))) + for _, v := range s { + b = appendFixed64(b, uint64(v)) + } + return b, nil +} +func appendFloat64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := math.Float64bits(*ptr.toFloat64()) + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + return b, nil +} +func appendFloat64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := math.Float64bits(*ptr.toFloat64()) + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, v) + return b, nil +} +func appendFloat64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toFloat64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendFixed64(b, math.Float64bits(*p)) + return b, nil +} +func appendFloat64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toFloat64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendFixed64(b, math.Float64bits(v)) + } + return b, nil +} +func appendFloat64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toFloat64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(8*len(s))) + for _, v := range s { + b = appendFixed64(b, math.Float64bits(v)) + } + return b, nil +} +func appendVarint32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint32() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarint32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarint32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toUint32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(*p)) + return b, nil +} +func appendVarint32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarint32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarintS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarintS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarintS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := ptr.getInt32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(*p)) + return b, nil +} +func appendVarintS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarintS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarint64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint64() + b = appendVarint(b, wiretag) + b = appendVarint(b, v) + return b, nil +} +func appendVarint64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toUint64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, v) + return b, nil +} +func appendVarint64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toUint64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, *p) + return b, nil +} +func appendVarint64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, v) + } + return b, nil +} +func appendVarint64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toUint64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(v) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, v) + } + return b, nil +} +func appendVarintS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarintS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + return b, nil +} +func appendVarintS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toInt64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(*p)) + return b, nil +} +func appendVarintS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendVarintS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v)) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64(v)) + } + return b, nil +} +func appendZigzag32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + return b, nil +} +func appendZigzag32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt32() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + return b, nil +} +func appendZigzag32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := ptr.getInt32Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + v := *p + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + return b, nil +} +func appendZigzag32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + } + return b, nil +} +func appendZigzag32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := ptr.getInt32Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + } + return b, nil +} +func appendZigzag64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + return b, nil +} +func appendZigzag64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toInt64() + if v == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + return b, nil +} +func appendZigzag64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toInt64Ptr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + v := *p + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + return b, nil +} +func appendZigzag64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + } + return b, nil +} +func appendZigzag64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toInt64Slice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + // compute size + n := 0 + for _, v := range s { + n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) + } + b = appendVarint(b, uint64(n)) + for _, v := range s { + b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) + } + return b, nil +} +func appendBoolValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBool() + b = appendVarint(b, wiretag) + if v { + b = append(b, 1) + } else { + b = append(b, 0) + } + return b, nil +} +func appendBoolValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBool() + if !v { + return b, nil + } + b = appendVarint(b, wiretag) + b = append(b, 1) + return b, nil +} + +func appendBoolPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toBoolPtr() + if p == nil { + return b, nil + } + b = appendVarint(b, wiretag) + if *p { + b = append(b, 1) + } else { + b = append(b, 0) + } + return b, nil +} +func appendBoolSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toBoolSlice() + for _, v := range s { + b = appendVarint(b, wiretag) + if v { + b = append(b, 1) + } else { + b = append(b, 0) + } + } + return b, nil +} +func appendBoolPackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toBoolSlice() + if len(s) == 0 { + return b, nil + } + b = appendVarint(b, wiretag&^7|WireBytes) + b = appendVarint(b, uint64(len(s))) + for _, v := range s { + if v { + b = append(b, 1) + } else { + b = append(b, 0) + } + } + return b, nil +} +func appendStringValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toString() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendStringValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toString() + if v == "" { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendStringPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + p := *ptr.toStringPtr() + if p == nil { + return b, nil + } + v := *p + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendStringSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toStringSlice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + } + return b, nil +} +func appendUTF8StringValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + var invalidUTF8 bool + v := *ptr.toString() + if !utf8.ValidString(v) { + invalidUTF8 = true + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + if invalidUTF8 { + return b, errInvalidUTF8 + } + return b, nil +} +func appendUTF8StringValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + var invalidUTF8 bool + v := *ptr.toString() + if v == "" { + return b, nil + } + if !utf8.ValidString(v) { + invalidUTF8 = true + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + if invalidUTF8 { + return b, errInvalidUTF8 + } + return b, nil +} +func appendUTF8StringPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + var invalidUTF8 bool + p := *ptr.toStringPtr() + if p == nil { + return b, nil + } + v := *p + if !utf8.ValidString(v) { + invalidUTF8 = true + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + if invalidUTF8 { + return b, errInvalidUTF8 + } + return b, nil +} +func appendUTF8StringSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + var invalidUTF8 bool + s := *ptr.toStringSlice() + for _, v := range s { + if !utf8.ValidString(v) { + invalidUTF8 = true + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + } + if invalidUTF8 { + return b, errInvalidUTF8 + } + return b, nil +} +func appendBytes(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBytes() + if v == nil { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendBytes3(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBytes() + if len(v) == 0 { + return b, nil + } + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendBytesOneof(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + v := *ptr.toBytes() + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + return b, nil +} +func appendBytesSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { + s := *ptr.toBytesSlice() + for _, v := range s { + b = appendVarint(b, wiretag) + b = appendVarint(b, uint64(len(v))) + b = append(b, v...) + } + return b, nil +} + +// makeGroupMarshaler returns the sizer and marshaler for a group. +// u is the marshal info of the underlying message. +func makeGroupMarshaler(u *marshalInfo) (sizer, marshaler) { + return func(ptr pointer, tagsize int) int { + p := ptr.getPointer() + if p.isNil() { + return 0 + } + return u.size(p) + 2*tagsize + }, + func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { + p := ptr.getPointer() + if p.isNil() { + return b, nil + } + var err error + b = appendVarint(b, wiretag) // start group + b, err = u.marshal(b, p, deterministic) + b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group + return b, err + } +} + +// makeGroupSliceMarshaler returns the sizer and marshaler for a group slice. +// u is the marshal info of the underlying message. +func makeGroupSliceMarshaler(u *marshalInfo) (sizer, marshaler) { + return func(ptr pointer, tagsize int) int { + s := ptr.getPointerSlice() + n := 0 + for _, v := range s { + if v.isNil() { + continue + } + n += u.size(v) + 2*tagsize + } + return n + }, + func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { + s := ptr.getPointerSlice() + var err error + var nerr nonFatal + for _, v := range s { + if v.isNil() { + return b, errRepeatedHasNil + } + b = appendVarint(b, wiretag) // start group + b, err = u.marshal(b, v, deterministic) + b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group + if !nerr.Merge(err) { + if err == ErrNil { + err = errRepeatedHasNil + } + return b, err + } + } + return b, nerr.E + } +} + +// makeMessageMarshaler returns the sizer and marshaler for a message field. +// u is the marshal info of the message. +func makeMessageMarshaler(u *marshalInfo) (sizer, marshaler) { + return func(ptr pointer, tagsize int) int { + p := ptr.getPointer() + if p.isNil() { + return 0 + } + siz := u.size(p) + return siz + SizeVarint(uint64(siz)) + tagsize + }, + func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { + p := ptr.getPointer() + if p.isNil() { + return b, nil + } + b = appendVarint(b, wiretag) + siz := u.cachedsize(p) + b = appendVarint(b, uint64(siz)) + return u.marshal(b, p, deterministic) + } +} + +// makeMessageSliceMarshaler returns the sizer and marshaler for a message slice. +// u is the marshal info of the message. +func makeMessageSliceMarshaler(u *marshalInfo) (sizer, marshaler) { + return func(ptr pointer, tagsize int) int { + s := ptr.getPointerSlice() + n := 0 + for _, v := range s { + if v.isNil() { + continue + } + siz := u.size(v) + n += siz + SizeVarint(uint64(siz)) + tagsize + } + return n + }, + func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { + s := ptr.getPointerSlice() + var err error + var nerr nonFatal + for _, v := range s { + if v.isNil() { + return b, errRepeatedHasNil + } + b = appendVarint(b, wiretag) + siz := u.cachedsize(v) + b = appendVarint(b, uint64(siz)) + b, err = u.marshal(b, v, deterministic) + + if !nerr.Merge(err) { + if err == ErrNil { + err = errRepeatedHasNil + } + return b, err + } + } + return b, nerr.E + } +} + +// makeMapMarshaler returns the sizer and marshaler for a map field. +// f is the pointer to the reflect data structure of the field. +func makeMapMarshaler(f *reflect.StructField) (sizer, marshaler) { + // figure out key and value type + t := f.Type + keyType := t.Key() + valType := t.Elem() + keyTags := strings.Split(f.Tag.Get("protobuf_key"), ",") + valTags := strings.Split(f.Tag.Get("protobuf_val"), ",") + keySizer, keyMarshaler := typeMarshaler(keyType, keyTags, false, false) // don't omit zero value in map + valSizer, valMarshaler := typeMarshaler(valType, valTags, false, false) // don't omit zero value in map + keyWireTag := 1<<3 | wiretype(keyTags[0]) + valWireTag := 2<<3 | wiretype(valTags[0]) + + // We create an interface to get the addresses of the map key and value. + // If value is pointer-typed, the interface is a direct interface, the + // idata itself is the value. Otherwise, the idata is the pointer to the + // value. + // Key cannot be pointer-typed. + valIsPtr := valType.Kind() == reflect.Ptr + + // If value is a message with nested maps, calling + // valSizer in marshal may be quadratic. We should use + // cached version in marshal (but not in size). + // If value is not message type, we don't have size cache, + // but it cannot be nested either. Just use valSizer. + valCachedSizer := valSizer + if valIsPtr && valType.Elem().Kind() == reflect.Struct { + u := getMarshalInfo(valType.Elem()) + valCachedSizer = func(ptr pointer, tagsize int) int { + // Same as message sizer, but use cache. + p := ptr.getPointer() + if p.isNil() { + return 0 + } + siz := u.cachedsize(p) + return siz + SizeVarint(uint64(siz)) + tagsize + } + } + return func(ptr pointer, tagsize int) int { + m := ptr.asPointerTo(t).Elem() // the map + n := 0 + for _, k := range m.MapKeys() { + ki := k.Interface() + vi := m.MapIndex(k).Interface() + kaddr := toAddrPointer(&ki, false, false) // pointer to key + vaddr := toAddrPointer(&vi, valIsPtr, false) // pointer to value + siz := keySizer(kaddr, 1) + valSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) + n += siz + SizeVarint(uint64(siz)) + tagsize + } + return n + }, + func(b []byte, ptr pointer, tag uint64, deterministic bool) ([]byte, error) { + m := ptr.asPointerTo(t).Elem() // the map + var err error + keys := m.MapKeys() + if len(keys) > 1 && deterministic { + sort.Sort(mapKeys(keys)) + } + + var nerr nonFatal + for _, k := range keys { + ki := k.Interface() + vi := m.MapIndex(k).Interface() + kaddr := toAddrPointer(&ki, false, false) // pointer to key + vaddr := toAddrPointer(&vi, valIsPtr, false) // pointer to value + b = appendVarint(b, tag) + siz := keySizer(kaddr, 1) + valCachedSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) + b = appendVarint(b, uint64(siz)) + b, err = keyMarshaler(b, kaddr, keyWireTag, deterministic) + if !nerr.Merge(err) { + return b, err + } + b, err = valMarshaler(b, vaddr, valWireTag, deterministic) + if err != ErrNil && !nerr.Merge(err) { // allow nil value in map + return b, err + } + } + return b, nerr.E + } +} + +// makeOneOfMarshaler returns the sizer and marshaler for a oneof field. +// fi is the marshal info of the field. +// f is the pointer to the reflect data structure of the field. +func makeOneOfMarshaler(fi *marshalFieldInfo, f *reflect.StructField) (sizer, marshaler) { + // Oneof field is an interface. We need to get the actual data type on the fly. + t := f.Type + return func(ptr pointer, _ int) int { + p := ptr.getInterfacePointer() + if p.isNil() { + return 0 + } + v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct + telem := v.Type() + e := fi.oneofElems[telem] + return e.sizer(p, e.tagsize) + }, + func(b []byte, ptr pointer, _ uint64, deterministic bool) ([]byte, error) { + p := ptr.getInterfacePointer() + if p.isNil() { + return b, nil + } + v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct + telem := v.Type() + if telem.Field(0).Type.Kind() == reflect.Ptr && p.getPointer().isNil() { + return b, errOneofHasNil + } + e := fi.oneofElems[telem] + return e.marshaler(b, p, e.wiretag, deterministic) + } +} + +// sizeExtensions computes the size of encoded data for a XXX_InternalExtensions field. +func (u *marshalInfo) sizeExtensions(ext *XXX_InternalExtensions) int { + m, mu := ext.extensionsRead() + if m == nil { + return 0 + } + mu.Lock() + + n := 0 + for _, e := range m { + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + n += len(e.enc) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr, ei.deref) + n += ei.sizer(p, ei.tagsize) + } + mu.Unlock() + return n +} + +// appendExtensions marshals a XXX_InternalExtensions field to the end of byte slice b. +func (u *marshalInfo) appendExtensions(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { + m, mu := ext.extensionsRead() + if m == nil { + return b, nil + } + mu.Lock() + defer mu.Unlock() + + var err error + var nerr nonFatal + + // Fast-path for common cases: zero or one extensions. + // Don't bother sorting the keys. + if len(m) <= 1 { + for _, e := range m { + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + b = append(b, e.enc...) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr, ei.deref) + b, err = ei.marshaler(b, p, ei.wiretag, deterministic) + if !nerr.Merge(err) { + return b, err + } + } + return b, nerr.E + } + + // Sort the keys to provide a deterministic encoding. + // Not sure this is required, but the old code does it. + keys := make([]int, 0, len(m)) + for k := range m { + keys = append(keys, int(k)) + } + sort.Ints(keys) + + for _, k := range keys { + e := m[int32(k)] + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + b = append(b, e.enc...) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr, ei.deref) + b, err = ei.marshaler(b, p, ei.wiretag, deterministic) + if !nerr.Merge(err) { + return b, err + } + } + return b, nerr.E +} + +// message set format is: +// message MessageSet { +// repeated group Item = 1 { +// required int32 type_id = 2; +// required string message = 3; +// }; +// } + +// sizeMessageSet computes the size of encoded data for a XXX_InternalExtensions field +// in message set format (above). +func (u *marshalInfo) sizeMessageSet(ext *XXX_InternalExtensions) int { + m, mu := ext.extensionsRead() + if m == nil { + return 0 + } + mu.Lock() + + n := 0 + for id, e := range m { + n += 2 // start group, end group. tag = 1 (size=1) + n += SizeVarint(uint64(id)) + 1 // type_id, tag = 2 (size=1) + + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint + siz := len(msgWithLen) + n += siz + 1 // message, tag = 3 (size=1) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr, ei.deref) + n += ei.sizer(p, 1) // message, tag = 3 (size=1) + } + mu.Unlock() + return n +} + +// appendMessageSet marshals a XXX_InternalExtensions field in message set format (above) +// to the end of byte slice b. +func (u *marshalInfo) appendMessageSet(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { + m, mu := ext.extensionsRead() + if m == nil { + return b, nil + } + mu.Lock() + defer mu.Unlock() + + var err error + var nerr nonFatal + + // Fast-path for common cases: zero or one extensions. + // Don't bother sorting the keys. + if len(m) <= 1 { + for id, e := range m { + b = append(b, 1<<3|WireStartGroup) + b = append(b, 2<<3|WireVarint) + b = appendVarint(b, uint64(id)) + + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint + b = append(b, 3<<3|WireBytes) + b = append(b, msgWithLen...) + b = append(b, 1<<3|WireEndGroup) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr, ei.deref) + b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) + if !nerr.Merge(err) { + return b, err + } + b = append(b, 1<<3|WireEndGroup) + } + return b, nerr.E + } + + // Sort the keys to provide a deterministic encoding. + keys := make([]int, 0, len(m)) + for k := range m { + keys = append(keys, int(k)) + } + sort.Ints(keys) + + for _, id := range keys { + e := m[int32(id)] + b = append(b, 1<<3|WireStartGroup) + b = append(b, 2<<3|WireVarint) + b = appendVarint(b, uint64(id)) + + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint + b = append(b, 3<<3|WireBytes) + b = append(b, msgWithLen...) + b = append(b, 1<<3|WireEndGroup) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr, ei.deref) + b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) + b = append(b, 1<<3|WireEndGroup) + if !nerr.Merge(err) { + return b, err + } + } + return b, nerr.E +} + +// sizeV1Extensions computes the size of encoded data for a V1-API extension field. +func (u *marshalInfo) sizeV1Extensions(m map[int32]Extension) int { + if m == nil { + return 0 + } + + n := 0 + for _, e := range m { + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + n += len(e.enc) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr, ei.deref) + n += ei.sizer(p, ei.tagsize) + } + return n +} + +// appendV1Extensions marshals a V1-API extension field to the end of byte slice b. +func (u *marshalInfo) appendV1Extensions(b []byte, m map[int32]Extension, deterministic bool) ([]byte, error) { + if m == nil { + return b, nil + } + + // Sort the keys to provide a deterministic encoding. + keys := make([]int, 0, len(m)) + for k := range m { + keys = append(keys, int(k)) + } + sort.Ints(keys) + + var err error + var nerr nonFatal + for _, k := range keys { + e := m[int32(k)] + if e.value == nil || e.desc == nil { + // Extension is only in its encoded form. + b = append(b, e.enc...) + continue + } + + // We don't skip extensions that have an encoded form set, + // because the extension value may have been mutated after + // the last time this function was called. + + ei := u.getExtElemInfo(e.desc) + v := e.value + p := toAddrPointer(&v, ei.isptr, ei.deref) + b, err = ei.marshaler(b, p, ei.wiretag, deterministic) + if !nerr.Merge(err) { + return b, err + } + } + return b, nerr.E +} + +// newMarshaler is the interface representing objects that can marshal themselves. +// +// This exists to support protoc-gen-go generated messages. +// The proto package will stop type-asserting to this interface in the future. +// +// DO NOT DEPEND ON THIS. +type newMarshaler interface { + XXX_Size() int + XXX_Marshal(b []byte, deterministic bool) ([]byte, error) +} + +// Size returns the encoded size of a protocol buffer message. +// This is the main entry point. +func Size(pb Message) int { + if m, ok := pb.(newMarshaler); ok { + return m.XXX_Size() + } + if m, ok := pb.(Marshaler); ok { + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + b, _ := m.Marshal() + return len(b) + } + // in case somehow we didn't generate the wrapper + if pb == nil { + return 0 + } + var info InternalMessageInfo + return info.Size(pb) +} + +// Marshal takes a protocol buffer message +// and encodes it into the wire format, returning the data. +// This is the main entry point. +func Marshal(pb Message) ([]byte, error) { + if m, ok := pb.(newMarshaler); ok { + siz := m.XXX_Size() + b := make([]byte, 0, siz) + return m.XXX_Marshal(b, false) + } + if m, ok := pb.(Marshaler); ok { + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + return m.Marshal() + } + // in case somehow we didn't generate the wrapper + if pb == nil { + return nil, ErrNil + } + var info InternalMessageInfo + siz := info.Size(pb) + b := make([]byte, 0, siz) + return info.Marshal(b, pb, false) +} + +// Marshal takes a protocol buffer message +// and encodes it into the wire format, writing the result to the +// Buffer. +// This is an alternative entry point. It is not necessary to use +// a Buffer for most applications. +func (p *Buffer) Marshal(pb Message) error { + var err error + if m, ok := pb.(newMarshaler); ok { + siz := m.XXX_Size() + p.grow(siz) // make sure buf has enough capacity + p.buf, err = m.XXX_Marshal(p.buf, p.deterministic) + return err + } + if m, ok := pb.(Marshaler); ok { + // If the message can marshal itself, let it do it, for compatibility. + // NOTE: This is not efficient. + b, err := m.Marshal() + p.buf = append(p.buf, b...) + return err + } + // in case somehow we didn't generate the wrapper + if pb == nil { + return ErrNil + } + var info InternalMessageInfo + siz := info.Size(pb) + p.grow(siz) // make sure buf has enough capacity + p.buf, err = info.Marshal(p.buf, pb, p.deterministic) + return err +} + +// grow grows the buffer's capacity, if necessary, to guarantee space for +// another n bytes. After grow(n), at least n bytes can be written to the +// buffer without another allocation. +func (p *Buffer) grow(n int) { + need := len(p.buf) + n + if need <= cap(p.buf) { + return + } + newCap := len(p.buf) * 2 + if newCap < need { + newCap = need + } + p.buf = append(make([]byte, 0, newCap), p.buf...) +} diff --git a/vendor/github.com/golang/protobuf/proto/table_merge.go b/vendor/github.com/golang/protobuf/proto/table_merge.go new file mode 100644 index 0000000..5525def --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/table_merge.go @@ -0,0 +1,654 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "fmt" + "reflect" + "strings" + "sync" + "sync/atomic" +) + +// Merge merges the src message into dst. +// This assumes that dst and src of the same type and are non-nil. +func (a *InternalMessageInfo) Merge(dst, src Message) { + mi := atomicLoadMergeInfo(&a.merge) + if mi == nil { + mi = getMergeInfo(reflect.TypeOf(dst).Elem()) + atomicStoreMergeInfo(&a.merge, mi) + } + mi.merge(toPointer(&dst), toPointer(&src)) +} + +type mergeInfo struct { + typ reflect.Type + + initialized int32 // 0: only typ is valid, 1: everything is valid + lock sync.Mutex + + fields []mergeFieldInfo + unrecognized field // Offset of XXX_unrecognized +} + +type mergeFieldInfo struct { + field field // Offset of field, guaranteed to be valid + + // isPointer reports whether the value in the field is a pointer. + // This is true for the following situations: + // * Pointer to struct + // * Pointer to basic type (proto2 only) + // * Slice (first value in slice header is a pointer) + // * String (first value in string header is a pointer) + isPointer bool + + // basicWidth reports the width of the field assuming that it is directly + // embedded in the struct (as is the case for basic types in proto3). + // The possible values are: + // 0: invalid + // 1: bool + // 4: int32, uint32, float32 + // 8: int64, uint64, float64 + basicWidth int + + // Where dst and src are pointers to the types being merged. + merge func(dst, src pointer) +} + +var ( + mergeInfoMap = map[reflect.Type]*mergeInfo{} + mergeInfoLock sync.Mutex +) + +func getMergeInfo(t reflect.Type) *mergeInfo { + mergeInfoLock.Lock() + defer mergeInfoLock.Unlock() + mi := mergeInfoMap[t] + if mi == nil { + mi = &mergeInfo{typ: t} + mergeInfoMap[t] = mi + } + return mi +} + +// merge merges src into dst assuming they are both of type *mi.typ. +func (mi *mergeInfo) merge(dst, src pointer) { + if dst.isNil() { + panic("proto: nil destination") + } + if src.isNil() { + return // Nothing to do. + } + + if atomic.LoadInt32(&mi.initialized) == 0 { + mi.computeMergeInfo() + } + + for _, fi := range mi.fields { + sfp := src.offset(fi.field) + + // As an optimization, we can avoid the merge function call cost + // if we know for sure that the source will have no effect + // by checking if it is the zero value. + if unsafeAllowed { + if fi.isPointer && sfp.getPointer().isNil() { // Could be slice or string + continue + } + if fi.basicWidth > 0 { + switch { + case fi.basicWidth == 1 && !*sfp.toBool(): + continue + case fi.basicWidth == 4 && *sfp.toUint32() == 0: + continue + case fi.basicWidth == 8 && *sfp.toUint64() == 0: + continue + } + } + } + + dfp := dst.offset(fi.field) + fi.merge(dfp, sfp) + } + + // TODO: Make this faster? + out := dst.asPointerTo(mi.typ).Elem() + in := src.asPointerTo(mi.typ).Elem() + if emIn, err := extendable(in.Addr().Interface()); err == nil { + emOut, _ := extendable(out.Addr().Interface()) + mIn, muIn := emIn.extensionsRead() + if mIn != nil { + mOut := emOut.extensionsWrite() + muIn.Lock() + mergeExtension(mOut, mIn) + muIn.Unlock() + } + } + + if mi.unrecognized.IsValid() { + if b := *src.offset(mi.unrecognized).toBytes(); len(b) > 0 { + *dst.offset(mi.unrecognized).toBytes() = append([]byte(nil), b...) + } + } +} + +func (mi *mergeInfo) computeMergeInfo() { + mi.lock.Lock() + defer mi.lock.Unlock() + if mi.initialized != 0 { + return + } + t := mi.typ + n := t.NumField() + + props := GetProperties(t) + for i := 0; i < n; i++ { + f := t.Field(i) + if strings.HasPrefix(f.Name, "XXX_") { + continue + } + + mfi := mergeFieldInfo{field: toField(&f)} + tf := f.Type + + // As an optimization, we can avoid the merge function call cost + // if we know for sure that the source will have no effect + // by checking if it is the zero value. + if unsafeAllowed { + switch tf.Kind() { + case reflect.Ptr, reflect.Slice, reflect.String: + // As a special case, we assume slices and strings are pointers + // since we know that the first field in the SliceSlice or + // StringHeader is a data pointer. + mfi.isPointer = true + case reflect.Bool: + mfi.basicWidth = 1 + case reflect.Int32, reflect.Uint32, reflect.Float32: + mfi.basicWidth = 4 + case reflect.Int64, reflect.Uint64, reflect.Float64: + mfi.basicWidth = 8 + } + } + + // Unwrap tf to get at its most basic type. + var isPointer, isSlice bool + if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { + isSlice = true + tf = tf.Elem() + } + if tf.Kind() == reflect.Ptr { + isPointer = true + tf = tf.Elem() + } + if isPointer && isSlice && tf.Kind() != reflect.Struct { + panic("both pointer and slice for basic type in " + tf.Name()) + } + + switch tf.Kind() { + case reflect.Int32: + switch { + case isSlice: // E.g., []int32 + mfi.merge = func(dst, src pointer) { + // NOTE: toInt32Slice is not defined (see pointer_reflect.go). + /* + sfsp := src.toInt32Slice() + if *sfsp != nil { + dfsp := dst.toInt32Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []int64{} + } + } + */ + sfs := src.getInt32Slice() + if sfs != nil { + dfs := dst.getInt32Slice() + dfs = append(dfs, sfs...) + if dfs == nil { + dfs = []int32{} + } + dst.setInt32Slice(dfs) + } + } + case isPointer: // E.g., *int32 + mfi.merge = func(dst, src pointer) { + // NOTE: toInt32Ptr is not defined (see pointer_reflect.go). + /* + sfpp := src.toInt32Ptr() + if *sfpp != nil { + dfpp := dst.toInt32Ptr() + if *dfpp == nil { + *dfpp = Int32(**sfpp) + } else { + **dfpp = **sfpp + } + } + */ + sfp := src.getInt32Ptr() + if sfp != nil { + dfp := dst.getInt32Ptr() + if dfp == nil { + dst.setInt32Ptr(*sfp) + } else { + *dfp = *sfp + } + } + } + default: // E.g., int32 + mfi.merge = func(dst, src pointer) { + if v := *src.toInt32(); v != 0 { + *dst.toInt32() = v + } + } + } + case reflect.Int64: + switch { + case isSlice: // E.g., []int64 + mfi.merge = func(dst, src pointer) { + sfsp := src.toInt64Slice() + if *sfsp != nil { + dfsp := dst.toInt64Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []int64{} + } + } + } + case isPointer: // E.g., *int64 + mfi.merge = func(dst, src pointer) { + sfpp := src.toInt64Ptr() + if *sfpp != nil { + dfpp := dst.toInt64Ptr() + if *dfpp == nil { + *dfpp = Int64(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., int64 + mfi.merge = func(dst, src pointer) { + if v := *src.toInt64(); v != 0 { + *dst.toInt64() = v + } + } + } + case reflect.Uint32: + switch { + case isSlice: // E.g., []uint32 + mfi.merge = func(dst, src pointer) { + sfsp := src.toUint32Slice() + if *sfsp != nil { + dfsp := dst.toUint32Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []uint32{} + } + } + } + case isPointer: // E.g., *uint32 + mfi.merge = func(dst, src pointer) { + sfpp := src.toUint32Ptr() + if *sfpp != nil { + dfpp := dst.toUint32Ptr() + if *dfpp == nil { + *dfpp = Uint32(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., uint32 + mfi.merge = func(dst, src pointer) { + if v := *src.toUint32(); v != 0 { + *dst.toUint32() = v + } + } + } + case reflect.Uint64: + switch { + case isSlice: // E.g., []uint64 + mfi.merge = func(dst, src pointer) { + sfsp := src.toUint64Slice() + if *sfsp != nil { + dfsp := dst.toUint64Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []uint64{} + } + } + } + case isPointer: // E.g., *uint64 + mfi.merge = func(dst, src pointer) { + sfpp := src.toUint64Ptr() + if *sfpp != nil { + dfpp := dst.toUint64Ptr() + if *dfpp == nil { + *dfpp = Uint64(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., uint64 + mfi.merge = func(dst, src pointer) { + if v := *src.toUint64(); v != 0 { + *dst.toUint64() = v + } + } + } + case reflect.Float32: + switch { + case isSlice: // E.g., []float32 + mfi.merge = func(dst, src pointer) { + sfsp := src.toFloat32Slice() + if *sfsp != nil { + dfsp := dst.toFloat32Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []float32{} + } + } + } + case isPointer: // E.g., *float32 + mfi.merge = func(dst, src pointer) { + sfpp := src.toFloat32Ptr() + if *sfpp != nil { + dfpp := dst.toFloat32Ptr() + if *dfpp == nil { + *dfpp = Float32(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., float32 + mfi.merge = func(dst, src pointer) { + if v := *src.toFloat32(); v != 0 { + *dst.toFloat32() = v + } + } + } + case reflect.Float64: + switch { + case isSlice: // E.g., []float64 + mfi.merge = func(dst, src pointer) { + sfsp := src.toFloat64Slice() + if *sfsp != nil { + dfsp := dst.toFloat64Slice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []float64{} + } + } + } + case isPointer: // E.g., *float64 + mfi.merge = func(dst, src pointer) { + sfpp := src.toFloat64Ptr() + if *sfpp != nil { + dfpp := dst.toFloat64Ptr() + if *dfpp == nil { + *dfpp = Float64(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., float64 + mfi.merge = func(dst, src pointer) { + if v := *src.toFloat64(); v != 0 { + *dst.toFloat64() = v + } + } + } + case reflect.Bool: + switch { + case isSlice: // E.g., []bool + mfi.merge = func(dst, src pointer) { + sfsp := src.toBoolSlice() + if *sfsp != nil { + dfsp := dst.toBoolSlice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []bool{} + } + } + } + case isPointer: // E.g., *bool + mfi.merge = func(dst, src pointer) { + sfpp := src.toBoolPtr() + if *sfpp != nil { + dfpp := dst.toBoolPtr() + if *dfpp == nil { + *dfpp = Bool(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., bool + mfi.merge = func(dst, src pointer) { + if v := *src.toBool(); v { + *dst.toBool() = v + } + } + } + case reflect.String: + switch { + case isSlice: // E.g., []string + mfi.merge = func(dst, src pointer) { + sfsp := src.toStringSlice() + if *sfsp != nil { + dfsp := dst.toStringSlice() + *dfsp = append(*dfsp, *sfsp...) + if *dfsp == nil { + *dfsp = []string{} + } + } + } + case isPointer: // E.g., *string + mfi.merge = func(dst, src pointer) { + sfpp := src.toStringPtr() + if *sfpp != nil { + dfpp := dst.toStringPtr() + if *dfpp == nil { + *dfpp = String(**sfpp) + } else { + **dfpp = **sfpp + } + } + } + default: // E.g., string + mfi.merge = func(dst, src pointer) { + if v := *src.toString(); v != "" { + *dst.toString() = v + } + } + } + case reflect.Slice: + isProto3 := props.Prop[i].proto3 + switch { + case isPointer: + panic("bad pointer in byte slice case in " + tf.Name()) + case tf.Elem().Kind() != reflect.Uint8: + panic("bad element kind in byte slice case in " + tf.Name()) + case isSlice: // E.g., [][]byte + mfi.merge = func(dst, src pointer) { + sbsp := src.toBytesSlice() + if *sbsp != nil { + dbsp := dst.toBytesSlice() + for _, sb := range *sbsp { + if sb == nil { + *dbsp = append(*dbsp, nil) + } else { + *dbsp = append(*dbsp, append([]byte{}, sb...)) + } + } + if *dbsp == nil { + *dbsp = [][]byte{} + } + } + } + default: // E.g., []byte + mfi.merge = func(dst, src pointer) { + sbp := src.toBytes() + if *sbp != nil { + dbp := dst.toBytes() + if !isProto3 || len(*sbp) > 0 { + *dbp = append([]byte{}, *sbp...) + } + } + } + } + case reflect.Struct: + switch { + case !isPointer: + panic(fmt.Sprintf("message field %s without pointer", tf)) + case isSlice: // E.g., []*pb.T + mi := getMergeInfo(tf) + mfi.merge = func(dst, src pointer) { + sps := src.getPointerSlice() + if sps != nil { + dps := dst.getPointerSlice() + for _, sp := range sps { + var dp pointer + if !sp.isNil() { + dp = valToPointer(reflect.New(tf)) + mi.merge(dp, sp) + } + dps = append(dps, dp) + } + if dps == nil { + dps = []pointer{} + } + dst.setPointerSlice(dps) + } + } + default: // E.g., *pb.T + mi := getMergeInfo(tf) + mfi.merge = func(dst, src pointer) { + sp := src.getPointer() + if !sp.isNil() { + dp := dst.getPointer() + if dp.isNil() { + dp = valToPointer(reflect.New(tf)) + dst.setPointer(dp) + } + mi.merge(dp, sp) + } + } + } + case reflect.Map: + switch { + case isPointer || isSlice: + panic("bad pointer or slice in map case in " + tf.Name()) + default: // E.g., map[K]V + mfi.merge = func(dst, src pointer) { + sm := src.asPointerTo(tf).Elem() + if sm.Len() == 0 { + return + } + dm := dst.asPointerTo(tf).Elem() + if dm.IsNil() { + dm.Set(reflect.MakeMap(tf)) + } + + switch tf.Elem().Kind() { + case reflect.Ptr: // Proto struct (e.g., *T) + for _, key := range sm.MapKeys() { + val := sm.MapIndex(key) + val = reflect.ValueOf(Clone(val.Interface().(Message))) + dm.SetMapIndex(key, val) + } + case reflect.Slice: // E.g. Bytes type (e.g., []byte) + for _, key := range sm.MapKeys() { + val := sm.MapIndex(key) + val = reflect.ValueOf(append([]byte{}, val.Bytes()...)) + dm.SetMapIndex(key, val) + } + default: // Basic type (e.g., string) + for _, key := range sm.MapKeys() { + val := sm.MapIndex(key) + dm.SetMapIndex(key, val) + } + } + } + } + case reflect.Interface: + // Must be oneof field. + switch { + case isPointer || isSlice: + panic("bad pointer or slice in interface case in " + tf.Name()) + default: // E.g., interface{} + // TODO: Make this faster? + mfi.merge = func(dst, src pointer) { + su := src.asPointerTo(tf).Elem() + if !su.IsNil() { + du := dst.asPointerTo(tf).Elem() + typ := su.Elem().Type() + if du.IsNil() || du.Elem().Type() != typ { + du.Set(reflect.New(typ.Elem())) // Initialize interface if empty + } + sv := su.Elem().Elem().Field(0) + if sv.Kind() == reflect.Ptr && sv.IsNil() { + return + } + dv := du.Elem().Elem().Field(0) + if dv.Kind() == reflect.Ptr && dv.IsNil() { + dv.Set(reflect.New(sv.Type().Elem())) // Initialize proto message if empty + } + switch sv.Type().Kind() { + case reflect.Ptr: // Proto struct (e.g., *T) + Merge(dv.Interface().(Message), sv.Interface().(Message)) + case reflect.Slice: // E.g. Bytes type (e.g., []byte) + dv.Set(reflect.ValueOf(append([]byte{}, sv.Bytes()...))) + default: // Basic type (e.g., string) + dv.Set(sv) + } + } + } + } + default: + panic(fmt.Sprintf("merger not found for type:%s", tf)) + } + mi.fields = append(mi.fields, mfi) + } + + mi.unrecognized = invalidField + if f, ok := t.FieldByName("XXX_unrecognized"); ok { + if f.Type != reflect.TypeOf([]byte{}) { + panic("expected XXX_unrecognized to be of type []byte") + } + mi.unrecognized = toField(&f) + } + + atomic.StoreInt32(&mi.initialized, 1) +} diff --git a/vendor/github.com/golang/protobuf/proto/table_unmarshal.go b/vendor/github.com/golang/protobuf/proto/table_unmarshal.go new file mode 100644 index 0000000..acee2fc --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/table_unmarshal.go @@ -0,0 +1,2053 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "errors" + "fmt" + "io" + "math" + "reflect" + "strconv" + "strings" + "sync" + "sync/atomic" + "unicode/utf8" +) + +// Unmarshal is the entry point from the generated .pb.go files. +// This function is not intended to be used by non-generated code. +// This function is not subject to any compatibility guarantee. +// msg contains a pointer to a protocol buffer struct. +// b is the data to be unmarshaled into the protocol buffer. +// a is a pointer to a place to store cached unmarshal information. +func (a *InternalMessageInfo) Unmarshal(msg Message, b []byte) error { + // Load the unmarshal information for this message type. + // The atomic load ensures memory consistency. + u := atomicLoadUnmarshalInfo(&a.unmarshal) + if u == nil { + // Slow path: find unmarshal info for msg, update a with it. + u = getUnmarshalInfo(reflect.TypeOf(msg).Elem()) + atomicStoreUnmarshalInfo(&a.unmarshal, u) + } + // Then do the unmarshaling. + err := u.unmarshal(toPointer(&msg), b) + return err +} + +type unmarshalInfo struct { + typ reflect.Type // type of the protobuf struct + + // 0 = only typ field is initialized + // 1 = completely initialized + initialized int32 + lock sync.Mutex // prevents double initialization + dense []unmarshalFieldInfo // fields indexed by tag # + sparse map[uint64]unmarshalFieldInfo // fields indexed by tag # + reqFields []string // names of required fields + reqMask uint64 // 1< 0 { + // Read tag and wire type. + // Special case 1 and 2 byte varints. + var x uint64 + if b[0] < 128 { + x = uint64(b[0]) + b = b[1:] + } else if len(b) >= 2 && b[1] < 128 { + x = uint64(b[0]&0x7f) + uint64(b[1])<<7 + b = b[2:] + } else { + var n int + x, n = decodeVarint(b) + if n == 0 { + return io.ErrUnexpectedEOF + } + b = b[n:] + } + tag := x >> 3 + wire := int(x) & 7 + + // Dispatch on the tag to one of the unmarshal* functions below. + var f unmarshalFieldInfo + if tag < uint64(len(u.dense)) { + f = u.dense[tag] + } else { + f = u.sparse[tag] + } + if fn := f.unmarshal; fn != nil { + var err error + b, err = fn(b, m.offset(f.field), wire) + if err == nil { + reqMask |= f.reqMask + continue + } + if r, ok := err.(*RequiredNotSetError); ok { + // Remember this error, but keep parsing. We need to produce + // a full parse even if a required field is missing. + if errLater == nil { + errLater = r + } + reqMask |= f.reqMask + continue + } + if err != errInternalBadWireType { + if err == errInvalidUTF8 { + if errLater == nil { + fullName := revProtoTypes[reflect.PtrTo(u.typ)] + "." + f.name + errLater = &invalidUTF8Error{fullName} + } + continue + } + return err + } + // Fragments with bad wire type are treated as unknown fields. + } + + // Unknown tag. + if !u.unrecognized.IsValid() { + // Don't keep unrecognized data; just skip it. + var err error + b, err = skipField(b, wire) + if err != nil { + return err + } + continue + } + // Keep unrecognized data around. + // maybe in extensions, maybe in the unrecognized field. + z := m.offset(u.unrecognized).toBytes() + var emap map[int32]Extension + var e Extension + for _, r := range u.extensionRanges { + if uint64(r.Start) <= tag && tag <= uint64(r.End) { + if u.extensions.IsValid() { + mp := m.offset(u.extensions).toExtensions() + emap = mp.extensionsWrite() + e = emap[int32(tag)] + z = &e.enc + break + } + if u.oldExtensions.IsValid() { + p := m.offset(u.oldExtensions).toOldExtensions() + emap = *p + if emap == nil { + emap = map[int32]Extension{} + *p = emap + } + e = emap[int32(tag)] + z = &e.enc + break + } + panic("no extensions field available") + } + } + + // Use wire type to skip data. + var err error + b0 := b + b, err = skipField(b, wire) + if err != nil { + return err + } + *z = encodeVarint(*z, tag<<3|uint64(wire)) + *z = append(*z, b0[:len(b0)-len(b)]...) + + if emap != nil { + emap[int32(tag)] = e + } + } + if reqMask != u.reqMask && errLater == nil { + // A required field of this message is missing. + for _, n := range u.reqFields { + if reqMask&1 == 0 { + errLater = &RequiredNotSetError{n} + } + reqMask >>= 1 + } + } + return errLater +} + +// computeUnmarshalInfo fills in u with information for use +// in unmarshaling protocol buffers of type u.typ. +func (u *unmarshalInfo) computeUnmarshalInfo() { + u.lock.Lock() + defer u.lock.Unlock() + if u.initialized != 0 { + return + } + t := u.typ + n := t.NumField() + + // Set up the "not found" value for the unrecognized byte buffer. + // This is the default for proto3. + u.unrecognized = invalidField + u.extensions = invalidField + u.oldExtensions = invalidField + + // List of the generated type and offset for each oneof field. + type oneofField struct { + ityp reflect.Type // interface type of oneof field + field field // offset in containing message + } + var oneofFields []oneofField + + for i := 0; i < n; i++ { + f := t.Field(i) + if f.Name == "XXX_unrecognized" { + // The byte slice used to hold unrecognized input is special. + if f.Type != reflect.TypeOf(([]byte)(nil)) { + panic("bad type for XXX_unrecognized field: " + f.Type.Name()) + } + u.unrecognized = toField(&f) + continue + } + if f.Name == "XXX_InternalExtensions" { + // Ditto here. + if f.Type != reflect.TypeOf(XXX_InternalExtensions{}) { + panic("bad type for XXX_InternalExtensions field: " + f.Type.Name()) + } + u.extensions = toField(&f) + if f.Tag.Get("protobuf_messageset") == "1" { + u.isMessageSet = true + } + continue + } + if f.Name == "XXX_extensions" { + // An older form of the extensions field. + if f.Type != reflect.TypeOf((map[int32]Extension)(nil)) { + panic("bad type for XXX_extensions field: " + f.Type.Name()) + } + u.oldExtensions = toField(&f) + continue + } + if f.Name == "XXX_NoUnkeyedLiteral" || f.Name == "XXX_sizecache" { + continue + } + + oneof := f.Tag.Get("protobuf_oneof") + if oneof != "" { + oneofFields = append(oneofFields, oneofField{f.Type, toField(&f)}) + // The rest of oneof processing happens below. + continue + } + + tags := f.Tag.Get("protobuf") + tagArray := strings.Split(tags, ",") + if len(tagArray) < 2 { + panic("protobuf tag not enough fields in " + t.Name() + "." + f.Name + ": " + tags) + } + tag, err := strconv.Atoi(tagArray[1]) + if err != nil { + panic("protobuf tag field not an integer: " + tagArray[1]) + } + + name := "" + for _, tag := range tagArray[3:] { + if strings.HasPrefix(tag, "name=") { + name = tag[5:] + } + } + + // Extract unmarshaling function from the field (its type and tags). + unmarshal := fieldUnmarshaler(&f) + + // Required field? + var reqMask uint64 + if tagArray[2] == "req" { + bit := len(u.reqFields) + u.reqFields = append(u.reqFields, name) + reqMask = uint64(1) << uint(bit) + // TODO: if we have more than 64 required fields, we end up + // not verifying that all required fields are present. + // Fix this, perhaps using a count of required fields? + } + + // Store the info in the correct slot in the message. + u.setTag(tag, toField(&f), unmarshal, reqMask, name) + } + + // Find any types associated with oneof fields. + var oneofImplementers []interface{} + switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) { + case oneofFuncsIface: + _, _, _, oneofImplementers = m.XXX_OneofFuncs() + case oneofWrappersIface: + oneofImplementers = m.XXX_OneofWrappers() + } + for _, v := range oneofImplementers { + tptr := reflect.TypeOf(v) // *Msg_X + typ := tptr.Elem() // Msg_X + + f := typ.Field(0) // oneof implementers have one field + baseUnmarshal := fieldUnmarshaler(&f) + tags := strings.Split(f.Tag.Get("protobuf"), ",") + fieldNum, err := strconv.Atoi(tags[1]) + if err != nil { + panic("protobuf tag field not an integer: " + tags[1]) + } + var name string + for _, tag := range tags { + if strings.HasPrefix(tag, "name=") { + name = strings.TrimPrefix(tag, "name=") + break + } + } + + // Find the oneof field that this struct implements. + // Might take O(n^2) to process all of the oneofs, but who cares. + for _, of := range oneofFields { + if tptr.Implements(of.ityp) { + // We have found the corresponding interface for this struct. + // That lets us know where this struct should be stored + // when we encounter it during unmarshaling. + unmarshal := makeUnmarshalOneof(typ, of.ityp, baseUnmarshal) + u.setTag(fieldNum, of.field, unmarshal, 0, name) + } + } + + } + + // Get extension ranges, if any. + fn := reflect.Zero(reflect.PtrTo(t)).MethodByName("ExtensionRangeArray") + if fn.IsValid() { + if !u.extensions.IsValid() && !u.oldExtensions.IsValid() { + panic("a message with extensions, but no extensions field in " + t.Name()) + } + u.extensionRanges = fn.Call(nil)[0].Interface().([]ExtensionRange) + } + + // Explicitly disallow tag 0. This will ensure we flag an error + // when decoding a buffer of all zeros. Without this code, we + // would decode and skip an all-zero buffer of even length. + // [0 0] is [tag=0/wiretype=varint varint-encoded-0]. + u.setTag(0, zeroField, func(b []byte, f pointer, w int) ([]byte, error) { + return nil, fmt.Errorf("proto: %s: illegal tag 0 (wire type %d)", t, w) + }, 0, "") + + // Set mask for required field check. + u.reqMask = uint64(1)<= 0 && (tag < 16 || tag < 2*n) { // TODO: what are the right numbers here? + for len(u.dense) <= tag { + u.dense = append(u.dense, unmarshalFieldInfo{}) + } + u.dense[tag] = i + return + } + if u.sparse == nil { + u.sparse = map[uint64]unmarshalFieldInfo{} + } + u.sparse[uint64(tag)] = i +} + +// fieldUnmarshaler returns an unmarshaler for the given field. +func fieldUnmarshaler(f *reflect.StructField) unmarshaler { + if f.Type.Kind() == reflect.Map { + return makeUnmarshalMap(f) + } + return typeUnmarshaler(f.Type, f.Tag.Get("protobuf")) +} + +// typeUnmarshaler returns an unmarshaler for the given field type / field tag pair. +func typeUnmarshaler(t reflect.Type, tags string) unmarshaler { + tagArray := strings.Split(tags, ",") + encoding := tagArray[0] + name := "unknown" + proto3 := false + validateUTF8 := true + for _, tag := range tagArray[3:] { + if strings.HasPrefix(tag, "name=") { + name = tag[5:] + } + if tag == "proto3" { + proto3 = true + } + } + validateUTF8 = validateUTF8 && proto3 + + // Figure out packaging (pointer, slice, or both) + slice := false + pointer := false + if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { + slice = true + t = t.Elem() + } + if t.Kind() == reflect.Ptr { + pointer = true + t = t.Elem() + } + + // We'll never have both pointer and slice for basic types. + if pointer && slice && t.Kind() != reflect.Struct { + panic("both pointer and slice for basic type in " + t.Name()) + } + + switch t.Kind() { + case reflect.Bool: + if pointer { + return unmarshalBoolPtr + } + if slice { + return unmarshalBoolSlice + } + return unmarshalBoolValue + case reflect.Int32: + switch encoding { + case "fixed32": + if pointer { + return unmarshalFixedS32Ptr + } + if slice { + return unmarshalFixedS32Slice + } + return unmarshalFixedS32Value + case "varint": + // this could be int32 or enum + if pointer { + return unmarshalInt32Ptr + } + if slice { + return unmarshalInt32Slice + } + return unmarshalInt32Value + case "zigzag32": + if pointer { + return unmarshalSint32Ptr + } + if slice { + return unmarshalSint32Slice + } + return unmarshalSint32Value + } + case reflect.Int64: + switch encoding { + case "fixed64": + if pointer { + return unmarshalFixedS64Ptr + } + if slice { + return unmarshalFixedS64Slice + } + return unmarshalFixedS64Value + case "varint": + if pointer { + return unmarshalInt64Ptr + } + if slice { + return unmarshalInt64Slice + } + return unmarshalInt64Value + case "zigzag64": + if pointer { + return unmarshalSint64Ptr + } + if slice { + return unmarshalSint64Slice + } + return unmarshalSint64Value + } + case reflect.Uint32: + switch encoding { + case "fixed32": + if pointer { + return unmarshalFixed32Ptr + } + if slice { + return unmarshalFixed32Slice + } + return unmarshalFixed32Value + case "varint": + if pointer { + return unmarshalUint32Ptr + } + if slice { + return unmarshalUint32Slice + } + return unmarshalUint32Value + } + case reflect.Uint64: + switch encoding { + case "fixed64": + if pointer { + return unmarshalFixed64Ptr + } + if slice { + return unmarshalFixed64Slice + } + return unmarshalFixed64Value + case "varint": + if pointer { + return unmarshalUint64Ptr + } + if slice { + return unmarshalUint64Slice + } + return unmarshalUint64Value + } + case reflect.Float32: + if pointer { + return unmarshalFloat32Ptr + } + if slice { + return unmarshalFloat32Slice + } + return unmarshalFloat32Value + case reflect.Float64: + if pointer { + return unmarshalFloat64Ptr + } + if slice { + return unmarshalFloat64Slice + } + return unmarshalFloat64Value + case reflect.Map: + panic("map type in typeUnmarshaler in " + t.Name()) + case reflect.Slice: + if pointer { + panic("bad pointer in slice case in " + t.Name()) + } + if slice { + return unmarshalBytesSlice + } + return unmarshalBytesValue + case reflect.String: + if validateUTF8 { + if pointer { + return unmarshalUTF8StringPtr + } + if slice { + return unmarshalUTF8StringSlice + } + return unmarshalUTF8StringValue + } + if pointer { + return unmarshalStringPtr + } + if slice { + return unmarshalStringSlice + } + return unmarshalStringValue + case reflect.Struct: + // message or group field + if !pointer { + panic(fmt.Sprintf("message/group field %s:%s without pointer", t, encoding)) + } + switch encoding { + case "bytes": + if slice { + return makeUnmarshalMessageSlicePtr(getUnmarshalInfo(t), name) + } + return makeUnmarshalMessagePtr(getUnmarshalInfo(t), name) + case "group": + if slice { + return makeUnmarshalGroupSlicePtr(getUnmarshalInfo(t), name) + } + return makeUnmarshalGroupPtr(getUnmarshalInfo(t), name) + } + } + panic(fmt.Sprintf("unmarshaler not found type:%s encoding:%s", t, encoding)) +} + +// Below are all the unmarshalers for individual fields of various types. + +func unmarshalInt64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x) + *f.toInt64() = v + return b, nil +} + +func unmarshalInt64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x) + *f.toInt64Ptr() = &v + return b, nil +} + +func unmarshalInt64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x) + s := f.toInt64Slice() + *s = append(*s, v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x) + s := f.toInt64Slice() + *s = append(*s, v) + return b, nil +} + +func unmarshalSint64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x>>1) ^ int64(x)<<63>>63 + *f.toInt64() = v + return b, nil +} + +func unmarshalSint64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x>>1) ^ int64(x)<<63>>63 + *f.toInt64Ptr() = &v + return b, nil +} + +func unmarshalSint64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x>>1) ^ int64(x)<<63>>63 + s := f.toInt64Slice() + *s = append(*s, v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int64(x>>1) ^ int64(x)<<63>>63 + s := f.toInt64Slice() + *s = append(*s, v) + return b, nil +} + +func unmarshalUint64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint64(x) + *f.toUint64() = v + return b, nil +} + +func unmarshalUint64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint64(x) + *f.toUint64Ptr() = &v + return b, nil +} + +func unmarshalUint64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint64(x) + s := f.toUint64Slice() + *s = append(*s, v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint64(x) + s := f.toUint64Slice() + *s = append(*s, v) + return b, nil +} + +func unmarshalInt32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x) + *f.toInt32() = v + return b, nil +} + +func unmarshalInt32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x) + f.setInt32Ptr(v) + return b, nil +} + +func unmarshalInt32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x) + f.appendInt32Slice(v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x) + f.appendInt32Slice(v) + return b, nil +} + +func unmarshalSint32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x>>1) ^ int32(x)<<31>>31 + *f.toInt32() = v + return b, nil +} + +func unmarshalSint32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x>>1) ^ int32(x)<<31>>31 + f.setInt32Ptr(v) + return b, nil +} + +func unmarshalSint32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x>>1) ^ int32(x)<<31>>31 + f.appendInt32Slice(v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := int32(x>>1) ^ int32(x)<<31>>31 + f.appendInt32Slice(v) + return b, nil +} + +func unmarshalUint32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint32(x) + *f.toUint32() = v + return b, nil +} + +func unmarshalUint32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint32(x) + *f.toUint32Ptr() = &v + return b, nil +} + +func unmarshalUint32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint32(x) + s := f.toUint32Slice() + *s = append(*s, v) + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + v := uint32(x) + s := f.toUint32Slice() + *s = append(*s, v) + return b, nil +} + +func unmarshalFixed64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 + *f.toUint64() = v + return b[8:], nil +} + +func unmarshalFixed64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 + *f.toUint64Ptr() = &v + return b[8:], nil +} + +func unmarshalFixed64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 + s := f.toUint64Slice() + *s = append(*s, v) + b = b[8:] + } + return res, nil + } + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 + s := f.toUint64Slice() + *s = append(*s, v) + return b[8:], nil +} + +func unmarshalFixedS64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 + *f.toInt64() = v + return b[8:], nil +} + +func unmarshalFixedS64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 + *f.toInt64Ptr() = &v + return b[8:], nil +} + +func unmarshalFixedS64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 + s := f.toInt64Slice() + *s = append(*s, v) + b = b[8:] + } + return res, nil + } + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 + s := f.toInt64Slice() + *s = append(*s, v) + return b[8:], nil +} + +func unmarshalFixed32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + *f.toUint32() = v + return b[4:], nil +} + +func unmarshalFixed32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + *f.toUint32Ptr() = &v + return b[4:], nil +} + +func unmarshalFixed32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + s := f.toUint32Slice() + *s = append(*s, v) + b = b[4:] + } + return res, nil + } + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + s := f.toUint32Slice() + *s = append(*s, v) + return b[4:], nil +} + +func unmarshalFixedS32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 + *f.toInt32() = v + return b[4:], nil +} + +func unmarshalFixedS32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 + f.setInt32Ptr(v) + return b[4:], nil +} + +func unmarshalFixedS32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 + f.appendInt32Slice(v) + b = b[4:] + } + return res, nil + } + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 + f.appendInt32Slice(v) + return b[4:], nil +} + +func unmarshalBoolValue(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + // Note: any length varint is allowed, even though any sane + // encoder will use one byte. + // See https://github.com/golang/protobuf/issues/76 + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + // TODO: check if x>1? Tests seem to indicate no. + v := x != 0 + *f.toBool() = v + return b[n:], nil +} + +func unmarshalBoolPtr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + v := x != 0 + *f.toBoolPtr() = &v + return b[n:], nil +} + +func unmarshalBoolSlice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + x, n = decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + v := x != 0 + s := f.toBoolSlice() + *s = append(*s, v) + b = b[n:] + } + return res, nil + } + if w != WireVarint { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + v := x != 0 + s := f.toBoolSlice() + *s = append(*s, v) + return b[n:], nil +} + +func unmarshalFloat64Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) + *f.toFloat64() = v + return b[8:], nil +} + +func unmarshalFloat64Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) + *f.toFloat64Ptr() = &v + return b[8:], nil +} + +func unmarshalFloat64Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) + s := f.toFloat64Slice() + *s = append(*s, v) + b = b[8:] + } + return res, nil + } + if w != WireFixed64 { + return b, errInternalBadWireType + } + if len(b) < 8 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) + s := f.toFloat64Slice() + *s = append(*s, v) + return b[8:], nil +} + +func unmarshalFloat32Value(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) + *f.toFloat32() = v + return b[4:], nil +} + +func unmarshalFloat32Ptr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) + *f.toFloat32Ptr() = &v + return b[4:], nil +} + +func unmarshalFloat32Slice(b []byte, f pointer, w int) ([]byte, error) { + if w == WireBytes { // packed + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + res := b[x:] + b = b[:x] + for len(b) > 0 { + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) + s := f.toFloat32Slice() + *s = append(*s, v) + b = b[4:] + } + return res, nil + } + if w != WireFixed32 { + return b, errInternalBadWireType + } + if len(b) < 4 { + return nil, io.ErrUnexpectedEOF + } + v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) + s := f.toFloat32Slice() + *s = append(*s, v) + return b[4:], nil +} + +func unmarshalStringValue(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := string(b[:x]) + *f.toString() = v + return b[x:], nil +} + +func unmarshalStringPtr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := string(b[:x]) + *f.toStringPtr() = &v + return b[x:], nil +} + +func unmarshalStringSlice(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := string(b[:x]) + s := f.toStringSlice() + *s = append(*s, v) + return b[x:], nil +} + +func unmarshalUTF8StringValue(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := string(b[:x]) + *f.toString() = v + if !utf8.ValidString(v) { + return b[x:], errInvalidUTF8 + } + return b[x:], nil +} + +func unmarshalUTF8StringPtr(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := string(b[:x]) + *f.toStringPtr() = &v + if !utf8.ValidString(v) { + return b[x:], errInvalidUTF8 + } + return b[x:], nil +} + +func unmarshalUTF8StringSlice(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := string(b[:x]) + s := f.toStringSlice() + *s = append(*s, v) + if !utf8.ValidString(v) { + return b[x:], errInvalidUTF8 + } + return b[x:], nil +} + +var emptyBuf [0]byte + +func unmarshalBytesValue(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + // The use of append here is a trick which avoids the zeroing + // that would be required if we used a make/copy pair. + // We append to emptyBuf instead of nil because we want + // a non-nil result even when the length is 0. + v := append(emptyBuf[:], b[:x]...) + *f.toBytes() = v + return b[x:], nil +} + +func unmarshalBytesSlice(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := append(emptyBuf[:], b[:x]...) + s := f.toBytesSlice() + *s = append(*s, v) + return b[x:], nil +} + +func makeUnmarshalMessagePtr(sub *unmarshalInfo, name string) unmarshaler { + return func(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + // First read the message field to see if something is there. + // The semantics of multiple submessages are weird. Instead of + // the last one winning (as it is for all other fields), multiple + // submessages are merged. + v := f.getPointer() + if v.isNil() { + v = valToPointer(reflect.New(sub.typ)) + f.setPointer(v) + } + err := sub.unmarshal(v, b[:x]) + if err != nil { + if r, ok := err.(*RequiredNotSetError); ok { + r.field = name + "." + r.field + } else { + return nil, err + } + } + return b[x:], err + } +} + +func makeUnmarshalMessageSlicePtr(sub *unmarshalInfo, name string) unmarshaler { + return func(b []byte, f pointer, w int) ([]byte, error) { + if w != WireBytes { + return b, errInternalBadWireType + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + v := valToPointer(reflect.New(sub.typ)) + err := sub.unmarshal(v, b[:x]) + if err != nil { + if r, ok := err.(*RequiredNotSetError); ok { + r.field = name + "." + r.field + } else { + return nil, err + } + } + f.appendPointer(v) + return b[x:], err + } +} + +func makeUnmarshalGroupPtr(sub *unmarshalInfo, name string) unmarshaler { + return func(b []byte, f pointer, w int) ([]byte, error) { + if w != WireStartGroup { + return b, errInternalBadWireType + } + x, y := findEndGroup(b) + if x < 0 { + return nil, io.ErrUnexpectedEOF + } + v := f.getPointer() + if v.isNil() { + v = valToPointer(reflect.New(sub.typ)) + f.setPointer(v) + } + err := sub.unmarshal(v, b[:x]) + if err != nil { + if r, ok := err.(*RequiredNotSetError); ok { + r.field = name + "." + r.field + } else { + return nil, err + } + } + return b[y:], err + } +} + +func makeUnmarshalGroupSlicePtr(sub *unmarshalInfo, name string) unmarshaler { + return func(b []byte, f pointer, w int) ([]byte, error) { + if w != WireStartGroup { + return b, errInternalBadWireType + } + x, y := findEndGroup(b) + if x < 0 { + return nil, io.ErrUnexpectedEOF + } + v := valToPointer(reflect.New(sub.typ)) + err := sub.unmarshal(v, b[:x]) + if err != nil { + if r, ok := err.(*RequiredNotSetError); ok { + r.field = name + "." + r.field + } else { + return nil, err + } + } + f.appendPointer(v) + return b[y:], err + } +} + +func makeUnmarshalMap(f *reflect.StructField) unmarshaler { + t := f.Type + kt := t.Key() + vt := t.Elem() + unmarshalKey := typeUnmarshaler(kt, f.Tag.Get("protobuf_key")) + unmarshalVal := typeUnmarshaler(vt, f.Tag.Get("protobuf_val")) + return func(b []byte, f pointer, w int) ([]byte, error) { + // The map entry is a submessage. Figure out how big it is. + if w != WireBytes { + return nil, fmt.Errorf("proto: bad wiretype for map field: got %d want %d", w, WireBytes) + } + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + b = b[n:] + if x > uint64(len(b)) { + return nil, io.ErrUnexpectedEOF + } + r := b[x:] // unused data to return + b = b[:x] // data for map entry + + // Note: we could use #keys * #values ~= 200 functions + // to do map decoding without reflection. Probably not worth it. + // Maps will be somewhat slow. Oh well. + + // Read key and value from data. + var nerr nonFatal + k := reflect.New(kt) + v := reflect.New(vt) + for len(b) > 0 { + x, n := decodeVarint(b) + if n == 0 { + return nil, io.ErrUnexpectedEOF + } + wire := int(x) & 7 + b = b[n:] + + var err error + switch x >> 3 { + case 1: + b, err = unmarshalKey(b, valToPointer(k), wire) + case 2: + b, err = unmarshalVal(b, valToPointer(v), wire) + default: + err = errInternalBadWireType // skip unknown tag + } + + if nerr.Merge(err) { + continue + } + if err != errInternalBadWireType { + return nil, err + } + + // Skip past unknown fields. + b, err = skipField(b, wire) + if err != nil { + return nil, err + } + } + + // Get map, allocate if needed. + m := f.asPointerTo(t).Elem() // an addressable map[K]T + if m.IsNil() { + m.Set(reflect.MakeMap(t)) + } + + // Insert into map. + m.SetMapIndex(k.Elem(), v.Elem()) + + return r, nerr.E + } +} + +// makeUnmarshalOneof makes an unmarshaler for oneof fields. +// for: +// message Msg { +// oneof F { +// int64 X = 1; +// float64 Y = 2; +// } +// } +// typ is the type of the concrete entry for a oneof case (e.g. Msg_X). +// ityp is the interface type of the oneof field (e.g. isMsg_F). +// unmarshal is the unmarshaler for the base type of the oneof case (e.g. int64). +// Note that this function will be called once for each case in the oneof. +func makeUnmarshalOneof(typ, ityp reflect.Type, unmarshal unmarshaler) unmarshaler { + sf := typ.Field(0) + field0 := toField(&sf) + return func(b []byte, f pointer, w int) ([]byte, error) { + // Allocate holder for value. + v := reflect.New(typ) + + // Unmarshal data into holder. + // We unmarshal into the first field of the holder object. + var err error + var nerr nonFatal + b, err = unmarshal(b, valToPointer(v).offset(field0), w) + if !nerr.Merge(err) { + return nil, err + } + + // Write pointer to holder into target field. + f.asPointerTo(ityp).Elem().Set(v) + + return b, nerr.E + } +} + +// Error used by decode internally. +var errInternalBadWireType = errors.New("proto: internal error: bad wiretype") + +// skipField skips past a field of type wire and returns the remaining bytes. +func skipField(b []byte, wire int) ([]byte, error) { + switch wire { + case WireVarint: + _, k := decodeVarint(b) + if k == 0 { + return b, io.ErrUnexpectedEOF + } + b = b[k:] + case WireFixed32: + if len(b) < 4 { + return b, io.ErrUnexpectedEOF + } + b = b[4:] + case WireFixed64: + if len(b) < 8 { + return b, io.ErrUnexpectedEOF + } + b = b[8:] + case WireBytes: + m, k := decodeVarint(b) + if k == 0 || uint64(len(b)-k) < m { + return b, io.ErrUnexpectedEOF + } + b = b[uint64(k)+m:] + case WireStartGroup: + _, i := findEndGroup(b) + if i == -1 { + return b, io.ErrUnexpectedEOF + } + b = b[i:] + default: + return b, fmt.Errorf("proto: can't skip unknown wire type %d", wire) + } + return b, nil +} + +// findEndGroup finds the index of the next EndGroup tag. +// Groups may be nested, so the "next" EndGroup tag is the first +// unpaired EndGroup. +// findEndGroup returns the indexes of the start and end of the EndGroup tag. +// Returns (-1,-1) if it can't find one. +func findEndGroup(b []byte) (int, int) { + depth := 1 + i := 0 + for { + x, n := decodeVarint(b[i:]) + if n == 0 { + return -1, -1 + } + j := i + i += n + switch x & 7 { + case WireVarint: + _, k := decodeVarint(b[i:]) + if k == 0 { + return -1, -1 + } + i += k + case WireFixed32: + if len(b)-4 < i { + return -1, -1 + } + i += 4 + case WireFixed64: + if len(b)-8 < i { + return -1, -1 + } + i += 8 + case WireBytes: + m, k := decodeVarint(b[i:]) + if k == 0 { + return -1, -1 + } + i += k + if uint64(len(b)-i) < m { + return -1, -1 + } + i += int(m) + case WireStartGroup: + depth++ + case WireEndGroup: + depth-- + if depth == 0 { + return j, i + } + default: + return -1, -1 + } + } +} + +// encodeVarint appends a varint-encoded integer to b and returns the result. +func encodeVarint(b []byte, x uint64) []byte { + for x >= 1<<7 { + b = append(b, byte(x&0x7f|0x80)) + x >>= 7 + } + return append(b, byte(x)) +} + +// decodeVarint reads a varint-encoded integer from b. +// Returns the decoded integer and the number of bytes read. +// If there is an error, it returns 0,0. +func decodeVarint(b []byte) (uint64, int) { + var x, y uint64 + if len(b) == 0 { + goto bad + } + x = uint64(b[0]) + if x < 0x80 { + return x, 1 + } + x -= 0x80 + + if len(b) <= 1 { + goto bad + } + y = uint64(b[1]) + x += y << 7 + if y < 0x80 { + return x, 2 + } + x -= 0x80 << 7 + + if len(b) <= 2 { + goto bad + } + y = uint64(b[2]) + x += y << 14 + if y < 0x80 { + return x, 3 + } + x -= 0x80 << 14 + + if len(b) <= 3 { + goto bad + } + y = uint64(b[3]) + x += y << 21 + if y < 0x80 { + return x, 4 + } + x -= 0x80 << 21 + + if len(b) <= 4 { + goto bad + } + y = uint64(b[4]) + x += y << 28 + if y < 0x80 { + return x, 5 + } + x -= 0x80 << 28 + + if len(b) <= 5 { + goto bad + } + y = uint64(b[5]) + x += y << 35 + if y < 0x80 { + return x, 6 + } + x -= 0x80 << 35 + + if len(b) <= 6 { + goto bad + } + y = uint64(b[6]) + x += y << 42 + if y < 0x80 { + return x, 7 + } + x -= 0x80 << 42 + + if len(b) <= 7 { + goto bad + } + y = uint64(b[7]) + x += y << 49 + if y < 0x80 { + return x, 8 + } + x -= 0x80 << 49 + + if len(b) <= 8 { + goto bad + } + y = uint64(b[8]) + x += y << 56 + if y < 0x80 { + return x, 9 + } + x -= 0x80 << 56 + + if len(b) <= 9 { + goto bad + } + y = uint64(b[9]) + x += y << 63 + if y < 2 { + return x, 10 + } + +bad: + return 0, 0 +} diff --git a/vendor/github.com/golang/protobuf/proto/test_proto/test.pb.go b/vendor/github.com/golang/protobuf/proto/test_proto/test.pb.go new file mode 100644 index 0000000..624e834 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/test_proto/test.pb.go @@ -0,0 +1,4926 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: test_proto/test.proto + +package test_proto + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type FOO int32 + +const ( + FOO_FOO1 FOO = 1 +) + +var FOO_name = map[int32]string{ + 1: "FOO1", +} + +var FOO_value = map[string]int32{ + "FOO1": 1, +} + +func (x FOO) Enum() *FOO { + p := new(FOO) + *p = x + return p +} + +func (x FOO) String() string { + return proto.EnumName(FOO_name, int32(x)) +} + +func (x *FOO) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FOO_value, data, "FOO") + if err != nil { + return err + } + *x = FOO(value) + return nil +} + +func (FOO) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{0} +} + +// An enum, for completeness. +type GoTest_KIND int32 + +const ( + GoTest_VOID GoTest_KIND = 0 + // Basic types + GoTest_BOOL GoTest_KIND = 1 + GoTest_BYTES GoTest_KIND = 2 + GoTest_FINGERPRINT GoTest_KIND = 3 + GoTest_FLOAT GoTest_KIND = 4 + GoTest_INT GoTest_KIND = 5 + GoTest_STRING GoTest_KIND = 6 + GoTest_TIME GoTest_KIND = 7 + // Groupings + GoTest_TUPLE GoTest_KIND = 8 + GoTest_ARRAY GoTest_KIND = 9 + GoTest_MAP GoTest_KIND = 10 + // Table types + GoTest_TABLE GoTest_KIND = 11 + // Functions + GoTest_FUNCTION GoTest_KIND = 12 +) + +var GoTest_KIND_name = map[int32]string{ + 0: "VOID", + 1: "BOOL", + 2: "BYTES", + 3: "FINGERPRINT", + 4: "FLOAT", + 5: "INT", + 6: "STRING", + 7: "TIME", + 8: "TUPLE", + 9: "ARRAY", + 10: "MAP", + 11: "TABLE", + 12: "FUNCTION", +} + +var GoTest_KIND_value = map[string]int32{ + "VOID": 0, + "BOOL": 1, + "BYTES": 2, + "FINGERPRINT": 3, + "FLOAT": 4, + "INT": 5, + "STRING": 6, + "TIME": 7, + "TUPLE": 8, + "ARRAY": 9, + "MAP": 10, + "TABLE": 11, + "FUNCTION": 12, +} + +func (x GoTest_KIND) Enum() *GoTest_KIND { + p := new(GoTest_KIND) + *p = x + return p +} + +func (x GoTest_KIND) String() string { + return proto.EnumName(GoTest_KIND_name, int32(x)) +} + +func (x *GoTest_KIND) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(GoTest_KIND_value, data, "GoTest_KIND") + if err != nil { + return err + } + *x = GoTest_KIND(value) + return nil +} + +func (GoTest_KIND) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{2, 0} +} + +type MyMessage_Color int32 + +const ( + MyMessage_RED MyMessage_Color = 0 + MyMessage_GREEN MyMessage_Color = 1 + MyMessage_BLUE MyMessage_Color = 2 +) + +var MyMessage_Color_name = map[int32]string{ + 0: "RED", + 1: "GREEN", + 2: "BLUE", +} + +var MyMessage_Color_value = map[string]int32{ + "RED": 0, + "GREEN": 1, + "BLUE": 2, +} + +func (x MyMessage_Color) Enum() *MyMessage_Color { + p := new(MyMessage_Color) + *p = x + return p +} + +func (x MyMessage_Color) String() string { + return proto.EnumName(MyMessage_Color_name, int32(x)) +} + +func (x *MyMessage_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(MyMessage_Color_value, data, "MyMessage_Color") + if err != nil { + return err + } + *x = MyMessage_Color(value) + return nil +} + +func (MyMessage_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{13, 0} +} + +type DefaultsMessage_DefaultsEnum int32 + +const ( + DefaultsMessage_ZERO DefaultsMessage_DefaultsEnum = 0 + DefaultsMessage_ONE DefaultsMessage_DefaultsEnum = 1 + DefaultsMessage_TWO DefaultsMessage_DefaultsEnum = 2 +) + +var DefaultsMessage_DefaultsEnum_name = map[int32]string{ + 0: "ZERO", + 1: "ONE", + 2: "TWO", +} + +var DefaultsMessage_DefaultsEnum_value = map[string]int32{ + "ZERO": 0, + "ONE": 1, + "TWO": 2, +} + +func (x DefaultsMessage_DefaultsEnum) Enum() *DefaultsMessage_DefaultsEnum { + p := new(DefaultsMessage_DefaultsEnum) + *p = x + return p +} + +func (x DefaultsMessage_DefaultsEnum) String() string { + return proto.EnumName(DefaultsMessage_DefaultsEnum_name, int32(x)) +} + +func (x *DefaultsMessage_DefaultsEnum) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(DefaultsMessage_DefaultsEnum_value, data, "DefaultsMessage_DefaultsEnum") + if err != nil { + return err + } + *x = DefaultsMessage_DefaultsEnum(value) + return nil +} + +func (DefaultsMessage_DefaultsEnum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{16, 0} +} + +type Defaults_Color int32 + +const ( + Defaults_RED Defaults_Color = 0 + Defaults_GREEN Defaults_Color = 1 + Defaults_BLUE Defaults_Color = 2 +) + +var Defaults_Color_name = map[int32]string{ + 0: "RED", + 1: "GREEN", + 2: "BLUE", +} + +var Defaults_Color_value = map[string]int32{ + "RED": 0, + "GREEN": 1, + "BLUE": 2, +} + +func (x Defaults_Color) Enum() *Defaults_Color { + p := new(Defaults_Color) + *p = x + return p +} + +func (x Defaults_Color) String() string { + return proto.EnumName(Defaults_Color_name, int32(x)) +} + +func (x *Defaults_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Defaults_Color_value, data, "Defaults_Color") + if err != nil { + return err + } + *x = Defaults_Color(value) + return nil +} + +func (Defaults_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{21, 0} +} + +type RepeatedEnum_Color int32 + +const ( + RepeatedEnum_RED RepeatedEnum_Color = 1 +) + +var RepeatedEnum_Color_name = map[int32]string{ + 1: "RED", +} + +var RepeatedEnum_Color_value = map[string]int32{ + "RED": 1, +} + +func (x RepeatedEnum_Color) Enum() *RepeatedEnum_Color { + p := new(RepeatedEnum_Color) + *p = x + return p +} + +func (x RepeatedEnum_Color) String() string { + return proto.EnumName(RepeatedEnum_Color_name, int32(x)) +} + +func (x *RepeatedEnum_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(RepeatedEnum_Color_value, data, "RepeatedEnum_Color") + if err != nil { + return err + } + *x = RepeatedEnum_Color(value) + return nil +} + +func (RepeatedEnum_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{23, 0} +} + +type GoEnum struct { + Foo *FOO `protobuf:"varint,1,req,name=foo,enum=test_proto.FOO" json:"foo,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoEnum) Reset() { *m = GoEnum{} } +func (m *GoEnum) String() string { return proto.CompactTextString(m) } +func (*GoEnum) ProtoMessage() {} +func (*GoEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{0} +} + +func (m *GoEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoEnum.Unmarshal(m, b) +} +func (m *GoEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoEnum.Marshal(b, m, deterministic) +} +func (m *GoEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoEnum.Merge(m, src) +} +func (m *GoEnum) XXX_Size() int { + return xxx_messageInfo_GoEnum.Size(m) +} +func (m *GoEnum) XXX_DiscardUnknown() { + xxx_messageInfo_GoEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_GoEnum proto.InternalMessageInfo + +func (m *GoEnum) GetFoo() FOO { + if m != nil && m.Foo != nil { + return *m.Foo + } + return FOO_FOO1 +} + +type GoTestField struct { + Label *string `protobuf:"bytes,1,req,name=Label" json:"Label,omitempty"` + Type *string `protobuf:"bytes,2,req,name=Type" json:"Type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTestField) Reset() { *m = GoTestField{} } +func (m *GoTestField) String() string { return proto.CompactTextString(m) } +func (*GoTestField) ProtoMessage() {} +func (*GoTestField) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{1} +} + +func (m *GoTestField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTestField.Unmarshal(m, b) +} +func (m *GoTestField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTestField.Marshal(b, m, deterministic) +} +func (m *GoTestField) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTestField.Merge(m, src) +} +func (m *GoTestField) XXX_Size() int { + return xxx_messageInfo_GoTestField.Size(m) +} +func (m *GoTestField) XXX_DiscardUnknown() { + xxx_messageInfo_GoTestField.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTestField proto.InternalMessageInfo + +func (m *GoTestField) GetLabel() string { + if m != nil && m.Label != nil { + return *m.Label + } + return "" +} + +func (m *GoTestField) GetType() string { + if m != nil && m.Type != nil { + return *m.Type + } + return "" +} + +type GoTest struct { + // Some typical parameters + Kind *GoTest_KIND `protobuf:"varint,1,req,name=Kind,enum=test_proto.GoTest_KIND" json:"Kind,omitempty"` + Table *string `protobuf:"bytes,2,opt,name=Table" json:"Table,omitempty"` + Param *int32 `protobuf:"varint,3,opt,name=Param" json:"Param,omitempty"` + // Required, repeated and optional foreign fields. + RequiredField *GoTestField `protobuf:"bytes,4,req,name=RequiredField" json:"RequiredField,omitempty"` + RepeatedField []*GoTestField `protobuf:"bytes,5,rep,name=RepeatedField" json:"RepeatedField,omitempty"` + OptionalField *GoTestField `protobuf:"bytes,6,opt,name=OptionalField" json:"OptionalField,omitempty"` + // Required fields of all basic types + F_BoolRequired *bool `protobuf:"varint,10,req,name=F_Bool_required,json=FBoolRequired" json:"F_Bool_required,omitempty"` + F_Int32Required *int32 `protobuf:"varint,11,req,name=F_Int32_required,json=FInt32Required" json:"F_Int32_required,omitempty"` + F_Int64Required *int64 `protobuf:"varint,12,req,name=F_Int64_required,json=FInt64Required" json:"F_Int64_required,omitempty"` + F_Fixed32Required *uint32 `protobuf:"fixed32,13,req,name=F_Fixed32_required,json=FFixed32Required" json:"F_Fixed32_required,omitempty"` + F_Fixed64Required *uint64 `protobuf:"fixed64,14,req,name=F_Fixed64_required,json=FFixed64Required" json:"F_Fixed64_required,omitempty"` + F_Uint32Required *uint32 `protobuf:"varint,15,req,name=F_Uint32_required,json=FUint32Required" json:"F_Uint32_required,omitempty"` + F_Uint64Required *uint64 `protobuf:"varint,16,req,name=F_Uint64_required,json=FUint64Required" json:"F_Uint64_required,omitempty"` + F_FloatRequired *float32 `protobuf:"fixed32,17,req,name=F_Float_required,json=FFloatRequired" json:"F_Float_required,omitempty"` + F_DoubleRequired *float64 `protobuf:"fixed64,18,req,name=F_Double_required,json=FDoubleRequired" json:"F_Double_required,omitempty"` + F_StringRequired *string `protobuf:"bytes,19,req,name=F_String_required,json=FStringRequired" json:"F_String_required,omitempty"` + F_BytesRequired []byte `protobuf:"bytes,101,req,name=F_Bytes_required,json=FBytesRequired" json:"F_Bytes_required,omitempty"` + F_Sint32Required *int32 `protobuf:"zigzag32,102,req,name=F_Sint32_required,json=FSint32Required" json:"F_Sint32_required,omitempty"` + F_Sint64Required *int64 `protobuf:"zigzag64,103,req,name=F_Sint64_required,json=FSint64Required" json:"F_Sint64_required,omitempty"` + F_Sfixed32Required *int32 `protobuf:"fixed32,104,req,name=F_Sfixed32_required,json=FSfixed32Required" json:"F_Sfixed32_required,omitempty"` + F_Sfixed64Required *int64 `protobuf:"fixed64,105,req,name=F_Sfixed64_required,json=FSfixed64Required" json:"F_Sfixed64_required,omitempty"` + // Repeated fields of all basic types + F_BoolRepeated []bool `protobuf:"varint,20,rep,name=F_Bool_repeated,json=FBoolRepeated" json:"F_Bool_repeated,omitempty"` + F_Int32Repeated []int32 `protobuf:"varint,21,rep,name=F_Int32_repeated,json=FInt32Repeated" json:"F_Int32_repeated,omitempty"` + F_Int64Repeated []int64 `protobuf:"varint,22,rep,name=F_Int64_repeated,json=FInt64Repeated" json:"F_Int64_repeated,omitempty"` + F_Fixed32Repeated []uint32 `protobuf:"fixed32,23,rep,name=F_Fixed32_repeated,json=FFixed32Repeated" json:"F_Fixed32_repeated,omitempty"` + F_Fixed64Repeated []uint64 `protobuf:"fixed64,24,rep,name=F_Fixed64_repeated,json=FFixed64Repeated" json:"F_Fixed64_repeated,omitempty"` + F_Uint32Repeated []uint32 `protobuf:"varint,25,rep,name=F_Uint32_repeated,json=FUint32Repeated" json:"F_Uint32_repeated,omitempty"` + F_Uint64Repeated []uint64 `protobuf:"varint,26,rep,name=F_Uint64_repeated,json=FUint64Repeated" json:"F_Uint64_repeated,omitempty"` + F_FloatRepeated []float32 `protobuf:"fixed32,27,rep,name=F_Float_repeated,json=FFloatRepeated" json:"F_Float_repeated,omitempty"` + F_DoubleRepeated []float64 `protobuf:"fixed64,28,rep,name=F_Double_repeated,json=FDoubleRepeated" json:"F_Double_repeated,omitempty"` + F_StringRepeated []string `protobuf:"bytes,29,rep,name=F_String_repeated,json=FStringRepeated" json:"F_String_repeated,omitempty"` + F_BytesRepeated [][]byte `protobuf:"bytes,201,rep,name=F_Bytes_repeated,json=FBytesRepeated" json:"F_Bytes_repeated,omitempty"` + F_Sint32Repeated []int32 `protobuf:"zigzag32,202,rep,name=F_Sint32_repeated,json=FSint32Repeated" json:"F_Sint32_repeated,omitempty"` + F_Sint64Repeated []int64 `protobuf:"zigzag64,203,rep,name=F_Sint64_repeated,json=FSint64Repeated" json:"F_Sint64_repeated,omitempty"` + F_Sfixed32Repeated []int32 `protobuf:"fixed32,204,rep,name=F_Sfixed32_repeated,json=FSfixed32Repeated" json:"F_Sfixed32_repeated,omitempty"` + F_Sfixed64Repeated []int64 `protobuf:"fixed64,205,rep,name=F_Sfixed64_repeated,json=FSfixed64Repeated" json:"F_Sfixed64_repeated,omitempty"` + // Optional fields of all basic types + F_BoolOptional *bool `protobuf:"varint,30,opt,name=F_Bool_optional,json=FBoolOptional" json:"F_Bool_optional,omitempty"` + F_Int32Optional *int32 `protobuf:"varint,31,opt,name=F_Int32_optional,json=FInt32Optional" json:"F_Int32_optional,omitempty"` + F_Int64Optional *int64 `protobuf:"varint,32,opt,name=F_Int64_optional,json=FInt64Optional" json:"F_Int64_optional,omitempty"` + F_Fixed32Optional *uint32 `protobuf:"fixed32,33,opt,name=F_Fixed32_optional,json=FFixed32Optional" json:"F_Fixed32_optional,omitempty"` + F_Fixed64Optional *uint64 `protobuf:"fixed64,34,opt,name=F_Fixed64_optional,json=FFixed64Optional" json:"F_Fixed64_optional,omitempty"` + F_Uint32Optional *uint32 `protobuf:"varint,35,opt,name=F_Uint32_optional,json=FUint32Optional" json:"F_Uint32_optional,omitempty"` + F_Uint64Optional *uint64 `protobuf:"varint,36,opt,name=F_Uint64_optional,json=FUint64Optional" json:"F_Uint64_optional,omitempty"` + F_FloatOptional *float32 `protobuf:"fixed32,37,opt,name=F_Float_optional,json=FFloatOptional" json:"F_Float_optional,omitempty"` + F_DoubleOptional *float64 `protobuf:"fixed64,38,opt,name=F_Double_optional,json=FDoubleOptional" json:"F_Double_optional,omitempty"` + F_StringOptional *string `protobuf:"bytes,39,opt,name=F_String_optional,json=FStringOptional" json:"F_String_optional,omitempty"` + F_BytesOptional []byte `protobuf:"bytes,301,opt,name=F_Bytes_optional,json=FBytesOptional" json:"F_Bytes_optional,omitempty"` + F_Sint32Optional *int32 `protobuf:"zigzag32,302,opt,name=F_Sint32_optional,json=FSint32Optional" json:"F_Sint32_optional,omitempty"` + F_Sint64Optional *int64 `protobuf:"zigzag64,303,opt,name=F_Sint64_optional,json=FSint64Optional" json:"F_Sint64_optional,omitempty"` + F_Sfixed32Optional *int32 `protobuf:"fixed32,304,opt,name=F_Sfixed32_optional,json=FSfixed32Optional" json:"F_Sfixed32_optional,omitempty"` + F_Sfixed64Optional *int64 `protobuf:"fixed64,305,opt,name=F_Sfixed64_optional,json=FSfixed64Optional" json:"F_Sfixed64_optional,omitempty"` + // Default-valued fields of all basic types + F_BoolDefaulted *bool `protobuf:"varint,40,opt,name=F_Bool_defaulted,json=FBoolDefaulted,def=1" json:"F_Bool_defaulted,omitempty"` + F_Int32Defaulted *int32 `protobuf:"varint,41,opt,name=F_Int32_defaulted,json=FInt32Defaulted,def=32" json:"F_Int32_defaulted,omitempty"` + F_Int64Defaulted *int64 `protobuf:"varint,42,opt,name=F_Int64_defaulted,json=FInt64Defaulted,def=64" json:"F_Int64_defaulted,omitempty"` + F_Fixed32Defaulted *uint32 `protobuf:"fixed32,43,opt,name=F_Fixed32_defaulted,json=FFixed32Defaulted,def=320" json:"F_Fixed32_defaulted,omitempty"` + F_Fixed64Defaulted *uint64 `protobuf:"fixed64,44,opt,name=F_Fixed64_defaulted,json=FFixed64Defaulted,def=640" json:"F_Fixed64_defaulted,omitempty"` + F_Uint32Defaulted *uint32 `protobuf:"varint,45,opt,name=F_Uint32_defaulted,json=FUint32Defaulted,def=3200" json:"F_Uint32_defaulted,omitempty"` + F_Uint64Defaulted *uint64 `protobuf:"varint,46,opt,name=F_Uint64_defaulted,json=FUint64Defaulted,def=6400" json:"F_Uint64_defaulted,omitempty"` + F_FloatDefaulted *float32 `protobuf:"fixed32,47,opt,name=F_Float_defaulted,json=FFloatDefaulted,def=314159" json:"F_Float_defaulted,omitempty"` + F_DoubleDefaulted *float64 `protobuf:"fixed64,48,opt,name=F_Double_defaulted,json=FDoubleDefaulted,def=271828" json:"F_Double_defaulted,omitempty"` + F_StringDefaulted *string `protobuf:"bytes,49,opt,name=F_String_defaulted,json=FStringDefaulted,def=hello, \"world!\"\n" json:"F_String_defaulted,omitempty"` + F_BytesDefaulted []byte `protobuf:"bytes,401,opt,name=F_Bytes_defaulted,json=FBytesDefaulted,def=Bignose" json:"F_Bytes_defaulted,omitempty"` + F_Sint32Defaulted *int32 `protobuf:"zigzag32,402,opt,name=F_Sint32_defaulted,json=FSint32Defaulted,def=-32" json:"F_Sint32_defaulted,omitempty"` + F_Sint64Defaulted *int64 `protobuf:"zigzag64,403,opt,name=F_Sint64_defaulted,json=FSint64Defaulted,def=-64" json:"F_Sint64_defaulted,omitempty"` + F_Sfixed32Defaulted *int32 `protobuf:"fixed32,404,opt,name=F_Sfixed32_defaulted,json=FSfixed32Defaulted,def=-32" json:"F_Sfixed32_defaulted,omitempty"` + F_Sfixed64Defaulted *int64 `protobuf:"fixed64,405,opt,name=F_Sfixed64_defaulted,json=FSfixed64Defaulted,def=-64" json:"F_Sfixed64_defaulted,omitempty"` + // Packed repeated fields (no string or bytes). + F_BoolRepeatedPacked []bool `protobuf:"varint,50,rep,packed,name=F_Bool_repeated_packed,json=FBoolRepeatedPacked" json:"F_Bool_repeated_packed,omitempty"` + F_Int32RepeatedPacked []int32 `protobuf:"varint,51,rep,packed,name=F_Int32_repeated_packed,json=FInt32RepeatedPacked" json:"F_Int32_repeated_packed,omitempty"` + F_Int64RepeatedPacked []int64 `protobuf:"varint,52,rep,packed,name=F_Int64_repeated_packed,json=FInt64RepeatedPacked" json:"F_Int64_repeated_packed,omitempty"` + F_Fixed32RepeatedPacked []uint32 `protobuf:"fixed32,53,rep,packed,name=F_Fixed32_repeated_packed,json=FFixed32RepeatedPacked" json:"F_Fixed32_repeated_packed,omitempty"` + F_Fixed64RepeatedPacked []uint64 `protobuf:"fixed64,54,rep,packed,name=F_Fixed64_repeated_packed,json=FFixed64RepeatedPacked" json:"F_Fixed64_repeated_packed,omitempty"` + F_Uint32RepeatedPacked []uint32 `protobuf:"varint,55,rep,packed,name=F_Uint32_repeated_packed,json=FUint32RepeatedPacked" json:"F_Uint32_repeated_packed,omitempty"` + F_Uint64RepeatedPacked []uint64 `protobuf:"varint,56,rep,packed,name=F_Uint64_repeated_packed,json=FUint64RepeatedPacked" json:"F_Uint64_repeated_packed,omitempty"` + F_FloatRepeatedPacked []float32 `protobuf:"fixed32,57,rep,packed,name=F_Float_repeated_packed,json=FFloatRepeatedPacked" json:"F_Float_repeated_packed,omitempty"` + F_DoubleRepeatedPacked []float64 `protobuf:"fixed64,58,rep,packed,name=F_Double_repeated_packed,json=FDoubleRepeatedPacked" json:"F_Double_repeated_packed,omitempty"` + F_Sint32RepeatedPacked []int32 `protobuf:"zigzag32,502,rep,packed,name=F_Sint32_repeated_packed,json=FSint32RepeatedPacked" json:"F_Sint32_repeated_packed,omitempty"` + F_Sint64RepeatedPacked []int64 `protobuf:"zigzag64,503,rep,packed,name=F_Sint64_repeated_packed,json=FSint64RepeatedPacked" json:"F_Sint64_repeated_packed,omitempty"` + F_Sfixed32RepeatedPacked []int32 `protobuf:"fixed32,504,rep,packed,name=F_Sfixed32_repeated_packed,json=FSfixed32RepeatedPacked" json:"F_Sfixed32_repeated_packed,omitempty"` + F_Sfixed64RepeatedPacked []int64 `protobuf:"fixed64,505,rep,packed,name=F_Sfixed64_repeated_packed,json=FSfixed64RepeatedPacked" json:"F_Sfixed64_repeated_packed,omitempty"` + Requiredgroup *GoTest_RequiredGroup `protobuf:"group,70,req,name=RequiredGroup,json=requiredgroup" json:"requiredgroup,omitempty"` + Repeatedgroup []*GoTest_RepeatedGroup `protobuf:"group,80,rep,name=RepeatedGroup,json=repeatedgroup" json:"repeatedgroup,omitempty"` + Optionalgroup *GoTest_OptionalGroup `protobuf:"group,90,opt,name=OptionalGroup,json=optionalgroup" json:"optionalgroup,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTest) Reset() { *m = GoTest{} } +func (m *GoTest) String() string { return proto.CompactTextString(m) } +func (*GoTest) ProtoMessage() {} +func (*GoTest) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{2} +} + +func (m *GoTest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTest.Unmarshal(m, b) +} +func (m *GoTest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTest.Marshal(b, m, deterministic) +} +func (m *GoTest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTest.Merge(m, src) +} +func (m *GoTest) XXX_Size() int { + return xxx_messageInfo_GoTest.Size(m) +} +func (m *GoTest) XXX_DiscardUnknown() { + xxx_messageInfo_GoTest.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTest proto.InternalMessageInfo + +const Default_GoTest_F_BoolDefaulted bool = true +const Default_GoTest_F_Int32Defaulted int32 = 32 +const Default_GoTest_F_Int64Defaulted int64 = 64 +const Default_GoTest_F_Fixed32Defaulted uint32 = 320 +const Default_GoTest_F_Fixed64Defaulted uint64 = 640 +const Default_GoTest_F_Uint32Defaulted uint32 = 3200 +const Default_GoTest_F_Uint64Defaulted uint64 = 6400 +const Default_GoTest_F_FloatDefaulted float32 = 314159 +const Default_GoTest_F_DoubleDefaulted float64 = 271828 +const Default_GoTest_F_StringDefaulted string = "hello, \"world!\"\n" + +var Default_GoTest_F_BytesDefaulted []byte = []byte("Bignose") + +const Default_GoTest_F_Sint32Defaulted int32 = -32 +const Default_GoTest_F_Sint64Defaulted int64 = -64 +const Default_GoTest_F_Sfixed32Defaulted int32 = -32 +const Default_GoTest_F_Sfixed64Defaulted int64 = -64 + +func (m *GoTest) GetKind() GoTest_KIND { + if m != nil && m.Kind != nil { + return *m.Kind + } + return GoTest_VOID +} + +func (m *GoTest) GetTable() string { + if m != nil && m.Table != nil { + return *m.Table + } + return "" +} + +func (m *GoTest) GetParam() int32 { + if m != nil && m.Param != nil { + return *m.Param + } + return 0 +} + +func (m *GoTest) GetRequiredField() *GoTestField { + if m != nil { + return m.RequiredField + } + return nil +} + +func (m *GoTest) GetRepeatedField() []*GoTestField { + if m != nil { + return m.RepeatedField + } + return nil +} + +func (m *GoTest) GetOptionalField() *GoTestField { + if m != nil { + return m.OptionalField + } + return nil +} + +func (m *GoTest) GetF_BoolRequired() bool { + if m != nil && m.F_BoolRequired != nil { + return *m.F_BoolRequired + } + return false +} + +func (m *GoTest) GetF_Int32Required() int32 { + if m != nil && m.F_Int32Required != nil { + return *m.F_Int32Required + } + return 0 +} + +func (m *GoTest) GetF_Int64Required() int64 { + if m != nil && m.F_Int64Required != nil { + return *m.F_Int64Required + } + return 0 +} + +func (m *GoTest) GetF_Fixed32Required() uint32 { + if m != nil && m.F_Fixed32Required != nil { + return *m.F_Fixed32Required + } + return 0 +} + +func (m *GoTest) GetF_Fixed64Required() uint64 { + if m != nil && m.F_Fixed64Required != nil { + return *m.F_Fixed64Required + } + return 0 +} + +func (m *GoTest) GetF_Uint32Required() uint32 { + if m != nil && m.F_Uint32Required != nil { + return *m.F_Uint32Required + } + return 0 +} + +func (m *GoTest) GetF_Uint64Required() uint64 { + if m != nil && m.F_Uint64Required != nil { + return *m.F_Uint64Required + } + return 0 +} + +func (m *GoTest) GetF_FloatRequired() float32 { + if m != nil && m.F_FloatRequired != nil { + return *m.F_FloatRequired + } + return 0 +} + +func (m *GoTest) GetF_DoubleRequired() float64 { + if m != nil && m.F_DoubleRequired != nil { + return *m.F_DoubleRequired + } + return 0 +} + +func (m *GoTest) GetF_StringRequired() string { + if m != nil && m.F_StringRequired != nil { + return *m.F_StringRequired + } + return "" +} + +func (m *GoTest) GetF_BytesRequired() []byte { + if m != nil { + return m.F_BytesRequired + } + return nil +} + +func (m *GoTest) GetF_Sint32Required() int32 { + if m != nil && m.F_Sint32Required != nil { + return *m.F_Sint32Required + } + return 0 +} + +func (m *GoTest) GetF_Sint64Required() int64 { + if m != nil && m.F_Sint64Required != nil { + return *m.F_Sint64Required + } + return 0 +} + +func (m *GoTest) GetF_Sfixed32Required() int32 { + if m != nil && m.F_Sfixed32Required != nil { + return *m.F_Sfixed32Required + } + return 0 +} + +func (m *GoTest) GetF_Sfixed64Required() int64 { + if m != nil && m.F_Sfixed64Required != nil { + return *m.F_Sfixed64Required + } + return 0 +} + +func (m *GoTest) GetF_BoolRepeated() []bool { + if m != nil { + return m.F_BoolRepeated + } + return nil +} + +func (m *GoTest) GetF_Int32Repeated() []int32 { + if m != nil { + return m.F_Int32Repeated + } + return nil +} + +func (m *GoTest) GetF_Int64Repeated() []int64 { + if m != nil { + return m.F_Int64Repeated + } + return nil +} + +func (m *GoTest) GetF_Fixed32Repeated() []uint32 { + if m != nil { + return m.F_Fixed32Repeated + } + return nil +} + +func (m *GoTest) GetF_Fixed64Repeated() []uint64 { + if m != nil { + return m.F_Fixed64Repeated + } + return nil +} + +func (m *GoTest) GetF_Uint32Repeated() []uint32 { + if m != nil { + return m.F_Uint32Repeated + } + return nil +} + +func (m *GoTest) GetF_Uint64Repeated() []uint64 { + if m != nil { + return m.F_Uint64Repeated + } + return nil +} + +func (m *GoTest) GetF_FloatRepeated() []float32 { + if m != nil { + return m.F_FloatRepeated + } + return nil +} + +func (m *GoTest) GetF_DoubleRepeated() []float64 { + if m != nil { + return m.F_DoubleRepeated + } + return nil +} + +func (m *GoTest) GetF_StringRepeated() []string { + if m != nil { + return m.F_StringRepeated + } + return nil +} + +func (m *GoTest) GetF_BytesRepeated() [][]byte { + if m != nil { + return m.F_BytesRepeated + } + return nil +} + +func (m *GoTest) GetF_Sint32Repeated() []int32 { + if m != nil { + return m.F_Sint32Repeated + } + return nil +} + +func (m *GoTest) GetF_Sint64Repeated() []int64 { + if m != nil { + return m.F_Sint64Repeated + } + return nil +} + +func (m *GoTest) GetF_Sfixed32Repeated() []int32 { + if m != nil { + return m.F_Sfixed32Repeated + } + return nil +} + +func (m *GoTest) GetF_Sfixed64Repeated() []int64 { + if m != nil { + return m.F_Sfixed64Repeated + } + return nil +} + +func (m *GoTest) GetF_BoolOptional() bool { + if m != nil && m.F_BoolOptional != nil { + return *m.F_BoolOptional + } + return false +} + +func (m *GoTest) GetF_Int32Optional() int32 { + if m != nil && m.F_Int32Optional != nil { + return *m.F_Int32Optional + } + return 0 +} + +func (m *GoTest) GetF_Int64Optional() int64 { + if m != nil && m.F_Int64Optional != nil { + return *m.F_Int64Optional + } + return 0 +} + +func (m *GoTest) GetF_Fixed32Optional() uint32 { + if m != nil && m.F_Fixed32Optional != nil { + return *m.F_Fixed32Optional + } + return 0 +} + +func (m *GoTest) GetF_Fixed64Optional() uint64 { + if m != nil && m.F_Fixed64Optional != nil { + return *m.F_Fixed64Optional + } + return 0 +} + +func (m *GoTest) GetF_Uint32Optional() uint32 { + if m != nil && m.F_Uint32Optional != nil { + return *m.F_Uint32Optional + } + return 0 +} + +func (m *GoTest) GetF_Uint64Optional() uint64 { + if m != nil && m.F_Uint64Optional != nil { + return *m.F_Uint64Optional + } + return 0 +} + +func (m *GoTest) GetF_FloatOptional() float32 { + if m != nil && m.F_FloatOptional != nil { + return *m.F_FloatOptional + } + return 0 +} + +func (m *GoTest) GetF_DoubleOptional() float64 { + if m != nil && m.F_DoubleOptional != nil { + return *m.F_DoubleOptional + } + return 0 +} + +func (m *GoTest) GetF_StringOptional() string { + if m != nil && m.F_StringOptional != nil { + return *m.F_StringOptional + } + return "" +} + +func (m *GoTest) GetF_BytesOptional() []byte { + if m != nil { + return m.F_BytesOptional + } + return nil +} + +func (m *GoTest) GetF_Sint32Optional() int32 { + if m != nil && m.F_Sint32Optional != nil { + return *m.F_Sint32Optional + } + return 0 +} + +func (m *GoTest) GetF_Sint64Optional() int64 { + if m != nil && m.F_Sint64Optional != nil { + return *m.F_Sint64Optional + } + return 0 +} + +func (m *GoTest) GetF_Sfixed32Optional() int32 { + if m != nil && m.F_Sfixed32Optional != nil { + return *m.F_Sfixed32Optional + } + return 0 +} + +func (m *GoTest) GetF_Sfixed64Optional() int64 { + if m != nil && m.F_Sfixed64Optional != nil { + return *m.F_Sfixed64Optional + } + return 0 +} + +func (m *GoTest) GetF_BoolDefaulted() bool { + if m != nil && m.F_BoolDefaulted != nil { + return *m.F_BoolDefaulted + } + return Default_GoTest_F_BoolDefaulted +} + +func (m *GoTest) GetF_Int32Defaulted() int32 { + if m != nil && m.F_Int32Defaulted != nil { + return *m.F_Int32Defaulted + } + return Default_GoTest_F_Int32Defaulted +} + +func (m *GoTest) GetF_Int64Defaulted() int64 { + if m != nil && m.F_Int64Defaulted != nil { + return *m.F_Int64Defaulted + } + return Default_GoTest_F_Int64Defaulted +} + +func (m *GoTest) GetF_Fixed32Defaulted() uint32 { + if m != nil && m.F_Fixed32Defaulted != nil { + return *m.F_Fixed32Defaulted + } + return Default_GoTest_F_Fixed32Defaulted +} + +func (m *GoTest) GetF_Fixed64Defaulted() uint64 { + if m != nil && m.F_Fixed64Defaulted != nil { + return *m.F_Fixed64Defaulted + } + return Default_GoTest_F_Fixed64Defaulted +} + +func (m *GoTest) GetF_Uint32Defaulted() uint32 { + if m != nil && m.F_Uint32Defaulted != nil { + return *m.F_Uint32Defaulted + } + return Default_GoTest_F_Uint32Defaulted +} + +func (m *GoTest) GetF_Uint64Defaulted() uint64 { + if m != nil && m.F_Uint64Defaulted != nil { + return *m.F_Uint64Defaulted + } + return Default_GoTest_F_Uint64Defaulted +} + +func (m *GoTest) GetF_FloatDefaulted() float32 { + if m != nil && m.F_FloatDefaulted != nil { + return *m.F_FloatDefaulted + } + return Default_GoTest_F_FloatDefaulted +} + +func (m *GoTest) GetF_DoubleDefaulted() float64 { + if m != nil && m.F_DoubleDefaulted != nil { + return *m.F_DoubleDefaulted + } + return Default_GoTest_F_DoubleDefaulted +} + +func (m *GoTest) GetF_StringDefaulted() string { + if m != nil && m.F_StringDefaulted != nil { + return *m.F_StringDefaulted + } + return Default_GoTest_F_StringDefaulted +} + +func (m *GoTest) GetF_BytesDefaulted() []byte { + if m != nil && m.F_BytesDefaulted != nil { + return m.F_BytesDefaulted + } + return append([]byte(nil), Default_GoTest_F_BytesDefaulted...) +} + +func (m *GoTest) GetF_Sint32Defaulted() int32 { + if m != nil && m.F_Sint32Defaulted != nil { + return *m.F_Sint32Defaulted + } + return Default_GoTest_F_Sint32Defaulted +} + +func (m *GoTest) GetF_Sint64Defaulted() int64 { + if m != nil && m.F_Sint64Defaulted != nil { + return *m.F_Sint64Defaulted + } + return Default_GoTest_F_Sint64Defaulted +} + +func (m *GoTest) GetF_Sfixed32Defaulted() int32 { + if m != nil && m.F_Sfixed32Defaulted != nil { + return *m.F_Sfixed32Defaulted + } + return Default_GoTest_F_Sfixed32Defaulted +} + +func (m *GoTest) GetF_Sfixed64Defaulted() int64 { + if m != nil && m.F_Sfixed64Defaulted != nil { + return *m.F_Sfixed64Defaulted + } + return Default_GoTest_F_Sfixed64Defaulted +} + +func (m *GoTest) GetF_BoolRepeatedPacked() []bool { + if m != nil { + return m.F_BoolRepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Int32RepeatedPacked() []int32 { + if m != nil { + return m.F_Int32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Int64RepeatedPacked() []int64 { + if m != nil { + return m.F_Int64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Fixed32RepeatedPacked() []uint32 { + if m != nil { + return m.F_Fixed32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Fixed64RepeatedPacked() []uint64 { + if m != nil { + return m.F_Fixed64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Uint32RepeatedPacked() []uint32 { + if m != nil { + return m.F_Uint32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Uint64RepeatedPacked() []uint64 { + if m != nil { + return m.F_Uint64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_FloatRepeatedPacked() []float32 { + if m != nil { + return m.F_FloatRepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_DoubleRepeatedPacked() []float64 { + if m != nil { + return m.F_DoubleRepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sint32RepeatedPacked() []int32 { + if m != nil { + return m.F_Sint32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sint64RepeatedPacked() []int64 { + if m != nil { + return m.F_Sint64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sfixed32RepeatedPacked() []int32 { + if m != nil { + return m.F_Sfixed32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sfixed64RepeatedPacked() []int64 { + if m != nil { + return m.F_Sfixed64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetRequiredgroup() *GoTest_RequiredGroup { + if m != nil { + return m.Requiredgroup + } + return nil +} + +func (m *GoTest) GetRepeatedgroup() []*GoTest_RepeatedGroup { + if m != nil { + return m.Repeatedgroup + } + return nil +} + +func (m *GoTest) GetOptionalgroup() *GoTest_OptionalGroup { + if m != nil { + return m.Optionalgroup + } + return nil +} + +// Required, repeated, and optional groups. +type GoTest_RequiredGroup struct { + RequiredField *string `protobuf:"bytes,71,req,name=RequiredField" json:"RequiredField,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTest_RequiredGroup) Reset() { *m = GoTest_RequiredGroup{} } +func (m *GoTest_RequiredGroup) String() string { return proto.CompactTextString(m) } +func (*GoTest_RequiredGroup) ProtoMessage() {} +func (*GoTest_RequiredGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{2, 0} +} + +func (m *GoTest_RequiredGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTest_RequiredGroup.Unmarshal(m, b) +} +func (m *GoTest_RequiredGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTest_RequiredGroup.Marshal(b, m, deterministic) +} +func (m *GoTest_RequiredGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTest_RequiredGroup.Merge(m, src) +} +func (m *GoTest_RequiredGroup) XXX_Size() int { + return xxx_messageInfo_GoTest_RequiredGroup.Size(m) +} +func (m *GoTest_RequiredGroup) XXX_DiscardUnknown() { + xxx_messageInfo_GoTest_RequiredGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTest_RequiredGroup proto.InternalMessageInfo + +func (m *GoTest_RequiredGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" +} + +type GoTest_RepeatedGroup struct { + RequiredField *string `protobuf:"bytes,81,req,name=RequiredField" json:"RequiredField,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTest_RepeatedGroup) Reset() { *m = GoTest_RepeatedGroup{} } +func (m *GoTest_RepeatedGroup) String() string { return proto.CompactTextString(m) } +func (*GoTest_RepeatedGroup) ProtoMessage() {} +func (*GoTest_RepeatedGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{2, 1} +} + +func (m *GoTest_RepeatedGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTest_RepeatedGroup.Unmarshal(m, b) +} +func (m *GoTest_RepeatedGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTest_RepeatedGroup.Marshal(b, m, deterministic) +} +func (m *GoTest_RepeatedGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTest_RepeatedGroup.Merge(m, src) +} +func (m *GoTest_RepeatedGroup) XXX_Size() int { + return xxx_messageInfo_GoTest_RepeatedGroup.Size(m) +} +func (m *GoTest_RepeatedGroup) XXX_DiscardUnknown() { + xxx_messageInfo_GoTest_RepeatedGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTest_RepeatedGroup proto.InternalMessageInfo + +func (m *GoTest_RepeatedGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" +} + +type GoTest_OptionalGroup struct { + RequiredField *string `protobuf:"bytes,91,req,name=RequiredField" json:"RequiredField,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTest_OptionalGroup) Reset() { *m = GoTest_OptionalGroup{} } +func (m *GoTest_OptionalGroup) String() string { return proto.CompactTextString(m) } +func (*GoTest_OptionalGroup) ProtoMessage() {} +func (*GoTest_OptionalGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{2, 2} +} + +func (m *GoTest_OptionalGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTest_OptionalGroup.Unmarshal(m, b) +} +func (m *GoTest_OptionalGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTest_OptionalGroup.Marshal(b, m, deterministic) +} +func (m *GoTest_OptionalGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTest_OptionalGroup.Merge(m, src) +} +func (m *GoTest_OptionalGroup) XXX_Size() int { + return xxx_messageInfo_GoTest_OptionalGroup.Size(m) +} +func (m *GoTest_OptionalGroup) XXX_DiscardUnknown() { + xxx_messageInfo_GoTest_OptionalGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTest_OptionalGroup proto.InternalMessageInfo + +func (m *GoTest_OptionalGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" +} + +// For testing a group containing a required field. +type GoTestRequiredGroupField struct { + Group *GoTestRequiredGroupField_Group `protobuf:"group,1,req,name=Group,json=group" json:"group,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTestRequiredGroupField) Reset() { *m = GoTestRequiredGroupField{} } +func (m *GoTestRequiredGroupField) String() string { return proto.CompactTextString(m) } +func (*GoTestRequiredGroupField) ProtoMessage() {} +func (*GoTestRequiredGroupField) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{3} +} + +func (m *GoTestRequiredGroupField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTestRequiredGroupField.Unmarshal(m, b) +} +func (m *GoTestRequiredGroupField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTestRequiredGroupField.Marshal(b, m, deterministic) +} +func (m *GoTestRequiredGroupField) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTestRequiredGroupField.Merge(m, src) +} +func (m *GoTestRequiredGroupField) XXX_Size() int { + return xxx_messageInfo_GoTestRequiredGroupField.Size(m) +} +func (m *GoTestRequiredGroupField) XXX_DiscardUnknown() { + xxx_messageInfo_GoTestRequiredGroupField.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTestRequiredGroupField proto.InternalMessageInfo + +func (m *GoTestRequiredGroupField) GetGroup() *GoTestRequiredGroupField_Group { + if m != nil { + return m.Group + } + return nil +} + +type GoTestRequiredGroupField_Group struct { + Field *int32 `protobuf:"varint,2,req,name=Field" json:"Field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoTestRequiredGroupField_Group) Reset() { *m = GoTestRequiredGroupField_Group{} } +func (m *GoTestRequiredGroupField_Group) String() string { return proto.CompactTextString(m) } +func (*GoTestRequiredGroupField_Group) ProtoMessage() {} +func (*GoTestRequiredGroupField_Group) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{3, 0} +} + +func (m *GoTestRequiredGroupField_Group) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoTestRequiredGroupField_Group.Unmarshal(m, b) +} +func (m *GoTestRequiredGroupField_Group) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoTestRequiredGroupField_Group.Marshal(b, m, deterministic) +} +func (m *GoTestRequiredGroupField_Group) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoTestRequiredGroupField_Group.Merge(m, src) +} +func (m *GoTestRequiredGroupField_Group) XXX_Size() int { + return xxx_messageInfo_GoTestRequiredGroupField_Group.Size(m) +} +func (m *GoTestRequiredGroupField_Group) XXX_DiscardUnknown() { + xxx_messageInfo_GoTestRequiredGroupField_Group.DiscardUnknown(m) +} + +var xxx_messageInfo_GoTestRequiredGroupField_Group proto.InternalMessageInfo + +func (m *GoTestRequiredGroupField_Group) GetField() int32 { + if m != nil && m.Field != nil { + return *m.Field + } + return 0 +} + +// For testing skipping of unrecognized fields. +// Numbers are all big, larger than tag numbers in GoTestField, +// the message used in the corresponding test. +type GoSkipTest struct { + SkipInt32 *int32 `protobuf:"varint,11,req,name=skip_int32,json=skipInt32" json:"skip_int32,omitempty"` + SkipFixed32 *uint32 `protobuf:"fixed32,12,req,name=skip_fixed32,json=skipFixed32" json:"skip_fixed32,omitempty"` + SkipFixed64 *uint64 `protobuf:"fixed64,13,req,name=skip_fixed64,json=skipFixed64" json:"skip_fixed64,omitempty"` + SkipString *string `protobuf:"bytes,14,req,name=skip_string,json=skipString" json:"skip_string,omitempty"` + Skipgroup *GoSkipTest_SkipGroup `protobuf:"group,15,req,name=SkipGroup,json=skipgroup" json:"skipgroup,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoSkipTest) Reset() { *m = GoSkipTest{} } +func (m *GoSkipTest) String() string { return proto.CompactTextString(m) } +func (*GoSkipTest) ProtoMessage() {} +func (*GoSkipTest) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{4} +} + +func (m *GoSkipTest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoSkipTest.Unmarshal(m, b) +} +func (m *GoSkipTest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoSkipTest.Marshal(b, m, deterministic) +} +func (m *GoSkipTest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoSkipTest.Merge(m, src) +} +func (m *GoSkipTest) XXX_Size() int { + return xxx_messageInfo_GoSkipTest.Size(m) +} +func (m *GoSkipTest) XXX_DiscardUnknown() { + xxx_messageInfo_GoSkipTest.DiscardUnknown(m) +} + +var xxx_messageInfo_GoSkipTest proto.InternalMessageInfo + +func (m *GoSkipTest) GetSkipInt32() int32 { + if m != nil && m.SkipInt32 != nil { + return *m.SkipInt32 + } + return 0 +} + +func (m *GoSkipTest) GetSkipFixed32() uint32 { + if m != nil && m.SkipFixed32 != nil { + return *m.SkipFixed32 + } + return 0 +} + +func (m *GoSkipTest) GetSkipFixed64() uint64 { + if m != nil && m.SkipFixed64 != nil { + return *m.SkipFixed64 + } + return 0 +} + +func (m *GoSkipTest) GetSkipString() string { + if m != nil && m.SkipString != nil { + return *m.SkipString + } + return "" +} + +func (m *GoSkipTest) GetSkipgroup() *GoSkipTest_SkipGroup { + if m != nil { + return m.Skipgroup + } + return nil +} + +type GoSkipTest_SkipGroup struct { + GroupInt32 *int32 `protobuf:"varint,16,req,name=group_int32,json=groupInt32" json:"group_int32,omitempty"` + GroupString *string `protobuf:"bytes,17,req,name=group_string,json=groupString" json:"group_string,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoSkipTest_SkipGroup) Reset() { *m = GoSkipTest_SkipGroup{} } +func (m *GoSkipTest_SkipGroup) String() string { return proto.CompactTextString(m) } +func (*GoSkipTest_SkipGroup) ProtoMessage() {} +func (*GoSkipTest_SkipGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{4, 0} +} + +func (m *GoSkipTest_SkipGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoSkipTest_SkipGroup.Unmarshal(m, b) +} +func (m *GoSkipTest_SkipGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoSkipTest_SkipGroup.Marshal(b, m, deterministic) +} +func (m *GoSkipTest_SkipGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoSkipTest_SkipGroup.Merge(m, src) +} +func (m *GoSkipTest_SkipGroup) XXX_Size() int { + return xxx_messageInfo_GoSkipTest_SkipGroup.Size(m) +} +func (m *GoSkipTest_SkipGroup) XXX_DiscardUnknown() { + xxx_messageInfo_GoSkipTest_SkipGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_GoSkipTest_SkipGroup proto.InternalMessageInfo + +func (m *GoSkipTest_SkipGroup) GetGroupInt32() int32 { + if m != nil && m.GroupInt32 != nil { + return *m.GroupInt32 + } + return 0 +} + +func (m *GoSkipTest_SkipGroup) GetGroupString() string { + if m != nil && m.GroupString != nil { + return *m.GroupString + } + return "" +} + +// For testing packed/non-packed decoder switching. +// A serialized instance of one should be deserializable as the other. +type NonPackedTest struct { + A []int32 `protobuf:"varint,1,rep,name=a" json:"a,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NonPackedTest) Reset() { *m = NonPackedTest{} } +func (m *NonPackedTest) String() string { return proto.CompactTextString(m) } +func (*NonPackedTest) ProtoMessage() {} +func (*NonPackedTest) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{5} +} + +func (m *NonPackedTest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NonPackedTest.Unmarshal(m, b) +} +func (m *NonPackedTest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NonPackedTest.Marshal(b, m, deterministic) +} +func (m *NonPackedTest) XXX_Merge(src proto.Message) { + xxx_messageInfo_NonPackedTest.Merge(m, src) +} +func (m *NonPackedTest) XXX_Size() int { + return xxx_messageInfo_NonPackedTest.Size(m) +} +func (m *NonPackedTest) XXX_DiscardUnknown() { + xxx_messageInfo_NonPackedTest.DiscardUnknown(m) +} + +var xxx_messageInfo_NonPackedTest proto.InternalMessageInfo + +func (m *NonPackedTest) GetA() []int32 { + if m != nil { + return m.A + } + return nil +} + +type PackedTest struct { + B []int32 `protobuf:"varint,1,rep,packed,name=b" json:"b,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PackedTest) Reset() { *m = PackedTest{} } +func (m *PackedTest) String() string { return proto.CompactTextString(m) } +func (*PackedTest) ProtoMessage() {} +func (*PackedTest) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{6} +} + +func (m *PackedTest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackedTest.Unmarshal(m, b) +} +func (m *PackedTest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackedTest.Marshal(b, m, deterministic) +} +func (m *PackedTest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackedTest.Merge(m, src) +} +func (m *PackedTest) XXX_Size() int { + return xxx_messageInfo_PackedTest.Size(m) +} +func (m *PackedTest) XXX_DiscardUnknown() { + xxx_messageInfo_PackedTest.DiscardUnknown(m) +} + +var xxx_messageInfo_PackedTest proto.InternalMessageInfo + +func (m *PackedTest) GetB() []int32 { + if m != nil { + return m.B + } + return nil +} + +type MaxTag struct { + // Maximum possible tag number. + LastField *string `protobuf:"bytes,536870911,opt,name=last_field,json=lastField" json:"last_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaxTag) Reset() { *m = MaxTag{} } +func (m *MaxTag) String() string { return proto.CompactTextString(m) } +func (*MaxTag) ProtoMessage() {} +func (*MaxTag) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{7} +} + +func (m *MaxTag) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaxTag.Unmarshal(m, b) +} +func (m *MaxTag) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaxTag.Marshal(b, m, deterministic) +} +func (m *MaxTag) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaxTag.Merge(m, src) +} +func (m *MaxTag) XXX_Size() int { + return xxx_messageInfo_MaxTag.Size(m) +} +func (m *MaxTag) XXX_DiscardUnknown() { + xxx_messageInfo_MaxTag.DiscardUnknown(m) +} + +var xxx_messageInfo_MaxTag proto.InternalMessageInfo + +func (m *MaxTag) GetLastField() string { + if m != nil && m.LastField != nil { + return *m.LastField + } + return "" +} + +type OldMessage struct { + Nested *OldMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` + Num *int32 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OldMessage) Reset() { *m = OldMessage{} } +func (m *OldMessage) String() string { return proto.CompactTextString(m) } +func (*OldMessage) ProtoMessage() {} +func (*OldMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{8} +} + +func (m *OldMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldMessage.Unmarshal(m, b) +} +func (m *OldMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldMessage.Marshal(b, m, deterministic) +} +func (m *OldMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldMessage.Merge(m, src) +} +func (m *OldMessage) XXX_Size() int { + return xxx_messageInfo_OldMessage.Size(m) +} +func (m *OldMessage) XXX_DiscardUnknown() { + xxx_messageInfo_OldMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_OldMessage proto.InternalMessageInfo + +func (m *OldMessage) GetNested() *OldMessage_Nested { + if m != nil { + return m.Nested + } + return nil +} + +func (m *OldMessage) GetNum() int32 { + if m != nil && m.Num != nil { + return *m.Num + } + return 0 +} + +type OldMessage_Nested struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OldMessage_Nested) Reset() { *m = OldMessage_Nested{} } +func (m *OldMessage_Nested) String() string { return proto.CompactTextString(m) } +func (*OldMessage_Nested) ProtoMessage() {} +func (*OldMessage_Nested) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{8, 0} +} + +func (m *OldMessage_Nested) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldMessage_Nested.Unmarshal(m, b) +} +func (m *OldMessage_Nested) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldMessage_Nested.Marshal(b, m, deterministic) +} +func (m *OldMessage_Nested) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldMessage_Nested.Merge(m, src) +} +func (m *OldMessage_Nested) XXX_Size() int { + return xxx_messageInfo_OldMessage_Nested.Size(m) +} +func (m *OldMessage_Nested) XXX_DiscardUnknown() { + xxx_messageInfo_OldMessage_Nested.DiscardUnknown(m) +} + +var xxx_messageInfo_OldMessage_Nested proto.InternalMessageInfo + +func (m *OldMessage_Nested) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +// NewMessage is wire compatible with OldMessage; +// imagine it as a future version. +type NewMessage struct { + Nested *NewMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` + // This is an int32 in OldMessage. + Num *int64 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NewMessage) Reset() { *m = NewMessage{} } +func (m *NewMessage) String() string { return proto.CompactTextString(m) } +func (*NewMessage) ProtoMessage() {} +func (*NewMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{9} +} + +func (m *NewMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NewMessage.Unmarshal(m, b) +} +func (m *NewMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NewMessage.Marshal(b, m, deterministic) +} +func (m *NewMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_NewMessage.Merge(m, src) +} +func (m *NewMessage) XXX_Size() int { + return xxx_messageInfo_NewMessage.Size(m) +} +func (m *NewMessage) XXX_DiscardUnknown() { + xxx_messageInfo_NewMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_NewMessage proto.InternalMessageInfo + +func (m *NewMessage) GetNested() *NewMessage_Nested { + if m != nil { + return m.Nested + } + return nil +} + +func (m *NewMessage) GetNum() int64 { + if m != nil && m.Num != nil { + return *m.Num + } + return 0 +} + +type NewMessage_Nested struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + FoodGroup *string `protobuf:"bytes,2,opt,name=food_group,json=foodGroup" json:"food_group,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NewMessage_Nested) Reset() { *m = NewMessage_Nested{} } +func (m *NewMessage_Nested) String() string { return proto.CompactTextString(m) } +func (*NewMessage_Nested) ProtoMessage() {} +func (*NewMessage_Nested) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{9, 0} +} + +func (m *NewMessage_Nested) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NewMessage_Nested.Unmarshal(m, b) +} +func (m *NewMessage_Nested) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NewMessage_Nested.Marshal(b, m, deterministic) +} +func (m *NewMessage_Nested) XXX_Merge(src proto.Message) { + xxx_messageInfo_NewMessage_Nested.Merge(m, src) +} +func (m *NewMessage_Nested) XXX_Size() int { + return xxx_messageInfo_NewMessage_Nested.Size(m) +} +func (m *NewMessage_Nested) XXX_DiscardUnknown() { + xxx_messageInfo_NewMessage_Nested.DiscardUnknown(m) +} + +var xxx_messageInfo_NewMessage_Nested proto.InternalMessageInfo + +func (m *NewMessage_Nested) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *NewMessage_Nested) GetFoodGroup() string { + if m != nil && m.FoodGroup != nil { + return *m.FoodGroup + } + return "" +} + +type InnerMessage struct { + Host *string `protobuf:"bytes,1,req,name=host" json:"host,omitempty"` + Port *int32 `protobuf:"varint,2,opt,name=port,def=4000" json:"port,omitempty"` + Connected *bool `protobuf:"varint,3,opt,name=connected" json:"connected,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InnerMessage) Reset() { *m = InnerMessage{} } +func (m *InnerMessage) String() string { return proto.CompactTextString(m) } +func (*InnerMessage) ProtoMessage() {} +func (*InnerMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{10} +} + +func (m *InnerMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InnerMessage.Unmarshal(m, b) +} +func (m *InnerMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InnerMessage.Marshal(b, m, deterministic) +} +func (m *InnerMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_InnerMessage.Merge(m, src) +} +func (m *InnerMessage) XXX_Size() int { + return xxx_messageInfo_InnerMessage.Size(m) +} +func (m *InnerMessage) XXX_DiscardUnknown() { + xxx_messageInfo_InnerMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_InnerMessage proto.InternalMessageInfo + +const Default_InnerMessage_Port int32 = 4000 + +func (m *InnerMessage) GetHost() string { + if m != nil && m.Host != nil { + return *m.Host + } + return "" +} + +func (m *InnerMessage) GetPort() int32 { + if m != nil && m.Port != nil { + return *m.Port + } + return Default_InnerMessage_Port +} + +func (m *InnerMessage) GetConnected() bool { + if m != nil && m.Connected != nil { + return *m.Connected + } + return false +} + +type OtherMessage struct { + Key *int64 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` + Weight *float32 `protobuf:"fixed32,3,opt,name=weight" json:"weight,omitempty"` + Inner *InnerMessage `protobuf:"bytes,4,opt,name=inner" json:"inner,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OtherMessage) Reset() { *m = OtherMessage{} } +func (m *OtherMessage) String() string { return proto.CompactTextString(m) } +func (*OtherMessage) ProtoMessage() {} +func (*OtherMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{11} +} + +var extRange_OtherMessage = []proto.ExtensionRange{ + {Start: 100, End: 536870911}, +} + +func (*OtherMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OtherMessage +} + +func (m *OtherMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OtherMessage.Unmarshal(m, b) +} +func (m *OtherMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OtherMessage.Marshal(b, m, deterministic) +} +func (m *OtherMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_OtherMessage.Merge(m, src) +} +func (m *OtherMessage) XXX_Size() int { + return xxx_messageInfo_OtherMessage.Size(m) +} +func (m *OtherMessage) XXX_DiscardUnknown() { + xxx_messageInfo_OtherMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_OtherMessage proto.InternalMessageInfo + +func (m *OtherMessage) GetKey() int64 { + if m != nil && m.Key != nil { + return *m.Key + } + return 0 +} + +func (m *OtherMessage) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +func (m *OtherMessage) GetWeight() float32 { + if m != nil && m.Weight != nil { + return *m.Weight + } + return 0 +} + +func (m *OtherMessage) GetInner() *InnerMessage { + if m != nil { + return m.Inner + } + return nil +} + +type RequiredInnerMessage struct { + LeoFinallyWonAnOscar *InnerMessage `protobuf:"bytes,1,req,name=leo_finally_won_an_oscar,json=leoFinallyWonAnOscar" json:"leo_finally_won_an_oscar,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequiredInnerMessage) Reset() { *m = RequiredInnerMessage{} } +func (m *RequiredInnerMessage) String() string { return proto.CompactTextString(m) } +func (*RequiredInnerMessage) ProtoMessage() {} +func (*RequiredInnerMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{12} +} + +func (m *RequiredInnerMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequiredInnerMessage.Unmarshal(m, b) +} +func (m *RequiredInnerMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequiredInnerMessage.Marshal(b, m, deterministic) +} +func (m *RequiredInnerMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequiredInnerMessage.Merge(m, src) +} +func (m *RequiredInnerMessage) XXX_Size() int { + return xxx_messageInfo_RequiredInnerMessage.Size(m) +} +func (m *RequiredInnerMessage) XXX_DiscardUnknown() { + xxx_messageInfo_RequiredInnerMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_RequiredInnerMessage proto.InternalMessageInfo + +func (m *RequiredInnerMessage) GetLeoFinallyWonAnOscar() *InnerMessage { + if m != nil { + return m.LeoFinallyWonAnOscar + } + return nil +} + +type MyMessage struct { + Count *int32 `protobuf:"varint,1,req,name=count" json:"count,omitempty"` + Name *string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` + Quote *string `protobuf:"bytes,3,opt,name=quote" json:"quote,omitempty"` + Pet []string `protobuf:"bytes,4,rep,name=pet" json:"pet,omitempty"` + Inner *InnerMessage `protobuf:"bytes,5,opt,name=inner" json:"inner,omitempty"` + Others []*OtherMessage `protobuf:"bytes,6,rep,name=others" json:"others,omitempty"` + WeMustGoDeeper *RequiredInnerMessage `protobuf:"bytes,13,opt,name=we_must_go_deeper,json=weMustGoDeeper" json:"we_must_go_deeper,omitempty"` + RepInner []*InnerMessage `protobuf:"bytes,12,rep,name=rep_inner,json=repInner" json:"rep_inner,omitempty"` + Bikeshed *MyMessage_Color `protobuf:"varint,7,opt,name=bikeshed,enum=test_proto.MyMessage_Color" json:"bikeshed,omitempty"` + Somegroup *MyMessage_SomeGroup `protobuf:"group,8,opt,name=SomeGroup,json=somegroup" json:"somegroup,omitempty"` + // This field becomes [][]byte in the generated code. + RepBytes [][]byte `protobuf:"bytes,10,rep,name=rep_bytes,json=repBytes" json:"rep_bytes,omitempty"` + Bigfloat *float64 `protobuf:"fixed64,11,opt,name=bigfloat" json:"bigfloat,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MyMessage) Reset() { *m = MyMessage{} } +func (m *MyMessage) String() string { return proto.CompactTextString(m) } +func (*MyMessage) ProtoMessage() {} +func (*MyMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{13} +} + +var extRange_MyMessage = []proto.ExtensionRange{ + {Start: 100, End: 536870911}, +} + +func (*MyMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MyMessage +} + +func (m *MyMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MyMessage.Unmarshal(m, b) +} +func (m *MyMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MyMessage.Marshal(b, m, deterministic) +} +func (m *MyMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_MyMessage.Merge(m, src) +} +func (m *MyMessage) XXX_Size() int { + return xxx_messageInfo_MyMessage.Size(m) +} +func (m *MyMessage) XXX_DiscardUnknown() { + xxx_messageInfo_MyMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_MyMessage proto.InternalMessageInfo + +func (m *MyMessage) GetCount() int32 { + if m != nil && m.Count != nil { + return *m.Count + } + return 0 +} + +func (m *MyMessage) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *MyMessage) GetQuote() string { + if m != nil && m.Quote != nil { + return *m.Quote + } + return "" +} + +func (m *MyMessage) GetPet() []string { + if m != nil { + return m.Pet + } + return nil +} + +func (m *MyMessage) GetInner() *InnerMessage { + if m != nil { + return m.Inner + } + return nil +} + +func (m *MyMessage) GetOthers() []*OtherMessage { + if m != nil { + return m.Others + } + return nil +} + +func (m *MyMessage) GetWeMustGoDeeper() *RequiredInnerMessage { + if m != nil { + return m.WeMustGoDeeper + } + return nil +} + +func (m *MyMessage) GetRepInner() []*InnerMessage { + if m != nil { + return m.RepInner + } + return nil +} + +func (m *MyMessage) GetBikeshed() MyMessage_Color { + if m != nil && m.Bikeshed != nil { + return *m.Bikeshed + } + return MyMessage_RED +} + +func (m *MyMessage) GetSomegroup() *MyMessage_SomeGroup { + if m != nil { + return m.Somegroup + } + return nil +} + +func (m *MyMessage) GetRepBytes() [][]byte { + if m != nil { + return m.RepBytes + } + return nil +} + +func (m *MyMessage) GetBigfloat() float64 { + if m != nil && m.Bigfloat != nil { + return *m.Bigfloat + } + return 0 +} + +type MyMessage_SomeGroup struct { + GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MyMessage_SomeGroup) Reset() { *m = MyMessage_SomeGroup{} } +func (m *MyMessage_SomeGroup) String() string { return proto.CompactTextString(m) } +func (*MyMessage_SomeGroup) ProtoMessage() {} +func (*MyMessage_SomeGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{13, 0} +} + +func (m *MyMessage_SomeGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MyMessage_SomeGroup.Unmarshal(m, b) +} +func (m *MyMessage_SomeGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MyMessage_SomeGroup.Marshal(b, m, deterministic) +} +func (m *MyMessage_SomeGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_MyMessage_SomeGroup.Merge(m, src) +} +func (m *MyMessage_SomeGroup) XXX_Size() int { + return xxx_messageInfo_MyMessage_SomeGroup.Size(m) +} +func (m *MyMessage_SomeGroup) XXX_DiscardUnknown() { + xxx_messageInfo_MyMessage_SomeGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_MyMessage_SomeGroup proto.InternalMessageInfo + +func (m *MyMessage_SomeGroup) GetGroupField() int32 { + if m != nil && m.GroupField != nil { + return *m.GroupField + } + return 0 +} + +type Ext struct { + Data *string `protobuf:"bytes,1,opt,name=data" json:"data,omitempty"` + MapField map[int32]int32 `protobuf:"bytes,2,rep,name=map_field,json=mapField" json:"map_field,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Ext) Reset() { *m = Ext{} } +func (m *Ext) String() string { return proto.CompactTextString(m) } +func (*Ext) ProtoMessage() {} +func (*Ext) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{14} +} + +func (m *Ext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Ext.Unmarshal(m, b) +} +func (m *Ext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Ext.Marshal(b, m, deterministic) +} +func (m *Ext) XXX_Merge(src proto.Message) { + xxx_messageInfo_Ext.Merge(m, src) +} +func (m *Ext) XXX_Size() int { + return xxx_messageInfo_Ext.Size(m) +} +func (m *Ext) XXX_DiscardUnknown() { + xxx_messageInfo_Ext.DiscardUnknown(m) +} + +var xxx_messageInfo_Ext proto.InternalMessageInfo + +func (m *Ext) GetData() string { + if m != nil && m.Data != nil { + return *m.Data + } + return "" +} + +func (m *Ext) GetMapField() map[int32]int32 { + if m != nil { + return m.MapField + } + return nil +} + +var E_Ext_More = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: (*Ext)(nil), + Field: 103, + Name: "test_proto.Ext.more", + Tag: "bytes,103,opt,name=more", + Filename: "test_proto/test.proto", +} + +var E_Ext_Text = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: (*string)(nil), + Field: 104, + Name: "test_proto.Ext.text", + Tag: "bytes,104,opt,name=text", + Filename: "test_proto/test.proto", +} + +var E_Ext_Number = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 105, + Name: "test_proto.Ext.number", + Tag: "varint,105,opt,name=number", + Filename: "test_proto/test.proto", +} + +type ComplexExtension struct { + First *int32 `protobuf:"varint,1,opt,name=first" json:"first,omitempty"` + Second *int32 `protobuf:"varint,2,opt,name=second" json:"second,omitempty"` + Third []int32 `protobuf:"varint,3,rep,name=third" json:"third,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ComplexExtension) Reset() { *m = ComplexExtension{} } +func (m *ComplexExtension) String() string { return proto.CompactTextString(m) } +func (*ComplexExtension) ProtoMessage() {} +func (*ComplexExtension) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{15} +} + +func (m *ComplexExtension) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ComplexExtension.Unmarshal(m, b) +} +func (m *ComplexExtension) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ComplexExtension.Marshal(b, m, deterministic) +} +func (m *ComplexExtension) XXX_Merge(src proto.Message) { + xxx_messageInfo_ComplexExtension.Merge(m, src) +} +func (m *ComplexExtension) XXX_Size() int { + return xxx_messageInfo_ComplexExtension.Size(m) +} +func (m *ComplexExtension) XXX_DiscardUnknown() { + xxx_messageInfo_ComplexExtension.DiscardUnknown(m) +} + +var xxx_messageInfo_ComplexExtension proto.InternalMessageInfo + +func (m *ComplexExtension) GetFirst() int32 { + if m != nil && m.First != nil { + return *m.First + } + return 0 +} + +func (m *ComplexExtension) GetSecond() int32 { + if m != nil && m.Second != nil { + return *m.Second + } + return 0 +} + +func (m *ComplexExtension) GetThird() []int32 { + if m != nil { + return m.Third + } + return nil +} + +type DefaultsMessage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DefaultsMessage) Reset() { *m = DefaultsMessage{} } +func (m *DefaultsMessage) String() string { return proto.CompactTextString(m) } +func (*DefaultsMessage) ProtoMessage() {} +func (*DefaultsMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{16} +} + +var extRange_DefaultsMessage = []proto.ExtensionRange{ + {Start: 100, End: 536870911}, +} + +func (*DefaultsMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_DefaultsMessage +} + +func (m *DefaultsMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DefaultsMessage.Unmarshal(m, b) +} +func (m *DefaultsMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DefaultsMessage.Marshal(b, m, deterministic) +} +func (m *DefaultsMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_DefaultsMessage.Merge(m, src) +} +func (m *DefaultsMessage) XXX_Size() int { + return xxx_messageInfo_DefaultsMessage.Size(m) +} +func (m *DefaultsMessage) XXX_DiscardUnknown() { + xxx_messageInfo_DefaultsMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_DefaultsMessage proto.InternalMessageInfo + +type MyMessageSet struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `protobuf_messageset:"1" json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MyMessageSet) Reset() { *m = MyMessageSet{} } +func (m *MyMessageSet) String() string { return proto.CompactTextString(m) } +func (*MyMessageSet) ProtoMessage() {} +func (*MyMessageSet) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{17} +} + +var extRange_MyMessageSet = []proto.ExtensionRange{ + {Start: 100, End: 2147483646}, +} + +func (*MyMessageSet) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MyMessageSet +} + +func (m *MyMessageSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MyMessageSet.Unmarshal(m, b) +} +func (m *MyMessageSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MyMessageSet.Marshal(b, m, deterministic) +} +func (m *MyMessageSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_MyMessageSet.Merge(m, src) +} +func (m *MyMessageSet) XXX_Size() int { + return xxx_messageInfo_MyMessageSet.Size(m) +} +func (m *MyMessageSet) XXX_DiscardUnknown() { + xxx_messageInfo_MyMessageSet.DiscardUnknown(m) +} + +var xxx_messageInfo_MyMessageSet proto.InternalMessageInfo + +type Empty struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{18} +} + +func (m *Empty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Empty.Unmarshal(m, b) +} +func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Empty.Marshal(b, m, deterministic) +} +func (m *Empty) XXX_Merge(src proto.Message) { + xxx_messageInfo_Empty.Merge(m, src) +} +func (m *Empty) XXX_Size() int { + return xxx_messageInfo_Empty.Size(m) +} +func (m *Empty) XXX_DiscardUnknown() { + xxx_messageInfo_Empty.DiscardUnknown(m) +} + +var xxx_messageInfo_Empty proto.InternalMessageInfo + +type MessageList struct { + Message []*MessageList_Message `protobuf:"group,1,rep,name=Message,json=message" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessageList) Reset() { *m = MessageList{} } +func (m *MessageList) String() string { return proto.CompactTextString(m) } +func (*MessageList) ProtoMessage() {} +func (*MessageList) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{19} +} + +func (m *MessageList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageList.Unmarshal(m, b) +} +func (m *MessageList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageList.Marshal(b, m, deterministic) +} +func (m *MessageList) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageList.Merge(m, src) +} +func (m *MessageList) XXX_Size() int { + return xxx_messageInfo_MessageList.Size(m) +} +func (m *MessageList) XXX_DiscardUnknown() { + xxx_messageInfo_MessageList.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageList proto.InternalMessageInfo + +func (m *MessageList) GetMessage() []*MessageList_Message { + if m != nil { + return m.Message + } + return nil +} + +type MessageList_Message struct { + Name *string `protobuf:"bytes,2,req,name=name" json:"name,omitempty"` + Count *int32 `protobuf:"varint,3,req,name=count" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessageList_Message) Reset() { *m = MessageList_Message{} } +func (m *MessageList_Message) String() string { return proto.CompactTextString(m) } +func (*MessageList_Message) ProtoMessage() {} +func (*MessageList_Message) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{19, 0} +} + +func (m *MessageList_Message) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageList_Message.Unmarshal(m, b) +} +func (m *MessageList_Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageList_Message.Marshal(b, m, deterministic) +} +func (m *MessageList_Message) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageList_Message.Merge(m, src) +} +func (m *MessageList_Message) XXX_Size() int { + return xxx_messageInfo_MessageList_Message.Size(m) +} +func (m *MessageList_Message) XXX_DiscardUnknown() { + xxx_messageInfo_MessageList_Message.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageList_Message proto.InternalMessageInfo + +func (m *MessageList_Message) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *MessageList_Message) GetCount() int32 { + if m != nil && m.Count != nil { + return *m.Count + } + return 0 +} + +type Strings struct { + StringField *string `protobuf:"bytes,1,opt,name=string_field,json=stringField" json:"string_field,omitempty"` + BytesField []byte `protobuf:"bytes,2,opt,name=bytes_field,json=bytesField" json:"bytes_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Strings) Reset() { *m = Strings{} } +func (m *Strings) String() string { return proto.CompactTextString(m) } +func (*Strings) ProtoMessage() {} +func (*Strings) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{20} +} + +func (m *Strings) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Strings.Unmarshal(m, b) +} +func (m *Strings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Strings.Marshal(b, m, deterministic) +} +func (m *Strings) XXX_Merge(src proto.Message) { + xxx_messageInfo_Strings.Merge(m, src) +} +func (m *Strings) XXX_Size() int { + return xxx_messageInfo_Strings.Size(m) +} +func (m *Strings) XXX_DiscardUnknown() { + xxx_messageInfo_Strings.DiscardUnknown(m) +} + +var xxx_messageInfo_Strings proto.InternalMessageInfo + +func (m *Strings) GetStringField() string { + if m != nil && m.StringField != nil { + return *m.StringField + } + return "" +} + +func (m *Strings) GetBytesField() []byte { + if m != nil { + return m.BytesField + } + return nil +} + +type Defaults struct { + // Default-valued fields of all basic types. + // Same as GoTest, but copied here to make testing easier. + F_Bool *bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,def=1" json:"F_Bool,omitempty"` + F_Int32 *int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,def=32" json:"F_Int32,omitempty"` + F_Int64 *int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,def=64" json:"F_Int64,omitempty"` + F_Fixed32 *uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,def=320" json:"F_Fixed32,omitempty"` + F_Fixed64 *uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,def=640" json:"F_Fixed64,omitempty"` + F_Uint32 *uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,def=3200" json:"F_Uint32,omitempty"` + F_Uint64 *uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,def=6400" json:"F_Uint64,omitempty"` + F_Float *float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,def=314159" json:"F_Float,omitempty"` + F_Double *float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,def=271828" json:"F_Double,omitempty"` + F_String *string `protobuf:"bytes,10,opt,name=F_String,json=FString,def=hello, \"world!\"\n" json:"F_String,omitempty"` + F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,def=Bignose" json:"F_Bytes,omitempty"` + F_Sint32 *int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,def=-32" json:"F_Sint32,omitempty"` + F_Sint64 *int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,def=-64" json:"F_Sint64,omitempty"` + F_Enum *Defaults_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=test_proto.Defaults_Color,def=1" json:"F_Enum,omitempty"` + // More fields with crazy defaults. + F_Pinf *float32 `protobuf:"fixed32,15,opt,name=F_Pinf,json=FPinf,def=inf" json:"F_Pinf,omitempty"` + F_Ninf *float32 `protobuf:"fixed32,16,opt,name=F_Ninf,json=FNinf,def=-inf" json:"F_Ninf,omitempty"` + F_Nan *float32 `protobuf:"fixed32,17,opt,name=F_Nan,json=FNan,def=nan" json:"F_Nan,omitempty"` + // Sub-message. + Sub *SubDefaults `protobuf:"bytes,18,opt,name=sub" json:"sub,omitempty"` + // Redundant but explicit defaults. + StrZero *string `protobuf:"bytes,19,opt,name=str_zero,json=strZero,def=" json:"str_zero,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Defaults) Reset() { *m = Defaults{} } +func (m *Defaults) String() string { return proto.CompactTextString(m) } +func (*Defaults) ProtoMessage() {} +func (*Defaults) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{21} +} + +func (m *Defaults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Defaults.Unmarshal(m, b) +} +func (m *Defaults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Defaults.Marshal(b, m, deterministic) +} +func (m *Defaults) XXX_Merge(src proto.Message) { + xxx_messageInfo_Defaults.Merge(m, src) +} +func (m *Defaults) XXX_Size() int { + return xxx_messageInfo_Defaults.Size(m) +} +func (m *Defaults) XXX_DiscardUnknown() { + xxx_messageInfo_Defaults.DiscardUnknown(m) +} + +var xxx_messageInfo_Defaults proto.InternalMessageInfo + +const Default_Defaults_F_Bool bool = true +const Default_Defaults_F_Int32 int32 = 32 +const Default_Defaults_F_Int64 int64 = 64 +const Default_Defaults_F_Fixed32 uint32 = 320 +const Default_Defaults_F_Fixed64 uint64 = 640 +const Default_Defaults_F_Uint32 uint32 = 3200 +const Default_Defaults_F_Uint64 uint64 = 6400 +const Default_Defaults_F_Float float32 = 314159 +const Default_Defaults_F_Double float64 = 271828 +const Default_Defaults_F_String string = "hello, \"world!\"\n" + +var Default_Defaults_F_Bytes []byte = []byte("Bignose") + +const Default_Defaults_F_Sint32 int32 = -32 +const Default_Defaults_F_Sint64 int64 = -64 +const Default_Defaults_F_Enum Defaults_Color = Defaults_GREEN + +var Default_Defaults_F_Pinf float32 = float32(math.Inf(1)) +var Default_Defaults_F_Ninf float32 = float32(math.Inf(-1)) +var Default_Defaults_F_Nan float32 = float32(math.NaN()) + +func (m *Defaults) GetF_Bool() bool { + if m != nil && m.F_Bool != nil { + return *m.F_Bool + } + return Default_Defaults_F_Bool +} + +func (m *Defaults) GetF_Int32() int32 { + if m != nil && m.F_Int32 != nil { + return *m.F_Int32 + } + return Default_Defaults_F_Int32 +} + +func (m *Defaults) GetF_Int64() int64 { + if m != nil && m.F_Int64 != nil { + return *m.F_Int64 + } + return Default_Defaults_F_Int64 +} + +func (m *Defaults) GetF_Fixed32() uint32 { + if m != nil && m.F_Fixed32 != nil { + return *m.F_Fixed32 + } + return Default_Defaults_F_Fixed32 +} + +func (m *Defaults) GetF_Fixed64() uint64 { + if m != nil && m.F_Fixed64 != nil { + return *m.F_Fixed64 + } + return Default_Defaults_F_Fixed64 +} + +func (m *Defaults) GetF_Uint32() uint32 { + if m != nil && m.F_Uint32 != nil { + return *m.F_Uint32 + } + return Default_Defaults_F_Uint32 +} + +func (m *Defaults) GetF_Uint64() uint64 { + if m != nil && m.F_Uint64 != nil { + return *m.F_Uint64 + } + return Default_Defaults_F_Uint64 +} + +func (m *Defaults) GetF_Float() float32 { + if m != nil && m.F_Float != nil { + return *m.F_Float + } + return Default_Defaults_F_Float +} + +func (m *Defaults) GetF_Double() float64 { + if m != nil && m.F_Double != nil { + return *m.F_Double + } + return Default_Defaults_F_Double +} + +func (m *Defaults) GetF_String() string { + if m != nil && m.F_String != nil { + return *m.F_String + } + return Default_Defaults_F_String +} + +func (m *Defaults) GetF_Bytes() []byte { + if m != nil && m.F_Bytes != nil { + return m.F_Bytes + } + return append([]byte(nil), Default_Defaults_F_Bytes...) +} + +func (m *Defaults) GetF_Sint32() int32 { + if m != nil && m.F_Sint32 != nil { + return *m.F_Sint32 + } + return Default_Defaults_F_Sint32 +} + +func (m *Defaults) GetF_Sint64() int64 { + if m != nil && m.F_Sint64 != nil { + return *m.F_Sint64 + } + return Default_Defaults_F_Sint64 +} + +func (m *Defaults) GetF_Enum() Defaults_Color { + if m != nil && m.F_Enum != nil { + return *m.F_Enum + } + return Default_Defaults_F_Enum +} + +func (m *Defaults) GetF_Pinf() float32 { + if m != nil && m.F_Pinf != nil { + return *m.F_Pinf + } + return Default_Defaults_F_Pinf +} + +func (m *Defaults) GetF_Ninf() float32 { + if m != nil && m.F_Ninf != nil { + return *m.F_Ninf + } + return Default_Defaults_F_Ninf +} + +func (m *Defaults) GetF_Nan() float32 { + if m != nil && m.F_Nan != nil { + return *m.F_Nan + } + return Default_Defaults_F_Nan +} + +func (m *Defaults) GetSub() *SubDefaults { + if m != nil { + return m.Sub + } + return nil +} + +func (m *Defaults) GetStrZero() string { + if m != nil && m.StrZero != nil { + return *m.StrZero + } + return "" +} + +type SubDefaults struct { + N *int64 `protobuf:"varint,1,opt,name=n,def=7" json:"n,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SubDefaults) Reset() { *m = SubDefaults{} } +func (m *SubDefaults) String() string { return proto.CompactTextString(m) } +func (*SubDefaults) ProtoMessage() {} +func (*SubDefaults) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{22} +} + +func (m *SubDefaults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SubDefaults.Unmarshal(m, b) +} +func (m *SubDefaults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SubDefaults.Marshal(b, m, deterministic) +} +func (m *SubDefaults) XXX_Merge(src proto.Message) { + xxx_messageInfo_SubDefaults.Merge(m, src) +} +func (m *SubDefaults) XXX_Size() int { + return xxx_messageInfo_SubDefaults.Size(m) +} +func (m *SubDefaults) XXX_DiscardUnknown() { + xxx_messageInfo_SubDefaults.DiscardUnknown(m) +} + +var xxx_messageInfo_SubDefaults proto.InternalMessageInfo + +const Default_SubDefaults_N int64 = 7 + +func (m *SubDefaults) GetN() int64 { + if m != nil && m.N != nil { + return *m.N + } + return Default_SubDefaults_N +} + +type RepeatedEnum struct { + Color []RepeatedEnum_Color `protobuf:"varint,1,rep,name=color,enum=test_proto.RepeatedEnum_Color" json:"color,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RepeatedEnum) Reset() { *m = RepeatedEnum{} } +func (m *RepeatedEnum) String() string { return proto.CompactTextString(m) } +func (*RepeatedEnum) ProtoMessage() {} +func (*RepeatedEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{23} +} + +func (m *RepeatedEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepeatedEnum.Unmarshal(m, b) +} +func (m *RepeatedEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepeatedEnum.Marshal(b, m, deterministic) +} +func (m *RepeatedEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepeatedEnum.Merge(m, src) +} +func (m *RepeatedEnum) XXX_Size() int { + return xxx_messageInfo_RepeatedEnum.Size(m) +} +func (m *RepeatedEnum) XXX_DiscardUnknown() { + xxx_messageInfo_RepeatedEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_RepeatedEnum proto.InternalMessageInfo + +func (m *RepeatedEnum) GetColor() []RepeatedEnum_Color { + if m != nil { + return m.Color + } + return nil +} + +type MoreRepeated struct { + Bools []bool `protobuf:"varint,1,rep,name=bools" json:"bools,omitempty"` + BoolsPacked []bool `protobuf:"varint,2,rep,packed,name=bools_packed,json=boolsPacked" json:"bools_packed,omitempty"` + Ints []int32 `protobuf:"varint,3,rep,name=ints" json:"ints,omitempty"` + IntsPacked []int32 `protobuf:"varint,4,rep,packed,name=ints_packed,json=intsPacked" json:"ints_packed,omitempty"` + Int64SPacked []int64 `protobuf:"varint,7,rep,packed,name=int64s_packed,json=int64sPacked" json:"int64s_packed,omitempty"` + Strings []string `protobuf:"bytes,5,rep,name=strings" json:"strings,omitempty"` + Fixeds []uint32 `protobuf:"fixed32,6,rep,name=fixeds" json:"fixeds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MoreRepeated) Reset() { *m = MoreRepeated{} } +func (m *MoreRepeated) String() string { return proto.CompactTextString(m) } +func (*MoreRepeated) ProtoMessage() {} +func (*MoreRepeated) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{24} +} + +func (m *MoreRepeated) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MoreRepeated.Unmarshal(m, b) +} +func (m *MoreRepeated) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MoreRepeated.Marshal(b, m, deterministic) +} +func (m *MoreRepeated) XXX_Merge(src proto.Message) { + xxx_messageInfo_MoreRepeated.Merge(m, src) +} +func (m *MoreRepeated) XXX_Size() int { + return xxx_messageInfo_MoreRepeated.Size(m) +} +func (m *MoreRepeated) XXX_DiscardUnknown() { + xxx_messageInfo_MoreRepeated.DiscardUnknown(m) +} + +var xxx_messageInfo_MoreRepeated proto.InternalMessageInfo + +func (m *MoreRepeated) GetBools() []bool { + if m != nil { + return m.Bools + } + return nil +} + +func (m *MoreRepeated) GetBoolsPacked() []bool { + if m != nil { + return m.BoolsPacked + } + return nil +} + +func (m *MoreRepeated) GetInts() []int32 { + if m != nil { + return m.Ints + } + return nil +} + +func (m *MoreRepeated) GetIntsPacked() []int32 { + if m != nil { + return m.IntsPacked + } + return nil +} + +func (m *MoreRepeated) GetInt64SPacked() []int64 { + if m != nil { + return m.Int64SPacked + } + return nil +} + +func (m *MoreRepeated) GetStrings() []string { + if m != nil { + return m.Strings + } + return nil +} + +func (m *MoreRepeated) GetFixeds() []uint32 { + if m != nil { + return m.Fixeds + } + return nil +} + +type GroupOld struct { + G *GroupOld_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupOld) Reset() { *m = GroupOld{} } +func (m *GroupOld) String() string { return proto.CompactTextString(m) } +func (*GroupOld) ProtoMessage() {} +func (*GroupOld) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{25} +} + +func (m *GroupOld) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupOld.Unmarshal(m, b) +} +func (m *GroupOld) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupOld.Marshal(b, m, deterministic) +} +func (m *GroupOld) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupOld.Merge(m, src) +} +func (m *GroupOld) XXX_Size() int { + return xxx_messageInfo_GroupOld.Size(m) +} +func (m *GroupOld) XXX_DiscardUnknown() { + xxx_messageInfo_GroupOld.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupOld proto.InternalMessageInfo + +func (m *GroupOld) GetG() *GroupOld_G { + if m != nil { + return m.G + } + return nil +} + +type GroupOld_G struct { + X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupOld_G) Reset() { *m = GroupOld_G{} } +func (m *GroupOld_G) String() string { return proto.CompactTextString(m) } +func (*GroupOld_G) ProtoMessage() {} +func (*GroupOld_G) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{25, 0} +} + +func (m *GroupOld_G) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupOld_G.Unmarshal(m, b) +} +func (m *GroupOld_G) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupOld_G.Marshal(b, m, deterministic) +} +func (m *GroupOld_G) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupOld_G.Merge(m, src) +} +func (m *GroupOld_G) XXX_Size() int { + return xxx_messageInfo_GroupOld_G.Size(m) +} +func (m *GroupOld_G) XXX_DiscardUnknown() { + xxx_messageInfo_GroupOld_G.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupOld_G proto.InternalMessageInfo + +func (m *GroupOld_G) GetX() int32 { + if m != nil && m.X != nil { + return *m.X + } + return 0 +} + +type GroupNew struct { + G *GroupNew_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupNew) Reset() { *m = GroupNew{} } +func (m *GroupNew) String() string { return proto.CompactTextString(m) } +func (*GroupNew) ProtoMessage() {} +func (*GroupNew) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{26} +} + +func (m *GroupNew) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupNew.Unmarshal(m, b) +} +func (m *GroupNew) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupNew.Marshal(b, m, deterministic) +} +func (m *GroupNew) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupNew.Merge(m, src) +} +func (m *GroupNew) XXX_Size() int { + return xxx_messageInfo_GroupNew.Size(m) +} +func (m *GroupNew) XXX_DiscardUnknown() { + xxx_messageInfo_GroupNew.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupNew proto.InternalMessageInfo + +func (m *GroupNew) GetG() *GroupNew_G { + if m != nil { + return m.G + } + return nil +} + +type GroupNew_G struct { + X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` + Y *int32 `protobuf:"varint,3,opt,name=y" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupNew_G) Reset() { *m = GroupNew_G{} } +func (m *GroupNew_G) String() string { return proto.CompactTextString(m) } +func (*GroupNew_G) ProtoMessage() {} +func (*GroupNew_G) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{26, 0} +} + +func (m *GroupNew_G) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupNew_G.Unmarshal(m, b) +} +func (m *GroupNew_G) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupNew_G.Marshal(b, m, deterministic) +} +func (m *GroupNew_G) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupNew_G.Merge(m, src) +} +func (m *GroupNew_G) XXX_Size() int { + return xxx_messageInfo_GroupNew_G.Size(m) +} +func (m *GroupNew_G) XXX_DiscardUnknown() { + xxx_messageInfo_GroupNew_G.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupNew_G proto.InternalMessageInfo + +func (m *GroupNew_G) GetX() int32 { + if m != nil && m.X != nil { + return *m.X + } + return 0 +} + +func (m *GroupNew_G) GetY() int32 { + if m != nil && m.Y != nil { + return *m.Y + } + return 0 +} + +type FloatingPoint struct { + F *float64 `protobuf:"fixed64,1,req,name=f" json:"f,omitempty"` + Exact *bool `protobuf:"varint,2,opt,name=exact" json:"exact,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FloatingPoint) Reset() { *m = FloatingPoint{} } +func (m *FloatingPoint) String() string { return proto.CompactTextString(m) } +func (*FloatingPoint) ProtoMessage() {} +func (*FloatingPoint) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{27} +} + +func (m *FloatingPoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FloatingPoint.Unmarshal(m, b) +} +func (m *FloatingPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FloatingPoint.Marshal(b, m, deterministic) +} +func (m *FloatingPoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_FloatingPoint.Merge(m, src) +} +func (m *FloatingPoint) XXX_Size() int { + return xxx_messageInfo_FloatingPoint.Size(m) +} +func (m *FloatingPoint) XXX_DiscardUnknown() { + xxx_messageInfo_FloatingPoint.DiscardUnknown(m) +} + +var xxx_messageInfo_FloatingPoint proto.InternalMessageInfo + +func (m *FloatingPoint) GetF() float64 { + if m != nil && m.F != nil { + return *m.F + } + return 0 +} + +func (m *FloatingPoint) GetExact() bool { + if m != nil && m.Exact != nil { + return *m.Exact + } + return false +} + +type MessageWithMap struct { + NameMapping map[int32]string `protobuf:"bytes,1,rep,name=name_mapping,json=nameMapping" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + MsgMapping map[int64]*FloatingPoint `protobuf:"bytes,2,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + ByteMapping map[bool][]byte `protobuf:"bytes,3,rep,name=byte_mapping,json=byteMapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + StrToStr map[string]string `protobuf:"bytes,4,rep,name=str_to_str,json=strToStr" json:"str_to_str,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } +func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } +func (*MessageWithMap) ProtoMessage() {} +func (*MessageWithMap) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{28} +} + +func (m *MessageWithMap) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageWithMap.Unmarshal(m, b) +} +func (m *MessageWithMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageWithMap.Marshal(b, m, deterministic) +} +func (m *MessageWithMap) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageWithMap.Merge(m, src) +} +func (m *MessageWithMap) XXX_Size() int { + return xxx_messageInfo_MessageWithMap.Size(m) +} +func (m *MessageWithMap) XXX_DiscardUnknown() { + xxx_messageInfo_MessageWithMap.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageWithMap proto.InternalMessageInfo + +func (m *MessageWithMap) GetNameMapping() map[int32]string { + if m != nil { + return m.NameMapping + } + return nil +} + +func (m *MessageWithMap) GetMsgMapping() map[int64]*FloatingPoint { + if m != nil { + return m.MsgMapping + } + return nil +} + +func (m *MessageWithMap) GetByteMapping() map[bool][]byte { + if m != nil { + return m.ByteMapping + } + return nil +} + +func (m *MessageWithMap) GetStrToStr() map[string]string { + if m != nil { + return m.StrToStr + } + return nil +} + +type Oneof struct { + // Types that are valid to be assigned to Union: + // *Oneof_F_Bool + // *Oneof_F_Int32 + // *Oneof_F_Int64 + // *Oneof_F_Fixed32 + // *Oneof_F_Fixed64 + // *Oneof_F_Uint32 + // *Oneof_F_Uint64 + // *Oneof_F_Float + // *Oneof_F_Double + // *Oneof_F_String + // *Oneof_F_Bytes + // *Oneof_F_Sint32 + // *Oneof_F_Sint64 + // *Oneof_F_Enum + // *Oneof_F_Message + // *Oneof_FGroup + // *Oneof_F_Largest_Tag + Union isOneof_Union `protobuf_oneof:"union"` + // Types that are valid to be assigned to Tormato: + // *Oneof_Value + Tormato isOneof_Tormato `protobuf_oneof:"tormato"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Oneof) Reset() { *m = Oneof{} } +func (m *Oneof) String() string { return proto.CompactTextString(m) } +func (*Oneof) ProtoMessage() {} +func (*Oneof) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{29} +} + +func (m *Oneof) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Oneof.Unmarshal(m, b) +} +func (m *Oneof) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Oneof.Marshal(b, m, deterministic) +} +func (m *Oneof) XXX_Merge(src proto.Message) { + xxx_messageInfo_Oneof.Merge(m, src) +} +func (m *Oneof) XXX_Size() int { + return xxx_messageInfo_Oneof.Size(m) +} +func (m *Oneof) XXX_DiscardUnknown() { + xxx_messageInfo_Oneof.DiscardUnknown(m) +} + +var xxx_messageInfo_Oneof proto.InternalMessageInfo + +type isOneof_Union interface { + isOneof_Union() +} + +type Oneof_F_Bool struct { + F_Bool bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,oneof"` +} + +type Oneof_F_Int32 struct { + F_Int32 int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,oneof"` +} + +type Oneof_F_Int64 struct { + F_Int64 int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,oneof"` +} + +type Oneof_F_Fixed32 struct { + F_Fixed32 uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,oneof"` +} + +type Oneof_F_Fixed64 struct { + F_Fixed64 uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,oneof"` +} + +type Oneof_F_Uint32 struct { + F_Uint32 uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,oneof"` +} + +type Oneof_F_Uint64 struct { + F_Uint64 uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,oneof"` +} + +type Oneof_F_Float struct { + F_Float float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,oneof"` +} + +type Oneof_F_Double struct { + F_Double float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,oneof"` +} + +type Oneof_F_String struct { + F_String string `protobuf:"bytes,10,opt,name=F_String,json=FString,oneof"` +} + +type Oneof_F_Bytes struct { + F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,oneof"` +} + +type Oneof_F_Sint32 struct { + F_Sint32 int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,oneof"` +} + +type Oneof_F_Sint64 struct { + F_Sint64 int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,oneof"` +} + +type Oneof_F_Enum struct { + F_Enum MyMessage_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=test_proto.MyMessage_Color,oneof"` +} + +type Oneof_F_Message struct { + F_Message *GoTestField `protobuf:"bytes,15,opt,name=F_Message,json=FMessage,oneof"` +} + +type Oneof_FGroup struct { + FGroup *Oneof_F_Group `protobuf:"group,16,opt,name=F_Group,json=fGroup,oneof"` +} + +type Oneof_F_Largest_Tag struct { + F_Largest_Tag int32 `protobuf:"varint,536870911,opt,name=F_Largest_Tag,json=FLargestTag,oneof"` +} + +func (*Oneof_F_Bool) isOneof_Union() {} + +func (*Oneof_F_Int32) isOneof_Union() {} + +func (*Oneof_F_Int64) isOneof_Union() {} + +func (*Oneof_F_Fixed32) isOneof_Union() {} + +func (*Oneof_F_Fixed64) isOneof_Union() {} + +func (*Oneof_F_Uint32) isOneof_Union() {} + +func (*Oneof_F_Uint64) isOneof_Union() {} + +func (*Oneof_F_Float) isOneof_Union() {} + +func (*Oneof_F_Double) isOneof_Union() {} + +func (*Oneof_F_String) isOneof_Union() {} + +func (*Oneof_F_Bytes) isOneof_Union() {} + +func (*Oneof_F_Sint32) isOneof_Union() {} + +func (*Oneof_F_Sint64) isOneof_Union() {} + +func (*Oneof_F_Enum) isOneof_Union() {} + +func (*Oneof_F_Message) isOneof_Union() {} + +func (*Oneof_FGroup) isOneof_Union() {} + +func (*Oneof_F_Largest_Tag) isOneof_Union() {} + +func (m *Oneof) GetUnion() isOneof_Union { + if m != nil { + return m.Union + } + return nil +} + +func (m *Oneof) GetF_Bool() bool { + if x, ok := m.GetUnion().(*Oneof_F_Bool); ok { + return x.F_Bool + } + return false +} + +func (m *Oneof) GetF_Int32() int32 { + if x, ok := m.GetUnion().(*Oneof_F_Int32); ok { + return x.F_Int32 + } + return 0 +} + +func (m *Oneof) GetF_Int64() int64 { + if x, ok := m.GetUnion().(*Oneof_F_Int64); ok { + return x.F_Int64 + } + return 0 +} + +func (m *Oneof) GetF_Fixed32() uint32 { + if x, ok := m.GetUnion().(*Oneof_F_Fixed32); ok { + return x.F_Fixed32 + } + return 0 +} + +func (m *Oneof) GetF_Fixed64() uint64 { + if x, ok := m.GetUnion().(*Oneof_F_Fixed64); ok { + return x.F_Fixed64 + } + return 0 +} + +func (m *Oneof) GetF_Uint32() uint32 { + if x, ok := m.GetUnion().(*Oneof_F_Uint32); ok { + return x.F_Uint32 + } + return 0 +} + +func (m *Oneof) GetF_Uint64() uint64 { + if x, ok := m.GetUnion().(*Oneof_F_Uint64); ok { + return x.F_Uint64 + } + return 0 +} + +func (m *Oneof) GetF_Float() float32 { + if x, ok := m.GetUnion().(*Oneof_F_Float); ok { + return x.F_Float + } + return 0 +} + +func (m *Oneof) GetF_Double() float64 { + if x, ok := m.GetUnion().(*Oneof_F_Double); ok { + return x.F_Double + } + return 0 +} + +func (m *Oneof) GetF_String() string { + if x, ok := m.GetUnion().(*Oneof_F_String); ok { + return x.F_String + } + return "" +} + +func (m *Oneof) GetF_Bytes() []byte { + if x, ok := m.GetUnion().(*Oneof_F_Bytes); ok { + return x.F_Bytes + } + return nil +} + +func (m *Oneof) GetF_Sint32() int32 { + if x, ok := m.GetUnion().(*Oneof_F_Sint32); ok { + return x.F_Sint32 + } + return 0 +} + +func (m *Oneof) GetF_Sint64() int64 { + if x, ok := m.GetUnion().(*Oneof_F_Sint64); ok { + return x.F_Sint64 + } + return 0 +} + +func (m *Oneof) GetF_Enum() MyMessage_Color { + if x, ok := m.GetUnion().(*Oneof_F_Enum); ok { + return x.F_Enum + } + return MyMessage_RED +} + +func (m *Oneof) GetF_Message() *GoTestField { + if x, ok := m.GetUnion().(*Oneof_F_Message); ok { + return x.F_Message + } + return nil +} + +func (m *Oneof) GetFGroup() *Oneof_F_Group { + if x, ok := m.GetUnion().(*Oneof_FGroup); ok { + return x.FGroup + } + return nil +} + +func (m *Oneof) GetF_Largest_Tag() int32 { + if x, ok := m.GetUnion().(*Oneof_F_Largest_Tag); ok { + return x.F_Largest_Tag + } + return 0 +} + +type isOneof_Tormato interface { + isOneof_Tormato() +} + +type Oneof_Value struct { + Value int32 `protobuf:"varint,100,opt,name=value,oneof"` +} + +func (*Oneof_Value) isOneof_Tormato() {} + +func (m *Oneof) GetTormato() isOneof_Tormato { + if m != nil { + return m.Tormato + } + return nil +} + +func (m *Oneof) GetValue() int32 { + if x, ok := m.GetTormato().(*Oneof_Value); ok { + return x.Value + } + return 0 +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Oneof) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*Oneof_F_Bool)(nil), + (*Oneof_F_Int32)(nil), + (*Oneof_F_Int64)(nil), + (*Oneof_F_Fixed32)(nil), + (*Oneof_F_Fixed64)(nil), + (*Oneof_F_Uint32)(nil), + (*Oneof_F_Uint64)(nil), + (*Oneof_F_Float)(nil), + (*Oneof_F_Double)(nil), + (*Oneof_F_String)(nil), + (*Oneof_F_Bytes)(nil), + (*Oneof_F_Sint32)(nil), + (*Oneof_F_Sint64)(nil), + (*Oneof_F_Enum)(nil), + (*Oneof_F_Message)(nil), + (*Oneof_FGroup)(nil), + (*Oneof_F_Largest_Tag)(nil), + (*Oneof_Value)(nil), + } +} + +type Oneof_F_Group struct { + X *int32 `protobuf:"varint,17,opt,name=x" json:"x,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Oneof_F_Group) Reset() { *m = Oneof_F_Group{} } +func (m *Oneof_F_Group) String() string { return proto.CompactTextString(m) } +func (*Oneof_F_Group) ProtoMessage() {} +func (*Oneof_F_Group) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{29, 0} +} + +func (m *Oneof_F_Group) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Oneof_F_Group.Unmarshal(m, b) +} +func (m *Oneof_F_Group) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Oneof_F_Group.Marshal(b, m, deterministic) +} +func (m *Oneof_F_Group) XXX_Merge(src proto.Message) { + xxx_messageInfo_Oneof_F_Group.Merge(m, src) +} +func (m *Oneof_F_Group) XXX_Size() int { + return xxx_messageInfo_Oneof_F_Group.Size(m) +} +func (m *Oneof_F_Group) XXX_DiscardUnknown() { + xxx_messageInfo_Oneof_F_Group.DiscardUnknown(m) +} + +var xxx_messageInfo_Oneof_F_Group proto.InternalMessageInfo + +func (m *Oneof_F_Group) GetX() int32 { + if m != nil && m.X != nil { + return *m.X + } + return 0 +} + +type Communique struct { + MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` + // This is a oneof, called "union". + // + // Types that are valid to be assigned to Union: + // *Communique_Number + // *Communique_Name + // *Communique_Data + // *Communique_TempC + // *Communique_Col + // *Communique_Msg + Union isCommunique_Union `protobuf_oneof:"union"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Communique) Reset() { *m = Communique{} } +func (m *Communique) String() string { return proto.CompactTextString(m) } +func (*Communique) ProtoMessage() {} +func (*Communique) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{30} +} + +func (m *Communique) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Communique.Unmarshal(m, b) +} +func (m *Communique) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Communique.Marshal(b, m, deterministic) +} +func (m *Communique) XXX_Merge(src proto.Message) { + xxx_messageInfo_Communique.Merge(m, src) +} +func (m *Communique) XXX_Size() int { + return xxx_messageInfo_Communique.Size(m) +} +func (m *Communique) XXX_DiscardUnknown() { + xxx_messageInfo_Communique.DiscardUnknown(m) +} + +var xxx_messageInfo_Communique proto.InternalMessageInfo + +func (m *Communique) GetMakeMeCry() bool { + if m != nil && m.MakeMeCry != nil { + return *m.MakeMeCry + } + return false +} + +type isCommunique_Union interface { + isCommunique_Union() +} + +type Communique_Number struct { + Number int32 `protobuf:"varint,5,opt,name=number,oneof"` +} + +type Communique_Name struct { + Name string `protobuf:"bytes,6,opt,name=name,oneof"` +} + +type Communique_Data struct { + Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` +} + +type Communique_TempC struct { + TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` +} + +type Communique_Col struct { + Col MyMessage_Color `protobuf:"varint,9,opt,name=col,enum=test_proto.MyMessage_Color,oneof"` +} + +type Communique_Msg struct { + Msg *Strings `protobuf:"bytes,10,opt,name=msg,oneof"` +} + +func (*Communique_Number) isCommunique_Union() {} + +func (*Communique_Name) isCommunique_Union() {} + +func (*Communique_Data) isCommunique_Union() {} + +func (*Communique_TempC) isCommunique_Union() {} + +func (*Communique_Col) isCommunique_Union() {} + +func (*Communique_Msg) isCommunique_Union() {} + +func (m *Communique) GetUnion() isCommunique_Union { + if m != nil { + return m.Union + } + return nil +} + +func (m *Communique) GetNumber() int32 { + if x, ok := m.GetUnion().(*Communique_Number); ok { + return x.Number + } + return 0 +} + +func (m *Communique) GetName() string { + if x, ok := m.GetUnion().(*Communique_Name); ok { + return x.Name + } + return "" +} + +func (m *Communique) GetData() []byte { + if x, ok := m.GetUnion().(*Communique_Data); ok { + return x.Data + } + return nil +} + +func (m *Communique) GetTempC() float64 { + if x, ok := m.GetUnion().(*Communique_TempC); ok { + return x.TempC + } + return 0 +} + +func (m *Communique) GetCol() MyMessage_Color { + if x, ok := m.GetUnion().(*Communique_Col); ok { + return x.Col + } + return MyMessage_RED +} + +func (m *Communique) GetMsg() *Strings { + if x, ok := m.GetUnion().(*Communique_Msg); ok { + return x.Msg + } + return nil +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Communique) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*Communique_Number)(nil), + (*Communique_Name)(nil), + (*Communique_Data)(nil), + (*Communique_TempC)(nil), + (*Communique_Col)(nil), + (*Communique_Msg)(nil), + } +} + +type TestUTF8 struct { + Scalar *string `protobuf:"bytes,1,opt,name=scalar" json:"scalar,omitempty"` + Vector []string `protobuf:"bytes,2,rep,name=vector" json:"vector,omitempty"` + // Types that are valid to be assigned to Oneof: + // *TestUTF8_Field + Oneof isTestUTF8_Oneof `protobuf_oneof:"oneof"` + MapKey map[string]int64 `protobuf:"bytes,4,rep,name=map_key,json=mapKey" json:"map_key,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + MapValue map[int64]string `protobuf:"bytes,5,rep,name=map_value,json=mapValue" json:"map_value,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestUTF8) Reset() { *m = TestUTF8{} } +func (m *TestUTF8) String() string { return proto.CompactTextString(m) } +func (*TestUTF8) ProtoMessage() {} +func (*TestUTF8) Descriptor() ([]byte, []int) { + return fileDescriptor_8ca34d01332f1402, []int{31} +} + +func (m *TestUTF8) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestUTF8.Unmarshal(m, b) +} +func (m *TestUTF8) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestUTF8.Marshal(b, m, deterministic) +} +func (m *TestUTF8) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestUTF8.Merge(m, src) +} +func (m *TestUTF8) XXX_Size() int { + return xxx_messageInfo_TestUTF8.Size(m) +} +func (m *TestUTF8) XXX_DiscardUnknown() { + xxx_messageInfo_TestUTF8.DiscardUnknown(m) +} + +var xxx_messageInfo_TestUTF8 proto.InternalMessageInfo + +func (m *TestUTF8) GetScalar() string { + if m != nil && m.Scalar != nil { + return *m.Scalar + } + return "" +} + +func (m *TestUTF8) GetVector() []string { + if m != nil { + return m.Vector + } + return nil +} + +type isTestUTF8_Oneof interface { + isTestUTF8_Oneof() +} + +type TestUTF8_Field struct { + Field string `protobuf:"bytes,3,opt,name=field,oneof"` +} + +func (*TestUTF8_Field) isTestUTF8_Oneof() {} + +func (m *TestUTF8) GetOneof() isTestUTF8_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *TestUTF8) GetField() string { + if x, ok := m.GetOneof().(*TestUTF8_Field); ok { + return x.Field + } + return "" +} + +func (m *TestUTF8) GetMapKey() map[string]int64 { + if m != nil { + return m.MapKey + } + return nil +} + +func (m *TestUTF8) GetMapValue() map[int64]string { + if m != nil { + return m.MapValue + } + return nil +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*TestUTF8) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*TestUTF8_Field)(nil), + } +} + +var E_Greeting = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: ([]string)(nil), + Field: 106, + Name: "test_proto.greeting", + Tag: "bytes,106,rep,name=greeting", + Filename: "test_proto/test.proto", +} + +var E_Complex = &proto.ExtensionDesc{ + ExtendedType: (*OtherMessage)(nil), + ExtensionType: (*ComplexExtension)(nil), + Field: 200, + Name: "test_proto.complex", + Tag: "bytes,200,opt,name=complex", + Filename: "test_proto/test.proto", +} + +var E_RComplex = &proto.ExtensionDesc{ + ExtendedType: (*OtherMessage)(nil), + ExtensionType: ([]*ComplexExtension)(nil), + Field: 201, + Name: "test_proto.r_complex", + Tag: "bytes,201,rep,name=r_complex", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultDouble = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float64)(nil), + Field: 101, + Name: "test_proto.no_default_double", + Tag: "fixed64,101,opt,name=no_default_double", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultFloat = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float32)(nil), + Field: 102, + Name: "test_proto.no_default_float", + Tag: "fixed32,102,opt,name=no_default_float", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultInt32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 103, + Name: "test_proto.no_default_int32", + Tag: "varint,103,opt,name=no_default_int32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultInt64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 104, + Name: "test_proto.no_default_int64", + Tag: "varint,104,opt,name=no_default_int64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultUint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 105, + Name: "test_proto.no_default_uint32", + Tag: "varint,105,opt,name=no_default_uint32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultUint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 106, + Name: "test_proto.no_default_uint64", + Tag: "varint,106,opt,name=no_default_uint64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultSint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 107, + Name: "test_proto.no_default_sint32", + Tag: "zigzag32,107,opt,name=no_default_sint32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultSint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 108, + Name: "test_proto.no_default_sint64", + Tag: "zigzag64,108,opt,name=no_default_sint64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultFixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 109, + Name: "test_proto.no_default_fixed32", + Tag: "fixed32,109,opt,name=no_default_fixed32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultFixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 110, + Name: "test_proto.no_default_fixed64", + Tag: "fixed64,110,opt,name=no_default_fixed64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultSfixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 111, + Name: "test_proto.no_default_sfixed32", + Tag: "fixed32,111,opt,name=no_default_sfixed32", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultSfixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 112, + Name: "test_proto.no_default_sfixed64", + Tag: "fixed64,112,opt,name=no_default_sfixed64", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultBool = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 113, + Name: "test_proto.no_default_bool", + Tag: "varint,113,opt,name=no_default_bool", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultString = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*string)(nil), + Field: 114, + Name: "test_proto.no_default_string", + Tag: "bytes,114,opt,name=no_default_string", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultBytes = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: ([]byte)(nil), + Field: 115, + Name: "test_proto.no_default_bytes", + Tag: "bytes,115,opt,name=no_default_bytes", + Filename: "test_proto/test.proto", +} + +var E_NoDefaultEnum = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), + Field: 116, + Name: "test_proto.no_default_enum", + Tag: "varint,116,opt,name=no_default_enum,enum=test_proto.DefaultsMessage_DefaultsEnum", + Filename: "test_proto/test.proto", +} + +var E_DefaultDouble = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float64)(nil), + Field: 201, + Name: "test_proto.default_double", + Tag: "fixed64,201,opt,name=default_double,def=3.1415", + Filename: "test_proto/test.proto", +} + +var E_DefaultFloat = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float32)(nil), + Field: 202, + Name: "test_proto.default_float", + Tag: "fixed32,202,opt,name=default_float,def=3.14", + Filename: "test_proto/test.proto", +} + +var E_DefaultInt32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 203, + Name: "test_proto.default_int32", + Tag: "varint,203,opt,name=default_int32,def=42", + Filename: "test_proto/test.proto", +} + +var E_DefaultInt64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 204, + Name: "test_proto.default_int64", + Tag: "varint,204,opt,name=default_int64,def=43", + Filename: "test_proto/test.proto", +} + +var E_DefaultUint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 205, + Name: "test_proto.default_uint32", + Tag: "varint,205,opt,name=default_uint32,def=44", + Filename: "test_proto/test.proto", +} + +var E_DefaultUint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 206, + Name: "test_proto.default_uint64", + Tag: "varint,206,opt,name=default_uint64,def=45", + Filename: "test_proto/test.proto", +} + +var E_DefaultSint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 207, + Name: "test_proto.default_sint32", + Tag: "zigzag32,207,opt,name=default_sint32,def=46", + Filename: "test_proto/test.proto", +} + +var E_DefaultSint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 208, + Name: "test_proto.default_sint64", + Tag: "zigzag64,208,opt,name=default_sint64,def=47", + Filename: "test_proto/test.proto", +} + +var E_DefaultFixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 209, + Name: "test_proto.default_fixed32", + Tag: "fixed32,209,opt,name=default_fixed32,def=48", + Filename: "test_proto/test.proto", +} + +var E_DefaultFixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 210, + Name: "test_proto.default_fixed64", + Tag: "fixed64,210,opt,name=default_fixed64,def=49", + Filename: "test_proto/test.proto", +} + +var E_DefaultSfixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 211, + Name: "test_proto.default_sfixed32", + Tag: "fixed32,211,opt,name=default_sfixed32,def=50", + Filename: "test_proto/test.proto", +} + +var E_DefaultSfixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 212, + Name: "test_proto.default_sfixed64", + Tag: "fixed64,212,opt,name=default_sfixed64,def=51", + Filename: "test_proto/test.proto", +} + +var E_DefaultBool = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 213, + Name: "test_proto.default_bool", + Tag: "varint,213,opt,name=default_bool,def=1", + Filename: "test_proto/test.proto", +} + +var E_DefaultString = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*string)(nil), + Field: 214, + Name: "test_proto.default_string", + Tag: "bytes,214,opt,name=default_string,def=Hello, string,def=foo", + Filename: "test_proto/test.proto", +} + +var E_DefaultBytes = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: ([]byte)(nil), + Field: 215, + Name: "test_proto.default_bytes", + Tag: "bytes,215,opt,name=default_bytes,def=Hello, bytes", + Filename: "test_proto/test.proto", +} + +var E_DefaultEnum = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), + Field: 216, + Name: "test_proto.default_enum", + Tag: "varint,216,opt,name=default_enum,enum=test_proto.DefaultsMessage_DefaultsEnum,def=1", + Filename: "test_proto/test.proto", +} + +var E_X201 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 201, + Name: "test_proto.x201", + Tag: "bytes,201,opt,name=x201", + Filename: "test_proto/test.proto", +} + +var E_X202 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 202, + Name: "test_proto.x202", + Tag: "bytes,202,opt,name=x202", + Filename: "test_proto/test.proto", +} + +var E_X203 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 203, + Name: "test_proto.x203", + Tag: "bytes,203,opt,name=x203", + Filename: "test_proto/test.proto", +} + +var E_X204 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 204, + Name: "test_proto.x204", + Tag: "bytes,204,opt,name=x204", + Filename: "test_proto/test.proto", +} + +var E_X205 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 205, + Name: "test_proto.x205", + Tag: "bytes,205,opt,name=x205", + Filename: "test_proto/test.proto", +} + +var E_X206 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 206, + Name: "test_proto.x206", + Tag: "bytes,206,opt,name=x206", + Filename: "test_proto/test.proto", +} + +var E_X207 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 207, + Name: "test_proto.x207", + Tag: "bytes,207,opt,name=x207", + Filename: "test_proto/test.proto", +} + +var E_X208 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 208, + Name: "test_proto.x208", + Tag: "bytes,208,opt,name=x208", + Filename: "test_proto/test.proto", +} + +var E_X209 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 209, + Name: "test_proto.x209", + Tag: "bytes,209,opt,name=x209", + Filename: "test_proto/test.proto", +} + +var E_X210 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 210, + Name: "test_proto.x210", + Tag: "bytes,210,opt,name=x210", + Filename: "test_proto/test.proto", +} + +var E_X211 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 211, + Name: "test_proto.x211", + Tag: "bytes,211,opt,name=x211", + Filename: "test_proto/test.proto", +} + +var E_X212 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 212, + Name: "test_proto.x212", + Tag: "bytes,212,opt,name=x212", + Filename: "test_proto/test.proto", +} + +var E_X213 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 213, + Name: "test_proto.x213", + Tag: "bytes,213,opt,name=x213", + Filename: "test_proto/test.proto", +} + +var E_X214 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 214, + Name: "test_proto.x214", + Tag: "bytes,214,opt,name=x214", + Filename: "test_proto/test.proto", +} + +var E_X215 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 215, + Name: "test_proto.x215", + Tag: "bytes,215,opt,name=x215", + Filename: "test_proto/test.proto", +} + +var E_X216 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 216, + Name: "test_proto.x216", + Tag: "bytes,216,opt,name=x216", + Filename: "test_proto/test.proto", +} + +var E_X217 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 217, + Name: "test_proto.x217", + Tag: "bytes,217,opt,name=x217", + Filename: "test_proto/test.proto", +} + +var E_X218 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 218, + Name: "test_proto.x218", + Tag: "bytes,218,opt,name=x218", + Filename: "test_proto/test.proto", +} + +var E_X219 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 219, + Name: "test_proto.x219", + Tag: "bytes,219,opt,name=x219", + Filename: "test_proto/test.proto", +} + +var E_X220 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 220, + Name: "test_proto.x220", + Tag: "bytes,220,opt,name=x220", + Filename: "test_proto/test.proto", +} + +var E_X221 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 221, + Name: "test_proto.x221", + Tag: "bytes,221,opt,name=x221", + Filename: "test_proto/test.proto", +} + +var E_X222 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 222, + Name: "test_proto.x222", + Tag: "bytes,222,opt,name=x222", + Filename: "test_proto/test.proto", +} + +var E_X223 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 223, + Name: "test_proto.x223", + Tag: "bytes,223,opt,name=x223", + Filename: "test_proto/test.proto", +} + +var E_X224 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 224, + Name: "test_proto.x224", + Tag: "bytes,224,opt,name=x224", + Filename: "test_proto/test.proto", +} + +var E_X225 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 225, + Name: "test_proto.x225", + Tag: "bytes,225,opt,name=x225", + Filename: "test_proto/test.proto", +} + +var E_X226 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 226, + Name: "test_proto.x226", + Tag: "bytes,226,opt,name=x226", + Filename: "test_proto/test.proto", +} + +var E_X227 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 227, + Name: "test_proto.x227", + Tag: "bytes,227,opt,name=x227", + Filename: "test_proto/test.proto", +} + +var E_X228 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 228, + Name: "test_proto.x228", + Tag: "bytes,228,opt,name=x228", + Filename: "test_proto/test.proto", +} + +var E_X229 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 229, + Name: "test_proto.x229", + Tag: "bytes,229,opt,name=x229", + Filename: "test_proto/test.proto", +} + +var E_X230 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 230, + Name: "test_proto.x230", + Tag: "bytes,230,opt,name=x230", + Filename: "test_proto/test.proto", +} + +var E_X231 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 231, + Name: "test_proto.x231", + Tag: "bytes,231,opt,name=x231", + Filename: "test_proto/test.proto", +} + +var E_X232 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 232, + Name: "test_proto.x232", + Tag: "bytes,232,opt,name=x232", + Filename: "test_proto/test.proto", +} + +var E_X233 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 233, + Name: "test_proto.x233", + Tag: "bytes,233,opt,name=x233", + Filename: "test_proto/test.proto", +} + +var E_X234 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 234, + Name: "test_proto.x234", + Tag: "bytes,234,opt,name=x234", + Filename: "test_proto/test.proto", +} + +var E_X235 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 235, + Name: "test_proto.x235", + Tag: "bytes,235,opt,name=x235", + Filename: "test_proto/test.proto", +} + +var E_X236 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 236, + Name: "test_proto.x236", + Tag: "bytes,236,opt,name=x236", + Filename: "test_proto/test.proto", +} + +var E_X237 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 237, + Name: "test_proto.x237", + Tag: "bytes,237,opt,name=x237", + Filename: "test_proto/test.proto", +} + +var E_X238 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 238, + Name: "test_proto.x238", + Tag: "bytes,238,opt,name=x238", + Filename: "test_proto/test.proto", +} + +var E_X239 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 239, + Name: "test_proto.x239", + Tag: "bytes,239,opt,name=x239", + Filename: "test_proto/test.proto", +} + +var E_X240 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 240, + Name: "test_proto.x240", + Tag: "bytes,240,opt,name=x240", + Filename: "test_proto/test.proto", +} + +var E_X241 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 241, + Name: "test_proto.x241", + Tag: "bytes,241,opt,name=x241", + Filename: "test_proto/test.proto", +} + +var E_X242 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 242, + Name: "test_proto.x242", + Tag: "bytes,242,opt,name=x242", + Filename: "test_proto/test.proto", +} + +var E_X243 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 243, + Name: "test_proto.x243", + Tag: "bytes,243,opt,name=x243", + Filename: "test_proto/test.proto", +} + +var E_X244 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 244, + Name: "test_proto.x244", + Tag: "bytes,244,opt,name=x244", + Filename: "test_proto/test.proto", +} + +var E_X245 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 245, + Name: "test_proto.x245", + Tag: "bytes,245,opt,name=x245", + Filename: "test_proto/test.proto", +} + +var E_X246 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 246, + Name: "test_proto.x246", + Tag: "bytes,246,opt,name=x246", + Filename: "test_proto/test.proto", +} + +var E_X247 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 247, + Name: "test_proto.x247", + Tag: "bytes,247,opt,name=x247", + Filename: "test_proto/test.proto", +} + +var E_X248 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 248, + Name: "test_proto.x248", + Tag: "bytes,248,opt,name=x248", + Filename: "test_proto/test.proto", +} + +var E_X249 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 249, + Name: "test_proto.x249", + Tag: "bytes,249,opt,name=x249", + Filename: "test_proto/test.proto", +} + +var E_X250 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 250, + Name: "test_proto.x250", + Tag: "bytes,250,opt,name=x250", + Filename: "test_proto/test.proto", +} + +func init() { + proto.RegisterEnum("test_proto.FOO", FOO_name, FOO_value) + proto.RegisterEnum("test_proto.GoTest_KIND", GoTest_KIND_name, GoTest_KIND_value) + proto.RegisterEnum("test_proto.MyMessage_Color", MyMessage_Color_name, MyMessage_Color_value) + proto.RegisterEnum("test_proto.DefaultsMessage_DefaultsEnum", DefaultsMessage_DefaultsEnum_name, DefaultsMessage_DefaultsEnum_value) + proto.RegisterEnum("test_proto.Defaults_Color", Defaults_Color_name, Defaults_Color_value) + proto.RegisterEnum("test_proto.RepeatedEnum_Color", RepeatedEnum_Color_name, RepeatedEnum_Color_value) + proto.RegisterType((*GoEnum)(nil), "test_proto.GoEnum") + proto.RegisterType((*GoTestField)(nil), "test_proto.GoTestField") + proto.RegisterType((*GoTest)(nil), "test_proto.GoTest") + proto.RegisterType((*GoTest_RequiredGroup)(nil), "test_proto.GoTest.RequiredGroup") + proto.RegisterType((*GoTest_RepeatedGroup)(nil), "test_proto.GoTest.RepeatedGroup") + proto.RegisterType((*GoTest_OptionalGroup)(nil), "test_proto.GoTest.OptionalGroup") + proto.RegisterType((*GoTestRequiredGroupField)(nil), "test_proto.GoTestRequiredGroupField") + proto.RegisterType((*GoTestRequiredGroupField_Group)(nil), "test_proto.GoTestRequiredGroupField.Group") + proto.RegisterType((*GoSkipTest)(nil), "test_proto.GoSkipTest") + proto.RegisterType((*GoSkipTest_SkipGroup)(nil), "test_proto.GoSkipTest.SkipGroup") + proto.RegisterType((*NonPackedTest)(nil), "test_proto.NonPackedTest") + proto.RegisterType((*PackedTest)(nil), "test_proto.PackedTest") + proto.RegisterType((*MaxTag)(nil), "test_proto.MaxTag") + proto.RegisterType((*OldMessage)(nil), "test_proto.OldMessage") + proto.RegisterType((*OldMessage_Nested)(nil), "test_proto.OldMessage.Nested") + proto.RegisterType((*NewMessage)(nil), "test_proto.NewMessage") + proto.RegisterType((*NewMessage_Nested)(nil), "test_proto.NewMessage.Nested") + proto.RegisterType((*InnerMessage)(nil), "test_proto.InnerMessage") + proto.RegisterType((*OtherMessage)(nil), "test_proto.OtherMessage") + proto.RegisterType((*RequiredInnerMessage)(nil), "test_proto.RequiredInnerMessage") + proto.RegisterType((*MyMessage)(nil), "test_proto.MyMessage") + proto.RegisterType((*MyMessage_SomeGroup)(nil), "test_proto.MyMessage.SomeGroup") + proto.RegisterExtension(E_Ext_More) + proto.RegisterExtension(E_Ext_Text) + proto.RegisterExtension(E_Ext_Number) + proto.RegisterType((*Ext)(nil), "test_proto.Ext") + proto.RegisterMapType((map[int32]int32)(nil), "test_proto.Ext.MapFieldEntry") + proto.RegisterType((*ComplexExtension)(nil), "test_proto.ComplexExtension") + proto.RegisterType((*DefaultsMessage)(nil), "test_proto.DefaultsMessage") + proto.RegisterType((*MyMessageSet)(nil), "test_proto.MyMessageSet") + proto.RegisterType((*Empty)(nil), "test_proto.Empty") + proto.RegisterType((*MessageList)(nil), "test_proto.MessageList") + proto.RegisterType((*MessageList_Message)(nil), "test_proto.MessageList.Message") + proto.RegisterType((*Strings)(nil), "test_proto.Strings") + proto.RegisterType((*Defaults)(nil), "test_proto.Defaults") + proto.RegisterType((*SubDefaults)(nil), "test_proto.SubDefaults") + proto.RegisterType((*RepeatedEnum)(nil), "test_proto.RepeatedEnum") + proto.RegisterType((*MoreRepeated)(nil), "test_proto.MoreRepeated") + proto.RegisterType((*GroupOld)(nil), "test_proto.GroupOld") + proto.RegisterType((*GroupOld_G)(nil), "test_proto.GroupOld.G") + proto.RegisterType((*GroupNew)(nil), "test_proto.GroupNew") + proto.RegisterType((*GroupNew_G)(nil), "test_proto.GroupNew.G") + proto.RegisterType((*FloatingPoint)(nil), "test_proto.FloatingPoint") + proto.RegisterType((*MessageWithMap)(nil), "test_proto.MessageWithMap") + proto.RegisterMapType((map[bool][]byte)(nil), "test_proto.MessageWithMap.ByteMappingEntry") + proto.RegisterMapType((map[int64]*FloatingPoint)(nil), "test_proto.MessageWithMap.MsgMappingEntry") + proto.RegisterMapType((map[int32]string)(nil), "test_proto.MessageWithMap.NameMappingEntry") + proto.RegisterMapType((map[string]string)(nil), "test_proto.MessageWithMap.StrToStrEntry") + proto.RegisterType((*Oneof)(nil), "test_proto.Oneof") + proto.RegisterType((*Oneof_F_Group)(nil), "test_proto.Oneof.F_Group") + proto.RegisterType((*Communique)(nil), "test_proto.Communique") + proto.RegisterType((*TestUTF8)(nil), "test_proto.TestUTF8") + proto.RegisterMapType((map[string]int64)(nil), "test_proto.TestUTF8.MapKeyEntry") + proto.RegisterMapType((map[int64]string)(nil), "test_proto.TestUTF8.MapValueEntry") + proto.RegisterExtension(E_Greeting) + proto.RegisterExtension(E_Complex) + proto.RegisterExtension(E_RComplex) + proto.RegisterExtension(E_NoDefaultDouble) + proto.RegisterExtension(E_NoDefaultFloat) + proto.RegisterExtension(E_NoDefaultInt32) + proto.RegisterExtension(E_NoDefaultInt64) + proto.RegisterExtension(E_NoDefaultUint32) + proto.RegisterExtension(E_NoDefaultUint64) + proto.RegisterExtension(E_NoDefaultSint32) + proto.RegisterExtension(E_NoDefaultSint64) + proto.RegisterExtension(E_NoDefaultFixed32) + proto.RegisterExtension(E_NoDefaultFixed64) + proto.RegisterExtension(E_NoDefaultSfixed32) + proto.RegisterExtension(E_NoDefaultSfixed64) + proto.RegisterExtension(E_NoDefaultBool) + proto.RegisterExtension(E_NoDefaultString) + proto.RegisterExtension(E_NoDefaultBytes) + proto.RegisterExtension(E_NoDefaultEnum) + proto.RegisterExtension(E_DefaultDouble) + proto.RegisterExtension(E_DefaultFloat) + proto.RegisterExtension(E_DefaultInt32) + proto.RegisterExtension(E_DefaultInt64) + proto.RegisterExtension(E_DefaultUint32) + proto.RegisterExtension(E_DefaultUint64) + proto.RegisterExtension(E_DefaultSint32) + proto.RegisterExtension(E_DefaultSint64) + proto.RegisterExtension(E_DefaultFixed32) + proto.RegisterExtension(E_DefaultFixed64) + proto.RegisterExtension(E_DefaultSfixed32) + proto.RegisterExtension(E_DefaultSfixed64) + proto.RegisterExtension(E_DefaultBool) + proto.RegisterExtension(E_DefaultString) + proto.RegisterExtension(E_DefaultBytes) + proto.RegisterExtension(E_DefaultEnum) + proto.RegisterExtension(E_X201) + proto.RegisterExtension(E_X202) + proto.RegisterExtension(E_X203) + proto.RegisterExtension(E_X204) + proto.RegisterExtension(E_X205) + proto.RegisterExtension(E_X206) + proto.RegisterExtension(E_X207) + proto.RegisterExtension(E_X208) + proto.RegisterExtension(E_X209) + proto.RegisterExtension(E_X210) + proto.RegisterExtension(E_X211) + proto.RegisterExtension(E_X212) + proto.RegisterExtension(E_X213) + proto.RegisterExtension(E_X214) + proto.RegisterExtension(E_X215) + proto.RegisterExtension(E_X216) + proto.RegisterExtension(E_X217) + proto.RegisterExtension(E_X218) + proto.RegisterExtension(E_X219) + proto.RegisterExtension(E_X220) + proto.RegisterExtension(E_X221) + proto.RegisterExtension(E_X222) + proto.RegisterExtension(E_X223) + proto.RegisterExtension(E_X224) + proto.RegisterExtension(E_X225) + proto.RegisterExtension(E_X226) + proto.RegisterExtension(E_X227) + proto.RegisterExtension(E_X228) + proto.RegisterExtension(E_X229) + proto.RegisterExtension(E_X230) + proto.RegisterExtension(E_X231) + proto.RegisterExtension(E_X232) + proto.RegisterExtension(E_X233) + proto.RegisterExtension(E_X234) + proto.RegisterExtension(E_X235) + proto.RegisterExtension(E_X236) + proto.RegisterExtension(E_X237) + proto.RegisterExtension(E_X238) + proto.RegisterExtension(E_X239) + proto.RegisterExtension(E_X240) + proto.RegisterExtension(E_X241) + proto.RegisterExtension(E_X242) + proto.RegisterExtension(E_X243) + proto.RegisterExtension(E_X244) + proto.RegisterExtension(E_X245) + proto.RegisterExtension(E_X246) + proto.RegisterExtension(E_X247) + proto.RegisterExtension(E_X248) + proto.RegisterExtension(E_X249) + proto.RegisterExtension(E_X250) +} + +func init() { proto.RegisterFile("test_proto/test.proto", fileDescriptor_8ca34d01332f1402) } + +var fileDescriptor_8ca34d01332f1402 = []byte{ + // 4795 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x5b, 0xd9, 0x73, 0x1b, 0x47, + 0x7a, 0xd7, 0x0c, 0xee, 0x0f, 0x20, 0x31, 0x6c, 0xc9, 0x12, 0x44, 0x59, 0xd2, 0x08, 0x6b, 0xaf, + 0x61, 0xc9, 0xa2, 0x48, 0x60, 0x08, 0x49, 0x70, 0xec, 0x58, 0x07, 0x41, 0xb3, 0x24, 0x12, 0xf2, + 0x90, 0xb6, 0xb3, 0xca, 0x03, 0x0a, 0x24, 0x06, 0x20, 0x56, 0xc0, 0x0c, 0x0c, 0x0c, 0x56, 0x64, + 0x52, 0xa9, 0xf2, 0x63, 0xaa, 0xf2, 0x94, 0x4d, 0x52, 0x95, 0xf7, 0xbc, 0xe4, 0x25, 0xd7, 0x43, + 0xf2, 0x37, 0xc4, 0xd7, 0x7a, 0x77, 0xbd, 0x57, 0x92, 0x4d, 0x36, 0xf7, 0x9d, 0xcd, 0xbd, 0x47, + 0x5e, 0x9c, 0xea, 0xaf, 0x7b, 0x66, 0x7a, 0x06, 0x50, 0x93, 0x7c, 0xe2, 0x74, 0xf7, 0xef, 0xfb, + 0xf5, 0xf5, 0x9b, 0xef, 0xfb, 0xba, 0x31, 0x84, 0xe7, 0x5c, 0x6b, 0xec, 0x36, 0x87, 0x23, 0xc7, + 0x75, 0x6e, 0xd0, 0xc7, 0x25, 0x7c, 0x24, 0x10, 0x54, 0x17, 0xaf, 0x41, 0x72, 0xdd, 0x59, 0xb3, + 0x27, 0x03, 0x72, 0x05, 0x62, 0x1d, 0xc7, 0x29, 0x28, 0xba, 0x5a, 0x9a, 0x2f, 0xe7, 0x97, 0x02, + 0xcc, 0x52, 0xbd, 0xd1, 0x30, 0x69, 0x5b, 0xf1, 0x26, 0x64, 0xd7, 0x9d, 0x1d, 0x6b, 0xec, 0xd6, + 0x7b, 0x56, 0xbf, 0x4d, 0xce, 0x40, 0xe2, 0x61, 0x6b, 0xd7, 0xea, 0xa3, 0x4d, 0xc6, 0x64, 0x05, + 0x42, 0x20, 0xbe, 0x73, 0x38, 0xb4, 0x0a, 0x2a, 0x56, 0xe2, 0x73, 0xf1, 0x0f, 0x8b, 0xb4, 0x1b, + 0x6a, 0x49, 0xae, 0x41, 0xfc, 0x41, 0xcf, 0x6e, 0xf3, 0x7e, 0xce, 0x89, 0xfd, 0x30, 0xc4, 0xd2, + 0x83, 0x8d, 0xad, 0xfb, 0x26, 0x82, 0x68, 0x0f, 0x3b, 0xad, 0xdd, 0x3e, 0x25, 0x53, 0x68, 0x0f, + 0x58, 0xa0, 0xb5, 0x8f, 0x5a, 0xa3, 0xd6, 0xa0, 0x10, 0xd3, 0x95, 0x52, 0xc2, 0x64, 0x05, 0xf2, + 0x1a, 0xcc, 0x99, 0xd6, 0x7b, 0x93, 0xde, 0xc8, 0x6a, 0xe3, 0xf0, 0x0a, 0x71, 0x5d, 0x2d, 0x65, + 0x67, 0xf5, 0x80, 0xcd, 0x66, 0x18, 0xcd, 0xcc, 0x87, 0x56, 0xcb, 0xf5, 0xcc, 0x13, 0x7a, 0xec, + 0x08, 0x73, 0x01, 0x4d, 0xcd, 0x1b, 0x43, 0xb7, 0xe7, 0xd8, 0xad, 0x3e, 0x33, 0x4f, 0xea, 0x8a, + 0xd4, 0x3c, 0x84, 0x26, 0x5f, 0x84, 0x7c, 0xbd, 0x79, 0xd7, 0x71, 0xfa, 0xcd, 0x11, 0x1f, 0x55, + 0x01, 0x74, 0xb5, 0x94, 0x36, 0xe7, 0xea, 0xb4, 0xd6, 0x1b, 0x2a, 0x29, 0x81, 0x56, 0x6f, 0x6e, + 0xd8, 0x6e, 0xa5, 0x1c, 0x00, 0xb3, 0xba, 0x5a, 0x4a, 0x98, 0xf3, 0x75, 0xac, 0x9e, 0x42, 0x56, + 0x8d, 0x00, 0x99, 0xd3, 0xd5, 0x52, 0x8c, 0x21, 0xab, 0x86, 0x8f, 0x7c, 0x05, 0x48, 0xbd, 0x59, + 0xef, 0x1d, 0x58, 0x6d, 0x91, 0x75, 0x4e, 0x57, 0x4b, 0x29, 0x53, 0xab, 0xf3, 0x86, 0x19, 0x68, + 0x91, 0x79, 0x5e, 0x57, 0x4b, 0x49, 0x0f, 0x2d, 0x70, 0x5f, 0x85, 0x85, 0x7a, 0xf3, 0xed, 0x5e, + 0x78, 0xc0, 0x79, 0x5d, 0x2d, 0xcd, 0x99, 0xf9, 0x3a, 0xab, 0x9f, 0xc6, 0x8a, 0xc4, 0x9a, 0xae, + 0x96, 0xe2, 0x1c, 0x2b, 0xf0, 0xe2, 0xec, 0xea, 0x7d, 0xa7, 0xe5, 0x06, 0xd0, 0x05, 0x5d, 0x2d, + 0xa9, 0xe6, 0x7c, 0x1d, 0xab, 0xc3, 0xac, 0xf7, 0x9d, 0xc9, 0x6e, 0xdf, 0x0a, 0xa0, 0x44, 0x57, + 0x4b, 0x8a, 0x99, 0xaf, 0xb3, 0xfa, 0x30, 0x76, 0xdb, 0x1d, 0xf5, 0xec, 0x6e, 0x80, 0x3d, 0x8d, + 0x3a, 0xce, 0xd7, 0x59, 0x7d, 0x78, 0x04, 0x77, 0x0f, 0x5d, 0x6b, 0x1c, 0x40, 0x2d, 0x5d, 0x2d, + 0xe5, 0xcc, 0xf9, 0x3a, 0x56, 0x47, 0x58, 0x23, 0x6b, 0xd0, 0xd1, 0xd5, 0xd2, 0x02, 0x65, 0x9d, + 0xb1, 0x06, 0xdb, 0x91, 0x35, 0xe8, 0xea, 0x6a, 0x89, 0x70, 0xac, 0xb0, 0x06, 0x4b, 0x70, 0xba, + 0xde, 0xdc, 0xee, 0x44, 0x37, 0x6e, 0x5f, 0x57, 0x4b, 0x79, 0x73, 0xa1, 0xee, 0xb5, 0xcc, 0xc2, + 0x8b, 0xec, 0x3d, 0x5d, 0x2d, 0x69, 0x3e, 0x5e, 0xe0, 0x17, 0x35, 0xc9, 0xa4, 0x5e, 0x38, 0xa3, + 0xc7, 0x04, 0x4d, 0xb2, 0xca, 0xb0, 0x26, 0x39, 0xf0, 0x39, 0x3d, 0x26, 0x6a, 0x32, 0x82, 0xc4, + 0xee, 0x39, 0xf2, 0xac, 0x1e, 0x13, 0x35, 0xc9, 0x91, 0x11, 0x4d, 0x72, 0xec, 0x39, 0x3d, 0x16, + 0xd6, 0xe4, 0x14, 0x5a, 0x64, 0x2e, 0xe8, 0xb1, 0xb0, 0x26, 0x39, 0x3a, 0xac, 0x49, 0x0e, 0x3e, + 0xaf, 0xc7, 0x42, 0x9a, 0x8c, 0x62, 0x45, 0xe2, 0x45, 0x3d, 0x16, 0xd2, 0xa4, 0x38, 0x3b, 0x4f, + 0x93, 0x1c, 0x7a, 0x41, 0x8f, 0x89, 0x9a, 0x14, 0x59, 0x7d, 0x4d, 0x72, 0xe8, 0xf3, 0x7a, 0x2c, + 0xa4, 0x49, 0x11, 0xeb, 0x6b, 0x92, 0x63, 0x2f, 0xea, 0xb1, 0x90, 0x26, 0x39, 0xf6, 0x65, 0x51, + 0x93, 0x1c, 0xfa, 0x81, 0xa2, 0xc7, 0x44, 0x51, 0x72, 0xe8, 0xb5, 0x90, 0x28, 0x39, 0xf6, 0x43, + 0x8a, 0x15, 0x55, 0x19, 0x05, 0x8b, 0xab, 0xf0, 0x11, 0x05, 0x8b, 0xb2, 0xe4, 0xe0, 0x1b, 0x11, + 0x59, 0x72, 0xf8, 0xc7, 0x14, 0x1e, 0xd6, 0xe5, 0xb4, 0x81, 0xc8, 0xff, 0x09, 0x35, 0x08, 0x0b, + 0x93, 0x1b, 0x04, 0xc2, 0x74, 0xb8, 0x13, 0x2d, 0x5c, 0xd2, 0x15, 0x5f, 0x98, 0x9e, 0x67, 0x15, + 0x85, 0xe9, 0x03, 0x2f, 0x63, 0xc8, 0xe0, 0xc2, 0x9c, 0x42, 0x56, 0x8d, 0x00, 0xa9, 0xeb, 0x4a, + 0x20, 0x4c, 0x1f, 0x19, 0x12, 0xa6, 0x8f, 0xbd, 0xa2, 0x2b, 0xa2, 0x30, 0x67, 0xa0, 0x45, 0xe6, + 0xa2, 0xae, 0x88, 0xc2, 0xf4, 0xd1, 0xa2, 0x30, 0x7d, 0xf0, 0x17, 0x74, 0x45, 0x10, 0xe6, 0x34, + 0x56, 0x24, 0x7e, 0x41, 0x57, 0x04, 0x61, 0x86, 0x67, 0xc7, 0x84, 0xe9, 0x43, 0x5f, 0xd4, 0x95, + 0x40, 0x98, 0x61, 0x56, 0x2e, 0x4c, 0x1f, 0xfa, 0x45, 0x5d, 0x11, 0x84, 0x19, 0xc6, 0x72, 0x61, + 0xfa, 0xd8, 0x97, 0x30, 0x4e, 0x7b, 0xc2, 0xf4, 0xb1, 0x82, 0x30, 0x7d, 0xe8, 0xef, 0xd0, 0x98, + 0xee, 0x0b, 0xd3, 0x87, 0x8a, 0xc2, 0xf4, 0xb1, 0xbf, 0x4b, 0xb1, 0x81, 0x30, 0xa7, 0xc1, 0xe2, + 0x2a, 0xfc, 0x1e, 0x05, 0x07, 0xc2, 0xf4, 0xc1, 0x61, 0x61, 0xfa, 0xf0, 0xdf, 0xa7, 0x70, 0x51, + 0x98, 0xb3, 0x0c, 0x44, 0xfe, 0x3f, 0xa0, 0x06, 0xa2, 0x30, 0x7d, 0x83, 0x25, 0x9c, 0x26, 0x15, + 0x66, 0xdb, 0xea, 0xb4, 0x26, 0x7d, 0x2a, 0xe3, 0x12, 0x55, 0x66, 0x2d, 0xee, 0x8e, 0x26, 0x16, + 0x9d, 0xab, 0xe3, 0xf4, 0xef, 0x7b, 0x6d, 0x64, 0x89, 0x0e, 0x9f, 0x09, 0x34, 0x30, 0x78, 0x99, + 0x2a, 0xb4, 0xa6, 0x56, 0xca, 0x66, 0x9e, 0xa9, 0x74, 0x1a, 0x5f, 0x35, 0x04, 0xfc, 0x55, 0xaa, + 0xd3, 0x9a, 0x5a, 0x35, 0x18, 0xbe, 0x6a, 0x04, 0xf8, 0x0a, 0x9d, 0x80, 0x27, 0xd6, 0xc0, 0xe2, + 0x1a, 0x55, 0x6b, 0x2d, 0x56, 0x29, 0x2f, 0x9b, 0x0b, 0x9e, 0x64, 0x67, 0x19, 0x85, 0xba, 0x79, + 0x85, 0x8a, 0xb6, 0x16, 0xab, 0x1a, 0xbe, 0x91, 0xd8, 0x53, 0x99, 0x0a, 0x9d, 0x4b, 0x37, 0xb0, + 0xb9, 0x4e, 0xb5, 0x5b, 0x8b, 0x57, 0xca, 0xcb, 0xcb, 0xa6, 0xc6, 0x15, 0x3c, 0xc3, 0x26, 0xd4, + 0xcf, 0x12, 0xd5, 0x70, 0x2d, 0x5e, 0x35, 0x7c, 0x9b, 0x70, 0x3f, 0x0b, 0x9e, 0x94, 0x03, 0x93, + 0x1b, 0x54, 0xcb, 0xb5, 0x64, 0x65, 0xc5, 0x58, 0x59, 0xbd, 0x6d, 0xe6, 0x99, 0xa6, 0x03, 0x1b, + 0x83, 0xf6, 0xc3, 0x45, 0x1d, 0x18, 0x2d, 0x53, 0x55, 0xd7, 0x92, 0xe5, 0x9b, 0x2b, 0xb7, 0xca, + 0xb7, 0x4c, 0x8d, 0xab, 0x3b, 0xb0, 0x7a, 0x9d, 0x5a, 0x71, 0x79, 0x07, 0x56, 0x2b, 0x54, 0xdf, + 0x35, 0x6d, 0xdf, 0xea, 0xf7, 0x9d, 0x57, 0xf4, 0xe2, 0x53, 0x67, 0xd4, 0x6f, 0x5f, 0x29, 0x82, + 0xa9, 0x71, 0xc5, 0x8b, 0xbd, 0x2e, 0x78, 0x92, 0x0f, 0xcc, 0x7f, 0x95, 0x66, 0xac, 0xb9, 0x5a, + 0xea, 0x6e, 0xaf, 0x6b, 0x3b, 0x63, 0xcb, 0xcc, 0x33, 0xf1, 0x47, 0xd6, 0x64, 0x3b, 0xba, 0x8e, + 0x5f, 0xa5, 0x66, 0x0b, 0xb5, 0xd8, 0xf5, 0x4a, 0x99, 0xf6, 0x34, 0x6b, 0x1d, 0xb7, 0xa3, 0xeb, + 0xf8, 0x6b, 0xd4, 0x86, 0xd4, 0x62, 0xd7, 0xab, 0x06, 0xb7, 0x11, 0xd7, 0xb1, 0x0a, 0x67, 0x84, + 0x77, 0x21, 0xb0, 0xfa, 0x75, 0x6a, 0x95, 0x67, 0x3d, 0x11, 0xff, 0x8d, 0x98, 0x69, 0x17, 0xea, + 0xed, 0x37, 0xa8, 0x9d, 0xc6, 0x7a, 0x23, 0xfe, 0x8b, 0x11, 0xd8, 0xdd, 0x84, 0xb3, 0x91, 0x5c, + 0xa2, 0x39, 0x6c, 0xed, 0x3d, 0xb1, 0xda, 0x85, 0x32, 0x4d, 0x29, 0xee, 0xaa, 0x9a, 0x62, 0x9e, + 0x0e, 0xa5, 0x15, 0x8f, 0xb0, 0x99, 0xdc, 0x86, 0x73, 0xd1, 0xe4, 0xc2, 0xb3, 0xac, 0xd0, 0x1c, + 0x03, 0x2d, 0xcf, 0x84, 0xf3, 0x8c, 0x88, 0xa9, 0x10, 0x54, 0x3c, 0x53, 0x83, 0x26, 0x1d, 0x81, + 0x69, 0x10, 0x5b, 0xb8, 0xe9, 0x6b, 0x70, 0x7e, 0x3a, 0xfd, 0xf0, 0x8c, 0x57, 0x69, 0x16, 0x82, + 0xc6, 0x67, 0xa3, 0x99, 0xc8, 0x94, 0xf9, 0x8c, 0xbe, 0xab, 0x34, 0x2d, 0x11, 0xcd, 0xa7, 0x7a, + 0x7f, 0x15, 0x0a, 0x53, 0x09, 0x8a, 0x67, 0x7d, 0x93, 0xe6, 0x29, 0x68, 0xfd, 0x5c, 0x24, 0x57, + 0x89, 0x1a, 0xcf, 0xe8, 0xfa, 0x16, 0x4d, 0x5c, 0x04, 0xe3, 0xa9, 0x9e, 0x71, 0xc9, 0xc2, 0x29, + 0x8c, 0x67, 0x7b, 0x9b, 0x66, 0x32, 0x7c, 0xc9, 0x42, 0xd9, 0x8c, 0xd8, 0x6f, 0x24, 0xa7, 0xf1, + 0x6c, 0x6b, 0x34, 0xb5, 0xe1, 0xfd, 0x86, 0xd3, 0x1b, 0x6e, 0xfc, 0x33, 0xd4, 0x78, 0x7b, 0xf6, + 0x8c, 0x7f, 0x14, 0xa3, 0x49, 0x09, 0xb7, 0xde, 0x9e, 0x35, 0x65, 0xdf, 0x7a, 0xc6, 0x94, 0x7f, + 0x4c, 0xad, 0x89, 0x60, 0x3d, 0x35, 0xe7, 0x37, 0x60, 0x71, 0x46, 0xbe, 0xe2, 0xd9, 0xff, 0x84, + 0xda, 0xe7, 0xd1, 0xfe, 0xdc, 0x54, 0xea, 0x32, 0xcd, 0x30, 0x63, 0x04, 0x3f, 0xa5, 0x0c, 0x5a, + 0x88, 0x61, 0x6a, 0x0c, 0x75, 0x98, 0xf3, 0xf2, 0xf1, 0xee, 0xc8, 0x99, 0x0c, 0x0b, 0x75, 0x5d, + 0x2d, 0x41, 0x59, 0x9f, 0x71, 0x3a, 0xf6, 0xd2, 0xf3, 0x75, 0x8a, 0x33, 0xc3, 0x66, 0x8c, 0x87, + 0x31, 0x33, 0x9e, 0x47, 0x7a, 0xec, 0x99, 0x3c, 0x0c, 0xe7, 0xf3, 0x08, 0x66, 0x94, 0xc7, 0x0b, + 0x77, 0x8c, 0xe7, 0xb1, 0xae, 0x3c, 0x83, 0xc7, 0x0b, 0x7e, 0x9c, 0x27, 0x64, 0xb6, 0xb8, 0x1a, + 0x9c, 0xc9, 0xb1, 0x9d, 0xbc, 0x10, 0x3d, 0xa4, 0xaf, 0xe3, 0xe9, 0x2a, 0x5c, 0xc9, 0xcc, 0x84, + 0xe1, 0x4d, 0x9b, 0xbd, 0xf5, 0x0c, 0xb3, 0xd0, 0x68, 0xa6, 0xcd, 0x7e, 0x7e, 0x86, 0x59, 0xf1, + 0x37, 0x15, 0x88, 0x3f, 0xd8, 0xd8, 0xba, 0x4f, 0xd2, 0x10, 0x7f, 0xa7, 0xb1, 0x71, 0x5f, 0x3b, + 0x45, 0x9f, 0xee, 0x36, 0x1a, 0x0f, 0x35, 0x85, 0x64, 0x20, 0x71, 0xf7, 0x4b, 0x3b, 0x6b, 0xdb, + 0x9a, 0x4a, 0xf2, 0x90, 0xad, 0x6f, 0x6c, 0xad, 0xaf, 0x99, 0x8f, 0xcc, 0x8d, 0xad, 0x1d, 0x2d, + 0x46, 0xdb, 0xea, 0x0f, 0x1b, 0x77, 0x76, 0xb4, 0x38, 0x49, 0x41, 0x8c, 0xd6, 0x25, 0x08, 0x40, + 0x72, 0x7b, 0xc7, 0xdc, 0xd8, 0x5a, 0xd7, 0x92, 0x94, 0x65, 0x67, 0x63, 0x73, 0x4d, 0x4b, 0x51, + 0xe4, 0xce, 0xdb, 0x8f, 0x1e, 0xae, 0x69, 0x69, 0xfa, 0x78, 0xc7, 0x34, 0xef, 0x7c, 0x49, 0xcb, + 0x50, 0xa3, 0xcd, 0x3b, 0x8f, 0x34, 0xc0, 0xe6, 0x3b, 0x77, 0x1f, 0xae, 0x69, 0x59, 0x92, 0x83, + 0x74, 0xfd, 0xed, 0xad, 0x7b, 0x3b, 0x1b, 0x8d, 0x2d, 0x2d, 0x57, 0xfc, 0x45, 0x28, 0xb0, 0x65, + 0x0e, 0xad, 0x22, 0xbb, 0x32, 0x78, 0x03, 0x12, 0x6c, 0x6f, 0x14, 0xd4, 0xca, 0xd5, 0xe9, 0xbd, + 0x99, 0x36, 0x5a, 0x62, 0xbb, 0xc4, 0x0c, 0x17, 0x2f, 0x42, 0x82, 0xad, 0xd3, 0x19, 0x48, 0xb0, + 0xf5, 0x51, 0xf1, 0x2a, 0x81, 0x15, 0x8a, 0xbf, 0xa5, 0x02, 0xac, 0x3b, 0xdb, 0x4f, 0x7a, 0x43, + 0xbc, 0xb8, 0xb9, 0x08, 0x30, 0x7e, 0xd2, 0x1b, 0x36, 0xf1, 0x0d, 0xe4, 0x97, 0x0e, 0x19, 0x5a, + 0x83, 0xbe, 0x97, 0x5c, 0x81, 0x1c, 0x36, 0xf3, 0x57, 0x04, 0xef, 0x1a, 0x52, 0x66, 0x96, 0xd6, + 0x71, 0x27, 0x19, 0x86, 0x54, 0x0d, 0xbc, 0x62, 0x48, 0x0a, 0x90, 0xaa, 0x41, 0x2e, 0x03, 0x16, + 0x9b, 0x63, 0x8c, 0xa6, 0x78, 0xad, 0x90, 0x31, 0xb1, 0x5f, 0x16, 0x5f, 0xc9, 0xeb, 0x80, 0x7d, + 0xb2, 0x99, 0xe7, 0x67, 0xbd, 0x25, 0xde, 0x80, 0x97, 0xe8, 0x03, 0x9b, 0x6f, 0x60, 0xb2, 0xd8, + 0x80, 0x8c, 0x5f, 0x4f, 0x7b, 0xc3, 0x5a, 0x3e, 0x27, 0x0d, 0xe7, 0x04, 0x58, 0xe5, 0x4f, 0x8a, + 0x01, 0xf8, 0x78, 0x16, 0x70, 0x3c, 0xcc, 0x88, 0x0d, 0xa8, 0x78, 0x11, 0xe6, 0xb6, 0x1c, 0x9b, + 0xbd, 0xc7, 0xb8, 0x4e, 0x39, 0x50, 0x5a, 0x05, 0x05, 0xcf, 0xbf, 0x4a, 0xab, 0x78, 0x09, 0x40, + 0x68, 0xd3, 0x40, 0xd9, 0x65, 0x6d, 0xe8, 0x0f, 0x94, 0xdd, 0xe2, 0x35, 0x48, 0x6e, 0xb6, 0x0e, + 0x76, 0x5a, 0x5d, 0x72, 0x05, 0xa0, 0xdf, 0x1a, 0xbb, 0xcd, 0x0e, 0xee, 0xc4, 0xe7, 0x9f, 0x7f, + 0xfe, 0xb9, 0x82, 0xc9, 0x74, 0x86, 0xd6, 0xb2, 0x1d, 0x19, 0x03, 0x34, 0xfa, 0xed, 0x4d, 0x6b, + 0x3c, 0x6e, 0x75, 0x2d, 0xb2, 0x0a, 0x49, 0xdb, 0x1a, 0xd3, 0xe8, 0xab, 0xe0, 0x5d, 0xd3, 0x45, + 0x71, 0x1d, 0x02, 0xdc, 0xd2, 0x16, 0x82, 0x4c, 0x0e, 0x26, 0x1a, 0xc4, 0xec, 0xc9, 0x00, 0x6f, + 0xd4, 0x12, 0x26, 0x7d, 0x5c, 0x7c, 0x1e, 0x92, 0x0c, 0x43, 0x08, 0xc4, 0xed, 0xd6, 0xc0, 0x2a, + 0xb0, 0x9e, 0xf1, 0xb9, 0xf8, 0x55, 0x05, 0x60, 0xcb, 0x7a, 0x7a, 0xac, 0x5e, 0x03, 0x9c, 0xa4, + 0xd7, 0x18, 0xeb, 0xf5, 0x55, 0x59, 0xaf, 0x54, 0x6d, 0x1d, 0xc7, 0x69, 0x37, 0xd9, 0x46, 0xb3, + 0xeb, 0xbf, 0x0c, 0xad, 0xc1, 0x9d, 0x2b, 0x3e, 0x86, 0xdc, 0x86, 0x6d, 0x5b, 0x23, 0x6f, 0x54, + 0x04, 0xe2, 0xfb, 0xce, 0xd8, 0xe5, 0x37, 0x91, 0xf8, 0x4c, 0x0a, 0x10, 0x1f, 0x3a, 0x23, 0x97, + 0xcd, 0xb4, 0x16, 0x37, 0x96, 0x97, 0x97, 0x4d, 0xac, 0x21, 0xcf, 0x43, 0x66, 0xcf, 0xb1, 0x6d, + 0x6b, 0x8f, 0x4e, 0x23, 0x86, 0x47, 0xc7, 0xa0, 0xa2, 0xf8, 0xcb, 0x0a, 0xe4, 0x1a, 0xee, 0x7e, + 0x40, 0xae, 0x41, 0xec, 0x89, 0x75, 0x88, 0xc3, 0x8b, 0x99, 0xf4, 0x91, 0xbe, 0x30, 0x5f, 0x69, + 0xf5, 0x27, 0xec, 0x5e, 0x32, 0x67, 0xb2, 0x02, 0x39, 0x0b, 0xc9, 0xa7, 0x56, 0xaf, 0xbb, 0xef, + 0x22, 0xa7, 0x6a, 0xf2, 0x12, 0x59, 0x82, 0x44, 0x8f, 0x0e, 0xb6, 0x10, 0xc7, 0x15, 0x2b, 0x88, + 0x2b, 0x26, 0xce, 0xc2, 0x64, 0xb0, 0xab, 0xe9, 0x74, 0x5b, 0x7b, 0xff, 0xfd, 0xf7, 0xdf, 0x57, + 0x8b, 0xfb, 0x70, 0xc6, 0x7b, 0x89, 0x43, 0xd3, 0x7d, 0x04, 0x85, 0xbe, 0xe5, 0x34, 0x3b, 0x3d, + 0xbb, 0xd5, 0xef, 0x1f, 0x36, 0x9f, 0x3a, 0x76, 0xb3, 0x65, 0x37, 0x9d, 0xf1, 0x5e, 0x6b, 0x84, + 0x4b, 0x20, 0xeb, 0xe4, 0x4c, 0xdf, 0x72, 0xea, 0xcc, 0xf0, 0x5d, 0xc7, 0xbe, 0x63, 0x37, 0xa8, + 0x55, 0xf1, 0xb3, 0x38, 0x64, 0x36, 0x0f, 0x3d, 0xfe, 0x33, 0x90, 0xd8, 0x73, 0x26, 0x36, 0x5b, + 0xcf, 0x84, 0xc9, 0x0a, 0xfe, 0x3e, 0xa9, 0xc2, 0x3e, 0x9d, 0x81, 0xc4, 0x7b, 0x13, 0xc7, 0xb5, + 0x70, 0xca, 0x19, 0x93, 0x15, 0xe8, 0x8a, 0x0d, 0x2d, 0xb7, 0x10, 0xc7, 0x6b, 0x0a, 0xfa, 0x18, + 0xac, 0x41, 0xe2, 0x58, 0x6b, 0x40, 0x96, 0x21, 0xe9, 0xd0, 0x3d, 0x18, 0x17, 0x92, 0x78, 0x0f, + 0x1b, 0x32, 0x10, 0x77, 0xc7, 0xe4, 0x38, 0xf2, 0x00, 0x16, 0x9e, 0x5a, 0xcd, 0xc1, 0x64, 0xec, + 0x36, 0xbb, 0x4e, 0xb3, 0x6d, 0x59, 0x43, 0x6b, 0x54, 0x98, 0xc3, 0xde, 0x42, 0x1e, 0x62, 0xd6, + 0x82, 0x9a, 0xf3, 0x4f, 0xad, 0xcd, 0xc9, 0xd8, 0x5d, 0x77, 0xee, 0xa3, 0x1d, 0x59, 0x85, 0xcc, + 0xc8, 0xa2, 0x7e, 0x81, 0x0e, 0x39, 0x37, 0x3d, 0x82, 0x90, 0x71, 0x7a, 0x64, 0x0d, 0xb1, 0x82, + 0xdc, 0x84, 0xf4, 0x6e, 0xef, 0x89, 0x35, 0xde, 0xb7, 0xda, 0x85, 0x94, 0xae, 0x94, 0xe6, 0xcb, + 0x17, 0x44, 0x2b, 0x7f, 0x81, 0x97, 0xee, 0x39, 0x7d, 0x67, 0x64, 0xfa, 0x60, 0xf2, 0x1a, 0x64, + 0xc6, 0xce, 0xc0, 0x62, 0x6a, 0x4f, 0x63, 0xb0, 0xbd, 0x3c, 0xdb, 0x72, 0xdb, 0x19, 0x58, 0x9e, + 0x57, 0xf3, 0x2c, 0xc8, 0x05, 0x36, 0xdc, 0x5d, 0x7a, 0x98, 0x28, 0x00, 0x5e, 0xf8, 0xd0, 0x41, + 0xe1, 0xe1, 0x82, 0x2c, 0xd2, 0x41, 0x75, 0x3b, 0x34, 0x67, 0x2b, 0x64, 0xf1, 0x2c, 0xef, 0x97, + 0x17, 0x5f, 0x81, 0x8c, 0x4f, 0x18, 0xb8, 0x43, 0xe6, 0x82, 0x32, 0xe8, 0x21, 0x98, 0x3b, 0x64, + 0xfe, 0xe7, 0x45, 0x48, 0xe0, 0xc0, 0x69, 0xe4, 0x32, 0xd7, 0x68, 0xa0, 0xcc, 0x40, 0x62, 0xdd, + 0x5c, 0x5b, 0xdb, 0xd2, 0x14, 0x8c, 0x99, 0x0f, 0xdf, 0x5e, 0xd3, 0x54, 0x41, 0xbf, 0xbf, 0xad, + 0x42, 0x6c, 0xed, 0x00, 0x95, 0xd3, 0x6e, 0xb9, 0x2d, 0xef, 0x0d, 0xa7, 0xcf, 0xa4, 0x06, 0x99, + 0x41, 0xcb, 0xeb, 0x4b, 0xc5, 0x25, 0x0e, 0xf9, 0x92, 0xb5, 0x03, 0x77, 0x69, 0xb3, 0xc5, 0x7a, + 0x5e, 0xb3, 0xdd, 0xd1, 0xa1, 0x99, 0x1e, 0xf0, 0xe2, 0xe2, 0xab, 0x30, 0x17, 0x6a, 0x12, 0x5f, + 0xd1, 0xc4, 0x8c, 0x57, 0x34, 0xc1, 0x5f, 0xd1, 0x9a, 0x7a, 0x4b, 0x29, 0xd7, 0x20, 0x3e, 0x70, + 0x46, 0x16, 0x79, 0x6e, 0xe6, 0x02, 0x17, 0xba, 0x28, 0x99, 0x7c, 0x64, 0x28, 0x26, 0xda, 0x94, + 0x5f, 0x86, 0xb8, 0x6b, 0x1d, 0xb8, 0xcf, 0xb2, 0xdd, 0x67, 0xf3, 0xa3, 0x90, 0xf2, 0x75, 0x48, + 0xda, 0x93, 0xc1, 0xae, 0x35, 0x7a, 0x16, 0xb8, 0x87, 0x03, 0xe3, 0xa0, 0xe2, 0x3b, 0xa0, 0xdd, + 0x73, 0x06, 0xc3, 0xbe, 0x75, 0xb0, 0x76, 0xe0, 0x5a, 0xf6, 0xb8, 0xe7, 0xd8, 0x74, 0x0e, 0x9d, + 0xde, 0x08, 0xdd, 0x1a, 0xce, 0x01, 0x0b, 0xd4, 0xcd, 0x8c, 0xad, 0x3d, 0xc7, 0x6e, 0xf3, 0xa9, + 0xf1, 0x12, 0x45, 0xbb, 0xfb, 0xbd, 0x11, 0xf5, 0x68, 0x34, 0xf8, 0xb0, 0x42, 0x71, 0x1d, 0xf2, + 0xfc, 0x18, 0x36, 0xe6, 0x1d, 0x17, 0xaf, 0x42, 0xce, 0xab, 0xc2, 0x5f, 0x7e, 0xd2, 0x10, 0x7f, + 0xbc, 0x66, 0x36, 0xb4, 0x53, 0x74, 0x5f, 0x1b, 0x5b, 0x6b, 0x9a, 0x42, 0x1f, 0x76, 0xde, 0x6d, + 0x84, 0xf6, 0xf2, 0x79, 0xc8, 0xf9, 0x63, 0xdf, 0xb6, 0x5c, 0x6c, 0xa1, 0x51, 0x2a, 0x55, 0x53, + 0xd3, 0x4a, 0x31, 0x05, 0x89, 0xb5, 0xc1, 0xd0, 0x3d, 0x2c, 0xfe, 0x12, 0x64, 0x39, 0xe8, 0x61, + 0x6f, 0xec, 0x92, 0xdb, 0x90, 0x1a, 0xf0, 0xf9, 0x2a, 0x98, 0x8b, 0x86, 0x65, 0x1d, 0x20, 0xbd, + 0x67, 0xd3, 0xc3, 0x2f, 0x56, 0x20, 0x25, 0xb8, 0x77, 0xee, 0x79, 0x54, 0xd1, 0xf3, 0x30, 0x1f, + 0x15, 0x13, 0x7c, 0x54, 0x71, 0x13, 0x52, 0x2c, 0x30, 0x8f, 0x31, 0xdd, 0x60, 0xe7, 0x77, 0xa6, + 0x31, 0x26, 0xbe, 0x2c, 0xab, 0x63, 0x39, 0xd4, 0x65, 0xc8, 0xe2, 0x3b, 0xe3, 0xab, 0x90, 0x7a, + 0x73, 0xc0, 0x2a, 0xa6, 0xf8, 0x3f, 0x4a, 0x40, 0xda, 0x5b, 0x2b, 0x72, 0x01, 0x92, 0xec, 0x10, + 0x8b, 0x54, 0xde, 0xa5, 0x4e, 0x02, 0x8f, 0xad, 0xe4, 0x02, 0xa4, 0xf8, 0x41, 0x95, 0x07, 0x1c, + 0xb5, 0x52, 0x36, 0x93, 0xec, 0x60, 0xea, 0x37, 0x56, 0x0d, 0xf4, 0x93, 0xec, 0xba, 0x26, 0xc9, + 0x8e, 0x9e, 0x44, 0x87, 0x8c, 0x7f, 0xd8, 0xc4, 0x10, 0xc1, 0xef, 0x66, 0xd2, 0xde, 0xe9, 0x52, + 0x40, 0x54, 0x0d, 0x74, 0xa0, 0xfc, 0x22, 0x26, 0x5d, 0x0f, 0xf2, 0xa6, 0xb4, 0x77, 0x64, 0xc4, + 0x5f, 0x9e, 0xbc, 0x5b, 0x97, 0x14, 0x3f, 0x24, 0x06, 0x80, 0xaa, 0x81, 0x9e, 0xc9, 0xbb, 0x62, + 0x49, 0xf1, 0x83, 0x20, 0xb9, 0x4c, 0x87, 0x88, 0x07, 0x3b, 0xf4, 0x3f, 0xc1, 0x7d, 0x4a, 0x92, + 0x1d, 0xf7, 0xc8, 0x15, 0xca, 0xc0, 0x4e, 0x6f, 0xe8, 0x1a, 0x82, 0xcb, 0x93, 0x14, 0x3f, 0xd4, + 0x91, 0x6b, 0x14, 0xc2, 0x96, 0xbf, 0x00, 0xcf, 0xb8, 0x29, 0x49, 0xf1, 0x9b, 0x12, 0xa2, 0xd3, + 0x0e, 0xd1, 0x43, 0xa1, 0x57, 0x12, 0x6e, 0x45, 0x92, 0xec, 0x56, 0x84, 0x5c, 0x42, 0x3a, 0x36, + 0xa9, 0x5c, 0x70, 0x03, 0x92, 0xe2, 0xa7, 0xc0, 0xa0, 0x1d, 0x73, 0x49, 0xff, 0xb6, 0x23, 0xc5, + 0xcf, 0x79, 0xe4, 0x16, 0xdd, 0x2f, 0xaa, 0xf0, 0xc2, 0x3c, 0xfa, 0xe2, 0x45, 0x51, 0x7a, 0xde, + 0xae, 0x32, 0x57, 0x5c, 0x63, 0x6e, 0xcc, 0x4c, 0xd4, 0xf1, 0x8d, 0x58, 0xa4, 0x96, 0x8f, 0x7a, + 0x76, 0xa7, 0x90, 0xc7, 0xb5, 0x88, 0xf5, 0xec, 0x8e, 0x99, 0xa8, 0xd3, 0x1a, 0xa6, 0x82, 0x2d, + 0xda, 0xa6, 0x61, 0x5b, 0xfc, 0x3a, 0x6b, 0xa4, 0x55, 0xa4, 0x00, 0x89, 0x7a, 0x73, 0xab, 0x65, + 0x17, 0x16, 0x98, 0x9d, 0xdd, 0xb2, 0xcd, 0x78, 0x7d, 0xab, 0x65, 0x93, 0x97, 0x21, 0x36, 0x9e, + 0xec, 0x16, 0xc8, 0xf4, 0xcf, 0x82, 0xdb, 0x93, 0x5d, 0x6f, 0x30, 0x26, 0xc5, 0x90, 0x0b, 0x90, + 0x1e, 0xbb, 0xa3, 0xe6, 0x2f, 0x58, 0x23, 0xa7, 0x70, 0x1a, 0x97, 0xf1, 0x94, 0x99, 0x1a, 0xbb, + 0xa3, 0xc7, 0xd6, 0xc8, 0x39, 0xa6, 0x0f, 0x2e, 0x5e, 0x82, 0xac, 0xc0, 0x4b, 0xf2, 0xa0, 0xd8, + 0x2c, 0x81, 0xa9, 0x29, 0x37, 0x4d, 0xc5, 0x2e, 0xbe, 0x03, 0x39, 0xef, 0x88, 0x85, 0x33, 0x36, + 0xe8, 0xdb, 0xd4, 0x77, 0x46, 0xf8, 0x96, 0xce, 0x97, 0x2f, 0x85, 0x23, 0x66, 0x00, 0xe4, 0x91, + 0x8b, 0x81, 0x8b, 0x5a, 0x64, 0x30, 0x4a, 0xf1, 0x07, 0x0a, 0xe4, 0x36, 0x9d, 0x51, 0xf0, 0xfb, + 0xc5, 0x19, 0x48, 0xec, 0x3a, 0x4e, 0x7f, 0x8c, 0xc4, 0x69, 0x93, 0x15, 0xc8, 0x8b, 0x90, 0xc3, + 0x07, 0xef, 0x90, 0xac, 0xfa, 0xb7, 0x40, 0x59, 0xac, 0xe7, 0xe7, 0x62, 0x02, 0xf1, 0x9e, 0xed, + 0x8e, 0xb9, 0x47, 0xc3, 0x67, 0xf2, 0x05, 0xc8, 0xd2, 0xbf, 0x9e, 0x65, 0xdc, 0xcf, 0xa6, 0x81, + 0x56, 0x73, 0xc3, 0x97, 0x60, 0x0e, 0x35, 0xe0, 0xc3, 0x52, 0xfe, 0x8d, 0x4f, 0x8e, 0x35, 0x70, + 0x60, 0x01, 0x52, 0xcc, 0x21, 0x8c, 0xf1, 0x07, 0xdf, 0x8c, 0xe9, 0x15, 0xa9, 0x9b, 0xc5, 0x83, + 0x0a, 0xcb, 0x40, 0x52, 0x26, 0x2f, 0x15, 0xef, 0x41, 0x1a, 0xc3, 0x65, 0xa3, 0xdf, 0x26, 0x2f, + 0x80, 0xd2, 0x2d, 0x58, 0x18, 0xae, 0xcf, 0x86, 0x4e, 0x21, 0x1c, 0xb0, 0xb4, 0x6e, 0x2a, 0xdd, + 0xc5, 0x05, 0x50, 0xd6, 0xe9, 0xb1, 0xe0, 0x80, 0x3b, 0x6c, 0xe5, 0xa0, 0xf8, 0x16, 0x27, 0xd9, + 0xb2, 0x9e, 0xca, 0x49, 0xb6, 0xac, 0xa7, 0x8c, 0xe4, 0xf2, 0x14, 0x09, 0x2d, 0x1d, 0xf2, 0xdf, + 0xc0, 0x95, 0xc3, 0x62, 0x05, 0xe6, 0xf0, 0x45, 0xed, 0xd9, 0xdd, 0x47, 0x4e, 0xcf, 0xc6, 0x83, + 0x48, 0x07, 0x13, 0x38, 0xc5, 0x54, 0x3a, 0x74, 0x1f, 0xac, 0x83, 0xd6, 0x1e, 0x4b, 0x87, 0xd3, + 0x26, 0x2b, 0x14, 0xbf, 0x1f, 0x87, 0x79, 0xee, 0x64, 0xdf, 0xed, 0xb9, 0xfb, 0x9b, 0xad, 0x21, + 0xd9, 0x82, 0x1c, 0xf5, 0xaf, 0xcd, 0x41, 0x6b, 0x38, 0xa4, 0x2f, 0xb2, 0x82, 0xa1, 0xf9, 0xda, + 0x0c, 0xb7, 0xcd, 0x2d, 0x96, 0xb6, 0x5a, 0x03, 0x6b, 0x93, 0xa1, 0x59, 0xa0, 0xce, 0xda, 0x41, + 0x0d, 0x79, 0x00, 0xd9, 0xc1, 0xb8, 0xeb, 0xd3, 0xb1, 0x48, 0x7f, 0x55, 0x42, 0xb7, 0x39, 0xee, + 0x86, 0xd8, 0x60, 0xe0, 0x57, 0xd0, 0xc1, 0x51, 0xef, 0xec, 0xb3, 0xc5, 0x8e, 0x1c, 0x1c, 0x75, + 0x25, 0xe1, 0xc1, 0xed, 0x06, 0x35, 0xa4, 0x0e, 0x40, 0x5f, 0x35, 0xd7, 0xa1, 0x27, 0x3c, 0xd4, + 0x52, 0xb6, 0x5c, 0x92, 0xb0, 0x6d, 0xbb, 0xa3, 0x1d, 0x67, 0xdb, 0x1d, 0xf1, 0x84, 0x64, 0xcc, + 0x8b, 0x8b, 0xaf, 0x83, 0x16, 0x5d, 0x85, 0xa3, 0x72, 0x92, 0x8c, 0x90, 0x93, 0x2c, 0xfe, 0x1c, + 0xe4, 0x23, 0xd3, 0x16, 0xcd, 0x09, 0x33, 0xbf, 0x21, 0x9a, 0x67, 0xcb, 0xe7, 0x43, 0xdf, 0x68, + 0x88, 0x5b, 0x2f, 0x32, 0xbf, 0x0e, 0x5a, 0x74, 0x09, 0x44, 0xea, 0xb4, 0xe4, 0x40, 0x83, 0xf6, + 0xaf, 0xc2, 0x5c, 0x68, 0xd2, 0xa2, 0x71, 0xe6, 0x88, 0x69, 0x15, 0x7f, 0x25, 0x01, 0x89, 0x86, + 0x6d, 0x39, 0x1d, 0x72, 0x2e, 0x1c, 0x3b, 0xdf, 0x3c, 0xe5, 0xc5, 0xcd, 0xf3, 0x91, 0xb8, 0xf9, + 0xe6, 0x29, 0x3f, 0x6a, 0x9e, 0x8f, 0x44, 0x4d, 0xaf, 0xa9, 0x6a, 0x90, 0x8b, 0x53, 0x31, 0xf3, + 0xcd, 0x53, 0x42, 0xc0, 0xbc, 0x38, 0x15, 0x30, 0x83, 0xe6, 0xaa, 0x41, 0x1d, 0x6c, 0x38, 0x5a, + 0xbe, 0x79, 0x2a, 0x88, 0x94, 0x17, 0xa2, 0x91, 0xd2, 0x6f, 0xac, 0x1a, 0x6c, 0x48, 0x42, 0x94, + 0xc4, 0x21, 0xb1, 0xf8, 0x78, 0x21, 0x1a, 0x1f, 0xd1, 0x8e, 0x47, 0xc6, 0x0b, 0xd1, 0xc8, 0x88, + 0x8d, 0x3c, 0x12, 0x9e, 0x8f, 0x44, 0x42, 0x24, 0x65, 0x21, 0xf0, 0x42, 0x34, 0x04, 0x32, 0x3b, + 0x61, 0xa4, 0x62, 0xfc, 0xf3, 0x1b, 0xab, 0x06, 0x31, 0x22, 0xc1, 0x4f, 0x76, 0x10, 0xc1, 0xdd, + 0xc0, 0x30, 0x50, 0xa5, 0x0b, 0xe7, 0x25, 0xa8, 0x79, 0xe9, 0x27, 0x2c, 0xb8, 0xa2, 0x5e, 0x82, + 0x66, 0x40, 0xaa, 0xc3, 0xcf, 0xea, 0x1a, 0x7a, 0xb2, 0x90, 0x38, 0x51, 0x02, 0x4b, 0xf5, 0x26, + 0x7a, 0x34, 0x3a, 0xbb, 0x0e, 0x3b, 0x70, 0x94, 0x60, 0xae, 0xde, 0x7c, 0xd8, 0x1a, 0x75, 0x29, + 0x74, 0xa7, 0xd5, 0xf5, 0x6f, 0x3d, 0xa8, 0x0a, 0xb2, 0x75, 0xde, 0xb2, 0xd3, 0xea, 0x92, 0xb3, + 0x9e, 0xc4, 0xda, 0xd8, 0xaa, 0x70, 0x91, 0x2d, 0x9e, 0xa3, 0x4b, 0xc7, 0xc8, 0xd0, 0x37, 0x2e, + 0x70, 0xdf, 0x78, 0x37, 0x05, 0x89, 0x89, 0xdd, 0x73, 0xec, 0xbb, 0x19, 0x48, 0xb9, 0xce, 0x68, + 0xd0, 0x72, 0x9d, 0xe2, 0x0f, 0x15, 0x80, 0x7b, 0xce, 0x60, 0x30, 0xb1, 0x7b, 0xef, 0x4d, 0x2c, + 0x72, 0x09, 0xb2, 0x83, 0xd6, 0x13, 0xab, 0x39, 0xb0, 0x9a, 0x7b, 0x23, 0xef, 0x6d, 0xc8, 0xd0, + 0xaa, 0x4d, 0xeb, 0xde, 0xe8, 0x90, 0x14, 0xbc, 0x04, 0x1e, 0x15, 0x84, 0xc2, 0xe4, 0x09, 0xfd, + 0x19, 0x9e, 0x8e, 0x26, 0xf9, 0x4e, 0x7a, 0x09, 0x29, 0x3b, 0xe4, 0xa4, 0xf8, 0x1e, 0xb2, 0x63, + 0xce, 0x39, 0x48, 0xba, 0xd6, 0x60, 0xd8, 0xdc, 0x43, 0xc1, 0x50, 0x51, 0x24, 0x68, 0xf9, 0x1e, + 0xb9, 0x01, 0xb1, 0x3d, 0xa7, 0x8f, 0x52, 0x39, 0x72, 0x77, 0x28, 0x92, 0xbc, 0x04, 0xb1, 0xc1, + 0x98, 0xc9, 0x27, 0x5b, 0x3e, 0x1d, 0xca, 0x20, 0x58, 0xc8, 0xa2, 0xc0, 0xc1, 0xb8, 0xeb, 0xcf, + 0xbd, 0xf8, 0xa9, 0x0a, 0x69, 0xba, 0x5f, 0x6f, 0xef, 0xd4, 0x6f, 0xe1, 0xb1, 0x61, 0xaf, 0xd5, + 0xc7, 0x1b, 0x02, 0xfa, 0x9a, 0xf2, 0x12, 0xad, 0xff, 0x8a, 0xb5, 0xe7, 0x3a, 0x23, 0x74, 0xcd, + 0x19, 0x93, 0x97, 0xe8, 0x92, 0xb3, 0xac, 0x38, 0xc6, 0x67, 0xc9, 0x8a, 0x98, 0xd1, 0xb7, 0x86, + 0x4d, 0xea, 0x03, 0x98, 0xbf, 0x0c, 0x9d, 0xae, 0xbd, 0xee, 0xe8, 0xd1, 0xed, 0x81, 0x75, 0xc8, + 0xfc, 0x64, 0x72, 0x80, 0x05, 0xf2, 0xb3, 0xec, 0xc8, 0xc7, 0x76, 0x92, 0x7d, 0x5f, 0x55, 0x7c, + 0x96, 0xf1, 0x3b, 0x14, 0x14, 0x9c, 0xfb, 0xb0, 0xb8, 0x78, 0x1b, 0xb2, 0x02, 0xef, 0x51, 0xae, + 0x28, 0x16, 0xf1, 0x63, 0x21, 0xd6, 0xa3, 0x6e, 0x75, 0x44, 0x3f, 0x46, 0x57, 0xd4, 0xa1, 0x1a, + 0xbe, 0x9a, 0x87, 0x58, 0xbd, 0xd1, 0xa0, 0x79, 0x56, 0xbd, 0xd1, 0x58, 0xd1, 0x94, 0xda, 0x0a, + 0xa4, 0xbb, 0x23, 0xcb, 0xa2, 0xae, 0xf7, 0x59, 0xe7, 0xbc, 0x2f, 0xe3, 0xb2, 0xfa, 0xb0, 0xda, + 0x5b, 0x90, 0xda, 0x63, 0x27, 0x3d, 0xf2, 0xcc, 0x5b, 0x8d, 0xc2, 0x1f, 0xb3, 0xdb, 0xb5, 0xe7, + 0x45, 0x40, 0xf4, 0x7c, 0x68, 0x7a, 0x3c, 0xb5, 0x1d, 0xc8, 0x8c, 0x9a, 0x47, 0x93, 0x7e, 0xc0, + 0x62, 0xb9, 0x9c, 0x34, 0x3d, 0xe2, 0x55, 0xb5, 0x75, 0x58, 0xb0, 0x1d, 0xef, 0x47, 0xbe, 0x66, + 0x9b, 0x7b, 0xb2, 0x59, 0x49, 0xb4, 0xd7, 0x81, 0xc5, 0x3e, 0x15, 0xb0, 0x1d, 0xde, 0xc0, 0xbc, + 0x5f, 0x6d, 0x0d, 0x34, 0x81, 0xa8, 0xc3, 0xdc, 0xa5, 0x8c, 0xa7, 0xc3, 0xbe, 0x4e, 0xf0, 0x79, + 0xd0, 0xc3, 0x46, 0x68, 0xb8, 0x0f, 0x94, 0xd1, 0x74, 0xd9, 0xc7, 0x1e, 0x3e, 0x0d, 0x86, 0x95, + 0x69, 0x1a, 0x1a, 0x11, 0x64, 0x34, 0xfb, 0xec, 0x4b, 0x10, 0x91, 0xa6, 0x6a, 0x44, 0x56, 0x67, + 0x72, 0x8c, 0xe1, 0xf4, 0xd8, 0xa7, 0x1c, 0x3e, 0x0f, 0x0b, 0x38, 0x33, 0x88, 0x8e, 0x1a, 0xd0, + 0x97, 0xd9, 0x77, 0x1e, 0x21, 0xa2, 0xa9, 0x11, 0x8d, 0x8f, 0x31, 0xa2, 0x27, 0xec, 0xb3, 0x0a, + 0x9f, 0x68, 0x7b, 0xd6, 0x88, 0xc6, 0xc7, 0x18, 0x51, 0x9f, 0x7d, 0x72, 0x11, 0x22, 0xaa, 0x1a, + 0xb5, 0x0d, 0x20, 0xe2, 0xc6, 0xf3, 0xe8, 0x2c, 0x65, 0x1a, 0xb0, 0x4f, 0x69, 0x82, 0xad, 0x67, + 0x46, 0xb3, 0xa8, 0x8e, 0x1a, 0x94, 0xcd, 0xbe, 0xb3, 0x09, 0x53, 0x55, 0x8d, 0xda, 0x03, 0x38, + 0x2d, 0x4e, 0xef, 0x58, 0xc3, 0x72, 0xd8, 0x47, 0x22, 0xc1, 0x04, 0xb9, 0xd5, 0x4c, 0xb2, 0xa3, + 0x06, 0x36, 0x64, 0x1f, 0x90, 0x44, 0xc8, 0xaa, 0x46, 0xed, 0x1e, 0xe4, 0x05, 0xb2, 0x5d, 0xbc, + 0x57, 0x90, 0x11, 0xbd, 0xc7, 0x3e, 0x7b, 0xf2, 0x89, 0x68, 0x46, 0x15, 0xdd, 0x3d, 0x96, 0x63, + 0x48, 0x69, 0x46, 0xec, 0xab, 0x9d, 0x60, 0x3c, 0x68, 0x13, 0x79, 0x51, 0x76, 0x59, 0x42, 0x22, + 0xe3, 0x19, 0xb3, 0x2f, 0x7a, 0x82, 0xe1, 0x50, 0x93, 0xda, 0x20, 0x34, 0x29, 0x8b, 0xa6, 0x19, + 0x52, 0x16, 0x17, 0x23, 0x62, 0x49, 0x02, 0x59, 0x12, 0xaf, 0xaf, 0x84, 0xe9, 0xd3, 0x62, 0xed, + 0x01, 0xcc, 0x9f, 0xc4, 0x65, 0x7d, 0xa0, 0xb0, 0xbb, 0x8c, 0xca, 0xd2, 0x8a, 0xb1, 0xb2, 0x6a, + 0xce, 0xb5, 0x43, 0x9e, 0x6b, 0x1d, 0xe6, 0x4e, 0xe0, 0xb6, 0x3e, 0x54, 0xd8, 0x8d, 0x00, 0xe5, + 0x32, 0x73, 0xed, 0xb0, 0xef, 0x9a, 0x3b, 0x81, 0xe3, 0xfa, 0x48, 0x61, 0x57, 0x48, 0x46, 0xd9, + 0xa7, 0xf1, 0x7c, 0xd7, 0xdc, 0x09, 0x1c, 0xd7, 0xc7, 0xec, 0xc4, 0xaf, 0x1a, 0x15, 0x91, 0x06, + 0x3d, 0xc5, 0xfc, 0x49, 0x1c, 0xd7, 0x27, 0x0a, 0x5e, 0x29, 0xa9, 0x86, 0xe1, 0xaf, 0x8f, 0xef, + 0xbb, 0xe6, 0x4f, 0xe2, 0xb8, 0xbe, 0xa6, 0xe0, 0xd5, 0x93, 0x6a, 0xac, 0x86, 0x88, 0xc2, 0x23, + 0x3a, 0x8e, 0xe3, 0xfa, 0x54, 0xc1, 0xfb, 0x20, 0xd5, 0xa8, 0xfa, 0x44, 0xdb, 0x53, 0x23, 0x3a, + 0x8e, 0xe3, 0xfa, 0x3a, 0x9e, 0xaf, 0x6a, 0xaa, 0x71, 0x33, 0x44, 0x84, 0xbe, 0x2b, 0x7f, 0x22, + 0xc7, 0xf5, 0x0d, 0x05, 0xaf, 0xee, 0x54, 0xe3, 0x96, 0xe9, 0x8d, 0x20, 0xf0, 0x5d, 0xf9, 0x13, + 0x39, 0xae, 0x6f, 0x2a, 0x78, 0xc7, 0xa7, 0x1a, 0xb7, 0xc3, 0x54, 0xe8, 0xbb, 0xb4, 0x93, 0x39, + 0xae, 0xcf, 0x14, 0xfc, 0xa2, 0x47, 0x5d, 0x5d, 0x36, 0xbd, 0x41, 0x08, 0xbe, 0x4b, 0x3b, 0x99, + 0xe3, 0xfa, 0x96, 0x82, 0x9f, 0xf9, 0xa8, 0xab, 0x2b, 0x11, 0xb2, 0xaa, 0x51, 0x5b, 0x83, 0xdc, + 0xf1, 0x1d, 0xd7, 0xb7, 0xc5, 0x1b, 0xd4, 0x6c, 0x5b, 0xf0, 0x5e, 0x8f, 0x85, 0xfd, 0x3b, 0x86, + 0xeb, 0xfa, 0x0e, 0x26, 0x7f, 0xb5, 0xe7, 0xde, 0x64, 0xf7, 0x8c, 0xcc, 0xe4, 0x95, 0xb6, 0xd5, + 0x79, 0xad, 0xe3, 0x38, 0xc1, 0x96, 0x32, 0x87, 0xd6, 0x08, 0xde, 0x9e, 0x63, 0x78, 0xb3, 0xef, + 0x2a, 0x78, 0x2d, 0x99, 0xe3, 0xd4, 0x68, 0xe1, 0xbf, 0x47, 0xcc, 0xb5, 0xd9, 0xc1, 0x9c, 0x8f, + 0xf6, 0x6b, 0xdf, 0x53, 0x4e, 0xe6, 0xd8, 0x6a, 0xb1, 0xc6, 0xd6, 0x9a, 0xbf, 0x38, 0x58, 0xf3, + 0x06, 0xc4, 0x0f, 0xca, 0xcb, 0x2b, 0xe1, 0x14, 0x4f, 0xbc, 0x95, 0x67, 0xee, 0x2c, 0x5b, 0x5e, + 0x08, 0xfd, 0x7c, 0x31, 0x18, 0xba, 0x87, 0x26, 0x5a, 0x72, 0x86, 0xb2, 0x84, 0xe1, 0x43, 0x29, + 0x43, 0x99, 0x33, 0x54, 0x24, 0x0c, 0x1f, 0x49, 0x19, 0x2a, 0x9c, 0xc1, 0x90, 0x30, 0x7c, 0x2c, + 0x65, 0x30, 0x38, 0xc3, 0xaa, 0x84, 0xe1, 0x13, 0x29, 0xc3, 0x2a, 0x67, 0xa8, 0x4a, 0x18, 0xbe, + 0x26, 0x65, 0xa8, 0x72, 0x86, 0x9b, 0x12, 0x86, 0x4f, 0xa5, 0x0c, 0x37, 0x39, 0xc3, 0x2d, 0x09, + 0xc3, 0xd7, 0xa5, 0x0c, 0xb7, 0x38, 0xc3, 0x6d, 0x09, 0xc3, 0x37, 0xa4, 0x0c, 0xb7, 0x19, 0xc3, + 0xca, 0xb2, 0x84, 0xe1, 0x9b, 0x32, 0x86, 0x95, 0x65, 0xce, 0x20, 0xd3, 0xe4, 0x67, 0x52, 0x06, + 0xae, 0xc9, 0x15, 0x99, 0x26, 0xbf, 0x25, 0x65, 0xe0, 0x9a, 0x5c, 0x91, 0x69, 0xf2, 0xdb, 0x52, + 0x06, 0xae, 0xc9, 0x15, 0x99, 0x26, 0xbf, 0x23, 0x65, 0xe0, 0x9a, 0x5c, 0x91, 0x69, 0xf2, 0xbb, + 0x52, 0x06, 0xae, 0xc9, 0x15, 0x99, 0x26, 0xbf, 0x27, 0x65, 0xe0, 0x9a, 0x5c, 0x91, 0x69, 0xf2, + 0x4f, 0xa4, 0x0c, 0x5c, 0x93, 0x2b, 0x32, 0x4d, 0xfe, 0xa9, 0x94, 0x81, 0x6b, 0x72, 0x45, 0xa6, + 0xc9, 0x3f, 0x93, 0x32, 0x70, 0x4d, 0x96, 0x65, 0x9a, 0xfc, 0xbe, 0x8c, 0xa1, 0xcc, 0x35, 0x59, + 0x96, 0x69, 0xf2, 0xcf, 0xa5, 0x0c, 0x5c, 0x93, 0x65, 0x99, 0x26, 0xff, 0x42, 0xca, 0xc0, 0x35, + 0x59, 0x96, 0x69, 0xf2, 0x07, 0x52, 0x06, 0xae, 0xc9, 0xb2, 0x4c, 0x93, 0x7f, 0x29, 0x65, 0xe0, + 0x9a, 0x2c, 0xcb, 0x34, 0xf9, 0x57, 0x52, 0x06, 0xae, 0xc9, 0xb2, 0x4c, 0x93, 0x7f, 0x2d, 0x65, + 0xe0, 0x9a, 0x2c, 0xcb, 0x34, 0xf9, 0x37, 0x52, 0x06, 0xae, 0xc9, 0xb2, 0x4c, 0x93, 0x7f, 0x2b, + 0x65, 0xe0, 0x9a, 0x2c, 0xcb, 0x34, 0xf9, 0x77, 0x52, 0x06, 0xae, 0xc9, 0x8a, 0x4c, 0x93, 0x7f, + 0x2f, 0x63, 0xa8, 0x70, 0x4d, 0x56, 0x64, 0x9a, 0xfc, 0x07, 0x29, 0x03, 0xd7, 0x64, 0x45, 0xa6, + 0xc9, 0x7f, 0x94, 0x32, 0x70, 0x4d, 0x56, 0x64, 0x9a, 0xfc, 0x27, 0x29, 0x03, 0xd7, 0x64, 0x45, + 0xa6, 0xc9, 0x7f, 0x96, 0x32, 0x70, 0x4d, 0x56, 0x64, 0x9a, 0xfc, 0x17, 0x29, 0x03, 0xd7, 0x64, + 0x45, 0xa6, 0xc9, 0x7f, 0x95, 0x32, 0x70, 0x4d, 0x56, 0x64, 0x9a, 0xfc, 0x37, 0x29, 0x03, 0xd7, + 0x64, 0x45, 0xa6, 0xc9, 0x1f, 0x4a, 0x19, 0xb8, 0x26, 0x2b, 0x32, 0x4d, 0xfe, 0xbb, 0x94, 0x81, + 0x6b, 0xd2, 0x90, 0x69, 0xf2, 0x3f, 0x64, 0x0c, 0x06, 0xd7, 0xa4, 0x21, 0xd3, 0xe4, 0x7f, 0x4a, + 0x19, 0xb8, 0x26, 0x0d, 0x99, 0x26, 0xff, 0x4b, 0xca, 0xc0, 0x35, 0x69, 0xc8, 0x34, 0xf9, 0xdf, + 0x52, 0x06, 0xae, 0x49, 0x43, 0xa6, 0xc9, 0xff, 0x91, 0x32, 0x70, 0x4d, 0x1a, 0x32, 0x4d, 0xfe, + 0xaf, 0x94, 0x81, 0x6b, 0xd2, 0x90, 0x69, 0xf2, 0x47, 0x52, 0x06, 0xae, 0x49, 0x43, 0xa6, 0xc9, + 0x1f, 0x4b, 0x19, 0xb8, 0x26, 0x0d, 0x99, 0x26, 0x7f, 0x22, 0x65, 0xe0, 0x9a, 0x34, 0x64, 0x9a, + 0xfc, 0xa9, 0x94, 0x81, 0x6b, 0x72, 0x55, 0xa6, 0xc9, 0xff, 0x93, 0x31, 0xac, 0x2e, 0xdf, 0xbd, + 0xfe, 0xf8, 0x5a, 0xb7, 0xe7, 0xee, 0x4f, 0x76, 0x97, 0xf6, 0x9c, 0xc1, 0x8d, 0xae, 0xd3, 0x6f, + 0xd9, 0xdd, 0x1b, 0x08, 0xdb, 0x9d, 0x74, 0x6e, 0x04, 0xff, 0xcc, 0xce, 0x4c, 0xff, 0x3f, 0x00, + 0x00, 0xff, 0xff, 0x8e, 0xb4, 0x0c, 0xbd, 0xe4, 0x3e, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/proto/testdata/test.proto b/vendor/github.com/golang/protobuf/proto/test_proto/test.proto similarity index 94% rename from vendor/github.com/golang/protobuf/proto/testdata/test.proto rename to vendor/github.com/golang/protobuf/proto/test_proto/test.proto index 70e3cfc..f339e05 100644 --- a/vendor/github.com/golang/protobuf/proto/testdata/test.proto +++ b/vendor/github.com/golang/protobuf/proto/test_proto/test.proto @@ -33,7 +33,9 @@ syntax = "proto2"; -package testdata; +option go_package = "github.com/golang/protobuf/proto/test_proto"; + +package test_proto; enum FOO { FOO1 = 1; }; @@ -96,6 +98,8 @@ message GoTest { required bytes F_Bytes_required = 101; required sint32 F_Sint32_required = 102; required sint64 F_Sint64_required = 103; + required sfixed32 F_Sfixed32_required = 104; + required sfixed64 F_Sfixed64_required = 105; // Repeated fields of all basic types repeated bool F_Bool_repeated = 20; @@ -111,6 +115,8 @@ message GoTest { repeated bytes F_Bytes_repeated = 201; repeated sint32 F_Sint32_repeated = 202; repeated sint64 F_Sint64_repeated = 203; + repeated sfixed32 F_Sfixed32_repeated = 204; + repeated sfixed64 F_Sfixed64_repeated = 205; // Optional fields of all basic types optional bool F_Bool_optional = 30; @@ -126,6 +132,8 @@ message GoTest { optional bytes F_Bytes_optional = 301; optional sint32 F_Sint32_optional = 302; optional sint64 F_Sint64_optional = 303; + optional sfixed32 F_Sfixed32_optional = 304; + optional sfixed64 F_Sfixed64_optional = 305; // Default-valued fields of all basic types optional bool F_Bool_defaulted = 40 [default=true]; @@ -141,6 +149,8 @@ message GoTest { optional bytes F_Bytes_defaulted = 401 [default="Bignose"]; optional sint32 F_Sint32_defaulted = 402 [default = -32]; optional sint64 F_Sint64_defaulted = 403 [default = -64]; + optional sfixed32 F_Sfixed32_defaulted = 404 [default = -32]; + optional sfixed64 F_Sfixed64_defaulted = 405 [default = -64]; // Packed repeated fields (no string or bytes). repeated bool F_Bool_repeated_packed = 50 [packed=true]; @@ -154,6 +164,8 @@ message GoTest { repeated double F_Double_repeated_packed = 58 [packed=true]; repeated sint32 F_Sint32_repeated_packed = 502 [packed=true]; repeated sint64 F_Sint64_repeated_packed = 503 [packed=true]; + repeated sfixed32 F_Sfixed32_repeated_packed = 504 [packed=true]; + repeated sfixed64 F_Sfixed64_repeated_packed = 505 [packed=true]; // Required, repeated, and optional groups. required group RequiredGroup = 70 { @@ -285,10 +297,12 @@ message Ext { } optional string data = 1; + map map_field = 2; } extend MyMessage { repeated string greeting = 106; + // leave field 200 unregistered for testing } message ComplexExtension { @@ -342,7 +356,7 @@ extend DefaultsMessage { optional sfixed32 default_sfixed32 = 211 [default = 50]; optional sfixed64 default_sfixed64 = 212 [default = 51]; optional bool default_bool = 213 [default = true]; - optional string default_string = 214 [default = "Hello, string"]; + optional string default_string = 214 [default = "Hello, string,def=foo"]; optional bytes default_bytes = 215 [default = "Hello, bytes"]; optional DefaultsMessage.DefaultsEnum default_enum = 216 [default = ONE]; } @@ -546,3 +560,11 @@ message Communique { Strings msg = 10; } } + +message TestUTF8 { + optional string scalar = 1; + repeated string vector = 2; + oneof oneof { string field = 3; } + map map_key = 4; + map map_value = 5; +} diff --git a/vendor/github.com/golang/protobuf/proto/testdata/Makefile b/vendor/github.com/golang/protobuf/proto/testdata/Makefile deleted file mode 100644 index fc28862..0000000 --- a/vendor/github.com/golang/protobuf/proto/testdata/Makefile +++ /dev/null @@ -1,50 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -include ../../Make.protobuf - -all: regenerate - -regenerate: - rm -f test.pb.go - make test.pb.go - -# The following rules are just aids to development. Not needed for typical testing. - -diff: regenerate - git diff test.pb.go - -restore: - cp test.pb.go.golden test.pb.go - -preserve: - cp test.pb.go test.pb.go.golden diff --git a/vendor/github.com/golang/protobuf/proto/testdata/test.pb.go b/vendor/github.com/golang/protobuf/proto/testdata/test.pb.go deleted file mode 100644 index e980d1a..0000000 --- a/vendor/github.com/golang/protobuf/proto/testdata/test.pb.go +++ /dev/null @@ -1,4147 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: test.proto - -/* -Package testdata is a generated protocol buffer package. - -It is generated from these files: - test.proto - -It has these top-level messages: - GoEnum - GoTestField - GoTest - GoTestRequiredGroupField - GoSkipTest - NonPackedTest - PackedTest - MaxTag - OldMessage - NewMessage - InnerMessage - OtherMessage - RequiredInnerMessage - MyMessage - Ext - ComplexExtension - DefaultsMessage - MyMessageSet - Empty - MessageList - Strings - Defaults - SubDefaults - RepeatedEnum - MoreRepeated - GroupOld - GroupNew - FloatingPoint - MessageWithMap - Oneof - Communique -*/ -package testdata - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type FOO int32 - -const ( - FOO_FOO1 FOO = 1 -) - -var FOO_name = map[int32]string{ - 1: "FOO1", -} -var FOO_value = map[string]int32{ - "FOO1": 1, -} - -func (x FOO) Enum() *FOO { - p := new(FOO) - *p = x - return p -} -func (x FOO) String() string { - return proto.EnumName(FOO_name, int32(x)) -} -func (x *FOO) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FOO_value, data, "FOO") - if err != nil { - return err - } - *x = FOO(value) - return nil -} -func (FOO) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -// An enum, for completeness. -type GoTest_KIND int32 - -const ( - GoTest_VOID GoTest_KIND = 0 - // Basic types - GoTest_BOOL GoTest_KIND = 1 - GoTest_BYTES GoTest_KIND = 2 - GoTest_FINGERPRINT GoTest_KIND = 3 - GoTest_FLOAT GoTest_KIND = 4 - GoTest_INT GoTest_KIND = 5 - GoTest_STRING GoTest_KIND = 6 - GoTest_TIME GoTest_KIND = 7 - // Groupings - GoTest_TUPLE GoTest_KIND = 8 - GoTest_ARRAY GoTest_KIND = 9 - GoTest_MAP GoTest_KIND = 10 - // Table types - GoTest_TABLE GoTest_KIND = 11 - // Functions - GoTest_FUNCTION GoTest_KIND = 12 -) - -var GoTest_KIND_name = map[int32]string{ - 0: "VOID", - 1: "BOOL", - 2: "BYTES", - 3: "FINGERPRINT", - 4: "FLOAT", - 5: "INT", - 6: "STRING", - 7: "TIME", - 8: "TUPLE", - 9: "ARRAY", - 10: "MAP", - 11: "TABLE", - 12: "FUNCTION", -} -var GoTest_KIND_value = map[string]int32{ - "VOID": 0, - "BOOL": 1, - "BYTES": 2, - "FINGERPRINT": 3, - "FLOAT": 4, - "INT": 5, - "STRING": 6, - "TIME": 7, - "TUPLE": 8, - "ARRAY": 9, - "MAP": 10, - "TABLE": 11, - "FUNCTION": 12, -} - -func (x GoTest_KIND) Enum() *GoTest_KIND { - p := new(GoTest_KIND) - *p = x - return p -} -func (x GoTest_KIND) String() string { - return proto.EnumName(GoTest_KIND_name, int32(x)) -} -func (x *GoTest_KIND) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(GoTest_KIND_value, data, "GoTest_KIND") - if err != nil { - return err - } - *x = GoTest_KIND(value) - return nil -} -func (GoTest_KIND) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } - -type MyMessage_Color int32 - -const ( - MyMessage_RED MyMessage_Color = 0 - MyMessage_GREEN MyMessage_Color = 1 - MyMessage_BLUE MyMessage_Color = 2 -) - -var MyMessage_Color_name = map[int32]string{ - 0: "RED", - 1: "GREEN", - 2: "BLUE", -} -var MyMessage_Color_value = map[string]int32{ - "RED": 0, - "GREEN": 1, - "BLUE": 2, -} - -func (x MyMessage_Color) Enum() *MyMessage_Color { - p := new(MyMessage_Color) - *p = x - return p -} -func (x MyMessage_Color) String() string { - return proto.EnumName(MyMessage_Color_name, int32(x)) -} -func (x *MyMessage_Color) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(MyMessage_Color_value, data, "MyMessage_Color") - if err != nil { - return err - } - *x = MyMessage_Color(value) - return nil -} -func (MyMessage_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{13, 0} } - -type DefaultsMessage_DefaultsEnum int32 - -const ( - DefaultsMessage_ZERO DefaultsMessage_DefaultsEnum = 0 - DefaultsMessage_ONE DefaultsMessage_DefaultsEnum = 1 - DefaultsMessage_TWO DefaultsMessage_DefaultsEnum = 2 -) - -var DefaultsMessage_DefaultsEnum_name = map[int32]string{ - 0: "ZERO", - 1: "ONE", - 2: "TWO", -} -var DefaultsMessage_DefaultsEnum_value = map[string]int32{ - "ZERO": 0, - "ONE": 1, - "TWO": 2, -} - -func (x DefaultsMessage_DefaultsEnum) Enum() *DefaultsMessage_DefaultsEnum { - p := new(DefaultsMessage_DefaultsEnum) - *p = x - return p -} -func (x DefaultsMessage_DefaultsEnum) String() string { - return proto.EnumName(DefaultsMessage_DefaultsEnum_name, int32(x)) -} -func (x *DefaultsMessage_DefaultsEnum) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(DefaultsMessage_DefaultsEnum_value, data, "DefaultsMessage_DefaultsEnum") - if err != nil { - return err - } - *x = DefaultsMessage_DefaultsEnum(value) - return nil -} -func (DefaultsMessage_DefaultsEnum) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{16, 0} -} - -type Defaults_Color int32 - -const ( - Defaults_RED Defaults_Color = 0 - Defaults_GREEN Defaults_Color = 1 - Defaults_BLUE Defaults_Color = 2 -) - -var Defaults_Color_name = map[int32]string{ - 0: "RED", - 1: "GREEN", - 2: "BLUE", -} -var Defaults_Color_value = map[string]int32{ - "RED": 0, - "GREEN": 1, - "BLUE": 2, -} - -func (x Defaults_Color) Enum() *Defaults_Color { - p := new(Defaults_Color) - *p = x - return p -} -func (x Defaults_Color) String() string { - return proto.EnumName(Defaults_Color_name, int32(x)) -} -func (x *Defaults_Color) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(Defaults_Color_value, data, "Defaults_Color") - if err != nil { - return err - } - *x = Defaults_Color(value) - return nil -} -func (Defaults_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{21, 0} } - -type RepeatedEnum_Color int32 - -const ( - RepeatedEnum_RED RepeatedEnum_Color = 1 -) - -var RepeatedEnum_Color_name = map[int32]string{ - 1: "RED", -} -var RepeatedEnum_Color_value = map[string]int32{ - "RED": 1, -} - -func (x RepeatedEnum_Color) Enum() *RepeatedEnum_Color { - p := new(RepeatedEnum_Color) - *p = x - return p -} -func (x RepeatedEnum_Color) String() string { - return proto.EnumName(RepeatedEnum_Color_name, int32(x)) -} -func (x *RepeatedEnum_Color) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(RepeatedEnum_Color_value, data, "RepeatedEnum_Color") - if err != nil { - return err - } - *x = RepeatedEnum_Color(value) - return nil -} -func (RepeatedEnum_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{23, 0} } - -type GoEnum struct { - Foo *FOO `protobuf:"varint,1,req,name=foo,enum=testdata.FOO" json:"foo,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoEnum) Reset() { *m = GoEnum{} } -func (m *GoEnum) String() string { return proto.CompactTextString(m) } -func (*GoEnum) ProtoMessage() {} -func (*GoEnum) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *GoEnum) GetFoo() FOO { - if m != nil && m.Foo != nil { - return *m.Foo - } - return FOO_FOO1 -} - -type GoTestField struct { - Label *string `protobuf:"bytes,1,req,name=Label" json:"Label,omitempty"` - Type *string `protobuf:"bytes,2,req,name=Type" json:"Type,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTestField) Reset() { *m = GoTestField{} } -func (m *GoTestField) String() string { return proto.CompactTextString(m) } -func (*GoTestField) ProtoMessage() {} -func (*GoTestField) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *GoTestField) GetLabel() string { - if m != nil && m.Label != nil { - return *m.Label - } - return "" -} - -func (m *GoTestField) GetType() string { - if m != nil && m.Type != nil { - return *m.Type - } - return "" -} - -type GoTest struct { - // Some typical parameters - Kind *GoTest_KIND `protobuf:"varint,1,req,name=Kind,enum=testdata.GoTest_KIND" json:"Kind,omitempty"` - Table *string `protobuf:"bytes,2,opt,name=Table" json:"Table,omitempty"` - Param *int32 `protobuf:"varint,3,opt,name=Param" json:"Param,omitempty"` - // Required, repeated and optional foreign fields. - RequiredField *GoTestField `protobuf:"bytes,4,req,name=RequiredField" json:"RequiredField,omitempty"` - RepeatedField []*GoTestField `protobuf:"bytes,5,rep,name=RepeatedField" json:"RepeatedField,omitempty"` - OptionalField *GoTestField `protobuf:"bytes,6,opt,name=OptionalField" json:"OptionalField,omitempty"` - // Required fields of all basic types - F_BoolRequired *bool `protobuf:"varint,10,req,name=F_Bool_required,json=FBoolRequired" json:"F_Bool_required,omitempty"` - F_Int32Required *int32 `protobuf:"varint,11,req,name=F_Int32_required,json=FInt32Required" json:"F_Int32_required,omitempty"` - F_Int64Required *int64 `protobuf:"varint,12,req,name=F_Int64_required,json=FInt64Required" json:"F_Int64_required,omitempty"` - F_Fixed32Required *uint32 `protobuf:"fixed32,13,req,name=F_Fixed32_required,json=FFixed32Required" json:"F_Fixed32_required,omitempty"` - F_Fixed64Required *uint64 `protobuf:"fixed64,14,req,name=F_Fixed64_required,json=FFixed64Required" json:"F_Fixed64_required,omitempty"` - F_Uint32Required *uint32 `protobuf:"varint,15,req,name=F_Uint32_required,json=FUint32Required" json:"F_Uint32_required,omitempty"` - F_Uint64Required *uint64 `protobuf:"varint,16,req,name=F_Uint64_required,json=FUint64Required" json:"F_Uint64_required,omitempty"` - F_FloatRequired *float32 `protobuf:"fixed32,17,req,name=F_Float_required,json=FFloatRequired" json:"F_Float_required,omitempty"` - F_DoubleRequired *float64 `protobuf:"fixed64,18,req,name=F_Double_required,json=FDoubleRequired" json:"F_Double_required,omitempty"` - F_StringRequired *string `protobuf:"bytes,19,req,name=F_String_required,json=FStringRequired" json:"F_String_required,omitempty"` - F_BytesRequired []byte `protobuf:"bytes,101,req,name=F_Bytes_required,json=FBytesRequired" json:"F_Bytes_required,omitempty"` - F_Sint32Required *int32 `protobuf:"zigzag32,102,req,name=F_Sint32_required,json=FSint32Required" json:"F_Sint32_required,omitempty"` - F_Sint64Required *int64 `protobuf:"zigzag64,103,req,name=F_Sint64_required,json=FSint64Required" json:"F_Sint64_required,omitempty"` - // Repeated fields of all basic types - F_BoolRepeated []bool `protobuf:"varint,20,rep,name=F_Bool_repeated,json=FBoolRepeated" json:"F_Bool_repeated,omitempty"` - F_Int32Repeated []int32 `protobuf:"varint,21,rep,name=F_Int32_repeated,json=FInt32Repeated" json:"F_Int32_repeated,omitempty"` - F_Int64Repeated []int64 `protobuf:"varint,22,rep,name=F_Int64_repeated,json=FInt64Repeated" json:"F_Int64_repeated,omitempty"` - F_Fixed32Repeated []uint32 `protobuf:"fixed32,23,rep,name=F_Fixed32_repeated,json=FFixed32Repeated" json:"F_Fixed32_repeated,omitempty"` - F_Fixed64Repeated []uint64 `protobuf:"fixed64,24,rep,name=F_Fixed64_repeated,json=FFixed64Repeated" json:"F_Fixed64_repeated,omitempty"` - F_Uint32Repeated []uint32 `protobuf:"varint,25,rep,name=F_Uint32_repeated,json=FUint32Repeated" json:"F_Uint32_repeated,omitempty"` - F_Uint64Repeated []uint64 `protobuf:"varint,26,rep,name=F_Uint64_repeated,json=FUint64Repeated" json:"F_Uint64_repeated,omitempty"` - F_FloatRepeated []float32 `protobuf:"fixed32,27,rep,name=F_Float_repeated,json=FFloatRepeated" json:"F_Float_repeated,omitempty"` - F_DoubleRepeated []float64 `protobuf:"fixed64,28,rep,name=F_Double_repeated,json=FDoubleRepeated" json:"F_Double_repeated,omitempty"` - F_StringRepeated []string `protobuf:"bytes,29,rep,name=F_String_repeated,json=FStringRepeated" json:"F_String_repeated,omitempty"` - F_BytesRepeated [][]byte `protobuf:"bytes,201,rep,name=F_Bytes_repeated,json=FBytesRepeated" json:"F_Bytes_repeated,omitempty"` - F_Sint32Repeated []int32 `protobuf:"zigzag32,202,rep,name=F_Sint32_repeated,json=FSint32Repeated" json:"F_Sint32_repeated,omitempty"` - F_Sint64Repeated []int64 `protobuf:"zigzag64,203,rep,name=F_Sint64_repeated,json=FSint64Repeated" json:"F_Sint64_repeated,omitempty"` - // Optional fields of all basic types - F_BoolOptional *bool `protobuf:"varint,30,opt,name=F_Bool_optional,json=FBoolOptional" json:"F_Bool_optional,omitempty"` - F_Int32Optional *int32 `protobuf:"varint,31,opt,name=F_Int32_optional,json=FInt32Optional" json:"F_Int32_optional,omitempty"` - F_Int64Optional *int64 `protobuf:"varint,32,opt,name=F_Int64_optional,json=FInt64Optional" json:"F_Int64_optional,omitempty"` - F_Fixed32Optional *uint32 `protobuf:"fixed32,33,opt,name=F_Fixed32_optional,json=FFixed32Optional" json:"F_Fixed32_optional,omitempty"` - F_Fixed64Optional *uint64 `protobuf:"fixed64,34,opt,name=F_Fixed64_optional,json=FFixed64Optional" json:"F_Fixed64_optional,omitempty"` - F_Uint32Optional *uint32 `protobuf:"varint,35,opt,name=F_Uint32_optional,json=FUint32Optional" json:"F_Uint32_optional,omitempty"` - F_Uint64Optional *uint64 `protobuf:"varint,36,opt,name=F_Uint64_optional,json=FUint64Optional" json:"F_Uint64_optional,omitempty"` - F_FloatOptional *float32 `protobuf:"fixed32,37,opt,name=F_Float_optional,json=FFloatOptional" json:"F_Float_optional,omitempty"` - F_DoubleOptional *float64 `protobuf:"fixed64,38,opt,name=F_Double_optional,json=FDoubleOptional" json:"F_Double_optional,omitempty"` - F_StringOptional *string `protobuf:"bytes,39,opt,name=F_String_optional,json=FStringOptional" json:"F_String_optional,omitempty"` - F_BytesOptional []byte `protobuf:"bytes,301,opt,name=F_Bytes_optional,json=FBytesOptional" json:"F_Bytes_optional,omitempty"` - F_Sint32Optional *int32 `protobuf:"zigzag32,302,opt,name=F_Sint32_optional,json=FSint32Optional" json:"F_Sint32_optional,omitempty"` - F_Sint64Optional *int64 `protobuf:"zigzag64,303,opt,name=F_Sint64_optional,json=FSint64Optional" json:"F_Sint64_optional,omitempty"` - // Default-valued fields of all basic types - F_BoolDefaulted *bool `protobuf:"varint,40,opt,name=F_Bool_defaulted,json=FBoolDefaulted,def=1" json:"F_Bool_defaulted,omitempty"` - F_Int32Defaulted *int32 `protobuf:"varint,41,opt,name=F_Int32_defaulted,json=FInt32Defaulted,def=32" json:"F_Int32_defaulted,omitempty"` - F_Int64Defaulted *int64 `protobuf:"varint,42,opt,name=F_Int64_defaulted,json=FInt64Defaulted,def=64" json:"F_Int64_defaulted,omitempty"` - F_Fixed32Defaulted *uint32 `protobuf:"fixed32,43,opt,name=F_Fixed32_defaulted,json=FFixed32Defaulted,def=320" json:"F_Fixed32_defaulted,omitempty"` - F_Fixed64Defaulted *uint64 `protobuf:"fixed64,44,opt,name=F_Fixed64_defaulted,json=FFixed64Defaulted,def=640" json:"F_Fixed64_defaulted,omitempty"` - F_Uint32Defaulted *uint32 `protobuf:"varint,45,opt,name=F_Uint32_defaulted,json=FUint32Defaulted,def=3200" json:"F_Uint32_defaulted,omitempty"` - F_Uint64Defaulted *uint64 `protobuf:"varint,46,opt,name=F_Uint64_defaulted,json=FUint64Defaulted,def=6400" json:"F_Uint64_defaulted,omitempty"` - F_FloatDefaulted *float32 `protobuf:"fixed32,47,opt,name=F_Float_defaulted,json=FFloatDefaulted,def=314159" json:"F_Float_defaulted,omitempty"` - F_DoubleDefaulted *float64 `protobuf:"fixed64,48,opt,name=F_Double_defaulted,json=FDoubleDefaulted,def=271828" json:"F_Double_defaulted,omitempty"` - F_StringDefaulted *string `protobuf:"bytes,49,opt,name=F_String_defaulted,json=FStringDefaulted,def=hello, \"world!\"\n" json:"F_String_defaulted,omitempty"` - F_BytesDefaulted []byte `protobuf:"bytes,401,opt,name=F_Bytes_defaulted,json=FBytesDefaulted,def=Bignose" json:"F_Bytes_defaulted,omitempty"` - F_Sint32Defaulted *int32 `protobuf:"zigzag32,402,opt,name=F_Sint32_defaulted,json=FSint32Defaulted,def=-32" json:"F_Sint32_defaulted,omitempty"` - F_Sint64Defaulted *int64 `protobuf:"zigzag64,403,opt,name=F_Sint64_defaulted,json=FSint64Defaulted,def=-64" json:"F_Sint64_defaulted,omitempty"` - // Packed repeated fields (no string or bytes). - F_BoolRepeatedPacked []bool `protobuf:"varint,50,rep,packed,name=F_Bool_repeated_packed,json=FBoolRepeatedPacked" json:"F_Bool_repeated_packed,omitempty"` - F_Int32RepeatedPacked []int32 `protobuf:"varint,51,rep,packed,name=F_Int32_repeated_packed,json=FInt32RepeatedPacked" json:"F_Int32_repeated_packed,omitempty"` - F_Int64RepeatedPacked []int64 `protobuf:"varint,52,rep,packed,name=F_Int64_repeated_packed,json=FInt64RepeatedPacked" json:"F_Int64_repeated_packed,omitempty"` - F_Fixed32RepeatedPacked []uint32 `protobuf:"fixed32,53,rep,packed,name=F_Fixed32_repeated_packed,json=FFixed32RepeatedPacked" json:"F_Fixed32_repeated_packed,omitempty"` - F_Fixed64RepeatedPacked []uint64 `protobuf:"fixed64,54,rep,packed,name=F_Fixed64_repeated_packed,json=FFixed64RepeatedPacked" json:"F_Fixed64_repeated_packed,omitempty"` - F_Uint32RepeatedPacked []uint32 `protobuf:"varint,55,rep,packed,name=F_Uint32_repeated_packed,json=FUint32RepeatedPacked" json:"F_Uint32_repeated_packed,omitempty"` - F_Uint64RepeatedPacked []uint64 `protobuf:"varint,56,rep,packed,name=F_Uint64_repeated_packed,json=FUint64RepeatedPacked" json:"F_Uint64_repeated_packed,omitempty"` - F_FloatRepeatedPacked []float32 `protobuf:"fixed32,57,rep,packed,name=F_Float_repeated_packed,json=FFloatRepeatedPacked" json:"F_Float_repeated_packed,omitempty"` - F_DoubleRepeatedPacked []float64 `protobuf:"fixed64,58,rep,packed,name=F_Double_repeated_packed,json=FDoubleRepeatedPacked" json:"F_Double_repeated_packed,omitempty"` - F_Sint32RepeatedPacked []int32 `protobuf:"zigzag32,502,rep,packed,name=F_Sint32_repeated_packed,json=FSint32RepeatedPacked" json:"F_Sint32_repeated_packed,omitempty"` - F_Sint64RepeatedPacked []int64 `protobuf:"zigzag64,503,rep,packed,name=F_Sint64_repeated_packed,json=FSint64RepeatedPacked" json:"F_Sint64_repeated_packed,omitempty"` - Requiredgroup *GoTest_RequiredGroup `protobuf:"group,70,req,name=RequiredGroup,json=requiredgroup" json:"requiredgroup,omitempty"` - Repeatedgroup []*GoTest_RepeatedGroup `protobuf:"group,80,rep,name=RepeatedGroup,json=repeatedgroup" json:"repeatedgroup,omitempty"` - Optionalgroup *GoTest_OptionalGroup `protobuf:"group,90,opt,name=OptionalGroup,json=optionalgroup" json:"optionalgroup,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTest) Reset() { *m = GoTest{} } -func (m *GoTest) String() string { return proto.CompactTextString(m) } -func (*GoTest) ProtoMessage() {} -func (*GoTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } - -const Default_GoTest_F_BoolDefaulted bool = true -const Default_GoTest_F_Int32Defaulted int32 = 32 -const Default_GoTest_F_Int64Defaulted int64 = 64 -const Default_GoTest_F_Fixed32Defaulted uint32 = 320 -const Default_GoTest_F_Fixed64Defaulted uint64 = 640 -const Default_GoTest_F_Uint32Defaulted uint32 = 3200 -const Default_GoTest_F_Uint64Defaulted uint64 = 6400 -const Default_GoTest_F_FloatDefaulted float32 = 314159 -const Default_GoTest_F_DoubleDefaulted float64 = 271828 -const Default_GoTest_F_StringDefaulted string = "hello, \"world!\"\n" - -var Default_GoTest_F_BytesDefaulted []byte = []byte("Bignose") - -const Default_GoTest_F_Sint32Defaulted int32 = -32 -const Default_GoTest_F_Sint64Defaulted int64 = -64 - -func (m *GoTest) GetKind() GoTest_KIND { - if m != nil && m.Kind != nil { - return *m.Kind - } - return GoTest_VOID -} - -func (m *GoTest) GetTable() string { - if m != nil && m.Table != nil { - return *m.Table - } - return "" -} - -func (m *GoTest) GetParam() int32 { - if m != nil && m.Param != nil { - return *m.Param - } - return 0 -} - -func (m *GoTest) GetRequiredField() *GoTestField { - if m != nil { - return m.RequiredField - } - return nil -} - -func (m *GoTest) GetRepeatedField() []*GoTestField { - if m != nil { - return m.RepeatedField - } - return nil -} - -func (m *GoTest) GetOptionalField() *GoTestField { - if m != nil { - return m.OptionalField - } - return nil -} - -func (m *GoTest) GetF_BoolRequired() bool { - if m != nil && m.F_BoolRequired != nil { - return *m.F_BoolRequired - } - return false -} - -func (m *GoTest) GetF_Int32Required() int32 { - if m != nil && m.F_Int32Required != nil { - return *m.F_Int32Required - } - return 0 -} - -func (m *GoTest) GetF_Int64Required() int64 { - if m != nil && m.F_Int64Required != nil { - return *m.F_Int64Required - } - return 0 -} - -func (m *GoTest) GetF_Fixed32Required() uint32 { - if m != nil && m.F_Fixed32Required != nil { - return *m.F_Fixed32Required - } - return 0 -} - -func (m *GoTest) GetF_Fixed64Required() uint64 { - if m != nil && m.F_Fixed64Required != nil { - return *m.F_Fixed64Required - } - return 0 -} - -func (m *GoTest) GetF_Uint32Required() uint32 { - if m != nil && m.F_Uint32Required != nil { - return *m.F_Uint32Required - } - return 0 -} - -func (m *GoTest) GetF_Uint64Required() uint64 { - if m != nil && m.F_Uint64Required != nil { - return *m.F_Uint64Required - } - return 0 -} - -func (m *GoTest) GetF_FloatRequired() float32 { - if m != nil && m.F_FloatRequired != nil { - return *m.F_FloatRequired - } - return 0 -} - -func (m *GoTest) GetF_DoubleRequired() float64 { - if m != nil && m.F_DoubleRequired != nil { - return *m.F_DoubleRequired - } - return 0 -} - -func (m *GoTest) GetF_StringRequired() string { - if m != nil && m.F_StringRequired != nil { - return *m.F_StringRequired - } - return "" -} - -func (m *GoTest) GetF_BytesRequired() []byte { - if m != nil { - return m.F_BytesRequired - } - return nil -} - -func (m *GoTest) GetF_Sint32Required() int32 { - if m != nil && m.F_Sint32Required != nil { - return *m.F_Sint32Required - } - return 0 -} - -func (m *GoTest) GetF_Sint64Required() int64 { - if m != nil && m.F_Sint64Required != nil { - return *m.F_Sint64Required - } - return 0 -} - -func (m *GoTest) GetF_BoolRepeated() []bool { - if m != nil { - return m.F_BoolRepeated - } - return nil -} - -func (m *GoTest) GetF_Int32Repeated() []int32 { - if m != nil { - return m.F_Int32Repeated - } - return nil -} - -func (m *GoTest) GetF_Int64Repeated() []int64 { - if m != nil { - return m.F_Int64Repeated - } - return nil -} - -func (m *GoTest) GetF_Fixed32Repeated() []uint32 { - if m != nil { - return m.F_Fixed32Repeated - } - return nil -} - -func (m *GoTest) GetF_Fixed64Repeated() []uint64 { - if m != nil { - return m.F_Fixed64Repeated - } - return nil -} - -func (m *GoTest) GetF_Uint32Repeated() []uint32 { - if m != nil { - return m.F_Uint32Repeated - } - return nil -} - -func (m *GoTest) GetF_Uint64Repeated() []uint64 { - if m != nil { - return m.F_Uint64Repeated - } - return nil -} - -func (m *GoTest) GetF_FloatRepeated() []float32 { - if m != nil { - return m.F_FloatRepeated - } - return nil -} - -func (m *GoTest) GetF_DoubleRepeated() []float64 { - if m != nil { - return m.F_DoubleRepeated - } - return nil -} - -func (m *GoTest) GetF_StringRepeated() []string { - if m != nil { - return m.F_StringRepeated - } - return nil -} - -func (m *GoTest) GetF_BytesRepeated() [][]byte { - if m != nil { - return m.F_BytesRepeated - } - return nil -} - -func (m *GoTest) GetF_Sint32Repeated() []int32 { - if m != nil { - return m.F_Sint32Repeated - } - return nil -} - -func (m *GoTest) GetF_Sint64Repeated() []int64 { - if m != nil { - return m.F_Sint64Repeated - } - return nil -} - -func (m *GoTest) GetF_BoolOptional() bool { - if m != nil && m.F_BoolOptional != nil { - return *m.F_BoolOptional - } - return false -} - -func (m *GoTest) GetF_Int32Optional() int32 { - if m != nil && m.F_Int32Optional != nil { - return *m.F_Int32Optional - } - return 0 -} - -func (m *GoTest) GetF_Int64Optional() int64 { - if m != nil && m.F_Int64Optional != nil { - return *m.F_Int64Optional - } - return 0 -} - -func (m *GoTest) GetF_Fixed32Optional() uint32 { - if m != nil && m.F_Fixed32Optional != nil { - return *m.F_Fixed32Optional - } - return 0 -} - -func (m *GoTest) GetF_Fixed64Optional() uint64 { - if m != nil && m.F_Fixed64Optional != nil { - return *m.F_Fixed64Optional - } - return 0 -} - -func (m *GoTest) GetF_Uint32Optional() uint32 { - if m != nil && m.F_Uint32Optional != nil { - return *m.F_Uint32Optional - } - return 0 -} - -func (m *GoTest) GetF_Uint64Optional() uint64 { - if m != nil && m.F_Uint64Optional != nil { - return *m.F_Uint64Optional - } - return 0 -} - -func (m *GoTest) GetF_FloatOptional() float32 { - if m != nil && m.F_FloatOptional != nil { - return *m.F_FloatOptional - } - return 0 -} - -func (m *GoTest) GetF_DoubleOptional() float64 { - if m != nil && m.F_DoubleOptional != nil { - return *m.F_DoubleOptional - } - return 0 -} - -func (m *GoTest) GetF_StringOptional() string { - if m != nil && m.F_StringOptional != nil { - return *m.F_StringOptional - } - return "" -} - -func (m *GoTest) GetF_BytesOptional() []byte { - if m != nil { - return m.F_BytesOptional - } - return nil -} - -func (m *GoTest) GetF_Sint32Optional() int32 { - if m != nil && m.F_Sint32Optional != nil { - return *m.F_Sint32Optional - } - return 0 -} - -func (m *GoTest) GetF_Sint64Optional() int64 { - if m != nil && m.F_Sint64Optional != nil { - return *m.F_Sint64Optional - } - return 0 -} - -func (m *GoTest) GetF_BoolDefaulted() bool { - if m != nil && m.F_BoolDefaulted != nil { - return *m.F_BoolDefaulted - } - return Default_GoTest_F_BoolDefaulted -} - -func (m *GoTest) GetF_Int32Defaulted() int32 { - if m != nil && m.F_Int32Defaulted != nil { - return *m.F_Int32Defaulted - } - return Default_GoTest_F_Int32Defaulted -} - -func (m *GoTest) GetF_Int64Defaulted() int64 { - if m != nil && m.F_Int64Defaulted != nil { - return *m.F_Int64Defaulted - } - return Default_GoTest_F_Int64Defaulted -} - -func (m *GoTest) GetF_Fixed32Defaulted() uint32 { - if m != nil && m.F_Fixed32Defaulted != nil { - return *m.F_Fixed32Defaulted - } - return Default_GoTest_F_Fixed32Defaulted -} - -func (m *GoTest) GetF_Fixed64Defaulted() uint64 { - if m != nil && m.F_Fixed64Defaulted != nil { - return *m.F_Fixed64Defaulted - } - return Default_GoTest_F_Fixed64Defaulted -} - -func (m *GoTest) GetF_Uint32Defaulted() uint32 { - if m != nil && m.F_Uint32Defaulted != nil { - return *m.F_Uint32Defaulted - } - return Default_GoTest_F_Uint32Defaulted -} - -func (m *GoTest) GetF_Uint64Defaulted() uint64 { - if m != nil && m.F_Uint64Defaulted != nil { - return *m.F_Uint64Defaulted - } - return Default_GoTest_F_Uint64Defaulted -} - -func (m *GoTest) GetF_FloatDefaulted() float32 { - if m != nil && m.F_FloatDefaulted != nil { - return *m.F_FloatDefaulted - } - return Default_GoTest_F_FloatDefaulted -} - -func (m *GoTest) GetF_DoubleDefaulted() float64 { - if m != nil && m.F_DoubleDefaulted != nil { - return *m.F_DoubleDefaulted - } - return Default_GoTest_F_DoubleDefaulted -} - -func (m *GoTest) GetF_StringDefaulted() string { - if m != nil && m.F_StringDefaulted != nil { - return *m.F_StringDefaulted - } - return Default_GoTest_F_StringDefaulted -} - -func (m *GoTest) GetF_BytesDefaulted() []byte { - if m != nil && m.F_BytesDefaulted != nil { - return m.F_BytesDefaulted - } - return append([]byte(nil), Default_GoTest_F_BytesDefaulted...) -} - -func (m *GoTest) GetF_Sint32Defaulted() int32 { - if m != nil && m.F_Sint32Defaulted != nil { - return *m.F_Sint32Defaulted - } - return Default_GoTest_F_Sint32Defaulted -} - -func (m *GoTest) GetF_Sint64Defaulted() int64 { - if m != nil && m.F_Sint64Defaulted != nil { - return *m.F_Sint64Defaulted - } - return Default_GoTest_F_Sint64Defaulted -} - -func (m *GoTest) GetF_BoolRepeatedPacked() []bool { - if m != nil { - return m.F_BoolRepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Int32RepeatedPacked() []int32 { - if m != nil { - return m.F_Int32RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Int64RepeatedPacked() []int64 { - if m != nil { - return m.F_Int64RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Fixed32RepeatedPacked() []uint32 { - if m != nil { - return m.F_Fixed32RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Fixed64RepeatedPacked() []uint64 { - if m != nil { - return m.F_Fixed64RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Uint32RepeatedPacked() []uint32 { - if m != nil { - return m.F_Uint32RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Uint64RepeatedPacked() []uint64 { - if m != nil { - return m.F_Uint64RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_FloatRepeatedPacked() []float32 { - if m != nil { - return m.F_FloatRepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_DoubleRepeatedPacked() []float64 { - if m != nil { - return m.F_DoubleRepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Sint32RepeatedPacked() []int32 { - if m != nil { - return m.F_Sint32RepeatedPacked - } - return nil -} - -func (m *GoTest) GetF_Sint64RepeatedPacked() []int64 { - if m != nil { - return m.F_Sint64RepeatedPacked - } - return nil -} - -func (m *GoTest) GetRequiredgroup() *GoTest_RequiredGroup { - if m != nil { - return m.Requiredgroup - } - return nil -} - -func (m *GoTest) GetRepeatedgroup() []*GoTest_RepeatedGroup { - if m != nil { - return m.Repeatedgroup - } - return nil -} - -func (m *GoTest) GetOptionalgroup() *GoTest_OptionalGroup { - if m != nil { - return m.Optionalgroup - } - return nil -} - -// Required, repeated, and optional groups. -type GoTest_RequiredGroup struct { - RequiredField *string `protobuf:"bytes,71,req,name=RequiredField" json:"RequiredField,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTest_RequiredGroup) Reset() { *m = GoTest_RequiredGroup{} } -func (m *GoTest_RequiredGroup) String() string { return proto.CompactTextString(m) } -func (*GoTest_RequiredGroup) ProtoMessage() {} -func (*GoTest_RequiredGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } - -func (m *GoTest_RequiredGroup) GetRequiredField() string { - if m != nil && m.RequiredField != nil { - return *m.RequiredField - } - return "" -} - -type GoTest_RepeatedGroup struct { - RequiredField *string `protobuf:"bytes,81,req,name=RequiredField" json:"RequiredField,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTest_RepeatedGroup) Reset() { *m = GoTest_RepeatedGroup{} } -func (m *GoTest_RepeatedGroup) String() string { return proto.CompactTextString(m) } -func (*GoTest_RepeatedGroup) ProtoMessage() {} -func (*GoTest_RepeatedGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 1} } - -func (m *GoTest_RepeatedGroup) GetRequiredField() string { - if m != nil && m.RequiredField != nil { - return *m.RequiredField - } - return "" -} - -type GoTest_OptionalGroup struct { - RequiredField *string `protobuf:"bytes,91,req,name=RequiredField" json:"RequiredField,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTest_OptionalGroup) Reset() { *m = GoTest_OptionalGroup{} } -func (m *GoTest_OptionalGroup) String() string { return proto.CompactTextString(m) } -func (*GoTest_OptionalGroup) ProtoMessage() {} -func (*GoTest_OptionalGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 2} } - -func (m *GoTest_OptionalGroup) GetRequiredField() string { - if m != nil && m.RequiredField != nil { - return *m.RequiredField - } - return "" -} - -// For testing a group containing a required field. -type GoTestRequiredGroupField struct { - Group *GoTestRequiredGroupField_Group `protobuf:"group,1,req,name=Group,json=group" json:"group,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTestRequiredGroupField) Reset() { *m = GoTestRequiredGroupField{} } -func (m *GoTestRequiredGroupField) String() string { return proto.CompactTextString(m) } -func (*GoTestRequiredGroupField) ProtoMessage() {} -func (*GoTestRequiredGroupField) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } - -func (m *GoTestRequiredGroupField) GetGroup() *GoTestRequiredGroupField_Group { - if m != nil { - return m.Group - } - return nil -} - -type GoTestRequiredGroupField_Group struct { - Field *int32 `protobuf:"varint,2,req,name=Field" json:"Field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoTestRequiredGroupField_Group) Reset() { *m = GoTestRequiredGroupField_Group{} } -func (m *GoTestRequiredGroupField_Group) String() string { return proto.CompactTextString(m) } -func (*GoTestRequiredGroupField_Group) ProtoMessage() {} -func (*GoTestRequiredGroupField_Group) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{3, 0} -} - -func (m *GoTestRequiredGroupField_Group) GetField() int32 { - if m != nil && m.Field != nil { - return *m.Field - } - return 0 -} - -// For testing skipping of unrecognized fields. -// Numbers are all big, larger than tag numbers in GoTestField, -// the message used in the corresponding test. -type GoSkipTest struct { - SkipInt32 *int32 `protobuf:"varint,11,req,name=skip_int32,json=skipInt32" json:"skip_int32,omitempty"` - SkipFixed32 *uint32 `protobuf:"fixed32,12,req,name=skip_fixed32,json=skipFixed32" json:"skip_fixed32,omitempty"` - SkipFixed64 *uint64 `protobuf:"fixed64,13,req,name=skip_fixed64,json=skipFixed64" json:"skip_fixed64,omitempty"` - SkipString *string `protobuf:"bytes,14,req,name=skip_string,json=skipString" json:"skip_string,omitempty"` - Skipgroup *GoSkipTest_SkipGroup `protobuf:"group,15,req,name=SkipGroup,json=skipgroup" json:"skipgroup,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoSkipTest) Reset() { *m = GoSkipTest{} } -func (m *GoSkipTest) String() string { return proto.CompactTextString(m) } -func (*GoSkipTest) ProtoMessage() {} -func (*GoSkipTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } - -func (m *GoSkipTest) GetSkipInt32() int32 { - if m != nil && m.SkipInt32 != nil { - return *m.SkipInt32 - } - return 0 -} - -func (m *GoSkipTest) GetSkipFixed32() uint32 { - if m != nil && m.SkipFixed32 != nil { - return *m.SkipFixed32 - } - return 0 -} - -func (m *GoSkipTest) GetSkipFixed64() uint64 { - if m != nil && m.SkipFixed64 != nil { - return *m.SkipFixed64 - } - return 0 -} - -func (m *GoSkipTest) GetSkipString() string { - if m != nil && m.SkipString != nil { - return *m.SkipString - } - return "" -} - -func (m *GoSkipTest) GetSkipgroup() *GoSkipTest_SkipGroup { - if m != nil { - return m.Skipgroup - } - return nil -} - -type GoSkipTest_SkipGroup struct { - GroupInt32 *int32 `protobuf:"varint,16,req,name=group_int32,json=groupInt32" json:"group_int32,omitempty"` - GroupString *string `protobuf:"bytes,17,req,name=group_string,json=groupString" json:"group_string,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GoSkipTest_SkipGroup) Reset() { *m = GoSkipTest_SkipGroup{} } -func (m *GoSkipTest_SkipGroup) String() string { return proto.CompactTextString(m) } -func (*GoSkipTest_SkipGroup) ProtoMessage() {} -func (*GoSkipTest_SkipGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 0} } - -func (m *GoSkipTest_SkipGroup) GetGroupInt32() int32 { - if m != nil && m.GroupInt32 != nil { - return *m.GroupInt32 - } - return 0 -} - -func (m *GoSkipTest_SkipGroup) GetGroupString() string { - if m != nil && m.GroupString != nil { - return *m.GroupString - } - return "" -} - -// For testing packed/non-packed decoder switching. -// A serialized instance of one should be deserializable as the other. -type NonPackedTest struct { - A []int32 `protobuf:"varint,1,rep,name=a" json:"a,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *NonPackedTest) Reset() { *m = NonPackedTest{} } -func (m *NonPackedTest) String() string { return proto.CompactTextString(m) } -func (*NonPackedTest) ProtoMessage() {} -func (*NonPackedTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } - -func (m *NonPackedTest) GetA() []int32 { - if m != nil { - return m.A - } - return nil -} - -type PackedTest struct { - B []int32 `protobuf:"varint,1,rep,packed,name=b" json:"b,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *PackedTest) Reset() { *m = PackedTest{} } -func (m *PackedTest) String() string { return proto.CompactTextString(m) } -func (*PackedTest) ProtoMessage() {} -func (*PackedTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } - -func (m *PackedTest) GetB() []int32 { - if m != nil { - return m.B - } - return nil -} - -type MaxTag struct { - // Maximum possible tag number. - LastField *string `protobuf:"bytes,536870911,opt,name=last_field,json=lastField" json:"last_field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MaxTag) Reset() { *m = MaxTag{} } -func (m *MaxTag) String() string { return proto.CompactTextString(m) } -func (*MaxTag) ProtoMessage() {} -func (*MaxTag) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } - -func (m *MaxTag) GetLastField() string { - if m != nil && m.LastField != nil { - return *m.LastField - } - return "" -} - -type OldMessage struct { - Nested *OldMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` - Num *int32 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OldMessage) Reset() { *m = OldMessage{} } -func (m *OldMessage) String() string { return proto.CompactTextString(m) } -func (*OldMessage) ProtoMessage() {} -func (*OldMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } - -func (m *OldMessage) GetNested() *OldMessage_Nested { - if m != nil { - return m.Nested - } - return nil -} - -func (m *OldMessage) GetNum() int32 { - if m != nil && m.Num != nil { - return *m.Num - } - return 0 -} - -type OldMessage_Nested struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OldMessage_Nested) Reset() { *m = OldMessage_Nested{} } -func (m *OldMessage_Nested) String() string { return proto.CompactTextString(m) } -func (*OldMessage_Nested) ProtoMessage() {} -func (*OldMessage_Nested) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8, 0} } - -func (m *OldMessage_Nested) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -// NewMessage is wire compatible with OldMessage; -// imagine it as a future version. -type NewMessage struct { - Nested *NewMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` - // This is an int32 in OldMessage. - Num *int64 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *NewMessage) Reset() { *m = NewMessage{} } -func (m *NewMessage) String() string { return proto.CompactTextString(m) } -func (*NewMessage) ProtoMessage() {} -func (*NewMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } - -func (m *NewMessage) GetNested() *NewMessage_Nested { - if m != nil { - return m.Nested - } - return nil -} - -func (m *NewMessage) GetNum() int64 { - if m != nil && m.Num != nil { - return *m.Num - } - return 0 -} - -type NewMessage_Nested struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - FoodGroup *string `protobuf:"bytes,2,opt,name=food_group,json=foodGroup" json:"food_group,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *NewMessage_Nested) Reset() { *m = NewMessage_Nested{} } -func (m *NewMessage_Nested) String() string { return proto.CompactTextString(m) } -func (*NewMessage_Nested) ProtoMessage() {} -func (*NewMessage_Nested) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9, 0} } - -func (m *NewMessage_Nested) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *NewMessage_Nested) GetFoodGroup() string { - if m != nil && m.FoodGroup != nil { - return *m.FoodGroup - } - return "" -} - -type InnerMessage struct { - Host *string `protobuf:"bytes,1,req,name=host" json:"host,omitempty"` - Port *int32 `protobuf:"varint,2,opt,name=port,def=4000" json:"port,omitempty"` - Connected *bool `protobuf:"varint,3,opt,name=connected" json:"connected,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *InnerMessage) Reset() { *m = InnerMessage{} } -func (m *InnerMessage) String() string { return proto.CompactTextString(m) } -func (*InnerMessage) ProtoMessage() {} -func (*InnerMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } - -const Default_InnerMessage_Port int32 = 4000 - -func (m *InnerMessage) GetHost() string { - if m != nil && m.Host != nil { - return *m.Host - } - return "" -} - -func (m *InnerMessage) GetPort() int32 { - if m != nil && m.Port != nil { - return *m.Port - } - return Default_InnerMessage_Port -} - -func (m *InnerMessage) GetConnected() bool { - if m != nil && m.Connected != nil { - return *m.Connected - } - return false -} - -type OtherMessage struct { - Key *int64 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` - Value []byte `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` - Weight *float32 `protobuf:"fixed32,3,opt,name=weight" json:"weight,omitempty"` - Inner *InnerMessage `protobuf:"bytes,4,opt,name=inner" json:"inner,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OtherMessage) Reset() { *m = OtherMessage{} } -func (m *OtherMessage) String() string { return proto.CompactTextString(m) } -func (*OtherMessage) ProtoMessage() {} -func (*OtherMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } - -var extRange_OtherMessage = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*OtherMessage) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_OtherMessage -} - -func (m *OtherMessage) GetKey() int64 { - if m != nil && m.Key != nil { - return *m.Key - } - return 0 -} - -func (m *OtherMessage) GetValue() []byte { - if m != nil { - return m.Value - } - return nil -} - -func (m *OtherMessage) GetWeight() float32 { - if m != nil && m.Weight != nil { - return *m.Weight - } - return 0 -} - -func (m *OtherMessage) GetInner() *InnerMessage { - if m != nil { - return m.Inner - } - return nil -} - -type RequiredInnerMessage struct { - LeoFinallyWonAnOscar *InnerMessage `protobuf:"bytes,1,req,name=leo_finally_won_an_oscar,json=leoFinallyWonAnOscar" json:"leo_finally_won_an_oscar,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *RequiredInnerMessage) Reset() { *m = RequiredInnerMessage{} } -func (m *RequiredInnerMessage) String() string { return proto.CompactTextString(m) } -func (*RequiredInnerMessage) ProtoMessage() {} -func (*RequiredInnerMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } - -func (m *RequiredInnerMessage) GetLeoFinallyWonAnOscar() *InnerMessage { - if m != nil { - return m.LeoFinallyWonAnOscar - } - return nil -} - -type MyMessage struct { - Count *int32 `protobuf:"varint,1,req,name=count" json:"count,omitempty"` - Name *string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` - Quote *string `protobuf:"bytes,3,opt,name=quote" json:"quote,omitempty"` - Pet []string `protobuf:"bytes,4,rep,name=pet" json:"pet,omitempty"` - Inner *InnerMessage `protobuf:"bytes,5,opt,name=inner" json:"inner,omitempty"` - Others []*OtherMessage `protobuf:"bytes,6,rep,name=others" json:"others,omitempty"` - WeMustGoDeeper *RequiredInnerMessage `protobuf:"bytes,13,opt,name=we_must_go_deeper,json=weMustGoDeeper" json:"we_must_go_deeper,omitempty"` - RepInner []*InnerMessage `protobuf:"bytes,12,rep,name=rep_inner,json=repInner" json:"rep_inner,omitempty"` - Bikeshed *MyMessage_Color `protobuf:"varint,7,opt,name=bikeshed,enum=testdata.MyMessage_Color" json:"bikeshed,omitempty"` - Somegroup *MyMessage_SomeGroup `protobuf:"group,8,opt,name=SomeGroup,json=somegroup" json:"somegroup,omitempty"` - // This field becomes [][]byte in the generated code. - RepBytes [][]byte `protobuf:"bytes,10,rep,name=rep_bytes,json=repBytes" json:"rep_bytes,omitempty"` - Bigfloat *float64 `protobuf:"fixed64,11,opt,name=bigfloat" json:"bigfloat,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MyMessage) Reset() { *m = MyMessage{} } -func (m *MyMessage) String() string { return proto.CompactTextString(m) } -func (*MyMessage) ProtoMessage() {} -func (*MyMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } - -var extRange_MyMessage = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*MyMessage) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_MyMessage -} - -func (m *MyMessage) GetCount() int32 { - if m != nil && m.Count != nil { - return *m.Count - } - return 0 -} - -func (m *MyMessage) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *MyMessage) GetQuote() string { - if m != nil && m.Quote != nil { - return *m.Quote - } - return "" -} - -func (m *MyMessage) GetPet() []string { - if m != nil { - return m.Pet - } - return nil -} - -func (m *MyMessage) GetInner() *InnerMessage { - if m != nil { - return m.Inner - } - return nil -} - -func (m *MyMessage) GetOthers() []*OtherMessage { - if m != nil { - return m.Others - } - return nil -} - -func (m *MyMessage) GetWeMustGoDeeper() *RequiredInnerMessage { - if m != nil { - return m.WeMustGoDeeper - } - return nil -} - -func (m *MyMessage) GetRepInner() []*InnerMessage { - if m != nil { - return m.RepInner - } - return nil -} - -func (m *MyMessage) GetBikeshed() MyMessage_Color { - if m != nil && m.Bikeshed != nil { - return *m.Bikeshed - } - return MyMessage_RED -} - -func (m *MyMessage) GetSomegroup() *MyMessage_SomeGroup { - if m != nil { - return m.Somegroup - } - return nil -} - -func (m *MyMessage) GetRepBytes() [][]byte { - if m != nil { - return m.RepBytes - } - return nil -} - -func (m *MyMessage) GetBigfloat() float64 { - if m != nil && m.Bigfloat != nil { - return *m.Bigfloat - } - return 0 -} - -type MyMessage_SomeGroup struct { - GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MyMessage_SomeGroup) Reset() { *m = MyMessage_SomeGroup{} } -func (m *MyMessage_SomeGroup) String() string { return proto.CompactTextString(m) } -func (*MyMessage_SomeGroup) ProtoMessage() {} -func (*MyMessage_SomeGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13, 0} } - -func (m *MyMessage_SomeGroup) GetGroupField() int32 { - if m != nil && m.GroupField != nil { - return *m.GroupField - } - return 0 -} - -type Ext struct { - Data *string `protobuf:"bytes,1,opt,name=data" json:"data,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Ext) Reset() { *m = Ext{} } -func (m *Ext) String() string { return proto.CompactTextString(m) } -func (*Ext) ProtoMessage() {} -func (*Ext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } - -func (m *Ext) GetData() string { - if m != nil && m.Data != nil { - return *m.Data - } - return "" -} - -var E_Ext_More = &proto.ExtensionDesc{ - ExtendedType: (*MyMessage)(nil), - ExtensionType: (*Ext)(nil), - Field: 103, - Name: "testdata.Ext.more", - Tag: "bytes,103,opt,name=more", - Filename: "test.proto", -} - -var E_Ext_Text = &proto.ExtensionDesc{ - ExtendedType: (*MyMessage)(nil), - ExtensionType: (*string)(nil), - Field: 104, - Name: "testdata.Ext.text", - Tag: "bytes,104,opt,name=text", - Filename: "test.proto", -} - -var E_Ext_Number = &proto.ExtensionDesc{ - ExtendedType: (*MyMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 105, - Name: "testdata.Ext.number", - Tag: "varint,105,opt,name=number", - Filename: "test.proto", -} - -type ComplexExtension struct { - First *int32 `protobuf:"varint,1,opt,name=first" json:"first,omitempty"` - Second *int32 `protobuf:"varint,2,opt,name=second" json:"second,omitempty"` - Third []int32 `protobuf:"varint,3,rep,name=third" json:"third,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *ComplexExtension) Reset() { *m = ComplexExtension{} } -func (m *ComplexExtension) String() string { return proto.CompactTextString(m) } -func (*ComplexExtension) ProtoMessage() {} -func (*ComplexExtension) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } - -func (m *ComplexExtension) GetFirst() int32 { - if m != nil && m.First != nil { - return *m.First - } - return 0 -} - -func (m *ComplexExtension) GetSecond() int32 { - if m != nil && m.Second != nil { - return *m.Second - } - return 0 -} - -func (m *ComplexExtension) GetThird() []int32 { - if m != nil { - return m.Third - } - return nil -} - -type DefaultsMessage struct { - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *DefaultsMessage) Reset() { *m = DefaultsMessage{} } -func (m *DefaultsMessage) String() string { return proto.CompactTextString(m) } -func (*DefaultsMessage) ProtoMessage() {} -func (*DefaultsMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } - -var extRange_DefaultsMessage = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*DefaultsMessage) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_DefaultsMessage -} - -type MyMessageSet struct { - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MyMessageSet) Reset() { *m = MyMessageSet{} } -func (m *MyMessageSet) String() string { return proto.CompactTextString(m) } -func (*MyMessageSet) ProtoMessage() {} -func (*MyMessageSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } - -func (m *MyMessageSet) Marshal() ([]byte, error) { - return proto.MarshalMessageSet(&m.XXX_InternalExtensions) -} -func (m *MyMessageSet) Unmarshal(buf []byte) error { - return proto.UnmarshalMessageSet(buf, &m.XXX_InternalExtensions) -} -func (m *MyMessageSet) MarshalJSON() ([]byte, error) { - return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) -} -func (m *MyMessageSet) UnmarshalJSON(buf []byte) error { - return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) -} - -// ensure MyMessageSet satisfies proto.Marshaler and proto.Unmarshaler -var _ proto.Marshaler = (*MyMessageSet)(nil) -var _ proto.Unmarshaler = (*MyMessageSet)(nil) - -var extRange_MyMessageSet = []proto.ExtensionRange{ - {100, 2147483646}, -} - -func (*MyMessageSet) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_MyMessageSet -} - -type Empty struct { - XXX_unrecognized []byte `json:"-"` -} - -func (m *Empty) Reset() { *m = Empty{} } -func (m *Empty) String() string { return proto.CompactTextString(m) } -func (*Empty) ProtoMessage() {} -func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } - -type MessageList struct { - Message []*MessageList_Message `protobuf:"group,1,rep,name=Message,json=message" json:"message,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MessageList) Reset() { *m = MessageList{} } -func (m *MessageList) String() string { return proto.CompactTextString(m) } -func (*MessageList) ProtoMessage() {} -func (*MessageList) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } - -func (m *MessageList) GetMessage() []*MessageList_Message { - if m != nil { - return m.Message - } - return nil -} - -type MessageList_Message struct { - Name *string `protobuf:"bytes,2,req,name=name" json:"name,omitempty"` - Count *int32 `protobuf:"varint,3,req,name=count" json:"count,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MessageList_Message) Reset() { *m = MessageList_Message{} } -func (m *MessageList_Message) String() string { return proto.CompactTextString(m) } -func (*MessageList_Message) ProtoMessage() {} -func (*MessageList_Message) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19, 0} } - -func (m *MessageList_Message) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *MessageList_Message) GetCount() int32 { - if m != nil && m.Count != nil { - return *m.Count - } - return 0 -} - -type Strings struct { - StringField *string `protobuf:"bytes,1,opt,name=string_field,json=stringField" json:"string_field,omitempty"` - BytesField []byte `protobuf:"bytes,2,opt,name=bytes_field,json=bytesField" json:"bytes_field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Strings) Reset() { *m = Strings{} } -func (m *Strings) String() string { return proto.CompactTextString(m) } -func (*Strings) ProtoMessage() {} -func (*Strings) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } - -func (m *Strings) GetStringField() string { - if m != nil && m.StringField != nil { - return *m.StringField - } - return "" -} - -func (m *Strings) GetBytesField() []byte { - if m != nil { - return m.BytesField - } - return nil -} - -type Defaults struct { - // Default-valued fields of all basic types. - // Same as GoTest, but copied here to make testing easier. - F_Bool *bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,def=1" json:"F_Bool,omitempty"` - F_Int32 *int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,def=32" json:"F_Int32,omitempty"` - F_Int64 *int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,def=64" json:"F_Int64,omitempty"` - F_Fixed32 *uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,def=320" json:"F_Fixed32,omitempty"` - F_Fixed64 *uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,def=640" json:"F_Fixed64,omitempty"` - F_Uint32 *uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,def=3200" json:"F_Uint32,omitempty"` - F_Uint64 *uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,def=6400" json:"F_Uint64,omitempty"` - F_Float *float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,def=314159" json:"F_Float,omitempty"` - F_Double *float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,def=271828" json:"F_Double,omitempty"` - F_String *string `protobuf:"bytes,10,opt,name=F_String,json=FString,def=hello, \"world!\"\n" json:"F_String,omitempty"` - F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,def=Bignose" json:"F_Bytes,omitempty"` - F_Sint32 *int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,def=-32" json:"F_Sint32,omitempty"` - F_Sint64 *int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,def=-64" json:"F_Sint64,omitempty"` - F_Enum *Defaults_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=testdata.Defaults_Color,def=1" json:"F_Enum,omitempty"` - // More fields with crazy defaults. - F_Pinf *float32 `protobuf:"fixed32,15,opt,name=F_Pinf,json=FPinf,def=inf" json:"F_Pinf,omitempty"` - F_Ninf *float32 `protobuf:"fixed32,16,opt,name=F_Ninf,json=FNinf,def=-inf" json:"F_Ninf,omitempty"` - F_Nan *float32 `protobuf:"fixed32,17,opt,name=F_Nan,json=FNan,def=nan" json:"F_Nan,omitempty"` - // Sub-message. - Sub *SubDefaults `protobuf:"bytes,18,opt,name=sub" json:"sub,omitempty"` - // Redundant but explicit defaults. - StrZero *string `protobuf:"bytes,19,opt,name=str_zero,json=strZero,def=" json:"str_zero,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Defaults) Reset() { *m = Defaults{} } -func (m *Defaults) String() string { return proto.CompactTextString(m) } -func (*Defaults) ProtoMessage() {} -func (*Defaults) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} } - -const Default_Defaults_F_Bool bool = true -const Default_Defaults_F_Int32 int32 = 32 -const Default_Defaults_F_Int64 int64 = 64 -const Default_Defaults_F_Fixed32 uint32 = 320 -const Default_Defaults_F_Fixed64 uint64 = 640 -const Default_Defaults_F_Uint32 uint32 = 3200 -const Default_Defaults_F_Uint64 uint64 = 6400 -const Default_Defaults_F_Float float32 = 314159 -const Default_Defaults_F_Double float64 = 271828 -const Default_Defaults_F_String string = "hello, \"world!\"\n" - -var Default_Defaults_F_Bytes []byte = []byte("Bignose") - -const Default_Defaults_F_Sint32 int32 = -32 -const Default_Defaults_F_Sint64 int64 = -64 -const Default_Defaults_F_Enum Defaults_Color = Defaults_GREEN - -var Default_Defaults_F_Pinf float32 = float32(math.Inf(1)) -var Default_Defaults_F_Ninf float32 = float32(math.Inf(-1)) -var Default_Defaults_F_Nan float32 = float32(math.NaN()) - -func (m *Defaults) GetF_Bool() bool { - if m != nil && m.F_Bool != nil { - return *m.F_Bool - } - return Default_Defaults_F_Bool -} - -func (m *Defaults) GetF_Int32() int32 { - if m != nil && m.F_Int32 != nil { - return *m.F_Int32 - } - return Default_Defaults_F_Int32 -} - -func (m *Defaults) GetF_Int64() int64 { - if m != nil && m.F_Int64 != nil { - return *m.F_Int64 - } - return Default_Defaults_F_Int64 -} - -func (m *Defaults) GetF_Fixed32() uint32 { - if m != nil && m.F_Fixed32 != nil { - return *m.F_Fixed32 - } - return Default_Defaults_F_Fixed32 -} - -func (m *Defaults) GetF_Fixed64() uint64 { - if m != nil && m.F_Fixed64 != nil { - return *m.F_Fixed64 - } - return Default_Defaults_F_Fixed64 -} - -func (m *Defaults) GetF_Uint32() uint32 { - if m != nil && m.F_Uint32 != nil { - return *m.F_Uint32 - } - return Default_Defaults_F_Uint32 -} - -func (m *Defaults) GetF_Uint64() uint64 { - if m != nil && m.F_Uint64 != nil { - return *m.F_Uint64 - } - return Default_Defaults_F_Uint64 -} - -func (m *Defaults) GetF_Float() float32 { - if m != nil && m.F_Float != nil { - return *m.F_Float - } - return Default_Defaults_F_Float -} - -func (m *Defaults) GetF_Double() float64 { - if m != nil && m.F_Double != nil { - return *m.F_Double - } - return Default_Defaults_F_Double -} - -func (m *Defaults) GetF_String() string { - if m != nil && m.F_String != nil { - return *m.F_String - } - return Default_Defaults_F_String -} - -func (m *Defaults) GetF_Bytes() []byte { - if m != nil && m.F_Bytes != nil { - return m.F_Bytes - } - return append([]byte(nil), Default_Defaults_F_Bytes...) -} - -func (m *Defaults) GetF_Sint32() int32 { - if m != nil && m.F_Sint32 != nil { - return *m.F_Sint32 - } - return Default_Defaults_F_Sint32 -} - -func (m *Defaults) GetF_Sint64() int64 { - if m != nil && m.F_Sint64 != nil { - return *m.F_Sint64 - } - return Default_Defaults_F_Sint64 -} - -func (m *Defaults) GetF_Enum() Defaults_Color { - if m != nil && m.F_Enum != nil { - return *m.F_Enum - } - return Default_Defaults_F_Enum -} - -func (m *Defaults) GetF_Pinf() float32 { - if m != nil && m.F_Pinf != nil { - return *m.F_Pinf - } - return Default_Defaults_F_Pinf -} - -func (m *Defaults) GetF_Ninf() float32 { - if m != nil && m.F_Ninf != nil { - return *m.F_Ninf - } - return Default_Defaults_F_Ninf -} - -func (m *Defaults) GetF_Nan() float32 { - if m != nil && m.F_Nan != nil { - return *m.F_Nan - } - return Default_Defaults_F_Nan -} - -func (m *Defaults) GetSub() *SubDefaults { - if m != nil { - return m.Sub - } - return nil -} - -func (m *Defaults) GetStrZero() string { - if m != nil && m.StrZero != nil { - return *m.StrZero - } - return "" -} - -type SubDefaults struct { - N *int64 `protobuf:"varint,1,opt,name=n,def=7" json:"n,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *SubDefaults) Reset() { *m = SubDefaults{} } -func (m *SubDefaults) String() string { return proto.CompactTextString(m) } -func (*SubDefaults) ProtoMessage() {} -func (*SubDefaults) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} } - -const Default_SubDefaults_N int64 = 7 - -func (m *SubDefaults) GetN() int64 { - if m != nil && m.N != nil { - return *m.N - } - return Default_SubDefaults_N -} - -type RepeatedEnum struct { - Color []RepeatedEnum_Color `protobuf:"varint,1,rep,name=color,enum=testdata.RepeatedEnum_Color" json:"color,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *RepeatedEnum) Reset() { *m = RepeatedEnum{} } -func (m *RepeatedEnum) String() string { return proto.CompactTextString(m) } -func (*RepeatedEnum) ProtoMessage() {} -func (*RepeatedEnum) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} } - -func (m *RepeatedEnum) GetColor() []RepeatedEnum_Color { - if m != nil { - return m.Color - } - return nil -} - -type MoreRepeated struct { - Bools []bool `protobuf:"varint,1,rep,name=bools" json:"bools,omitempty"` - BoolsPacked []bool `protobuf:"varint,2,rep,packed,name=bools_packed,json=boolsPacked" json:"bools_packed,omitempty"` - Ints []int32 `protobuf:"varint,3,rep,name=ints" json:"ints,omitempty"` - IntsPacked []int32 `protobuf:"varint,4,rep,packed,name=ints_packed,json=intsPacked" json:"ints_packed,omitempty"` - Int64SPacked []int64 `protobuf:"varint,7,rep,packed,name=int64s_packed,json=int64sPacked" json:"int64s_packed,omitempty"` - Strings []string `protobuf:"bytes,5,rep,name=strings" json:"strings,omitempty"` - Fixeds []uint32 `protobuf:"fixed32,6,rep,name=fixeds" json:"fixeds,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MoreRepeated) Reset() { *m = MoreRepeated{} } -func (m *MoreRepeated) String() string { return proto.CompactTextString(m) } -func (*MoreRepeated) ProtoMessage() {} -func (*MoreRepeated) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} } - -func (m *MoreRepeated) GetBools() []bool { - if m != nil { - return m.Bools - } - return nil -} - -func (m *MoreRepeated) GetBoolsPacked() []bool { - if m != nil { - return m.BoolsPacked - } - return nil -} - -func (m *MoreRepeated) GetInts() []int32 { - if m != nil { - return m.Ints - } - return nil -} - -func (m *MoreRepeated) GetIntsPacked() []int32 { - if m != nil { - return m.IntsPacked - } - return nil -} - -func (m *MoreRepeated) GetInt64SPacked() []int64 { - if m != nil { - return m.Int64SPacked - } - return nil -} - -func (m *MoreRepeated) GetStrings() []string { - if m != nil { - return m.Strings - } - return nil -} - -func (m *MoreRepeated) GetFixeds() []uint32 { - if m != nil { - return m.Fixeds - } - return nil -} - -type GroupOld struct { - G *GroupOld_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GroupOld) Reset() { *m = GroupOld{} } -func (m *GroupOld) String() string { return proto.CompactTextString(m) } -func (*GroupOld) ProtoMessage() {} -func (*GroupOld) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25} } - -func (m *GroupOld) GetG() *GroupOld_G { - if m != nil { - return m.G - } - return nil -} - -type GroupOld_G struct { - X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GroupOld_G) Reset() { *m = GroupOld_G{} } -func (m *GroupOld_G) String() string { return proto.CompactTextString(m) } -func (*GroupOld_G) ProtoMessage() {} -func (*GroupOld_G) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25, 0} } - -func (m *GroupOld_G) GetX() int32 { - if m != nil && m.X != nil { - return *m.X - } - return 0 -} - -type GroupNew struct { - G *GroupNew_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GroupNew) Reset() { *m = GroupNew{} } -func (m *GroupNew) String() string { return proto.CompactTextString(m) } -func (*GroupNew) ProtoMessage() {} -func (*GroupNew) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26} } - -func (m *GroupNew) GetG() *GroupNew_G { - if m != nil { - return m.G - } - return nil -} - -type GroupNew_G struct { - X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` - Y *int32 `protobuf:"varint,3,opt,name=y" json:"y,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GroupNew_G) Reset() { *m = GroupNew_G{} } -func (m *GroupNew_G) String() string { return proto.CompactTextString(m) } -func (*GroupNew_G) ProtoMessage() {} -func (*GroupNew_G) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26, 0} } - -func (m *GroupNew_G) GetX() int32 { - if m != nil && m.X != nil { - return *m.X - } - return 0 -} - -func (m *GroupNew_G) GetY() int32 { - if m != nil && m.Y != nil { - return *m.Y - } - return 0 -} - -type FloatingPoint struct { - F *float64 `protobuf:"fixed64,1,req,name=f" json:"f,omitempty"` - Exact *bool `protobuf:"varint,2,opt,name=exact" json:"exact,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *FloatingPoint) Reset() { *m = FloatingPoint{} } -func (m *FloatingPoint) String() string { return proto.CompactTextString(m) } -func (*FloatingPoint) ProtoMessage() {} -func (*FloatingPoint) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{27} } - -func (m *FloatingPoint) GetF() float64 { - if m != nil && m.F != nil { - return *m.F - } - return 0 -} - -func (m *FloatingPoint) GetExact() bool { - if m != nil && m.Exact != nil { - return *m.Exact - } - return false -} - -type MessageWithMap struct { - NameMapping map[int32]string `protobuf:"bytes,1,rep,name=name_mapping,json=nameMapping" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - MsgMapping map[int64]*FloatingPoint `protobuf:"bytes,2,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - ByteMapping map[bool][]byte `protobuf:"bytes,3,rep,name=byte_mapping,json=byteMapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - StrToStr map[string]string `protobuf:"bytes,4,rep,name=str_to_str,json=strToStr" json:"str_to_str,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } -func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } -func (*MessageWithMap) ProtoMessage() {} -func (*MessageWithMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{28} } - -func (m *MessageWithMap) GetNameMapping() map[int32]string { - if m != nil { - return m.NameMapping - } - return nil -} - -func (m *MessageWithMap) GetMsgMapping() map[int64]*FloatingPoint { - if m != nil { - return m.MsgMapping - } - return nil -} - -func (m *MessageWithMap) GetByteMapping() map[bool][]byte { - if m != nil { - return m.ByteMapping - } - return nil -} - -func (m *MessageWithMap) GetStrToStr() map[string]string { - if m != nil { - return m.StrToStr - } - return nil -} - -type Oneof struct { - // Types that are valid to be assigned to Union: - // *Oneof_F_Bool - // *Oneof_F_Int32 - // *Oneof_F_Int64 - // *Oneof_F_Fixed32 - // *Oneof_F_Fixed64 - // *Oneof_F_Uint32 - // *Oneof_F_Uint64 - // *Oneof_F_Float - // *Oneof_F_Double - // *Oneof_F_String - // *Oneof_F_Bytes - // *Oneof_F_Sint32 - // *Oneof_F_Sint64 - // *Oneof_F_Enum - // *Oneof_F_Message - // *Oneof_FGroup - // *Oneof_F_Largest_Tag - Union isOneof_Union `protobuf_oneof:"union"` - // Types that are valid to be assigned to Tormato: - // *Oneof_Value - Tormato isOneof_Tormato `protobuf_oneof:"tormato"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Oneof) Reset() { *m = Oneof{} } -func (m *Oneof) String() string { return proto.CompactTextString(m) } -func (*Oneof) ProtoMessage() {} -func (*Oneof) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29} } - -type isOneof_Union interface { - isOneof_Union() -} -type isOneof_Tormato interface { - isOneof_Tormato() -} - -type Oneof_F_Bool struct { - F_Bool bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,oneof"` -} -type Oneof_F_Int32 struct { - F_Int32 int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,oneof"` -} -type Oneof_F_Int64 struct { - F_Int64 int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,oneof"` -} -type Oneof_F_Fixed32 struct { - F_Fixed32 uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,oneof"` -} -type Oneof_F_Fixed64 struct { - F_Fixed64 uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,oneof"` -} -type Oneof_F_Uint32 struct { - F_Uint32 uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,oneof"` -} -type Oneof_F_Uint64 struct { - F_Uint64 uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,oneof"` -} -type Oneof_F_Float struct { - F_Float float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,oneof"` -} -type Oneof_F_Double struct { - F_Double float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,oneof"` -} -type Oneof_F_String struct { - F_String string `protobuf:"bytes,10,opt,name=F_String,json=FString,oneof"` -} -type Oneof_F_Bytes struct { - F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,oneof"` -} -type Oneof_F_Sint32 struct { - F_Sint32 int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,oneof"` -} -type Oneof_F_Sint64 struct { - F_Sint64 int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,oneof"` -} -type Oneof_F_Enum struct { - F_Enum MyMessage_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=testdata.MyMessage_Color,oneof"` -} -type Oneof_F_Message struct { - F_Message *GoTestField `protobuf:"bytes,15,opt,name=F_Message,json=FMessage,oneof"` -} -type Oneof_FGroup struct { - FGroup *Oneof_F_Group `protobuf:"group,16,opt,name=F_Group,json=fGroup,oneof"` -} -type Oneof_F_Largest_Tag struct { - F_Largest_Tag int32 `protobuf:"varint,536870911,opt,name=F_Largest_Tag,json=FLargestTag,oneof"` -} -type Oneof_Value struct { - Value int32 `protobuf:"varint,100,opt,name=value,oneof"` -} - -func (*Oneof_F_Bool) isOneof_Union() {} -func (*Oneof_F_Int32) isOneof_Union() {} -func (*Oneof_F_Int64) isOneof_Union() {} -func (*Oneof_F_Fixed32) isOneof_Union() {} -func (*Oneof_F_Fixed64) isOneof_Union() {} -func (*Oneof_F_Uint32) isOneof_Union() {} -func (*Oneof_F_Uint64) isOneof_Union() {} -func (*Oneof_F_Float) isOneof_Union() {} -func (*Oneof_F_Double) isOneof_Union() {} -func (*Oneof_F_String) isOneof_Union() {} -func (*Oneof_F_Bytes) isOneof_Union() {} -func (*Oneof_F_Sint32) isOneof_Union() {} -func (*Oneof_F_Sint64) isOneof_Union() {} -func (*Oneof_F_Enum) isOneof_Union() {} -func (*Oneof_F_Message) isOneof_Union() {} -func (*Oneof_FGroup) isOneof_Union() {} -func (*Oneof_F_Largest_Tag) isOneof_Union() {} -func (*Oneof_Value) isOneof_Tormato() {} - -func (m *Oneof) GetUnion() isOneof_Union { - if m != nil { - return m.Union - } - return nil -} -func (m *Oneof) GetTormato() isOneof_Tormato { - if m != nil { - return m.Tormato - } - return nil -} - -func (m *Oneof) GetF_Bool() bool { - if x, ok := m.GetUnion().(*Oneof_F_Bool); ok { - return x.F_Bool - } - return false -} - -func (m *Oneof) GetF_Int32() int32 { - if x, ok := m.GetUnion().(*Oneof_F_Int32); ok { - return x.F_Int32 - } - return 0 -} - -func (m *Oneof) GetF_Int64() int64 { - if x, ok := m.GetUnion().(*Oneof_F_Int64); ok { - return x.F_Int64 - } - return 0 -} - -func (m *Oneof) GetF_Fixed32() uint32 { - if x, ok := m.GetUnion().(*Oneof_F_Fixed32); ok { - return x.F_Fixed32 - } - return 0 -} - -func (m *Oneof) GetF_Fixed64() uint64 { - if x, ok := m.GetUnion().(*Oneof_F_Fixed64); ok { - return x.F_Fixed64 - } - return 0 -} - -func (m *Oneof) GetF_Uint32() uint32 { - if x, ok := m.GetUnion().(*Oneof_F_Uint32); ok { - return x.F_Uint32 - } - return 0 -} - -func (m *Oneof) GetF_Uint64() uint64 { - if x, ok := m.GetUnion().(*Oneof_F_Uint64); ok { - return x.F_Uint64 - } - return 0 -} - -func (m *Oneof) GetF_Float() float32 { - if x, ok := m.GetUnion().(*Oneof_F_Float); ok { - return x.F_Float - } - return 0 -} - -func (m *Oneof) GetF_Double() float64 { - if x, ok := m.GetUnion().(*Oneof_F_Double); ok { - return x.F_Double - } - return 0 -} - -func (m *Oneof) GetF_String() string { - if x, ok := m.GetUnion().(*Oneof_F_String); ok { - return x.F_String - } - return "" -} - -func (m *Oneof) GetF_Bytes() []byte { - if x, ok := m.GetUnion().(*Oneof_F_Bytes); ok { - return x.F_Bytes - } - return nil -} - -func (m *Oneof) GetF_Sint32() int32 { - if x, ok := m.GetUnion().(*Oneof_F_Sint32); ok { - return x.F_Sint32 - } - return 0 -} - -func (m *Oneof) GetF_Sint64() int64 { - if x, ok := m.GetUnion().(*Oneof_F_Sint64); ok { - return x.F_Sint64 - } - return 0 -} - -func (m *Oneof) GetF_Enum() MyMessage_Color { - if x, ok := m.GetUnion().(*Oneof_F_Enum); ok { - return x.F_Enum - } - return MyMessage_RED -} - -func (m *Oneof) GetF_Message() *GoTestField { - if x, ok := m.GetUnion().(*Oneof_F_Message); ok { - return x.F_Message - } - return nil -} - -func (m *Oneof) GetFGroup() *Oneof_F_Group { - if x, ok := m.GetUnion().(*Oneof_FGroup); ok { - return x.FGroup - } - return nil -} - -func (m *Oneof) GetF_Largest_Tag() int32 { - if x, ok := m.GetUnion().(*Oneof_F_Largest_Tag); ok { - return x.F_Largest_Tag - } - return 0 -} - -func (m *Oneof) GetValue() int32 { - if x, ok := m.GetTormato().(*Oneof_Value); ok { - return x.Value - } - return 0 -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Oneof) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Oneof_OneofMarshaler, _Oneof_OneofUnmarshaler, _Oneof_OneofSizer, []interface{}{ - (*Oneof_F_Bool)(nil), - (*Oneof_F_Int32)(nil), - (*Oneof_F_Int64)(nil), - (*Oneof_F_Fixed32)(nil), - (*Oneof_F_Fixed64)(nil), - (*Oneof_F_Uint32)(nil), - (*Oneof_F_Uint64)(nil), - (*Oneof_F_Float)(nil), - (*Oneof_F_Double)(nil), - (*Oneof_F_String)(nil), - (*Oneof_F_Bytes)(nil), - (*Oneof_F_Sint32)(nil), - (*Oneof_F_Sint64)(nil), - (*Oneof_F_Enum)(nil), - (*Oneof_F_Message)(nil), - (*Oneof_FGroup)(nil), - (*Oneof_F_Largest_Tag)(nil), - (*Oneof_Value)(nil), - } -} - -func _Oneof_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Oneof) - // union - switch x := m.Union.(type) { - case *Oneof_F_Bool: - t := uint64(0) - if x.F_Bool { - t = 1 - } - b.EncodeVarint(1<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *Oneof_F_Int32: - b.EncodeVarint(2<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Int32)) - case *Oneof_F_Int64: - b.EncodeVarint(3<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Int64)) - case *Oneof_F_Fixed32: - b.EncodeVarint(4<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(x.F_Fixed32)) - case *Oneof_F_Fixed64: - b.EncodeVarint(5<<3 | proto.WireFixed64) - b.EncodeFixed64(uint64(x.F_Fixed64)) - case *Oneof_F_Uint32: - b.EncodeVarint(6<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Uint32)) - case *Oneof_F_Uint64: - b.EncodeVarint(7<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Uint64)) - case *Oneof_F_Float: - b.EncodeVarint(8<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(math.Float32bits(x.F_Float))) - case *Oneof_F_Double: - b.EncodeVarint(9<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.F_Double)) - case *Oneof_F_String: - b.EncodeVarint(10<<3 | proto.WireBytes) - b.EncodeStringBytes(x.F_String) - case *Oneof_F_Bytes: - b.EncodeVarint(11<<3 | proto.WireBytes) - b.EncodeRawBytes(x.F_Bytes) - case *Oneof_F_Sint32: - b.EncodeVarint(12<<3 | proto.WireVarint) - b.EncodeZigzag32(uint64(x.F_Sint32)) - case *Oneof_F_Sint64: - b.EncodeVarint(13<<3 | proto.WireVarint) - b.EncodeZigzag64(uint64(x.F_Sint64)) - case *Oneof_F_Enum: - b.EncodeVarint(14<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Enum)) - case *Oneof_F_Message: - b.EncodeVarint(15<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.F_Message); err != nil { - return err - } - case *Oneof_FGroup: - b.EncodeVarint(16<<3 | proto.WireStartGroup) - if err := b.Marshal(x.FGroup); err != nil { - return err - } - b.EncodeVarint(16<<3 | proto.WireEndGroup) - case *Oneof_F_Largest_Tag: - b.EncodeVarint(536870911<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.F_Largest_Tag)) - case nil: - default: - return fmt.Errorf("Oneof.Union has unexpected type %T", x) - } - // tormato - switch x := m.Tormato.(type) { - case *Oneof_Value: - b.EncodeVarint(100<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Value)) - case nil: - default: - return fmt.Errorf("Oneof.Tormato has unexpected type %T", x) - } - return nil -} - -func _Oneof_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Oneof) - switch tag { - case 1: // union.F_Bool - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Bool{x != 0} - return true, err - case 2: // union.F_Int32 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Int32{int32(x)} - return true, err - case 3: // union.F_Int64 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Int64{int64(x)} - return true, err - case 4: // union.F_Fixed32 - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.Union = &Oneof_F_Fixed32{uint32(x)} - return true, err - case 5: // union.F_Fixed64 - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Union = &Oneof_F_Fixed64{x} - return true, err - case 6: // union.F_Uint32 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Uint32{uint32(x)} - return true, err - case 7: // union.F_Uint64 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Uint64{x} - return true, err - case 8: // union.F_Float - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.Union = &Oneof_F_Float{math.Float32frombits(uint32(x))} - return true, err - case 9: // union.F_Double - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Union = &Oneof_F_Double{math.Float64frombits(x)} - return true, err - case 10: // union.F_String - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &Oneof_F_String{x} - return true, err - case 11: // union.F_Bytes - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Union = &Oneof_F_Bytes{x} - return true, err - case 12: // union.F_Sint32 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeZigzag32() - m.Union = &Oneof_F_Sint32{int32(x)} - return true, err - case 13: // union.F_Sint64 - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeZigzag64() - m.Union = &Oneof_F_Sint64{int64(x)} - return true, err - case 14: // union.F_Enum - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Enum{MyMessage_Color(x)} - return true, err - case 15: // union.F_Message - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(GoTestField) - err := b.DecodeMessage(msg) - m.Union = &Oneof_F_Message{msg} - return true, err - case 16: // union.f_group - if wire != proto.WireStartGroup { - return true, proto.ErrInternalBadWireType - } - msg := new(Oneof_F_Group) - err := b.DecodeGroup(msg) - m.Union = &Oneof_FGroup{msg} - return true, err - case 536870911: // union.F_Largest_Tag - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Oneof_F_Largest_Tag{int32(x)} - return true, err - case 100: // tormato.value - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Tormato = &Oneof_Value{int32(x)} - return true, err - default: - return false, nil - } -} - -func _Oneof_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Oneof) - // union - switch x := m.Union.(type) { - case *Oneof_F_Bool: - n += proto.SizeVarint(1<<3 | proto.WireVarint) - n += 1 - case *Oneof_F_Int32: - n += proto.SizeVarint(2<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Int32)) - case *Oneof_F_Int64: - n += proto.SizeVarint(3<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Int64)) - case *Oneof_F_Fixed32: - n += proto.SizeVarint(4<<3 | proto.WireFixed32) - n += 4 - case *Oneof_F_Fixed64: - n += proto.SizeVarint(5<<3 | proto.WireFixed64) - n += 8 - case *Oneof_F_Uint32: - n += proto.SizeVarint(6<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Uint32)) - case *Oneof_F_Uint64: - n += proto.SizeVarint(7<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Uint64)) - case *Oneof_F_Float: - n += proto.SizeVarint(8<<3 | proto.WireFixed32) - n += 4 - case *Oneof_F_Double: - n += proto.SizeVarint(9<<3 | proto.WireFixed64) - n += 8 - case *Oneof_F_String: - n += proto.SizeVarint(10<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.F_String))) - n += len(x.F_String) - case *Oneof_F_Bytes: - n += proto.SizeVarint(11<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.F_Bytes))) - n += len(x.F_Bytes) - case *Oneof_F_Sint32: - n += proto.SizeVarint(12<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64((uint32(x.F_Sint32) << 1) ^ uint32((int32(x.F_Sint32) >> 31)))) - case *Oneof_F_Sint64: - n += proto.SizeVarint(13<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(uint64(x.F_Sint64<<1) ^ uint64((int64(x.F_Sint64) >> 63)))) - case *Oneof_F_Enum: - n += proto.SizeVarint(14<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Enum)) - case *Oneof_F_Message: - s := proto.Size(x.F_Message) - n += proto.SizeVarint(15<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case *Oneof_FGroup: - n += proto.SizeVarint(16<<3 | proto.WireStartGroup) - n += proto.Size(x.FGroup) - n += proto.SizeVarint(16<<3 | proto.WireEndGroup) - case *Oneof_F_Largest_Tag: - n += proto.SizeVarint(536870911<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.F_Largest_Tag)) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - // tormato - switch x := m.Tormato.(type) { - case *Oneof_Value: - n += proto.SizeVarint(100<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Value)) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -type Oneof_F_Group struct { - X *int32 `protobuf:"varint,17,opt,name=x" json:"x,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Oneof_F_Group) Reset() { *m = Oneof_F_Group{} } -func (m *Oneof_F_Group) String() string { return proto.CompactTextString(m) } -func (*Oneof_F_Group) ProtoMessage() {} -func (*Oneof_F_Group) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29, 0} } - -func (m *Oneof_F_Group) GetX() int32 { - if m != nil && m.X != nil { - return *m.X - } - return 0 -} - -type Communique struct { - MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` - // This is a oneof, called "union". - // - // Types that are valid to be assigned to Union: - // *Communique_Number - // *Communique_Name - // *Communique_Data - // *Communique_TempC - // *Communique_Col - // *Communique_Msg - Union isCommunique_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Communique) Reset() { *m = Communique{} } -func (m *Communique) String() string { return proto.CompactTextString(m) } -func (*Communique) ProtoMessage() {} -func (*Communique) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{30} } - -type isCommunique_Union interface { - isCommunique_Union() -} - -type Communique_Number struct { - Number int32 `protobuf:"varint,5,opt,name=number,oneof"` -} -type Communique_Name struct { - Name string `protobuf:"bytes,6,opt,name=name,oneof"` -} -type Communique_Data struct { - Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` -} -type Communique_TempC struct { - TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` -} -type Communique_Col struct { - Col MyMessage_Color `protobuf:"varint,9,opt,name=col,enum=testdata.MyMessage_Color,oneof"` -} -type Communique_Msg struct { - Msg *Strings `protobuf:"bytes,10,opt,name=msg,oneof"` -} - -func (*Communique_Number) isCommunique_Union() {} -func (*Communique_Name) isCommunique_Union() {} -func (*Communique_Data) isCommunique_Union() {} -func (*Communique_TempC) isCommunique_Union() {} -func (*Communique_Col) isCommunique_Union() {} -func (*Communique_Msg) isCommunique_Union() {} - -func (m *Communique) GetUnion() isCommunique_Union { - if m != nil { - return m.Union - } - return nil -} - -func (m *Communique) GetMakeMeCry() bool { - if m != nil && m.MakeMeCry != nil { - return *m.MakeMeCry - } - return false -} - -func (m *Communique) GetNumber() int32 { - if x, ok := m.GetUnion().(*Communique_Number); ok { - return x.Number - } - return 0 -} - -func (m *Communique) GetName() string { - if x, ok := m.GetUnion().(*Communique_Name); ok { - return x.Name - } - return "" -} - -func (m *Communique) GetData() []byte { - if x, ok := m.GetUnion().(*Communique_Data); ok { - return x.Data - } - return nil -} - -func (m *Communique) GetTempC() float64 { - if x, ok := m.GetUnion().(*Communique_TempC); ok { - return x.TempC - } - return 0 -} - -func (m *Communique) GetCol() MyMessage_Color { - if x, ok := m.GetUnion().(*Communique_Col); ok { - return x.Col - } - return MyMessage_RED -} - -func (m *Communique) GetMsg() *Strings { - if x, ok := m.GetUnion().(*Communique_Msg); ok { - return x.Msg - } - return nil -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Communique_OneofMarshaler, _Communique_OneofUnmarshaler, _Communique_OneofSizer, []interface{}{ - (*Communique_Number)(nil), - (*Communique_Name)(nil), - (*Communique_Data)(nil), - (*Communique_TempC)(nil), - (*Communique_Col)(nil), - (*Communique_Msg)(nil), - } -} - -func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - b.EncodeVarint(5<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Number)) - case *Communique_Name: - b.EncodeVarint(6<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Name) - case *Communique_Data: - b.EncodeVarint(7<<3 | proto.WireBytes) - b.EncodeRawBytes(x.Data) - case *Communique_TempC: - b.EncodeVarint(8<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.TempC)) - case *Communique_Col: - b.EncodeVarint(9<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Col)) - case *Communique_Msg: - b.EncodeVarint(10<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Msg); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("Communique.Union has unexpected type %T", x) - } - return nil -} - -func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Communique) - switch tag { - case 5: // union.number - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Number{int32(x)} - return true, err - case 6: // union.name - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &Communique_Name{x} - return true, err - case 7: // union.data - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Union = &Communique_Data{x} - return true, err - case 8: // union.temp_c - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Union = &Communique_TempC{math.Float64frombits(x)} - return true, err - case 9: // union.col - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Col{MyMessage_Color(x)} - return true, err - case 10: // union.msg - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Strings) - err := b.DecodeMessage(msg) - m.Union = &Communique_Msg{msg} - return true, err - default: - return false, nil - } -} - -func _Communique_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - n += proto.SizeVarint(5<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Number)) - case *Communique_Name: - n += proto.SizeVarint(6<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Name))) - n += len(x.Name) - case *Communique_Data: - n += proto.SizeVarint(7<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Data))) - n += len(x.Data) - case *Communique_TempC: - n += proto.SizeVarint(8<<3 | proto.WireFixed64) - n += 8 - case *Communique_Col: - n += proto.SizeVarint(9<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Col)) - case *Communique_Msg: - s := proto.Size(x.Msg) - n += proto.SizeVarint(10<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -var E_Greeting = &proto.ExtensionDesc{ - ExtendedType: (*MyMessage)(nil), - ExtensionType: ([]string)(nil), - Field: 106, - Name: "testdata.greeting", - Tag: "bytes,106,rep,name=greeting", - Filename: "test.proto", -} - -var E_Complex = &proto.ExtensionDesc{ - ExtendedType: (*OtherMessage)(nil), - ExtensionType: (*ComplexExtension)(nil), - Field: 200, - Name: "testdata.complex", - Tag: "bytes,200,opt,name=complex", - Filename: "test.proto", -} - -var E_RComplex = &proto.ExtensionDesc{ - ExtendedType: (*OtherMessage)(nil), - ExtensionType: ([]*ComplexExtension)(nil), - Field: 201, - Name: "testdata.r_complex", - Tag: "bytes,201,rep,name=r_complex,json=rComplex", - Filename: "test.proto", -} - -var E_NoDefaultDouble = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*float64)(nil), - Field: 101, - Name: "testdata.no_default_double", - Tag: "fixed64,101,opt,name=no_default_double,json=noDefaultDouble", - Filename: "test.proto", -} - -var E_NoDefaultFloat = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*float32)(nil), - Field: 102, - Name: "testdata.no_default_float", - Tag: "fixed32,102,opt,name=no_default_float,json=noDefaultFloat", - Filename: "test.proto", -} - -var E_NoDefaultInt32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 103, - Name: "testdata.no_default_int32", - Tag: "varint,103,opt,name=no_default_int32,json=noDefaultInt32", - Filename: "test.proto", -} - -var E_NoDefaultInt64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 104, - Name: "testdata.no_default_int64", - Tag: "varint,104,opt,name=no_default_int64,json=noDefaultInt64", - Filename: "test.proto", -} - -var E_NoDefaultUint32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint32)(nil), - Field: 105, - Name: "testdata.no_default_uint32", - Tag: "varint,105,opt,name=no_default_uint32,json=noDefaultUint32", - Filename: "test.proto", -} - -var E_NoDefaultUint64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint64)(nil), - Field: 106, - Name: "testdata.no_default_uint64", - Tag: "varint,106,opt,name=no_default_uint64,json=noDefaultUint64", - Filename: "test.proto", -} - -var E_NoDefaultSint32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 107, - Name: "testdata.no_default_sint32", - Tag: "zigzag32,107,opt,name=no_default_sint32,json=noDefaultSint32", - Filename: "test.proto", -} - -var E_NoDefaultSint64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 108, - Name: "testdata.no_default_sint64", - Tag: "zigzag64,108,opt,name=no_default_sint64,json=noDefaultSint64", - Filename: "test.proto", -} - -var E_NoDefaultFixed32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint32)(nil), - Field: 109, - Name: "testdata.no_default_fixed32", - Tag: "fixed32,109,opt,name=no_default_fixed32,json=noDefaultFixed32", - Filename: "test.proto", -} - -var E_NoDefaultFixed64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint64)(nil), - Field: 110, - Name: "testdata.no_default_fixed64", - Tag: "fixed64,110,opt,name=no_default_fixed64,json=noDefaultFixed64", - Filename: "test.proto", -} - -var E_NoDefaultSfixed32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 111, - Name: "testdata.no_default_sfixed32", - Tag: "fixed32,111,opt,name=no_default_sfixed32,json=noDefaultSfixed32", - Filename: "test.proto", -} - -var E_NoDefaultSfixed64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 112, - Name: "testdata.no_default_sfixed64", - Tag: "fixed64,112,opt,name=no_default_sfixed64,json=noDefaultSfixed64", - Filename: "test.proto", -} - -var E_NoDefaultBool = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*bool)(nil), - Field: 113, - Name: "testdata.no_default_bool", - Tag: "varint,113,opt,name=no_default_bool,json=noDefaultBool", - Filename: "test.proto", -} - -var E_NoDefaultString = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*string)(nil), - Field: 114, - Name: "testdata.no_default_string", - Tag: "bytes,114,opt,name=no_default_string,json=noDefaultString", - Filename: "test.proto", -} - -var E_NoDefaultBytes = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: ([]byte)(nil), - Field: 115, - Name: "testdata.no_default_bytes", - Tag: "bytes,115,opt,name=no_default_bytes,json=noDefaultBytes", - Filename: "test.proto", -} - -var E_NoDefaultEnum = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), - Field: 116, - Name: "testdata.no_default_enum", - Tag: "varint,116,opt,name=no_default_enum,json=noDefaultEnum,enum=testdata.DefaultsMessage_DefaultsEnum", - Filename: "test.proto", -} - -var E_DefaultDouble = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*float64)(nil), - Field: 201, - Name: "testdata.default_double", - Tag: "fixed64,201,opt,name=default_double,json=defaultDouble,def=3.1415", - Filename: "test.proto", -} - -var E_DefaultFloat = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*float32)(nil), - Field: 202, - Name: "testdata.default_float", - Tag: "fixed32,202,opt,name=default_float,json=defaultFloat,def=3.14", - Filename: "test.proto", -} - -var E_DefaultInt32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 203, - Name: "testdata.default_int32", - Tag: "varint,203,opt,name=default_int32,json=defaultInt32,def=42", - Filename: "test.proto", -} - -var E_DefaultInt64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 204, - Name: "testdata.default_int64", - Tag: "varint,204,opt,name=default_int64,json=defaultInt64,def=43", - Filename: "test.proto", -} - -var E_DefaultUint32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint32)(nil), - Field: 205, - Name: "testdata.default_uint32", - Tag: "varint,205,opt,name=default_uint32,json=defaultUint32,def=44", - Filename: "test.proto", -} - -var E_DefaultUint64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint64)(nil), - Field: 206, - Name: "testdata.default_uint64", - Tag: "varint,206,opt,name=default_uint64,json=defaultUint64,def=45", - Filename: "test.proto", -} - -var E_DefaultSint32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 207, - Name: "testdata.default_sint32", - Tag: "zigzag32,207,opt,name=default_sint32,json=defaultSint32,def=46", - Filename: "test.proto", -} - -var E_DefaultSint64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 208, - Name: "testdata.default_sint64", - Tag: "zigzag64,208,opt,name=default_sint64,json=defaultSint64,def=47", - Filename: "test.proto", -} - -var E_DefaultFixed32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint32)(nil), - Field: 209, - Name: "testdata.default_fixed32", - Tag: "fixed32,209,opt,name=default_fixed32,json=defaultFixed32,def=48", - Filename: "test.proto", -} - -var E_DefaultFixed64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*uint64)(nil), - Field: 210, - Name: "testdata.default_fixed64", - Tag: "fixed64,210,opt,name=default_fixed64,json=defaultFixed64,def=49", - Filename: "test.proto", -} - -var E_DefaultSfixed32 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int32)(nil), - Field: 211, - Name: "testdata.default_sfixed32", - Tag: "fixed32,211,opt,name=default_sfixed32,json=defaultSfixed32,def=50", - Filename: "test.proto", -} - -var E_DefaultSfixed64 = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*int64)(nil), - Field: 212, - Name: "testdata.default_sfixed64", - Tag: "fixed64,212,opt,name=default_sfixed64,json=defaultSfixed64,def=51", - Filename: "test.proto", -} - -var E_DefaultBool = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*bool)(nil), - Field: 213, - Name: "testdata.default_bool", - Tag: "varint,213,opt,name=default_bool,json=defaultBool,def=1", - Filename: "test.proto", -} - -var E_DefaultString = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*string)(nil), - Field: 214, - Name: "testdata.default_string", - Tag: "bytes,214,opt,name=default_string,json=defaultString,def=Hello, string", - Filename: "test.proto", -} - -var E_DefaultBytes = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: ([]byte)(nil), - Field: 215, - Name: "testdata.default_bytes", - Tag: "bytes,215,opt,name=default_bytes,json=defaultBytes,def=Hello, bytes", - Filename: "test.proto", -} - -var E_DefaultEnum = &proto.ExtensionDesc{ - ExtendedType: (*DefaultsMessage)(nil), - ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), - Field: 216, - Name: "testdata.default_enum", - Tag: "varint,216,opt,name=default_enum,json=defaultEnum,enum=testdata.DefaultsMessage_DefaultsEnum,def=1", - Filename: "test.proto", -} - -var E_X201 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 201, - Name: "testdata.x201", - Tag: "bytes,201,opt,name=x201", - Filename: "test.proto", -} - -var E_X202 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 202, - Name: "testdata.x202", - Tag: "bytes,202,opt,name=x202", - Filename: "test.proto", -} - -var E_X203 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 203, - Name: "testdata.x203", - Tag: "bytes,203,opt,name=x203", - Filename: "test.proto", -} - -var E_X204 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 204, - Name: "testdata.x204", - Tag: "bytes,204,opt,name=x204", - Filename: "test.proto", -} - -var E_X205 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 205, - Name: "testdata.x205", - Tag: "bytes,205,opt,name=x205", - Filename: "test.proto", -} - -var E_X206 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 206, - Name: "testdata.x206", - Tag: "bytes,206,opt,name=x206", - Filename: "test.proto", -} - -var E_X207 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 207, - Name: "testdata.x207", - Tag: "bytes,207,opt,name=x207", - Filename: "test.proto", -} - -var E_X208 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 208, - Name: "testdata.x208", - Tag: "bytes,208,opt,name=x208", - Filename: "test.proto", -} - -var E_X209 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 209, - Name: "testdata.x209", - Tag: "bytes,209,opt,name=x209", - Filename: "test.proto", -} - -var E_X210 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 210, - Name: "testdata.x210", - Tag: "bytes,210,opt,name=x210", - Filename: "test.proto", -} - -var E_X211 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 211, - Name: "testdata.x211", - Tag: "bytes,211,opt,name=x211", - Filename: "test.proto", -} - -var E_X212 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 212, - Name: "testdata.x212", - Tag: "bytes,212,opt,name=x212", - Filename: "test.proto", -} - -var E_X213 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 213, - Name: "testdata.x213", - Tag: "bytes,213,opt,name=x213", - Filename: "test.proto", -} - -var E_X214 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 214, - Name: "testdata.x214", - Tag: "bytes,214,opt,name=x214", - Filename: "test.proto", -} - -var E_X215 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 215, - Name: "testdata.x215", - Tag: "bytes,215,opt,name=x215", - Filename: "test.proto", -} - -var E_X216 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 216, - Name: "testdata.x216", - Tag: "bytes,216,opt,name=x216", - Filename: "test.proto", -} - -var E_X217 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 217, - Name: "testdata.x217", - Tag: "bytes,217,opt,name=x217", - Filename: "test.proto", -} - -var E_X218 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 218, - Name: "testdata.x218", - Tag: "bytes,218,opt,name=x218", - Filename: "test.proto", -} - -var E_X219 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 219, - Name: "testdata.x219", - Tag: "bytes,219,opt,name=x219", - Filename: "test.proto", -} - -var E_X220 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 220, - Name: "testdata.x220", - Tag: "bytes,220,opt,name=x220", - Filename: "test.proto", -} - -var E_X221 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 221, - Name: "testdata.x221", - Tag: "bytes,221,opt,name=x221", - Filename: "test.proto", -} - -var E_X222 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 222, - Name: "testdata.x222", - Tag: "bytes,222,opt,name=x222", - Filename: "test.proto", -} - -var E_X223 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 223, - Name: "testdata.x223", - Tag: "bytes,223,opt,name=x223", - Filename: "test.proto", -} - -var E_X224 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 224, - Name: "testdata.x224", - Tag: "bytes,224,opt,name=x224", - Filename: "test.proto", -} - -var E_X225 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 225, - Name: "testdata.x225", - Tag: "bytes,225,opt,name=x225", - Filename: "test.proto", -} - -var E_X226 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 226, - Name: "testdata.x226", - Tag: "bytes,226,opt,name=x226", - Filename: "test.proto", -} - -var E_X227 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 227, - Name: "testdata.x227", - Tag: "bytes,227,opt,name=x227", - Filename: "test.proto", -} - -var E_X228 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 228, - Name: "testdata.x228", - Tag: "bytes,228,opt,name=x228", - Filename: "test.proto", -} - -var E_X229 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 229, - Name: "testdata.x229", - Tag: "bytes,229,opt,name=x229", - Filename: "test.proto", -} - -var E_X230 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 230, - Name: "testdata.x230", - Tag: "bytes,230,opt,name=x230", - Filename: "test.proto", -} - -var E_X231 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 231, - Name: "testdata.x231", - Tag: "bytes,231,opt,name=x231", - Filename: "test.proto", -} - -var E_X232 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 232, - Name: "testdata.x232", - Tag: "bytes,232,opt,name=x232", - Filename: "test.proto", -} - -var E_X233 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 233, - Name: "testdata.x233", - Tag: "bytes,233,opt,name=x233", - Filename: "test.proto", -} - -var E_X234 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 234, - Name: "testdata.x234", - Tag: "bytes,234,opt,name=x234", - Filename: "test.proto", -} - -var E_X235 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 235, - Name: "testdata.x235", - Tag: "bytes,235,opt,name=x235", - Filename: "test.proto", -} - -var E_X236 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 236, - Name: "testdata.x236", - Tag: "bytes,236,opt,name=x236", - Filename: "test.proto", -} - -var E_X237 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 237, - Name: "testdata.x237", - Tag: "bytes,237,opt,name=x237", - Filename: "test.proto", -} - -var E_X238 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 238, - Name: "testdata.x238", - Tag: "bytes,238,opt,name=x238", - Filename: "test.proto", -} - -var E_X239 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 239, - Name: "testdata.x239", - Tag: "bytes,239,opt,name=x239", - Filename: "test.proto", -} - -var E_X240 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 240, - Name: "testdata.x240", - Tag: "bytes,240,opt,name=x240", - Filename: "test.proto", -} - -var E_X241 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 241, - Name: "testdata.x241", - Tag: "bytes,241,opt,name=x241", - Filename: "test.proto", -} - -var E_X242 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 242, - Name: "testdata.x242", - Tag: "bytes,242,opt,name=x242", - Filename: "test.proto", -} - -var E_X243 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 243, - Name: "testdata.x243", - Tag: "bytes,243,opt,name=x243", - Filename: "test.proto", -} - -var E_X244 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 244, - Name: "testdata.x244", - Tag: "bytes,244,opt,name=x244", - Filename: "test.proto", -} - -var E_X245 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 245, - Name: "testdata.x245", - Tag: "bytes,245,opt,name=x245", - Filename: "test.proto", -} - -var E_X246 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 246, - Name: "testdata.x246", - Tag: "bytes,246,opt,name=x246", - Filename: "test.proto", -} - -var E_X247 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 247, - Name: "testdata.x247", - Tag: "bytes,247,opt,name=x247", - Filename: "test.proto", -} - -var E_X248 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 248, - Name: "testdata.x248", - Tag: "bytes,248,opt,name=x248", - Filename: "test.proto", -} - -var E_X249 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 249, - Name: "testdata.x249", - Tag: "bytes,249,opt,name=x249", - Filename: "test.proto", -} - -var E_X250 = &proto.ExtensionDesc{ - ExtendedType: (*MyMessageSet)(nil), - ExtensionType: (*Empty)(nil), - Field: 250, - Name: "testdata.x250", - Tag: "bytes,250,opt,name=x250", - Filename: "test.proto", -} - -func init() { - proto.RegisterType((*GoEnum)(nil), "testdata.GoEnum") - proto.RegisterType((*GoTestField)(nil), "testdata.GoTestField") - proto.RegisterType((*GoTest)(nil), "testdata.GoTest") - proto.RegisterType((*GoTest_RequiredGroup)(nil), "testdata.GoTest.RequiredGroup") - proto.RegisterType((*GoTest_RepeatedGroup)(nil), "testdata.GoTest.RepeatedGroup") - proto.RegisterType((*GoTest_OptionalGroup)(nil), "testdata.GoTest.OptionalGroup") - proto.RegisterType((*GoTestRequiredGroupField)(nil), "testdata.GoTestRequiredGroupField") - proto.RegisterType((*GoTestRequiredGroupField_Group)(nil), "testdata.GoTestRequiredGroupField.Group") - proto.RegisterType((*GoSkipTest)(nil), "testdata.GoSkipTest") - proto.RegisterType((*GoSkipTest_SkipGroup)(nil), "testdata.GoSkipTest.SkipGroup") - proto.RegisterType((*NonPackedTest)(nil), "testdata.NonPackedTest") - proto.RegisterType((*PackedTest)(nil), "testdata.PackedTest") - proto.RegisterType((*MaxTag)(nil), "testdata.MaxTag") - proto.RegisterType((*OldMessage)(nil), "testdata.OldMessage") - proto.RegisterType((*OldMessage_Nested)(nil), "testdata.OldMessage.Nested") - proto.RegisterType((*NewMessage)(nil), "testdata.NewMessage") - proto.RegisterType((*NewMessage_Nested)(nil), "testdata.NewMessage.Nested") - proto.RegisterType((*InnerMessage)(nil), "testdata.InnerMessage") - proto.RegisterType((*OtherMessage)(nil), "testdata.OtherMessage") - proto.RegisterType((*RequiredInnerMessage)(nil), "testdata.RequiredInnerMessage") - proto.RegisterType((*MyMessage)(nil), "testdata.MyMessage") - proto.RegisterType((*MyMessage_SomeGroup)(nil), "testdata.MyMessage.SomeGroup") - proto.RegisterType((*Ext)(nil), "testdata.Ext") - proto.RegisterType((*ComplexExtension)(nil), "testdata.ComplexExtension") - proto.RegisterType((*DefaultsMessage)(nil), "testdata.DefaultsMessage") - proto.RegisterType((*MyMessageSet)(nil), "testdata.MyMessageSet") - proto.RegisterType((*Empty)(nil), "testdata.Empty") - proto.RegisterType((*MessageList)(nil), "testdata.MessageList") - proto.RegisterType((*MessageList_Message)(nil), "testdata.MessageList.Message") - proto.RegisterType((*Strings)(nil), "testdata.Strings") - proto.RegisterType((*Defaults)(nil), "testdata.Defaults") - proto.RegisterType((*SubDefaults)(nil), "testdata.SubDefaults") - proto.RegisterType((*RepeatedEnum)(nil), "testdata.RepeatedEnum") - proto.RegisterType((*MoreRepeated)(nil), "testdata.MoreRepeated") - proto.RegisterType((*GroupOld)(nil), "testdata.GroupOld") - proto.RegisterType((*GroupOld_G)(nil), "testdata.GroupOld.G") - proto.RegisterType((*GroupNew)(nil), "testdata.GroupNew") - proto.RegisterType((*GroupNew_G)(nil), "testdata.GroupNew.G") - proto.RegisterType((*FloatingPoint)(nil), "testdata.FloatingPoint") - proto.RegisterType((*MessageWithMap)(nil), "testdata.MessageWithMap") - proto.RegisterType((*Oneof)(nil), "testdata.Oneof") - proto.RegisterType((*Oneof_F_Group)(nil), "testdata.Oneof.F_Group") - proto.RegisterType((*Communique)(nil), "testdata.Communique") - proto.RegisterEnum("testdata.FOO", FOO_name, FOO_value) - proto.RegisterEnum("testdata.GoTest_KIND", GoTest_KIND_name, GoTest_KIND_value) - proto.RegisterEnum("testdata.MyMessage_Color", MyMessage_Color_name, MyMessage_Color_value) - proto.RegisterEnum("testdata.DefaultsMessage_DefaultsEnum", DefaultsMessage_DefaultsEnum_name, DefaultsMessage_DefaultsEnum_value) - proto.RegisterEnum("testdata.Defaults_Color", Defaults_Color_name, Defaults_Color_value) - proto.RegisterEnum("testdata.RepeatedEnum_Color", RepeatedEnum_Color_name, RepeatedEnum_Color_value) - proto.RegisterExtension(E_Ext_More) - proto.RegisterExtension(E_Ext_Text) - proto.RegisterExtension(E_Ext_Number) - proto.RegisterExtension(E_Greeting) - proto.RegisterExtension(E_Complex) - proto.RegisterExtension(E_RComplex) - proto.RegisterExtension(E_NoDefaultDouble) - proto.RegisterExtension(E_NoDefaultFloat) - proto.RegisterExtension(E_NoDefaultInt32) - proto.RegisterExtension(E_NoDefaultInt64) - proto.RegisterExtension(E_NoDefaultUint32) - proto.RegisterExtension(E_NoDefaultUint64) - proto.RegisterExtension(E_NoDefaultSint32) - proto.RegisterExtension(E_NoDefaultSint64) - proto.RegisterExtension(E_NoDefaultFixed32) - proto.RegisterExtension(E_NoDefaultFixed64) - proto.RegisterExtension(E_NoDefaultSfixed32) - proto.RegisterExtension(E_NoDefaultSfixed64) - proto.RegisterExtension(E_NoDefaultBool) - proto.RegisterExtension(E_NoDefaultString) - proto.RegisterExtension(E_NoDefaultBytes) - proto.RegisterExtension(E_NoDefaultEnum) - proto.RegisterExtension(E_DefaultDouble) - proto.RegisterExtension(E_DefaultFloat) - proto.RegisterExtension(E_DefaultInt32) - proto.RegisterExtension(E_DefaultInt64) - proto.RegisterExtension(E_DefaultUint32) - proto.RegisterExtension(E_DefaultUint64) - proto.RegisterExtension(E_DefaultSint32) - proto.RegisterExtension(E_DefaultSint64) - proto.RegisterExtension(E_DefaultFixed32) - proto.RegisterExtension(E_DefaultFixed64) - proto.RegisterExtension(E_DefaultSfixed32) - proto.RegisterExtension(E_DefaultSfixed64) - proto.RegisterExtension(E_DefaultBool) - proto.RegisterExtension(E_DefaultString) - proto.RegisterExtension(E_DefaultBytes) - proto.RegisterExtension(E_DefaultEnum) - proto.RegisterExtension(E_X201) - proto.RegisterExtension(E_X202) - proto.RegisterExtension(E_X203) - proto.RegisterExtension(E_X204) - proto.RegisterExtension(E_X205) - proto.RegisterExtension(E_X206) - proto.RegisterExtension(E_X207) - proto.RegisterExtension(E_X208) - proto.RegisterExtension(E_X209) - proto.RegisterExtension(E_X210) - proto.RegisterExtension(E_X211) - proto.RegisterExtension(E_X212) - proto.RegisterExtension(E_X213) - proto.RegisterExtension(E_X214) - proto.RegisterExtension(E_X215) - proto.RegisterExtension(E_X216) - proto.RegisterExtension(E_X217) - proto.RegisterExtension(E_X218) - proto.RegisterExtension(E_X219) - proto.RegisterExtension(E_X220) - proto.RegisterExtension(E_X221) - proto.RegisterExtension(E_X222) - proto.RegisterExtension(E_X223) - proto.RegisterExtension(E_X224) - proto.RegisterExtension(E_X225) - proto.RegisterExtension(E_X226) - proto.RegisterExtension(E_X227) - proto.RegisterExtension(E_X228) - proto.RegisterExtension(E_X229) - proto.RegisterExtension(E_X230) - proto.RegisterExtension(E_X231) - proto.RegisterExtension(E_X232) - proto.RegisterExtension(E_X233) - proto.RegisterExtension(E_X234) - proto.RegisterExtension(E_X235) - proto.RegisterExtension(E_X236) - proto.RegisterExtension(E_X237) - proto.RegisterExtension(E_X238) - proto.RegisterExtension(E_X239) - proto.RegisterExtension(E_X240) - proto.RegisterExtension(E_X241) - proto.RegisterExtension(E_X242) - proto.RegisterExtension(E_X243) - proto.RegisterExtension(E_X244) - proto.RegisterExtension(E_X245) - proto.RegisterExtension(E_X246) - proto.RegisterExtension(E_X247) - proto.RegisterExtension(E_X248) - proto.RegisterExtension(E_X249) - proto.RegisterExtension(E_X250) -} - -func init() { proto.RegisterFile("test.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 4453 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x5a, 0xc9, 0x77, 0xdb, 0x48, - 0x7a, 0x37, 0xc0, 0xfd, 0x23, 0x25, 0x42, 0x65, 0xb5, 0x9b, 0x96, 0xbc, 0xc0, 0x9c, 0xe9, 0x6e, - 0x7a, 0xd3, 0x48, 0x20, 0x44, 0xdb, 0x74, 0xa7, 0xdf, 0xf3, 0x42, 0xca, 0x7a, 0x63, 0x89, 0x0a, - 0xa4, 0xee, 0x7e, 0xd3, 0x39, 0xf0, 0x51, 0x22, 0x44, 0xb3, 0x4d, 0x02, 0x34, 0x09, 0xc5, 0x52, - 0x72, 0xe9, 0x4b, 0x72, 0xcd, 0x76, 0xc9, 0x35, 0xa7, 0x9c, 0x92, 0xbc, 0x97, 0x7f, 0x22, 0xe9, - 0xee, 0x59, 0x7b, 0xd6, 0xac, 0x93, 0x7d, 0x99, 0xec, 0xdb, 0x4c, 0x92, 0x4b, 0xcf, 0xab, 0xaf, - 0x0a, 0x40, 0x01, 0x24, 0x20, 0xf9, 0x24, 0x56, 0xd5, 0xef, 0xf7, 0xd5, 0xf6, 0xab, 0xef, 0xab, - 0xaf, 0x20, 0x00, 0xc7, 0x9c, 0x38, 0x2b, 0xa3, 0xb1, 0xed, 0xd8, 0x24, 0x4b, 0x7f, 0x77, 0x3b, - 0x4e, 0xa7, 0x7c, 0x1d, 0xd2, 0x1b, 0x76, 0xc3, 0x3a, 0x1a, 0x92, 0xab, 0x90, 0x38, 0xb4, 0xed, - 0x92, 0xa4, 0xca, 0x95, 0x79, 0x6d, 0x6e, 0xc5, 0x45, 0xac, 0x34, 0x5b, 0x2d, 0x83, 0xb6, 0x94, - 0xef, 0x40, 0x7e, 0xc3, 0xde, 0x33, 0x27, 0x4e, 0xb3, 0x6f, 0x0e, 0xba, 0x64, 0x11, 0x52, 0x4f, - 0x3b, 0xfb, 0xe6, 0x00, 0x19, 0x39, 0x83, 0x15, 0x08, 0x81, 0xe4, 0xde, 0xc9, 0xc8, 0x2c, 0xc9, - 0x58, 0x89, 0xbf, 0xcb, 0xbf, 0x72, 0x85, 0x76, 0x42, 0x99, 0xe4, 0x3a, 0x24, 0xbf, 0xdc, 0xb7, - 0xba, 0xbc, 0x97, 0xd7, 0xfc, 0x5e, 0x58, 0xfb, 0xca, 0x97, 0x37, 0xb7, 0x1f, 0x1b, 0x08, 0xa1, - 0xf6, 0xf7, 0x3a, 0xfb, 0x03, 0x6a, 0x4a, 0xa2, 0xf6, 0xb1, 0x40, 0x6b, 0x77, 0x3a, 0xe3, 0xce, - 0xb0, 0x94, 0x50, 0xa5, 0x4a, 0xca, 0x60, 0x05, 0x72, 0x1f, 0xe6, 0x0c, 0xf3, 0xc5, 0x51, 0x7f, - 0x6c, 0x76, 0x71, 0x70, 0xa5, 0xa4, 0x2a, 0x57, 0xf2, 0xd3, 0xf6, 0xb1, 0xd1, 0x08, 0x62, 0x19, - 0x79, 0x64, 0x76, 0x1c, 0x97, 0x9c, 0x52, 0x13, 0xb1, 0x64, 0x01, 0x4b, 0xc9, 0xad, 0x91, 0xd3, - 0xb7, 0xad, 0xce, 0x80, 0x91, 0xd3, 0xaa, 0x14, 0x43, 0x0e, 0x60, 0xc9, 0x9b, 0x50, 0x6c, 0xb6, - 0x1f, 0xda, 0xf6, 0xa0, 0x3d, 0xe6, 0x23, 0x2a, 0x81, 0x2a, 0x57, 0xb2, 0xc6, 0x5c, 0x93, 0xd6, - 0xba, 0xc3, 0x24, 0x15, 0x50, 0x9a, 0xed, 0x4d, 0xcb, 0xa9, 0x6a, 0x3e, 0x30, 0xaf, 0xca, 0x95, - 0x94, 0x31, 0xdf, 0xc4, 0xea, 0x29, 0x64, 0x4d, 0xf7, 0x91, 0x05, 0x55, 0xae, 0x24, 0x18, 0xb2, - 0xa6, 0x7b, 0xc8, 0x5b, 0x40, 0x9a, 0xed, 0x66, 0xff, 0xd8, 0xec, 0x8a, 0x56, 0xe7, 0x54, 0xb9, - 0x92, 0x31, 0x94, 0x26, 0x6f, 0x98, 0x81, 0x16, 0x2d, 0xcf, 0xab, 0x72, 0x25, 0xed, 0xa2, 0x05, - 0xdb, 0x37, 0x60, 0xa1, 0xd9, 0x7e, 0xb7, 0x1f, 0x1c, 0x70, 0x51, 0x95, 0x2b, 0x73, 0x46, 0xb1, - 0xc9, 0xea, 0xa7, 0xb1, 0xa2, 0x61, 0x45, 0x95, 0x2b, 0x49, 0x8e, 0x15, 0xec, 0xe2, 0xec, 0x9a, - 0x03, 0xbb, 0xe3, 0xf8, 0xd0, 0x05, 0x55, 0xae, 0xc8, 0xc6, 0x7c, 0x13, 0xab, 0x83, 0x56, 0x1f, - 0xdb, 0x47, 0xfb, 0x03, 0xd3, 0x87, 0x12, 0x55, 0xae, 0x48, 0x46, 0xb1, 0xc9, 0xea, 0x83, 0xd8, - 0x5d, 0x67, 0xdc, 0xb7, 0x7a, 0x3e, 0xf6, 0x3c, 0xea, 0xb7, 0xd8, 0x64, 0xf5, 0xc1, 0x11, 0x3c, - 0x3c, 0x71, 0xcc, 0x89, 0x0f, 0x35, 0x55, 0xb9, 0x52, 0x30, 0xe6, 0x9b, 0x58, 0x1d, 0xb2, 0x1a, - 0x5a, 0x83, 0x43, 0x55, 0xae, 0x2c, 0x50, 0xab, 0x33, 0xd6, 0x60, 0x37, 0xb4, 0x06, 0x3d, 0x55, - 0xae, 0x10, 0x8e, 0x15, 0xd6, 0x40, 0xd4, 0x0c, 0x13, 0x62, 0x69, 0x51, 0x4d, 0x08, 0x9a, 0x61, - 0x95, 0x41, 0xcd, 0x70, 0xe0, 0x6b, 0x6a, 0x42, 0xd4, 0x4c, 0x08, 0x89, 0x9d, 0x73, 0xe4, 0x05, - 0x35, 0x21, 0x6a, 0x86, 0x23, 0x43, 0x9a, 0xe1, 0xd8, 0xd7, 0xd5, 0x44, 0x50, 0x33, 0x53, 0x68, - 0xd1, 0x72, 0x49, 0x4d, 0x04, 0x35, 0xc3, 0xd1, 0x41, 0xcd, 0x70, 0xf0, 0x45, 0x35, 0x11, 0xd0, - 0x4c, 0x18, 0x2b, 0x1a, 0x5e, 0x52, 0x13, 0x01, 0xcd, 0x88, 0xb3, 0x73, 0x35, 0xc3, 0xa1, 0xcb, - 0x6a, 0x42, 0xd4, 0x8c, 0x68, 0xd5, 0xd3, 0x0c, 0x87, 0x5e, 0x52, 0x13, 0x01, 0xcd, 0x88, 0x58, - 0x4f, 0x33, 0x1c, 0x7b, 0x59, 0x4d, 0x04, 0x34, 0xc3, 0xb1, 0xd7, 0x45, 0xcd, 0x70, 0xe8, 0xc7, - 0x92, 0x9a, 0x10, 0x45, 0xc3, 0xa1, 0x37, 0x03, 0xa2, 0xe1, 0xd8, 0x4f, 0x28, 0x56, 0x54, 0x4d, - 0x18, 0x2c, 0xae, 0xc2, 0xa7, 0x14, 0x2c, 0xca, 0x86, 0x83, 0x7d, 0xd9, 0xd8, 0xdc, 0x05, 0x95, - 0xae, 0xa8, 0x92, 0x27, 0x1b, 0xd7, 0x2f, 0x89, 0xb2, 0xf1, 0x80, 0x57, 0xd1, 0xd5, 0x72, 0xd9, - 0x4c, 0x21, 0x6b, 0xba, 0x8f, 0x54, 0x55, 0xc9, 0x97, 0x8d, 0x87, 0x0c, 0xc8, 0xc6, 0xc3, 0x5e, - 0x53, 0x25, 0x51, 0x36, 0x33, 0xd0, 0xa2, 0xe5, 0xb2, 0x2a, 0x89, 0xb2, 0xf1, 0xd0, 0xa2, 0x6c, - 0x3c, 0xf0, 0x17, 0x54, 0x49, 0x90, 0xcd, 0x34, 0x56, 0x34, 0xfc, 0x45, 0x55, 0x12, 0x64, 0x13, - 0x9c, 0x1d, 0x93, 0x8d, 0x07, 0x7d, 0x43, 0x95, 0x7c, 0xd9, 0x04, 0xad, 0x72, 0xd9, 0x78, 0xd0, - 0x37, 0x55, 0x49, 0x90, 0x4d, 0x10, 0xcb, 0x65, 0xe3, 0x61, 0xdf, 0xc2, 0xf8, 0xe6, 0xca, 0xc6, - 0xc3, 0x0a, 0xb2, 0xf1, 0xa0, 0xbf, 0x43, 0x63, 0xa1, 0x27, 0x1b, 0x0f, 0x2a, 0xca, 0xc6, 0xc3, - 0xfe, 0x2e, 0xc5, 0xfa, 0xb2, 0x99, 0x06, 0x8b, 0xab, 0xf0, 0x7b, 0x14, 0xec, 0xcb, 0xc6, 0x03, - 0xaf, 0xe0, 0x20, 0xa8, 0x6c, 0xba, 0xe6, 0x61, 0xe7, 0x68, 0x40, 0x25, 0x56, 0xa1, 0xba, 0xa9, - 0x27, 0x9d, 0xf1, 0x91, 0x49, 0x47, 0x62, 0xdb, 0x83, 0xc7, 0x6e, 0x1b, 0x59, 0xa1, 0xc6, 0x99, - 0x7c, 0x7c, 0xc2, 0x75, 0xaa, 0x9f, 0xba, 0x5c, 0xd5, 0x8c, 0x22, 0xd3, 0xd0, 0x34, 0xbe, 0xa6, - 0x0b, 0xf8, 0x1b, 0x54, 0x45, 0x75, 0xb9, 0xa6, 0x33, 0x7c, 0x4d, 0xf7, 0xf1, 0x55, 0x38, 0xef, - 0x4b, 0xc9, 0x67, 0xdc, 0xa4, 0x5a, 0xaa, 0x27, 0xaa, 0xda, 0xaa, 0xb1, 0xe0, 0x0a, 0x6a, 0x16, - 0x29, 0xd0, 0xcd, 0x2d, 0x2a, 0xa9, 0x7a, 0xa2, 0xa6, 0x7b, 0x24, 0xb1, 0x27, 0x8d, 0xca, 0x90, - 0x0b, 0xcb, 0xe7, 0xdc, 0xa6, 0xca, 0xaa, 0x27, 0xab, 0xda, 0xea, 0xaa, 0xa1, 0x70, 0x7d, 0xcd, - 0xe0, 0x04, 0xfa, 0x59, 0xa1, 0x0a, 0xab, 0x27, 0x6b, 0xba, 0xc7, 0x09, 0xf6, 0xb3, 0xe0, 0x0a, - 0xcd, 0xa7, 0x7c, 0x89, 0x2a, 0xad, 0x9e, 0xae, 0xae, 0xe9, 0x6b, 0xeb, 0xf7, 0x8c, 0x22, 0x53, - 0x9c, 0xcf, 0xd1, 0x69, 0x3f, 0x5c, 0x72, 0x3e, 0x69, 0x95, 0x6a, 0xae, 0x9e, 0xd6, 0xee, 0xac, - 0xdd, 0xd5, 0xee, 0x1a, 0x0a, 0xd7, 0x9e, 0xcf, 0x7a, 0x87, 0xb2, 0xb8, 0xf8, 0x7c, 0xd6, 0x1a, - 0x55, 0x5f, 0x5d, 0x79, 0x66, 0x0e, 0x06, 0xf6, 0x2d, 0xb5, 0xfc, 0xd2, 0x1e, 0x0f, 0xba, 0xd7, - 0xca, 0x60, 0x28, 0x5c, 0x8f, 0x62, 0xaf, 0x0b, 0xae, 0x20, 0x7d, 0xfa, 0xaf, 0xd1, 0x7b, 0x58, - 0xa1, 0x9e, 0x79, 0xd8, 0xef, 0x59, 0xf6, 0xc4, 0x34, 0x8a, 0x4c, 0x9a, 0xa1, 0x35, 0xd9, 0x0d, - 0xaf, 0xe3, 0xaf, 0x53, 0xda, 0x42, 0x3d, 0x71, 0xbb, 0xaa, 0xd1, 0x9e, 0x66, 0xad, 0xe3, 0x6e, - 0x78, 0x1d, 0x7f, 0x83, 0x72, 0x48, 0x3d, 0x71, 0xbb, 0xa6, 0x73, 0x8e, 0xb8, 0x8e, 0x77, 0xe0, - 0x42, 0x28, 0x2e, 0xb6, 0x47, 0x9d, 0x83, 0xe7, 0x66, 0xb7, 0xa4, 0xd1, 0xf0, 0xf8, 0x50, 0x56, - 0x24, 0xe3, 0x7c, 0x20, 0x44, 0xee, 0x60, 0x33, 0xb9, 0x07, 0xaf, 0x87, 0x03, 0xa5, 0xcb, 0xac, - 0xd2, 0x78, 0x89, 0xcc, 0xc5, 0x60, 0xcc, 0x0c, 0x51, 0x05, 0x07, 0xec, 0x52, 0x75, 0x1a, 0x40, - 0x7d, 0xaa, 0xef, 0x89, 0x39, 0xf5, 0x67, 0xe0, 0xe2, 0x74, 0x28, 0x75, 0xc9, 0xeb, 0x34, 0xa2, - 0x22, 0xf9, 0x42, 0x38, 0xaa, 0x4e, 0xd1, 0x67, 0xf4, 0x5d, 0xa3, 0x21, 0x56, 0xa4, 0x4f, 0xf5, - 0x7e, 0x1f, 0x4a, 0x53, 0xc1, 0xd6, 0x65, 0xdf, 0xa1, 0x31, 0x17, 0xd9, 0xaf, 0x85, 0xe2, 0x6e, - 0x98, 0x3c, 0xa3, 0xeb, 0xbb, 0x34, 0x08, 0x0b, 0xe4, 0xa9, 0x9e, 0x71, 0xc9, 0x82, 0xe1, 0xd8, - 0xe5, 0xde, 0xa3, 0x51, 0x99, 0x2f, 0x59, 0x20, 0x32, 0x8b, 0xfd, 0x86, 0xe2, 0xb3, 0xcb, 0xad, - 0xd3, 0x30, 0xcd, 0xfb, 0x0d, 0x86, 0x6a, 0x4e, 0x7e, 0x9b, 0x92, 0x77, 0x67, 0xcf, 0xf8, 0xc7, - 0x09, 0x1a, 0x60, 0x39, 0x7b, 0x77, 0xd6, 0x94, 0x3d, 0xf6, 0x8c, 0x29, 0xff, 0x84, 0xb2, 0x89, - 0xc0, 0x9e, 0x9a, 0xf3, 0x63, 0x98, 0x73, 0x6f, 0x75, 0xbd, 0xb1, 0x7d, 0x34, 0x2a, 0x35, 0x55, - 0xb9, 0x02, 0xda, 0x95, 0xa9, 0xec, 0xc7, 0xbd, 0xe4, 0x6d, 0x50, 0x94, 0x11, 0x24, 0x31, 0x2b, - 0xcc, 0x2e, 0xb3, 0xb2, 0xa3, 0x26, 0x22, 0xac, 0x30, 0x94, 0x67, 0x45, 0x20, 0x51, 0x2b, 0xae, - 0xd3, 0x67, 0x56, 0x3e, 0x50, 0xa5, 0x99, 0x56, 0xdc, 0x10, 0xc0, 0xad, 0x04, 0x48, 0x4b, 0xeb, - 0x7e, 0xbe, 0x85, 0xed, 0xe4, 0x8b, 0xe1, 0x04, 0x6c, 0x03, 0xef, 0xcf, 0xc1, 0x4a, 0x46, 0x13, - 0x06, 0x37, 0x4d, 0xfb, 0xd9, 0x08, 0x5a, 0x60, 0x34, 0xd3, 0xb4, 0x9f, 0x9b, 0x41, 0x2b, 0xff, - 0xa6, 0x04, 0x49, 0x9a, 0x4f, 0x92, 0x2c, 0x24, 0xdf, 0x6b, 0x6d, 0x3e, 0x56, 0xce, 0xd1, 0x5f, - 0x0f, 0x5b, 0xad, 0xa7, 0x8a, 0x44, 0x72, 0x90, 0x7a, 0xf8, 0x95, 0xbd, 0xc6, 0xae, 0x22, 0x93, - 0x22, 0xe4, 0x9b, 0x9b, 0xdb, 0x1b, 0x0d, 0x63, 0xc7, 0xd8, 0xdc, 0xde, 0x53, 0x12, 0xb4, 0xad, - 0xf9, 0xb4, 0xf5, 0x60, 0x4f, 0x49, 0x92, 0x0c, 0x24, 0x68, 0x5d, 0x8a, 0x00, 0xa4, 0x77, 0xf7, - 0x8c, 0xcd, 0xed, 0x0d, 0x25, 0x4d, 0xad, 0xec, 0x6d, 0x6e, 0x35, 0x94, 0x0c, 0x45, 0xee, 0xbd, - 0xbb, 0xf3, 0xb4, 0xa1, 0x64, 0xe9, 0xcf, 0x07, 0x86, 0xf1, 0xe0, 0x2b, 0x4a, 0x8e, 0x92, 0xb6, - 0x1e, 0xec, 0x28, 0x80, 0xcd, 0x0f, 0x1e, 0x3e, 0x6d, 0x28, 0x79, 0x52, 0x80, 0x6c, 0xf3, 0xdd, - 0xed, 0x47, 0x7b, 0x9b, 0xad, 0x6d, 0xa5, 0x50, 0x3e, 0x81, 0x12, 0x5b, 0xe6, 0xc0, 0x2a, 0xb2, - 0xa4, 0xf0, 0x1d, 0x48, 0xb1, 0x9d, 0x91, 0x50, 0x25, 0x95, 0xf0, 0xce, 0x4c, 0x53, 0x56, 0xd8, - 0x1e, 0x31, 0xda, 0xd2, 0x65, 0x48, 0xb1, 0x55, 0x5a, 0x84, 0x14, 0x5b, 0x1d, 0x19, 0x53, 0x45, - 0x56, 0x28, 0xff, 0x96, 0x0c, 0xb0, 0x61, 0xef, 0x3e, 0xef, 0x8f, 0x30, 0x21, 0xbf, 0x0c, 0x30, - 0x79, 0xde, 0x1f, 0xb5, 0x51, 0xf5, 0x3c, 0xa9, 0xcc, 0xd1, 0x1a, 0xf4, 0x77, 0xe4, 0x1a, 0x14, - 0xb0, 0xf9, 0x90, 0x79, 0x21, 0xcc, 0x25, 0x33, 0x46, 0x9e, 0xd6, 0x71, 0xc7, 0x14, 0x84, 0xd4, - 0x74, 0x4c, 0x21, 0xd3, 0x02, 0xa4, 0xa6, 0x93, 0xab, 0x80, 0xc5, 0xf6, 0x04, 0x23, 0x0a, 0xa6, - 0x8d, 0x39, 0x03, 0xfb, 0x65, 0x31, 0x86, 0xbc, 0x0d, 0xd8, 0x27, 0x9b, 0x77, 0x71, 0xfa, 0x74, - 0xb8, 0xc3, 0x5d, 0xa1, 0x3f, 0xd8, 0x6c, 0x7d, 0xc2, 0x52, 0x0b, 0x72, 0x5e, 0x3d, 0xed, 0x0b, - 0x6b, 0xf9, 0x8c, 0x14, 0x9c, 0x11, 0x60, 0x95, 0x37, 0x25, 0x06, 0xe0, 0xa3, 0x59, 0xc0, 0xd1, - 0x30, 0x12, 0x1b, 0x4e, 0xf9, 0x32, 0xcc, 0x6d, 0xdb, 0x16, 0x3b, 0xbd, 0xb8, 0x4a, 0x05, 0x90, - 0x3a, 0x25, 0x09, 0xb3, 0x27, 0xa9, 0x53, 0xbe, 0x02, 0x20, 0xb4, 0x29, 0x20, 0xed, 0xb3, 0x36, - 0xf4, 0x01, 0xd2, 0x7e, 0xf9, 0x26, 0xa4, 0xb7, 0x3a, 0xc7, 0x7b, 0x9d, 0x1e, 0xb9, 0x06, 0x30, - 0xe8, 0x4c, 0x9c, 0xf6, 0x21, 0xee, 0xc3, 0xe7, 0x9f, 0x7f, 0xfe, 0xb9, 0x84, 0x97, 0xbd, 0x1c, - 0xad, 0x65, 0xfb, 0xf1, 0x02, 0xa0, 0x35, 0xe8, 0x6e, 0x99, 0x93, 0x49, 0xa7, 0x67, 0x92, 0x2a, - 0xa4, 0x2d, 0x73, 0x42, 0xa3, 0x9d, 0x84, 0xef, 0x08, 0xcb, 0xfe, 0x2a, 0xf8, 0xa8, 0x95, 0x6d, - 0x84, 0x18, 0x1c, 0x4a, 0x14, 0x48, 0x58, 0x47, 0x43, 0x7c, 0x27, 0x49, 0x19, 0xf4, 0xe7, 0xd2, - 0x25, 0x48, 0x33, 0x0c, 0x21, 0x90, 0xb4, 0x3a, 0x43, 0xb3, 0xc4, 0xfa, 0xc5, 0xdf, 0xe5, 0x5f, - 0x95, 0x00, 0xb6, 0xcd, 0x97, 0x67, 0xe8, 0xd3, 0x47, 0xc5, 0xf4, 0x99, 0x60, 0x7d, 0xde, 0x8f, - 0xeb, 0x93, 0xea, 0xec, 0xd0, 0xb6, 0xbb, 0x6d, 0xb6, 0xc5, 0xec, 0x49, 0x27, 0x47, 0x6b, 0x70, - 0xd7, 0xca, 0x1f, 0x40, 0x61, 0xd3, 0xb2, 0xcc, 0xb1, 0x3b, 0x26, 0x02, 0xc9, 0x67, 0xf6, 0xc4, - 0xe1, 0x6f, 0x4b, 0xf8, 0x9b, 0x94, 0x20, 0x39, 0xb2, 0xc7, 0x0e, 0x9b, 0x67, 0x3d, 0xa9, 0xaf, - 0xae, 0xae, 0x1a, 0x58, 0x43, 0x2e, 0x41, 0xee, 0xc0, 0xb6, 0x2c, 0xf3, 0x80, 0x4e, 0x22, 0x81, - 0x69, 0x8d, 0x5f, 0x51, 0xfe, 0x65, 0x09, 0x0a, 0x2d, 0xe7, 0x99, 0x6f, 0x5c, 0x81, 0xc4, 0x73, - 0xf3, 0x04, 0x87, 0x97, 0x30, 0xe8, 0x4f, 0x7a, 0x54, 0x7e, 0xbe, 0x33, 0x38, 0x62, 0x6f, 0x4d, - 0x05, 0x83, 0x15, 0xc8, 0x05, 0x48, 0xbf, 0x34, 0xfb, 0xbd, 0x67, 0x0e, 0xda, 0x94, 0x0d, 0x5e, - 0x22, 0xb7, 0x20, 0xd5, 0xa7, 0x83, 0x2d, 0x25, 0x71, 0xbd, 0x2e, 0xf8, 0xeb, 0x25, 0xce, 0xc1, - 0x60, 0xa0, 0x1b, 0xd9, 0x6c, 0x57, 0xf9, 0xe8, 0xa3, 0x8f, 0x3e, 0x92, 0xcb, 0x87, 0xb0, 0xe8, - 0x1e, 0xde, 0xc0, 0x64, 0xb7, 0xa1, 0x34, 0x30, 0xed, 0xf6, 0x61, 0xdf, 0xea, 0x0c, 0x06, 0x27, - 0xed, 0x97, 0xb6, 0xd5, 0xee, 0x58, 0x6d, 0x7b, 0x72, 0xd0, 0x19, 0xe3, 0x02, 0x44, 0x77, 0xb1, - 0x38, 0x30, 0xed, 0x26, 0xa3, 0xbd, 0x6f, 0x5b, 0x0f, 0xac, 0x16, 0xe5, 0x94, 0xff, 0x20, 0x09, - 0xb9, 0xad, 0x13, 0xd7, 0xfa, 0x22, 0xa4, 0x0e, 0xec, 0x23, 0x8b, 0xad, 0x65, 0xca, 0x60, 0x05, - 0x6f, 0x8f, 0x64, 0x61, 0x8f, 0x16, 0x21, 0xf5, 0xe2, 0xc8, 0x76, 0x4c, 0x9c, 0x6e, 0xce, 0x60, - 0x05, 0xba, 0x5a, 0x23, 0xd3, 0x29, 0x25, 0x31, 0xb9, 0xa5, 0x3f, 0xfd, 0xf9, 0xa7, 0xce, 0x30, - 0x7f, 0xb2, 0x02, 0x69, 0x9b, 0xae, 0xfe, 0xa4, 0x94, 0xc6, 0x77, 0x35, 0x01, 0x2e, 0xee, 0x8a, - 0xc1, 0x51, 0x64, 0x13, 0x16, 0x5e, 0x9a, 0xed, 0xe1, 0xd1, 0xc4, 0x69, 0xf7, 0xec, 0x76, 0xd7, - 0x34, 0x47, 0xe6, 0xb8, 0x34, 0x87, 0x3d, 0x09, 0x3e, 0x61, 0xd6, 0x42, 0x1a, 0xf3, 0x2f, 0xcd, - 0xad, 0xa3, 0x89, 0xb3, 0x61, 0x3f, 0x46, 0x16, 0xa9, 0x42, 0x6e, 0x6c, 0x52, 0x4f, 0x40, 0x07, - 0x5b, 0x08, 0xf7, 0x1e, 0xa0, 0x66, 0xc7, 0xe6, 0x08, 0x2b, 0xc8, 0x3a, 0x64, 0xf7, 0xfb, 0xcf, - 0xcd, 0xc9, 0x33, 0xb3, 0x5b, 0xca, 0xa8, 0x52, 0x65, 0x5e, 0xbb, 0xe8, 0x73, 0xbc, 0x65, 0x5d, - 0x79, 0x64, 0x0f, 0xec, 0xb1, 0xe1, 0x41, 0xc9, 0x7d, 0xc8, 0x4d, 0xec, 0xa1, 0xc9, 0xf4, 0x9d, - 0xc5, 0xa0, 0x7a, 0x79, 0x16, 0x6f, 0xd7, 0x1e, 0x9a, 0xae, 0x07, 0x73, 0xf1, 0x64, 0x99, 0x0d, - 0x74, 0x9f, 0x5e, 0x9d, 0x4b, 0x80, 0x4f, 0x03, 0x74, 0x40, 0x78, 0x95, 0x26, 0x4b, 0x74, 0x40, - 0xbd, 0x43, 0x7a, 0x23, 0x2a, 0xe5, 0x31, 0xaf, 0xf4, 0xca, 0x4b, 0xb7, 0x20, 0xe7, 0x19, 0xf4, - 0x5d, 0x1f, 0x73, 0x37, 0x39, 0xf4, 0x07, 0xcc, 0xf5, 0x31, 0x5f, 0xf3, 0x06, 0xa4, 0x70, 0xd8, - 0x34, 0x42, 0x19, 0x0d, 0x1a, 0x10, 0x73, 0x90, 0xda, 0x30, 0x1a, 0x8d, 0x6d, 0x45, 0xc2, 0xd8, - 0xf8, 0xf4, 0xdd, 0x86, 0x22, 0x0b, 0x8a, 0xfd, 0x6d, 0x09, 0x12, 0x8d, 0x63, 0x54, 0x0b, 0x9d, - 0x86, 0x7b, 0xa2, 0xe9, 0x6f, 0xad, 0x06, 0xc9, 0xa1, 0x3d, 0x36, 0xc9, 0xf9, 0x19, 0xb3, 0x2c, - 0xf5, 0x70, 0xbf, 0x84, 0x57, 0xe4, 0xc6, 0xb1, 0x63, 0x20, 0x5e, 0x7b, 0x0b, 0x92, 0x8e, 0x79, - 0xec, 0xcc, 0xe6, 0x3d, 0x63, 0x1d, 0x50, 0x80, 0x76, 0x13, 0xd2, 0xd6, 0xd1, 0x70, 0xdf, 0x1c, - 0xcf, 0x86, 0xf6, 0x71, 0x7a, 0x1c, 0x52, 0x7e, 0x0f, 0x94, 0x47, 0xf6, 0x70, 0x34, 0x30, 0x8f, - 0x1b, 0xc7, 0x8e, 0x69, 0x4d, 0xfa, 0xb6, 0x45, 0xf5, 0x7c, 0xd8, 0x1f, 0xa3, 0x17, 0xc1, 0xb7, - 0x62, 0x2c, 0xd0, 0x53, 0x3d, 0x31, 0x0f, 0x6c, 0xab, 0xcb, 0x1d, 0x26, 0x2f, 0x51, 0xb4, 0xf3, - 0xac, 0x3f, 0xa6, 0x0e, 0x84, 0xfa, 0x79, 0x56, 0x28, 0x6f, 0x40, 0x91, 0xe7, 0x18, 0x13, 0xde, - 0x71, 0xf9, 0x06, 0x14, 0xdc, 0x2a, 0x7c, 0x38, 0xcf, 0x42, 0xf2, 0x83, 0x86, 0xd1, 0x52, 0xce, - 0xd1, 0x65, 0x6d, 0x6d, 0x37, 0x14, 0x89, 0xfe, 0xd8, 0x7b, 0xbf, 0x15, 0x58, 0xca, 0x4b, 0x50, - 0xf0, 0xc6, 0xbe, 0x6b, 0x3a, 0xd8, 0x42, 0x03, 0x42, 0xa6, 0x2e, 0x67, 0xa5, 0x72, 0x06, 0x52, - 0x8d, 0xe1, 0xc8, 0x39, 0x29, 0xff, 0x22, 0xe4, 0x39, 0xe8, 0x69, 0x7f, 0xe2, 0x90, 0x3b, 0x90, - 0x19, 0xf2, 0xf9, 0x4a, 0x78, 0xdd, 0x13, 0x35, 0xe5, 0xe3, 0xdc, 0xdf, 0x86, 0x8b, 0x5e, 0xaa, - 0x42, 0x46, 0xf0, 0xa5, 0xfc, 0xa8, 0xcb, 0xe2, 0x51, 0x67, 0x4e, 0x21, 0x21, 0x38, 0x85, 0xf2, - 0x16, 0x64, 0x58, 0x04, 0x9c, 0x60, 0x54, 0x67, 0xa9, 0x22, 0x13, 0x13, 0xdb, 0xf9, 0x3c, 0xab, - 0x63, 0x17, 0x95, 0xab, 0x90, 0x47, 0xc1, 0x72, 0x04, 0x73, 0x9d, 0x80, 0x55, 0x4c, 0x6e, 0xbf, - 0x9f, 0x82, 0xac, 0xbb, 0x52, 0x64, 0x19, 0xd2, 0x2c, 0x3f, 0x43, 0x53, 0xee, 0xfb, 0x41, 0x0a, - 0x33, 0x32, 0xb2, 0x0c, 0x19, 0x9e, 0x83, 0x71, 0xef, 0x2e, 0x57, 0x35, 0x23, 0xcd, 0x72, 0x2e, - 0xaf, 0xb1, 0xa6, 0xa3, 0x63, 0x62, 0x2f, 0x03, 0x69, 0x96, 0x55, 0x11, 0x15, 0x72, 0x5e, 0x1e, - 0x85, 0xfe, 0x98, 0x3f, 0x03, 0x64, 0xdd, 0xc4, 0x49, 0x40, 0xd4, 0x74, 0xf4, 0x58, 0x3c, 0xe7, - 0xcf, 0x36, 0xfd, 0xeb, 0x49, 0xd6, 0xcd, 0x86, 0xf0, 0xf9, 0xde, 0x4d, 0xf0, 0x33, 0x3c, 0xff, - 0xf1, 0x01, 0x35, 0x1d, 0x5d, 0x82, 0x9b, 0xcd, 0x67, 0x78, 0x8e, 0x43, 0xae, 0xd2, 0x21, 0x62, - 0xce, 0x82, 0x47, 0xdf, 0x4f, 0xdd, 0xd3, 0x2c, 0x93, 0x21, 0xd7, 0xa8, 0x05, 0x96, 0x98, 0xe0, - 0xb9, 0xf4, 0xf3, 0xf4, 0x0c, 0xcf, 0x57, 0xc8, 0x4d, 0x0a, 0x61, 0xcb, 0x5f, 0x82, 0x88, 0xa4, - 0x3c, 0xc3, 0x93, 0x72, 0xa2, 0xd2, 0x0e, 0xd1, 0x3d, 0xa0, 0x4b, 0x10, 0x12, 0xf0, 0x34, 0x4b, - 0xc0, 0xc9, 0x15, 0x34, 0xc7, 0x26, 0x55, 0xf0, 0x93, 0xed, 0x0c, 0x4f, 0x70, 0xfc, 0x76, 0xbc, - 0xb2, 0x79, 0x89, 0x75, 0x86, 0xa7, 0x30, 0xa4, 0x46, 0xf7, 0x8b, 0xea, 0xbb, 0x34, 0x8f, 0x4e, - 0xb0, 0xe4, 0x0b, 0xcf, 0xdd, 0x53, 0xe6, 0x03, 0xeb, 0xcc, 0x83, 0x18, 0xa9, 0x26, 0x9e, 0x86, - 0x25, 0xca, 0xdb, 0xe9, 0x5b, 0x87, 0xa5, 0x22, 0xae, 0x44, 0xa2, 0x6f, 0x1d, 0x1a, 0xa9, 0x26, - 0xad, 0x61, 0x1a, 0xd8, 0xa6, 0x6d, 0x0a, 0xb6, 0x25, 0x6f, 0xb3, 0x46, 0x5a, 0x45, 0x4a, 0x90, - 0x6a, 0xb6, 0xb7, 0x3b, 0x56, 0x69, 0x81, 0xf1, 0xac, 0x8e, 0x65, 0x24, 0x9b, 0xdb, 0x1d, 0x8b, - 0xbc, 0x05, 0x89, 0xc9, 0xd1, 0x7e, 0x89, 0x84, 0xbf, 0xac, 0xec, 0x1e, 0xed, 0xbb, 0x43, 0x31, - 0x28, 0x82, 0x2c, 0x43, 0x76, 0xe2, 0x8c, 0xdb, 0xbf, 0x60, 0x8e, 0xed, 0xd2, 0x79, 0x5c, 0xc2, - 0x73, 0x46, 0x66, 0xe2, 0x8c, 0x3f, 0x30, 0xc7, 0xf6, 0x19, 0x9d, 0x5f, 0xf9, 0x0a, 0xe4, 0x05, - 0xbb, 0xa4, 0x08, 0x92, 0xc5, 0x6e, 0x0a, 0x75, 0xe9, 0x8e, 0x21, 0x59, 0xe5, 0x3d, 0x28, 0xb8, - 0x39, 0x0c, 0xce, 0x57, 0xa3, 0x27, 0x69, 0x60, 0x8f, 0xf1, 0x7c, 0xce, 0x6b, 0x97, 0xc4, 0x10, - 0xe5, 0xc3, 0x78, 0xb8, 0x60, 0xd0, 0xb2, 0x12, 0x1a, 0x8a, 0x54, 0xfe, 0xa1, 0x04, 0x85, 0x2d, - 0x7b, 0xec, 0x3f, 0x30, 0x2f, 0x42, 0x6a, 0xdf, 0xb6, 0x07, 0x13, 0x34, 0x9b, 0x35, 0x58, 0x81, - 0xbc, 0x01, 0x05, 0xfc, 0xe1, 0xe6, 0x9e, 0xb2, 0xf7, 0xb4, 0x91, 0xc7, 0x7a, 0x9e, 0x70, 0x12, - 0x48, 0xf6, 0x2d, 0x67, 0xc2, 0x3d, 0x19, 0xfe, 0x26, 0x5f, 0x80, 0x3c, 0xfd, 0xeb, 0x32, 0x93, - 0xde, 0x85, 0x15, 0x68, 0x35, 0x27, 0xbe, 0x05, 0x73, 0xb8, 0xfb, 0x1e, 0x2c, 0xe3, 0x3d, 0x63, - 0x14, 0x58, 0x03, 0x07, 0x96, 0x20, 0xc3, 0x5c, 0xc1, 0x04, 0xbf, 0x96, 0xe5, 0x0c, 0xb7, 0x48, - 0xdd, 0x2b, 0x66, 0x02, 0x2c, 0xdc, 0x67, 0x0c, 0x5e, 0x2a, 0x3f, 0x80, 0x2c, 0x46, 0xa9, 0xd6, - 0xa0, 0x4b, 0xca, 0x20, 0xf5, 0x4a, 0x26, 0xc6, 0xc8, 0x45, 0xe1, 0x9a, 0xcf, 0x9b, 0x57, 0x36, - 0x0c, 0xa9, 0xb7, 0xb4, 0x00, 0xd2, 0x06, 0xbd, 0x77, 0x1f, 0x73, 0x37, 0x2d, 0x1d, 0x97, 0x5b, - 0xdc, 0xc4, 0xb6, 0xf9, 0x32, 0xce, 0xc4, 0xb6, 0xf9, 0x92, 0x99, 0xb8, 0x3a, 0x65, 0x82, 0x96, - 0x4e, 0xf8, 0xa7, 0x43, 0xe9, 0xa4, 0x5c, 0x85, 0x39, 0x3c, 0x9e, 0x7d, 0xab, 0xb7, 0x63, 0xf7, - 0x2d, 0xbc, 0xe7, 0x1f, 0xe2, 0x3d, 0x49, 0x32, 0xa4, 0x43, 0xba, 0x07, 0xe6, 0x71, 0xe7, 0x80, - 0xdd, 0x38, 0xb3, 0x06, 0x2b, 0x94, 0x3f, 0x4b, 0xc2, 0x3c, 0x77, 0xad, 0xef, 0xf7, 0x9d, 0x67, - 0x5b, 0x9d, 0x11, 0x79, 0x0a, 0x05, 0xea, 0x55, 0xdb, 0xc3, 0xce, 0x68, 0x44, 0x8f, 0xaf, 0x84, - 0x57, 0x8d, 0xeb, 0x53, 0xae, 0x9a, 0xe3, 0x57, 0xb6, 0x3b, 0x43, 0x73, 0x8b, 0x61, 0x1b, 0x96, - 0x33, 0x3e, 0x31, 0xf2, 0x96, 0x5f, 0x43, 0x36, 0x21, 0x3f, 0x9c, 0xf4, 0x3c, 0x63, 0x32, 0x1a, - 0xab, 0x44, 0x1a, 0xdb, 0x9a, 0xf4, 0x02, 0xb6, 0x60, 0xe8, 0x55, 0xd0, 0x81, 0x51, 0x7f, 0xec, - 0xd9, 0x4a, 0x9c, 0x32, 0x30, 0xea, 0x3a, 0x82, 0x03, 0xdb, 0xf7, 0x6b, 0xc8, 0x63, 0x00, 0x7a, - 0xbc, 0x1c, 0x9b, 0xa6, 0x4e, 0xa8, 0xa0, 0xbc, 0xf6, 0x66, 0xa4, 0xad, 0x5d, 0x67, 0xbc, 0x67, - 0xef, 0x3a, 0x63, 0x66, 0x88, 0x1e, 0x4c, 0x2c, 0x2e, 0xbd, 0x03, 0x4a, 0x78, 0xfe, 0xe2, 0x8d, - 0x3c, 0x35, 0xe3, 0x46, 0x9e, 0xe3, 0x37, 0xf2, 0xba, 0x7c, 0x57, 0x5a, 0x7a, 0x0f, 0x8a, 0xa1, - 0x29, 0x8b, 0x74, 0xc2, 0xe8, 0xb7, 0x45, 0x7a, 0x5e, 0x7b, 0x5d, 0xf8, 0x9c, 0x2d, 0x6e, 0xb8, - 0x68, 0xf7, 0x1d, 0x50, 0xc2, 0xd3, 0x17, 0x0d, 0x67, 0x63, 0x32, 0x05, 0xe4, 0xdf, 0x87, 0xb9, - 0xc0, 0x94, 0x45, 0x72, 0xee, 0x94, 0x49, 0x95, 0x7f, 0x29, 0x05, 0xa9, 0x96, 0x65, 0xda, 0x87, - 0xe4, 0xf5, 0x60, 0x9c, 0x7c, 0x72, 0xce, 0x8d, 0x91, 0x17, 0x43, 0x31, 0xf2, 0xc9, 0x39, 0x2f, - 0x42, 0x5e, 0x0c, 0x45, 0x48, 0xb7, 0xa9, 0xa6, 0x93, 0xcb, 0x53, 0xf1, 0xf1, 0xc9, 0x39, 0x21, - 0x38, 0x5e, 0x9e, 0x0a, 0x8e, 0x7e, 0x73, 0x4d, 0xa7, 0x0e, 0x35, 0x18, 0x19, 0x9f, 0x9c, 0xf3, - 0xa3, 0xe2, 0x72, 0x38, 0x2a, 0x7a, 0x8d, 0x35, 0x9d, 0x0d, 0x49, 0x88, 0x88, 0x38, 0x24, 0x16, - 0x0b, 0x97, 0xc3, 0xb1, 0x10, 0x79, 0x3c, 0x0a, 0x2e, 0x87, 0xa3, 0x20, 0x36, 0xf2, 0xa8, 0x77, - 0x31, 0x14, 0xf5, 0xd0, 0x28, 0x0b, 0x77, 0xcb, 0xe1, 0x70, 0xc7, 0x78, 0xc2, 0x48, 0xc5, 0x58, - 0xe7, 0x35, 0xd6, 0x74, 0xa2, 0x85, 0x02, 0x5d, 0xf4, 0x6d, 0x1f, 0xf7, 0x02, 0x9d, 0xbe, 0x4e, - 0x97, 0xcd, 0xbd, 0x88, 0x16, 0x63, 0xbe, 0xf8, 0xe3, 0x6a, 0xba, 0x17, 0x31, 0x0d, 0x32, 0x87, - 0x3c, 0x01, 0x56, 0xd0, 0x73, 0x09, 0xb2, 0xc4, 0xcd, 0x5f, 0x69, 0xb6, 0xd1, 0x83, 0xd1, 0x79, - 0x1d, 0xb2, 0x3b, 0x7d, 0x05, 0xe6, 0x9a, 0xed, 0xa7, 0x9d, 0x71, 0xcf, 0x9c, 0x38, 0xed, 0xbd, - 0x4e, 0xcf, 0x7b, 0x44, 0xa0, 0xfb, 0x9f, 0x6f, 0xf2, 0x96, 0xbd, 0x4e, 0x8f, 0x5c, 0x70, 0xc5, - 0xd5, 0xc5, 0x56, 0x89, 0xcb, 0x6b, 0xe9, 0x75, 0xba, 0x68, 0xcc, 0x18, 0xfa, 0xc2, 0x05, 0xee, - 0x0b, 0x1f, 0x66, 0x20, 0x75, 0x64, 0xf5, 0x6d, 0xeb, 0x61, 0x0e, 0x32, 0x8e, 0x3d, 0x1e, 0x76, - 0x1c, 0xbb, 0xfc, 0x23, 0x09, 0xe0, 0x91, 0x3d, 0x1c, 0x1e, 0x59, 0xfd, 0x17, 0x47, 0x26, 0xb9, - 0x02, 0xf9, 0x61, 0xe7, 0xb9, 0xd9, 0x1e, 0x9a, 0xed, 0x83, 0xb1, 0x7b, 0x0e, 0x72, 0xb4, 0x6a, - 0xcb, 0x7c, 0x34, 0x3e, 0x21, 0x25, 0xf7, 0x8a, 0x8e, 0xda, 0x41, 0x49, 0xf2, 0x2b, 0xfb, 0x22, - 0xbf, 0x74, 0xa6, 0xf9, 0x1e, 0xba, 0xd7, 0x4e, 0x96, 0x47, 0x64, 0xf8, 0xee, 0x61, 0x89, 0x4a, - 0xde, 0x31, 0x87, 0xa3, 0xf6, 0x01, 0x4a, 0x85, 0xca, 0x21, 0x45, 0xcb, 0x8f, 0xc8, 0x6d, 0x48, - 0x1c, 0xd8, 0x03, 0x14, 0xc9, 0x29, 0xfb, 0x42, 0x71, 0xe4, 0x0d, 0x48, 0x0c, 0x27, 0x4c, 0x36, - 0x79, 0x6d, 0x41, 0xb8, 0x27, 0xb0, 0xd0, 0x44, 0x61, 0xc3, 0x49, 0xcf, 0x9b, 0xf7, 0x8d, 0x22, - 0x24, 0x9a, 0xad, 0x16, 0x8d, 0xfd, 0xcd, 0x56, 0x6b, 0x4d, 0x91, 0xea, 0x5f, 0x82, 0x6c, 0x6f, - 0x6c, 0x9a, 0xd4, 0x3d, 0xcc, 0xce, 0x39, 0x3e, 0xc4, 0x58, 0xe7, 0x81, 0xea, 0x5b, 0x90, 0x39, - 0x60, 0x59, 0x07, 0x89, 0x48, 0x6b, 0x4b, 0x7f, 0xc8, 0x1e, 0x55, 0x96, 0xfc, 0xe6, 0x70, 0x9e, - 0x62, 0xb8, 0x36, 0xea, 0x3b, 0x90, 0x1b, 0xb7, 0x4f, 0x33, 0xf8, 0x31, 0x8b, 0x2e, 0x71, 0x06, - 0xb3, 0x63, 0x5e, 0x55, 0x6f, 0xc0, 0x82, 0x65, 0xbb, 0xdf, 0x50, 0xda, 0x5d, 0x76, 0xc6, 0x2e, - 0x4e, 0x5f, 0xe5, 0x5c, 0xe3, 0x26, 0xfb, 0x6e, 0x69, 0xd9, 0xbc, 0x81, 0x9d, 0xca, 0xfa, 0x23, - 0x50, 0x04, 0x33, 0x98, 0x7a, 0xc6, 0x59, 0x39, 0x64, 0x1f, 0x4a, 0x3d, 0x2b, 0x78, 0xee, 0x43, - 0x46, 0xd8, 0xc9, 0x8c, 0x31, 0xd2, 0x63, 0x5f, 0x9d, 0x3d, 0x23, 0xe8, 0xea, 0xa6, 0x8d, 0x50, - 0x5f, 0x13, 0x6d, 0xe4, 0x19, 0xfb, 0x20, 0x2d, 0x1a, 0xa9, 0xe9, 0xa1, 0x55, 0x39, 0x3a, 0x75, - 0x28, 0x7d, 0xf6, 0x3d, 0xd9, 0xb3, 0xc2, 0x1c, 0xe0, 0x0c, 0x33, 0xf1, 0x83, 0xf9, 0x90, 0x7d, - 0x6a, 0x0e, 0x98, 0x99, 0x1a, 0xcd, 0xe4, 0xd4, 0xd1, 0x3c, 0x67, 0xdf, 0x75, 0x3d, 0x33, 0xbb, - 0xb3, 0x46, 0x33, 0x39, 0x75, 0x34, 0x03, 0xf6, 0xc5, 0x37, 0x60, 0xa6, 0xa6, 0xd7, 0x37, 0x80, - 0x88, 0x5b, 0xcd, 0xe3, 0x44, 0x8c, 0x9d, 0x21, 0xfb, 0x8e, 0xef, 0x6f, 0x36, 0xa3, 0xcc, 0x32, - 0x14, 0x3f, 0x20, 0x8b, 0x7d, 0xe2, 0x0f, 0x1a, 0xaa, 0xe9, 0xf5, 0x4d, 0x38, 0x2f, 0x4e, 0xec, - 0x0c, 0x43, 0xb2, 0x55, 0xa9, 0x52, 0x34, 0x16, 0xfc, 0xa9, 0x71, 0xce, 0x4c, 0x53, 0xf1, 0x83, - 0x1a, 0xa9, 0x52, 0x45, 0x99, 0x32, 0x55, 0xd3, 0xeb, 0x0f, 0xa0, 0x28, 0x98, 0xda, 0xc7, 0x08, - 0x1d, 0x6d, 0xe6, 0x05, 0xfb, 0x5f, 0x0b, 0xcf, 0x0c, 0x8d, 0xe8, 0xe1, 0x1d, 0xe3, 0x31, 0x2e, - 0xda, 0xc8, 0x98, 0xfd, 0xa3, 0x80, 0x3f, 0x16, 0x64, 0x84, 0x8e, 0x04, 0xe6, 0xdf, 0x71, 0x56, - 0x26, 0xec, 0x5f, 0x08, 0xfc, 0xa1, 0x50, 0x42, 0xbd, 0x1f, 0x98, 0x8e, 0x49, 0x83, 0x5c, 0x8c, - 0x0d, 0x07, 0x3d, 0xf2, 0x9b, 0x91, 0x80, 0x15, 0xf1, 0x81, 0x44, 0x98, 0x36, 0x2d, 0xd6, 0x37, - 0x61, 0xfe, 0xec, 0x0e, 0xe9, 0x63, 0x89, 0x65, 0xcb, 0xd5, 0x15, 0x9a, 0x50, 0x1b, 0x73, 0xdd, - 0x80, 0x5f, 0x6a, 0xc0, 0xdc, 0x99, 0x9d, 0xd2, 0x27, 0x12, 0xcb, 0x39, 0xa9, 0x25, 0xa3, 0xd0, - 0x0d, 0x7a, 0xa6, 0xb9, 0x33, 0xbb, 0xa5, 0x4f, 0x25, 0xf6, 0x40, 0xa1, 0x6b, 0x9e, 0x11, 0xd7, - 0x33, 0xcd, 0x9d, 0xd9, 0x2d, 0x7d, 0x95, 0x65, 0x94, 0xb2, 0x5e, 0x15, 0x8d, 0xa0, 0x2f, 0x98, - 0x3f, 0xbb, 0x5b, 0xfa, 0x9a, 0x84, 0x8f, 0x15, 0xb2, 0xae, 0x7b, 0xeb, 0xe2, 0x79, 0xa6, 0xf9, - 0xb3, 0xbb, 0xa5, 0xaf, 0x4b, 0xf8, 0xa4, 0x21, 0xeb, 0xeb, 0x01, 0x33, 0xc1, 0xd1, 0x9c, 0xee, - 0x96, 0xbe, 0x21, 0xe1, 0x2b, 0x83, 0xac, 0xd7, 0x3c, 0x33, 0xbb, 0x53, 0xa3, 0x39, 0xdd, 0x2d, - 0x7d, 0x13, 0x6f, 0xf1, 0x75, 0x59, 0xbf, 0x13, 0x30, 0x83, 0x9e, 0xa9, 0xf8, 0x0a, 0x6e, 0xe9, - 0x5b, 0x12, 0x3e, 0x06, 0xc9, 0xfa, 0x5d, 0xc3, 0xed, 0xdd, 0xf7, 0x4c, 0xc5, 0x57, 0x70, 0x4b, - 0x9f, 0x49, 0xf8, 0x66, 0x24, 0xeb, 0xf7, 0x82, 0x86, 0xd0, 0x33, 0x29, 0xaf, 0xe2, 0x96, 0xbe, - 0x4d, 0x2d, 0x15, 0xeb, 0xf2, 0xfa, 0xaa, 0xe1, 0x0e, 0x40, 0xf0, 0x4c, 0xca, 0xab, 0xb8, 0xa5, - 0xef, 0x50, 0x53, 0x4a, 0x5d, 0x5e, 0x5f, 0x0b, 0x99, 0xaa, 0xe9, 0xf5, 0x47, 0x50, 0x38, 0xab, - 0x5b, 0xfa, 0xae, 0xf8, 0x16, 0x97, 0xef, 0x0a, 0xbe, 0x69, 0x47, 0xd8, 0xb3, 0x53, 0x1d, 0xd3, - 0xf7, 0x30, 0xc7, 0xa9, 0xcf, 0x3d, 0x61, 0xef, 0x55, 0x8c, 0xe0, 0x6f, 0x1f, 0x73, 0x53, 0x5b, - 0xfe, 0xf9, 0x38, 0xd5, 0x47, 0x7d, 0x5f, 0xc2, 0x47, 0xad, 0x02, 0x37, 0x88, 0x78, 0xef, 0xa4, - 0x30, 0x87, 0xf5, 0xa1, 0x3f, 0xcb, 0xd3, 0xbc, 0xd5, 0x0f, 0xa4, 0x57, 0x71, 0x57, 0xf5, 0x44, - 0x6b, 0xbb, 0xe1, 0x2d, 0x06, 0xd6, 0xbc, 0x0d, 0xc9, 0x63, 0x6d, 0x75, 0x4d, 0xbc, 0x92, 0x89, - 0x6f, 0xb9, 0xcc, 0x49, 0xe5, 0xb5, 0xa2, 0xf0, 0xdc, 0x3d, 0x1c, 0x39, 0x27, 0x06, 0xb2, 0x38, - 0x5b, 0x8b, 0x64, 0x7f, 0x12, 0xc3, 0xd6, 0x38, 0xbb, 0x1a, 0xc9, 0xfe, 0x34, 0x86, 0x5d, 0xe5, - 0x6c, 0x3d, 0x92, 0xfd, 0xd5, 0x18, 0xb6, 0xce, 0xd9, 0xeb, 0x91, 0xec, 0xaf, 0xc5, 0xb0, 0xd7, - 0x39, 0xbb, 0x16, 0xc9, 0xfe, 0x7a, 0x0c, 0xbb, 0xc6, 0xd9, 0x77, 0x22, 0xd9, 0xdf, 0x88, 0x61, - 0xdf, 0xe1, 0xec, 0xbb, 0x91, 0xec, 0x6f, 0xc6, 0xb0, 0xef, 0x72, 0xf6, 0xbd, 0x48, 0xf6, 0xb7, - 0x62, 0xd8, 0xf7, 0x18, 0x7b, 0x6d, 0x35, 0x92, 0xfd, 0x59, 0x34, 0x7b, 0x6d, 0x95, 0xb3, 0xa3, - 0xb5, 0xf6, 0xed, 0x18, 0x36, 0xd7, 0xda, 0x5a, 0xb4, 0xd6, 0xbe, 0x13, 0xc3, 0xe6, 0x5a, 0x5b, - 0x8b, 0xd6, 0xda, 0x77, 0x63, 0xd8, 0x5c, 0x6b, 0x6b, 0xd1, 0x5a, 0xfb, 0x5e, 0x0c, 0x9b, 0x6b, - 0x6d, 0x2d, 0x5a, 0x6b, 0xdf, 0x8f, 0x61, 0x73, 0xad, 0xad, 0x45, 0x6b, 0xed, 0x07, 0x31, 0x6c, - 0xae, 0xb5, 0xb5, 0x68, 0xad, 0xfd, 0x51, 0x0c, 0x9b, 0x6b, 0x6d, 0x2d, 0x5a, 0x6b, 0x7f, 0x1c, - 0xc3, 0xe6, 0x5a, 0x5b, 0x8b, 0xd6, 0xda, 0x9f, 0xc4, 0xb0, 0xb9, 0xd6, 0xb4, 0x68, 0xad, 0xfd, - 0x69, 0x34, 0x5b, 0xe3, 0x5a, 0xd3, 0xa2, 0xb5, 0xf6, 0x67, 0x31, 0x6c, 0xae, 0x35, 0x2d, 0x5a, - 0x6b, 0x7f, 0x1e, 0xc3, 0xe6, 0x5a, 0xd3, 0xa2, 0xb5, 0xf6, 0xc3, 0x18, 0x36, 0xd7, 0x9a, 0x16, - 0xad, 0xb5, 0xbf, 0x88, 0x61, 0x73, 0xad, 0x69, 0xd1, 0x5a, 0xfb, 0xcb, 0x18, 0x36, 0xd7, 0x9a, - 0x16, 0xad, 0xb5, 0xbf, 0x8a, 0x61, 0x73, 0xad, 0x69, 0xd1, 0x5a, 0xfb, 0xeb, 0x18, 0x36, 0xd7, - 0x9a, 0x16, 0xad, 0xb5, 0xbf, 0x89, 0x61, 0x73, 0xad, 0x69, 0xd1, 0x5a, 0xfb, 0xdb, 0x18, 0x36, - 0xd7, 0x5a, 0x35, 0x5a, 0x6b, 0x7f, 0x17, 0xcd, 0xae, 0x72, 0xad, 0x55, 0xa3, 0xb5, 0xf6, 0xf7, - 0x31, 0x6c, 0xae, 0xb5, 0x6a, 0xb4, 0xd6, 0xfe, 0x21, 0x86, 0xcd, 0xb5, 0x56, 0x8d, 0xd6, 0xda, - 0x3f, 0xc6, 0xb0, 0xb9, 0xd6, 0xaa, 0xd1, 0x5a, 0xfb, 0x51, 0x0c, 0x9b, 0x6b, 0xad, 0x1a, 0xad, - 0xb5, 0x7f, 0x8a, 0x61, 0x73, 0xad, 0x55, 0xa3, 0xb5, 0xf6, 0xcf, 0x31, 0x6c, 0xae, 0xb5, 0x6a, - 0xb4, 0xd6, 0xfe, 0x25, 0x86, 0xcd, 0xb5, 0x56, 0x8d, 0xd6, 0xda, 0xbf, 0xc6, 0xb0, 0xb9, 0xd6, - 0xaa, 0xd1, 0x5a, 0xfb, 0xb7, 0x18, 0x36, 0xd7, 0x9a, 0x1e, 0xad, 0xb5, 0x7f, 0x8f, 0x66, 0xeb, - 0x5c, 0x6b, 0x7a, 0xb4, 0xd6, 0xfe, 0x23, 0x86, 0xcd, 0xb5, 0xa6, 0x47, 0x6b, 0xed, 0x3f, 0x63, - 0xd8, 0x5c, 0x6b, 0x7a, 0xb4, 0xd6, 0xfe, 0x2b, 0x86, 0xcd, 0xb5, 0xa6, 0x47, 0x6b, 0xed, 0xbf, - 0x63, 0xd8, 0x5c, 0x6b, 0x7a, 0xb4, 0xd6, 0xfe, 0x27, 0x86, 0xcd, 0xb5, 0xa6, 0x47, 0x6b, 0xed, - 0xc7, 0x31, 0x6c, 0xae, 0x35, 0x3d, 0x5a, 0x6b, 0x3f, 0x89, 0x61, 0x73, 0xad, 0xe9, 0xd1, 0x5a, - 0xfb, 0xdf, 0x18, 0x36, 0xd7, 0x9a, 0x1e, 0xad, 0xb5, 0xff, 0x8b, 0x61, 0x73, 0xad, 0xad, 0x47, - 0x6b, 0xed, 0xff, 0xa3, 0xd9, 0xeb, 0xab, 0x3f, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xaa, 0x00, 0xcd, - 0x32, 0x57, 0x39, 0x00, 0x00, -} diff --git a/vendor/github.com/golang/protobuf/proto/text.go b/vendor/github.com/golang/protobuf/proto/text.go index 965876b..1aaee72 100644 --- a/vendor/github.com/golang/protobuf/proto/text.go +++ b/vendor/github.com/golang/protobuf/proto/text.go @@ -50,7 +50,6 @@ import ( var ( newline = []byte("\n") spaces = []byte(" ") - gtNewline = []byte(">\n") endBraceNewline = []byte("}\n") backslashN = []byte{'\\', 'n'} backslashR = []byte{'\\', 'r'} @@ -170,11 +169,6 @@ func writeName(w *textWriter, props *Properties) error { return nil } -// raw is the interface satisfied by RawMessage. -type raw interface { - Bytes() []byte -} - func requiresQuotes(u string) bool { // When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted. for _, ch := range u { @@ -269,6 +263,10 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { props := sprops.Prop[i] name := st.Field(i).Name + if name == "XXX_NoUnkeyedLiteral" { + continue + } + if strings.HasPrefix(name, "XXX_") { // There are two XXX_ fields: // XXX_unrecognized []byte @@ -355,7 +353,7 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { return err } } - if err := tm.writeAny(w, key, props.mkeyprop); err != nil { + if err := tm.writeAny(w, key, props.MapKeyProp); err != nil { return err } if err := w.WriteByte('\n'); err != nil { @@ -372,7 +370,7 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { return err } } - if err := tm.writeAny(w, val, props.mvalprop); err != nil { + if err := tm.writeAny(w, val, props.MapValProp); err != nil { return err } if err := w.WriteByte('\n'); err != nil { @@ -436,12 +434,6 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { return err } } - if b, ok := fv.Interface().(raw); ok { - if err := writeRaw(w, b.Bytes()); err != nil { - return err - } - continue - } // Enums have a String method, so writeAny will work fine. if err := tm.writeAny(w, fv, props); err != nil { @@ -455,7 +447,7 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { // Extensions (the XXX_extensions field). pv := sv.Addr() - if _, ok := extendable(pv.Interface()); ok { + if _, err := extendable(pv.Interface()); err == nil { if err := tm.writeExtensions(w, pv); err != nil { return err } @@ -464,27 +456,6 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { return nil } -// writeRaw writes an uninterpreted raw message. -func writeRaw(w *textWriter, b []byte) error { - if err := w.WriteByte('<'); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte('\n'); err != nil { - return err - } - } - w.indent() - if err := writeUnknownStruct(w, b); err != nil { - return err - } - w.unindent() - if err := w.WriteByte('>'); err != nil { - return err - } - return nil -} - // writeAny writes an arbitrary field. func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Properties) error { v = reflect.Indirect(v) @@ -535,6 +506,19 @@ func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Propert } } w.indent() + if v.CanAddr() { + // Calling v.Interface on a struct causes the reflect package to + // copy the entire struct. This is racy with the new Marshaler + // since we atomically update the XXX_sizecache. + // + // Thus, we retrieve a pointer to the struct if possible to avoid + // a race since v.Interface on the pointer doesn't copy the struct. + // + // If v is not addressable, then we are not worried about a race + // since it implies that the binary Marshaler cannot possibly be + // mutating this value. + v = v.Addr() + } if etm, ok := v.Interface().(encoding.TextMarshaler); ok { text, err := etm.MarshalText() if err != nil { @@ -543,8 +527,13 @@ func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Propert if _, err = w.Write(text); err != nil { return err } - } else if err := tm.writeStruct(w, v); err != nil { - return err + } else { + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + if err := tm.writeStruct(w, v); err != nil { + return err + } } w.unindent() if err := w.WriteByte(ket); err != nil { diff --git a/vendor/github.com/golang/protobuf/proto/text_parser.go b/vendor/github.com/golang/protobuf/proto/text_parser.go index 5e14513..bb55a3a 100644 --- a/vendor/github.com/golang/protobuf/proto/text_parser.go +++ b/vendor/github.com/golang/protobuf/proto/text_parser.go @@ -206,7 +206,6 @@ func (p *textParser) advance() { var ( errBadUTF8 = errors.New("proto: bad UTF-8") - errBadHex = errors.New("proto: bad hexadecimal") ) func unquoteC(s string, quote rune) (string, error) { @@ -277,60 +276,47 @@ func unescape(s string) (ch string, tail string, err error) { return "?", s, nil // trigraph workaround case '\'', '"', '\\': return string(r), s, nil - case '0', '1', '2', '3', '4', '5', '6', '7', 'x', 'X': + case '0', '1', '2', '3', '4', '5', '6', '7': if len(s) < 2 { return "", "", fmt.Errorf(`\%c requires 2 following digits`, r) } - base := 8 - ss := s[:2] + ss := string(r) + s[:2] s = s[2:] - if r == 'x' || r == 'X' { - base = 16 - } else { - ss = string(r) + ss - } - i, err := strconv.ParseUint(ss, base, 8) + i, err := strconv.ParseUint(ss, 8, 8) if err != nil { - return "", "", err + return "", "", fmt.Errorf(`\%s contains non-octal digits`, ss) } return string([]byte{byte(i)}), s, nil - case 'u', 'U': - n := 4 - if r == 'U' { + case 'x', 'X', 'u', 'U': + var n int + switch r { + case 'x', 'X': + n = 2 + case 'u': + n = 4 + case 'U': n = 8 } if len(s) < n { - return "", "", fmt.Errorf(`\%c requires %d digits`, r, n) - } - - bs := make([]byte, n/2) - for i := 0; i < n; i += 2 { - a, ok1 := unhex(s[i]) - b, ok2 := unhex(s[i+1]) - if !ok1 || !ok2 { - return "", "", errBadHex - } - bs[i/2] = a<<4 | b + return "", "", fmt.Errorf(`\%c requires %d following digits`, r, n) } + ss := s[:n] s = s[n:] - return string(bs), s, nil + i, err := strconv.ParseUint(ss, 16, 64) + if err != nil { + return "", "", fmt.Errorf(`\%c%s contains non-hexadecimal digits`, r, ss) + } + if r == 'x' || r == 'X' { + return string([]byte{byte(i)}), s, nil + } + if i > utf8.MaxRune { + return "", "", fmt.Errorf(`\%c%s is not a valid Unicode code point`, r, ss) + } + return string(i), s, nil } return "", "", fmt.Errorf(`unknown escape \%c`, r) } -// Adapted from src/pkg/strconv/quote.go. -func unhex(b byte) (v byte, ok bool) { - switch { - case '0' <= b && b <= '9': - return b - '0', true - case 'a' <= b && b <= 'f': - return b - 'a' + 10, true - case 'A' <= b && b <= 'F': - return b - 'A' + 10, true - } - return 0, false -} - // Back off the parser by one token. Can only be done between calls to next(). // It makes the next advance() a no-op. func (p *textParser) back() { p.backed = true } @@ -644,17 +630,17 @@ func (p *textParser) readStruct(sv reflect.Value, terminator string) error { if err := p.consumeToken(":"); err != nil { return err } - if err := p.readAny(key, props.mkeyprop); err != nil { + if err := p.readAny(key, props.MapKeyProp); err != nil { return err } if err := p.consumeOptionalSeparator(); err != nil { return err } case "value": - if err := p.checkForColon(props.mvalprop, dst.Type().Elem()); err != nil { + if err := p.checkForColon(props.MapValProp, dst.Type().Elem()); err != nil { return err } - if err := p.readAny(val, props.mvalprop); err != nil { + if err := p.readAny(val, props.MapValProp); err != nil { return err } if err := p.consumeOptionalSeparator(); err != nil { @@ -728,6 +714,9 @@ func (p *textParser) consumeExtName() (string, error) { if tok.err != nil { return "", p.errorf("unrecognized type_url or extension name: %s", tok.err) } + if p.done && tok.value != "]" { + return "", p.errorf("unclosed type_url or extension name") + } } return strings.Join(parts, ""), nil } @@ -865,7 +854,7 @@ func (p *textParser) readAny(v reflect.Value, props *Properties) error { return p.readStruct(fv, terminator) case reflect.Uint32: if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil { - fv.SetUint(x) + fv.SetUint(uint64(x)) return nil } case reflect.Uint64: @@ -883,13 +872,9 @@ func (p *textParser) readAny(v reflect.Value, props *Properties) error { // UnmarshalText returns *RequiredNotSetError. func UnmarshalText(s string, pb Message) error { if um, ok := pb.(encoding.TextUnmarshaler); ok { - err := um.UnmarshalText([]byte(s)) - return err + return um.UnmarshalText([]byte(s)) } pb.Reset() v := reflect.ValueOf(pb) - if pe := newTextParser(s).readStruct(v.Elem(), ""); pe != nil { - return pe - } - return nil + return newTextParser(s).readStruct(v.Elem(), "") } diff --git a/vendor/github.com/golang/protobuf/proto/text_parser_test.go b/vendor/github.com/golang/protobuf/proto/text_parser_test.go index 8f7cb4d..a819808 100644 --- a/vendor/github.com/golang/protobuf/proto/text_parser_test.go +++ b/vendor/github.com/golang/protobuf/proto/text_parser_test.go @@ -32,13 +32,13 @@ package proto_test import ( + "fmt" "math" - "reflect" "testing" . "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - . "github.com/golang/protobuf/proto/testdata" + . "github.com/golang/protobuf/proto/test_proto" ) type UnmarshalTextTest struct { @@ -167,10 +167,19 @@ var unMarshalTextTests = []UnmarshalTextTest{ // Quoted string with UTF-8 bytes. { - in: "count:42 name: '\303\277\302\201\xAB'", + in: "count:42 name: '\303\277\302\201\x00\xAB\xCD\xEF'", out: &MyMessage{ Count: Int32(42), - Name: String("\303\277\302\201\xAB"), + Name: String("\303\277\302\201\x00\xAB\xCD\xEF"), + }, + }, + + // Quoted string with unicode escapes. + { + in: `count: 42 name: "\u0047\U00000047\uffff\U0010ffff"`, + out: &MyMessage{ + Count: Int32(42), + Name: String("GG\uffff\U0010ffff"), }, }, @@ -180,6 +189,24 @@ var unMarshalTextTests = []UnmarshalTextTest{ err: `line 1.15: invalid quoted string "\0": \0 requires 2 following digits`, }, + // Bad \u escape + { + in: `count: 42 name: "\u000"`, + err: `line 1.16: invalid quoted string "\u000": \u requires 4 following digits`, + }, + + // Bad \U escape + { + in: `count: 42 name: "\U0000000"`, + err: `line 1.16: invalid quoted string "\U0000000": \U requires 8 following digits`, + }, + + // Bad \U escape + { + in: `count: 42 name: "\xxx"`, + err: `line 1.16: invalid quoted string "\xxx": \xxx contains non-hexadecimal digits`, + }, + // Number too large for int64 { in: "count: 1 others { key: 123456789012345678901 }", @@ -263,6 +290,12 @@ var unMarshalTextTests = []UnmarshalTextTest{ err: `line 1.17: invalid float32: "17.4"`, }, + // unclosed bracket doesn't cause infinite loop + { + in: `[`, + err: `line 1.0: unclosed type_url or extension name`, + }, + // Enum { in: `count:42 bikeshed: BLUE`, @@ -330,7 +363,7 @@ var unMarshalTextTests = []UnmarshalTextTest{ // Missing required field { in: `name: "Pawel"`, - err: `proto: required field "testdata.MyMessage.count" not set`, + err: fmt.Sprintf(`proto: required field "%T.count" not set`, MyMessage{}), out: &MyMessage{ Name: String("Pawel"), }, @@ -339,7 +372,7 @@ var unMarshalTextTests = []UnmarshalTextTest{ // Missing required field in a required submessage { in: `count: 42 we_must_go_deeper < leo_finally_won_an_oscar <> >`, - err: `proto: required field "testdata.InnerMessage.host" not set`, + err: fmt.Sprintf(`proto: required field "%T.host" not set`, InnerMessage{}), out: &MyMessage{ Count: Int32(42), WeMustGoDeeper: &RequiredInnerMessage{LeoFinallyWonAnOscar: &InnerMessage{}}, @@ -470,10 +503,10 @@ var unMarshalTextTests = []UnmarshalTextTest{ }, // Extension - buildExtStructTest(`count: 42 [testdata.Ext.more]:`), - buildExtStructTest(`count: 42 [testdata.Ext.more] {data:"Hello, world!"}`), - buildExtDataTest(`count: 42 [testdata.Ext.text]:"Hello, world!" [testdata.Ext.number]:1729`), - buildExtRepStringTest(`count: 42 [testdata.greeting]:"bula" [testdata.greeting]:"hola"`), + buildExtStructTest(`count: 42 [test_proto.Ext.more]:`), + buildExtStructTest(`count: 42 [test_proto.Ext.more] {data:"Hello, world!"}`), + buildExtDataTest(`count: 42 [test_proto.Ext.text]:"Hello, world!" [test_proto.Ext.number]:1729`), + buildExtRepStringTest(`count: 42 [test_proto.greeting]:"bula" [test_proto.greeting]:"hola"`), // Big all-in-one { @@ -534,7 +567,7 @@ func TestUnmarshalText(t *testing.T) { // We don't expect failure. if err != nil { t.Errorf("Test %d: Unexpected error: %v", i, err) - } else if !reflect.DeepEqual(pb, test.out) { + } else if !Equal(pb, test.out) { t.Errorf("Test %d: Incorrect populated \nHave: %v\nWant: %v", i, pb, test.out) } @@ -545,7 +578,7 @@ func TestUnmarshalText(t *testing.T) { } else if err.Error() != test.err { t.Errorf("Test %d: Incorrect error.\nHave: %v\nWant: %v", i, err.Error(), test.err) - } else if _, ok := err.(*RequiredNotSetError); ok && test.out != nil && !reflect.DeepEqual(pb, test.out) { + } else if _, ok := err.(*RequiredNotSetError); ok && test.out != nil && !Equal(pb, test.out) { t.Errorf("Test %d: Incorrect populated \nHave: %v\nWant: %v", i, pb, test.out) } diff --git a/vendor/github.com/golang/protobuf/proto/text_test.go b/vendor/github.com/golang/protobuf/proto/text_test.go index 3eabaca..3c8b033 100644 --- a/vendor/github.com/golang/protobuf/proto/text_test.go +++ b/vendor/github.com/golang/protobuf/proto/text_test.go @@ -37,12 +37,14 @@ import ( "io/ioutil" "math" "strings" + "sync" "testing" "github.com/golang/protobuf/proto" proto3pb "github.com/golang/protobuf/proto/proto3_proto" - pb "github.com/golang/protobuf/proto/testdata" + pb "github.com/golang/protobuf/proto/test_proto" + anypb "github.com/golang/protobuf/ptypes/any" ) // textMessage implements the methods that allow it to marshal and unmarshal @@ -151,12 +153,12 @@ SomeGroup { } /* 2 unknown bytes */ 13: 4 -[testdata.Ext.more]: < +[test_proto.Ext.more]: < data: "Big gobs for big rats" > -[testdata.greeting]: "adg" -[testdata.greeting]: "easy" -[testdata.greeting]: "cow" +[test_proto.greeting]: "adg" +[test_proto.greeting]: "easy" +[test_proto.greeting]: "cow" /* 13 unknown bytes */ 201: "\t3G skiing" /* 3 unknown bytes */ @@ -472,3 +474,45 @@ func TestProto3Text(t *testing.T) { } } } + +func TestRacyMarshal(t *testing.T) { + // This test should be run with the race detector. + + any := &pb.MyMessage{Count: proto.Int32(47), Name: proto.String("David")} + proto.SetExtension(any, pb.E_Ext_Text, proto.String("bar")) + b, err := proto.Marshal(any) + if err != nil { + panic(err) + } + m := &proto3pb.Message{ + Name: "David", + ResultCount: 47, + Anything: &anypb.Any{TypeUrl: "type.googleapis.com/" + proto.MessageName(any), Value: b}, + } + + wantText := proto.MarshalTextString(m) + wantBytes, err := proto.Marshal(m) + if err != nil { + t.Fatalf("proto.Marshal error: %v", err) + } + + var wg sync.WaitGroup + defer wg.Wait() + wg.Add(20) + for i := 0; i < 10; i++ { + go func() { + defer wg.Done() + got := proto.MarshalTextString(m) + if got != wantText { + t.Errorf("proto.MarshalTextString = %q, want %q", got, wantText) + } + }() + go func() { + defer wg.Done() + got, err := proto.Marshal(m) + if !bytes.Equal(got, wantBytes) || err != nil { + t.Errorf("proto.Marshal = (%x, %v), want (%x, nil)", got, err, wantBytes) + } + }() + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/Makefile deleted file mode 100644 index a42cc37..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/Makefile +++ /dev/null @@ -1,33 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -test: - cd testdata && make test diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile deleted file mode 100644 index 41a2d04..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Not stored here, but descriptor.proto is in https://github.com/google/protobuf/ -# at src/google/protobuf/descriptor.proto -regenerate: - @echo WARNING! THIS RULE IS PROBABLY NOT RIGHT FOR YOUR INSTALLATION - protoc --go_out=../../../../.. -I$(HOME)/src/protobuf/include $(HOME)/src/protobuf/include/google/protobuf/descriptor.proto diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go index 63cf2c8..1ded05b 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go @@ -1,39 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: google/protobuf/descriptor.proto -/* -Package descriptor is a generated protocol buffer package. - -It is generated from these files: - google/protobuf/descriptor.proto - -It has these top-level messages: - FileDescriptorSet - FileDescriptorProto - DescriptorProto - FieldDescriptorProto - OneofDescriptorProto - EnumDescriptorProto - EnumValueDescriptorProto - ServiceDescriptorProto - MethodDescriptorProto - FileOptions - MessageOptions - FieldOptions - OneofOptions - EnumOptions - EnumValueOptions - ServiceOptions - MethodOptions - UninterpretedOption - SourceCodeInfo - GeneratedCodeInfo -*/ package descriptor -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -44,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type FieldDescriptorProto_Type int32 @@ -100,6 +74,7 @@ var FieldDescriptorProto_Type_name = map[int32]string{ 17: "TYPE_SINT32", 18: "TYPE_SINT64", } + var FieldDescriptorProto_Type_value = map[string]int32{ "TYPE_DOUBLE": 1, "TYPE_FLOAT": 2, @@ -126,9 +101,11 @@ func (x FieldDescriptorProto_Type) Enum() *FieldDescriptorProto_Type { *p = x return p } + func (x FieldDescriptorProto_Type) String() string { return proto.EnumName(FieldDescriptorProto_Type_name, int32(x)) } + func (x *FieldDescriptorProto_Type) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Type_value, data, "FieldDescriptorProto_Type") if err != nil { @@ -137,7 +114,10 @@ func (x *FieldDescriptorProto_Type) UnmarshalJSON(data []byte) error { *x = FieldDescriptorProto_Type(value) return nil } -func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{3, 0} } + +func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{4, 0} +} type FieldDescriptorProto_Label int32 @@ -153,6 +133,7 @@ var FieldDescriptorProto_Label_name = map[int32]string{ 2: "LABEL_REQUIRED", 3: "LABEL_REPEATED", } + var FieldDescriptorProto_Label_value = map[string]int32{ "LABEL_OPTIONAL": 1, "LABEL_REQUIRED": 2, @@ -164,9 +145,11 @@ func (x FieldDescriptorProto_Label) Enum() *FieldDescriptorProto_Label { *p = x return p } + func (x FieldDescriptorProto_Label) String() string { return proto.EnumName(FieldDescriptorProto_Label_name, int32(x)) } + func (x *FieldDescriptorProto_Label) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Label_value, data, "FieldDescriptorProto_Label") if err != nil { @@ -175,8 +158,9 @@ func (x *FieldDescriptorProto_Label) UnmarshalJSON(data []byte) error { *x = FieldDescriptorProto_Label(value) return nil } + func (FieldDescriptorProto_Label) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{3, 1} + return fileDescriptor_e5baabe45344a177, []int{4, 1} } // Generated classes can be optimized for speed or code size. @@ -194,6 +178,7 @@ var FileOptions_OptimizeMode_name = map[int32]string{ 2: "CODE_SIZE", 3: "LITE_RUNTIME", } + var FileOptions_OptimizeMode_value = map[string]int32{ "SPEED": 1, "CODE_SIZE": 2, @@ -205,9 +190,11 @@ func (x FileOptions_OptimizeMode) Enum() *FileOptions_OptimizeMode { *p = x return p } + func (x FileOptions_OptimizeMode) String() string { return proto.EnumName(FileOptions_OptimizeMode_name, int32(x)) } + func (x *FileOptions_OptimizeMode) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FileOptions_OptimizeMode_value, data, "FileOptions_OptimizeMode") if err != nil { @@ -216,7 +203,10 @@ func (x *FileOptions_OptimizeMode) UnmarshalJSON(data []byte) error { *x = FileOptions_OptimizeMode(value) return nil } -func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{9, 0} } + +func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{10, 0} +} type FieldOptions_CType int32 @@ -232,6 +222,7 @@ var FieldOptions_CType_name = map[int32]string{ 1: "CORD", 2: "STRING_PIECE", } + var FieldOptions_CType_value = map[string]int32{ "STRING": 0, "CORD": 1, @@ -243,9 +234,11 @@ func (x FieldOptions_CType) Enum() *FieldOptions_CType { *p = x return p } + func (x FieldOptions_CType) String() string { return proto.EnumName(FieldOptions_CType_name, int32(x)) } + func (x *FieldOptions_CType) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FieldOptions_CType_value, data, "FieldOptions_CType") if err != nil { @@ -254,7 +247,10 @@ func (x *FieldOptions_CType) UnmarshalJSON(data []byte) error { *x = FieldOptions_CType(value) return nil } -func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{11, 0} } + +func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{12, 0} +} type FieldOptions_JSType int32 @@ -272,6 +268,7 @@ var FieldOptions_JSType_name = map[int32]string{ 1: "JS_STRING", 2: "JS_NUMBER", } + var FieldOptions_JSType_value = map[string]int32{ "JS_NORMAL": 0, "JS_STRING": 1, @@ -283,9 +280,11 @@ func (x FieldOptions_JSType) Enum() *FieldOptions_JSType { *p = x return p } + func (x FieldOptions_JSType) String() string { return proto.EnumName(FieldOptions_JSType_name, int32(x)) } + func (x *FieldOptions_JSType) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FieldOptions_JSType_value, data, "FieldOptions_JSType") if err != nil { @@ -294,7 +293,10 @@ func (x *FieldOptions_JSType) UnmarshalJSON(data []byte) error { *x = FieldOptions_JSType(value) return nil } -func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{11, 1} } + +func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{12, 1} +} // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, // or neither? HTTP based RPC implementation may choose GET verb for safe @@ -312,6 +314,7 @@ var MethodOptions_IdempotencyLevel_name = map[int32]string{ 1: "NO_SIDE_EFFECTS", 2: "IDEMPOTENT", } + var MethodOptions_IdempotencyLevel_value = map[string]int32{ "IDEMPOTENCY_UNKNOWN": 0, "NO_SIDE_EFFECTS": 1, @@ -323,9 +326,11 @@ func (x MethodOptions_IdempotencyLevel) Enum() *MethodOptions_IdempotencyLevel { *p = x return p } + func (x MethodOptions_IdempotencyLevel) String() string { return proto.EnumName(MethodOptions_IdempotencyLevel_name, int32(x)) } + func (x *MethodOptions_IdempotencyLevel) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(MethodOptions_IdempotencyLevel_value, data, "MethodOptions_IdempotencyLevel") if err != nil { @@ -334,21 +339,44 @@ func (x *MethodOptions_IdempotencyLevel) UnmarshalJSON(data []byte) error { *x = MethodOptions_IdempotencyLevel(value) return nil } + func (MethodOptions_IdempotencyLevel) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{16, 0} + return fileDescriptor_e5baabe45344a177, []int{17, 0} } // The protocol compiler can output a FileDescriptorSet containing the .proto // files it parses. type FileDescriptorSet struct { - File []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"` - XXX_unrecognized []byte `json:"-"` + File []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileDescriptorSet) Reset() { *m = FileDescriptorSet{} } +func (m *FileDescriptorSet) String() string { return proto.CompactTextString(m) } +func (*FileDescriptorSet) ProtoMessage() {} +func (*FileDescriptorSet) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{0} +} + +func (m *FileDescriptorSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileDescriptorSet.Unmarshal(m, b) +} +func (m *FileDescriptorSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileDescriptorSet.Marshal(b, m, deterministic) +} +func (m *FileDescriptorSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileDescriptorSet.Merge(m, src) +} +func (m *FileDescriptorSet) XXX_Size() int { + return xxx_messageInfo_FileDescriptorSet.Size(m) +} +func (m *FileDescriptorSet) XXX_DiscardUnknown() { + xxx_messageInfo_FileDescriptorSet.DiscardUnknown(m) } -func (m *FileDescriptorSet) Reset() { *m = FileDescriptorSet{} } -func (m *FileDescriptorSet) String() string { return proto.CompactTextString(m) } -func (*FileDescriptorSet) ProtoMessage() {} -func (*FileDescriptorSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +var xxx_messageInfo_FileDescriptorSet proto.InternalMessageInfo func (m *FileDescriptorSet) GetFile() []*FileDescriptorProto { if m != nil { @@ -381,14 +409,36 @@ type FileDescriptorProto struct { SourceCodeInfo *SourceCodeInfo `protobuf:"bytes,9,opt,name=source_code_info,json=sourceCodeInfo" json:"source_code_info,omitempty"` // The syntax of the proto file. // The supported values are "proto2" and "proto3". - Syntax *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"` - XXX_unrecognized []byte `json:"-"` + Syntax *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileDescriptorProto) Reset() { *m = FileDescriptorProto{} } +func (m *FileDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*FileDescriptorProto) ProtoMessage() {} +func (*FileDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{1} +} + +func (m *FileDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileDescriptorProto.Unmarshal(m, b) +} +func (m *FileDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileDescriptorProto.Marshal(b, m, deterministic) +} +func (m *FileDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileDescriptorProto.Merge(m, src) +} +func (m *FileDescriptorProto) XXX_Size() int { + return xxx_messageInfo_FileDescriptorProto.Size(m) +} +func (m *FileDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_FileDescriptorProto.DiscardUnknown(m) } -func (m *FileDescriptorProto) Reset() { *m = FileDescriptorProto{} } -func (m *FileDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*FileDescriptorProto) ProtoMessage() {} -func (*FileDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +var xxx_messageInfo_FileDescriptorProto proto.InternalMessageInfo func (m *FileDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -487,14 +537,36 @@ type DescriptorProto struct { ReservedRange []*DescriptorProto_ReservedRange `protobuf:"bytes,9,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` // Reserved field names, which may not be used by fields in the same message. // A given name may only be reserved once. - ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` - XXX_unrecognized []byte `json:"-"` + ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DescriptorProto) Reset() { *m = DescriptorProto{} } +func (m *DescriptorProto) String() string { return proto.CompactTextString(m) } +func (*DescriptorProto) ProtoMessage() {} +func (*DescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{2} +} + +func (m *DescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DescriptorProto.Unmarshal(m, b) +} +func (m *DescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DescriptorProto.Marshal(b, m, deterministic) +} +func (m *DescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_DescriptorProto.Merge(m, src) +} +func (m *DescriptorProto) XXX_Size() int { + return xxx_messageInfo_DescriptorProto.Size(m) +} +func (m *DescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_DescriptorProto.DiscardUnknown(m) } -func (m *DescriptorProto) Reset() { *m = DescriptorProto{} } -func (m *DescriptorProto) String() string { return proto.CompactTextString(m) } -func (*DescriptorProto) ProtoMessage() {} -func (*DescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +var xxx_messageInfo_DescriptorProto proto.InternalMessageInfo func (m *DescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -567,17 +639,38 @@ func (m *DescriptorProto) GetReservedName() []string { } type DescriptorProto_ExtensionRange struct { - Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` - End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` - XXX_unrecognized []byte `json:"-"` + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + Options *ExtensionRangeOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *DescriptorProto_ExtensionRange) Reset() { *m = DescriptorProto_ExtensionRange{} } func (m *DescriptorProto_ExtensionRange) String() string { return proto.CompactTextString(m) } func (*DescriptorProto_ExtensionRange) ProtoMessage() {} func (*DescriptorProto_ExtensionRange) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{2, 0} + return fileDescriptor_e5baabe45344a177, []int{2, 0} +} + +func (m *DescriptorProto_ExtensionRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DescriptorProto_ExtensionRange.Unmarshal(m, b) } +func (m *DescriptorProto_ExtensionRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DescriptorProto_ExtensionRange.Marshal(b, m, deterministic) +} +func (m *DescriptorProto_ExtensionRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_DescriptorProto_ExtensionRange.Merge(m, src) +} +func (m *DescriptorProto_ExtensionRange) XXX_Size() int { + return xxx_messageInfo_DescriptorProto_ExtensionRange.Size(m) +} +func (m *DescriptorProto_ExtensionRange) XXX_DiscardUnknown() { + xxx_messageInfo_DescriptorProto_ExtensionRange.DiscardUnknown(m) +} + +var xxx_messageInfo_DescriptorProto_ExtensionRange proto.InternalMessageInfo func (m *DescriptorProto_ExtensionRange) GetStart() int32 { if m != nil && m.Start != nil { @@ -593,21 +686,48 @@ func (m *DescriptorProto_ExtensionRange) GetEnd() int32 { return 0 } +func (m *DescriptorProto_ExtensionRange) GetOptions() *ExtensionRangeOptions { + if m != nil { + return m.Options + } + return nil +} + // Range of reserved tag numbers. Reserved tag numbers may not be used by // fields or extension ranges in the same message. Reserved ranges may // not overlap. type DescriptorProto_ReservedRange struct { - Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` - End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` - XXX_unrecognized []byte `json:"-"` + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *DescriptorProto_ReservedRange) Reset() { *m = DescriptorProto_ReservedRange{} } func (m *DescriptorProto_ReservedRange) String() string { return proto.CompactTextString(m) } func (*DescriptorProto_ReservedRange) ProtoMessage() {} func (*DescriptorProto_ReservedRange) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{2, 1} + return fileDescriptor_e5baabe45344a177, []int{2, 1} +} + +func (m *DescriptorProto_ReservedRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DescriptorProto_ReservedRange.Unmarshal(m, b) +} +func (m *DescriptorProto_ReservedRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DescriptorProto_ReservedRange.Marshal(b, m, deterministic) +} +func (m *DescriptorProto_ReservedRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_DescriptorProto_ReservedRange.Merge(m, src) } +func (m *DescriptorProto_ReservedRange) XXX_Size() int { + return xxx_messageInfo_DescriptorProto_ReservedRange.Size(m) +} +func (m *DescriptorProto_ReservedRange) XXX_DiscardUnknown() { + xxx_messageInfo_DescriptorProto_ReservedRange.DiscardUnknown(m) +} + +var xxx_messageInfo_DescriptorProto_ReservedRange proto.InternalMessageInfo func (m *DescriptorProto_ReservedRange) GetStart() int32 { if m != nil && m.Start != nil { @@ -623,6 +743,55 @@ func (m *DescriptorProto_ReservedRange) GetEnd() int32 { return 0 } +type ExtensionRangeOptions struct { + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionRangeOptions) Reset() { *m = ExtensionRangeOptions{} } +func (m *ExtensionRangeOptions) String() string { return proto.CompactTextString(m) } +func (*ExtensionRangeOptions) ProtoMessage() {} +func (*ExtensionRangeOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{3} +} + +var extRange_ExtensionRangeOptions = []proto.ExtensionRange{ + {Start: 1000, End: 536870911}, +} + +func (*ExtensionRangeOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ExtensionRangeOptions +} + +func (m *ExtensionRangeOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionRangeOptions.Unmarshal(m, b) +} +func (m *ExtensionRangeOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionRangeOptions.Marshal(b, m, deterministic) +} +func (m *ExtensionRangeOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionRangeOptions.Merge(m, src) +} +func (m *ExtensionRangeOptions) XXX_Size() int { + return xxx_messageInfo_ExtensionRangeOptions.Size(m) +} +func (m *ExtensionRangeOptions) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionRangeOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionRangeOptions proto.InternalMessageInfo + +func (m *ExtensionRangeOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + // Describes a field within a message. type FieldDescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` @@ -653,15 +822,37 @@ type FieldDescriptorProto struct { // user has set a "json_name" option on this field, that option's value // will be used. Otherwise, it's deduced from the field's name by converting // it to camelCase. - JsonName *string `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"` - Options *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` + JsonName *string `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"` + Options *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldDescriptorProto) Reset() { *m = FieldDescriptorProto{} } +func (m *FieldDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*FieldDescriptorProto) ProtoMessage() {} +func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{4} } -func (m *FieldDescriptorProto) Reset() { *m = FieldDescriptorProto{} } -func (m *FieldDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*FieldDescriptorProto) ProtoMessage() {} -func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +func (m *FieldDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldDescriptorProto.Unmarshal(m, b) +} +func (m *FieldDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldDescriptorProto.Marshal(b, m, deterministic) +} +func (m *FieldDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldDescriptorProto.Merge(m, src) +} +func (m *FieldDescriptorProto) XXX_Size() int { + return xxx_messageInfo_FieldDescriptorProto.Size(m) +} +func (m *FieldDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_FieldDescriptorProto.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldDescriptorProto proto.InternalMessageInfo func (m *FieldDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -735,15 +926,37 @@ func (m *FieldDescriptorProto) GetOptions() *FieldOptions { // Describes a oneof. type OneofDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OneofDescriptorProto) Reset() { *m = OneofDescriptorProto{} } +func (m *OneofDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*OneofDescriptorProto) ProtoMessage() {} +func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{5} +} + +func (m *OneofDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OneofDescriptorProto.Unmarshal(m, b) +} +func (m *OneofDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OneofDescriptorProto.Marshal(b, m, deterministic) +} +func (m *OneofDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_OneofDescriptorProto.Merge(m, src) +} +func (m *OneofDescriptorProto) XXX_Size() int { + return xxx_messageInfo_OneofDescriptorProto.Size(m) +} +func (m *OneofDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_OneofDescriptorProto.DiscardUnknown(m) } -func (m *OneofDescriptorProto) Reset() { *m = OneofDescriptorProto{} } -func (m *OneofDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*OneofDescriptorProto) ProtoMessage() {} -func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } +var xxx_messageInfo_OneofDescriptorProto proto.InternalMessageInfo func (m *OneofDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -761,16 +974,45 @@ func (m *OneofDescriptorProto) GetOptions() *OneofOptions { // Describes an enum type. type EnumDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Value []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` - Options *EnumOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Value []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` + Options *EnumOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + ReservedRange []*EnumDescriptorProto_EnumReservedRange `protobuf:"bytes,4,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + ReservedName []string `protobuf:"bytes,5,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumDescriptorProto) Reset() { *m = EnumDescriptorProto{} } +func (m *EnumDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*EnumDescriptorProto) ProtoMessage() {} +func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{6} +} + +func (m *EnumDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumDescriptorProto.Unmarshal(m, b) +} +func (m *EnumDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumDescriptorProto.Marshal(b, m, deterministic) +} +func (m *EnumDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumDescriptorProto.Merge(m, src) +} +func (m *EnumDescriptorProto) XXX_Size() int { + return xxx_messageInfo_EnumDescriptorProto.Size(m) +} +func (m *EnumDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_EnumDescriptorProto.DiscardUnknown(m) } -func (m *EnumDescriptorProto) Reset() { *m = EnumDescriptorProto{} } -func (m *EnumDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*EnumDescriptorProto) ProtoMessage() {} -func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } +var xxx_messageInfo_EnumDescriptorProto proto.InternalMessageInfo func (m *EnumDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -793,18 +1035,107 @@ func (m *EnumDescriptorProto) GetOptions() *EnumOptions { return nil } +func (m *EnumDescriptorProto) GetReservedRange() []*EnumDescriptorProto_EnumReservedRange { + if m != nil { + return m.ReservedRange + } + return nil +} + +func (m *EnumDescriptorProto) GetReservedName() []string { + if m != nil { + return m.ReservedName + } + return nil +} + +// Range of reserved numeric values. Reserved values may not be used by +// entries in the same enum. Reserved ranges may not overlap. +// +// Note that this is distinct from DescriptorProto.ReservedRange in that it +// is inclusive such that it can appropriately represent the entire int32 +// domain. +type EnumDescriptorProto_EnumReservedRange struct { + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumDescriptorProto_EnumReservedRange) Reset() { *m = EnumDescriptorProto_EnumReservedRange{} } +func (m *EnumDescriptorProto_EnumReservedRange) String() string { return proto.CompactTextString(m) } +func (*EnumDescriptorProto_EnumReservedRange) ProtoMessage() {} +func (*EnumDescriptorProto_EnumReservedRange) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{6, 0} +} + +func (m *EnumDescriptorProto_EnumReservedRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.Unmarshal(m, b) +} +func (m *EnumDescriptorProto_EnumReservedRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.Marshal(b, m, deterministic) +} +func (m *EnumDescriptorProto_EnumReservedRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.Merge(m, src) +} +func (m *EnumDescriptorProto_EnumReservedRange) XXX_Size() int { + return xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.Size(m) +} +func (m *EnumDescriptorProto_EnumReservedRange) XXX_DiscardUnknown() { + xxx_messageInfo_EnumDescriptorProto_EnumReservedRange.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumDescriptorProto_EnumReservedRange proto.InternalMessageInfo + +func (m *EnumDescriptorProto_EnumReservedRange) GetStart() int32 { + if m != nil && m.Start != nil { + return *m.Start + } + return 0 +} + +func (m *EnumDescriptorProto_EnumReservedRange) GetEnd() int32 { + if m != nil && m.End != nil { + return *m.End + } + return 0 +} + // Describes a value within an enum. type EnumValueDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Number *int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"` - Options *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Number *int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"` + Options *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumValueDescriptorProto) Reset() { *m = EnumValueDescriptorProto{} } +func (m *EnumValueDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*EnumValueDescriptorProto) ProtoMessage() {} +func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{7} +} + +func (m *EnumValueDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumValueDescriptorProto.Unmarshal(m, b) +} +func (m *EnumValueDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumValueDescriptorProto.Marshal(b, m, deterministic) +} +func (m *EnumValueDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumValueDescriptorProto.Merge(m, src) +} +func (m *EnumValueDescriptorProto) XXX_Size() int { + return xxx_messageInfo_EnumValueDescriptorProto.Size(m) +} +func (m *EnumValueDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_EnumValueDescriptorProto.DiscardUnknown(m) } -func (m *EnumValueDescriptorProto) Reset() { *m = EnumValueDescriptorProto{} } -func (m *EnumValueDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*EnumValueDescriptorProto) ProtoMessage() {} -func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } +var xxx_messageInfo_EnumValueDescriptorProto proto.InternalMessageInfo func (m *EnumValueDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -829,16 +1160,38 @@ func (m *EnumValueDescriptorProto) GetOptions() *EnumValueOptions { // Describes a service. type ServiceDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Method []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"` - Options *ServiceOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Method []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"` + Options *ServiceOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServiceDescriptorProto) Reset() { *m = ServiceDescriptorProto{} } +func (m *ServiceDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*ServiceDescriptorProto) ProtoMessage() {} +func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{8} +} + +func (m *ServiceDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceDescriptorProto.Unmarshal(m, b) +} +func (m *ServiceDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceDescriptorProto.Marshal(b, m, deterministic) +} +func (m *ServiceDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceDescriptorProto.Merge(m, src) +} +func (m *ServiceDescriptorProto) XXX_Size() int { + return xxx_messageInfo_ServiceDescriptorProto.Size(m) +} +func (m *ServiceDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceDescriptorProto.DiscardUnknown(m) } -func (m *ServiceDescriptorProto) Reset() { *m = ServiceDescriptorProto{} } -func (m *ServiceDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*ServiceDescriptorProto) ProtoMessage() {} -func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } +var xxx_messageInfo_ServiceDescriptorProto proto.InternalMessageInfo func (m *ServiceDescriptorProto) GetName() string { if m != nil && m.Name != nil { @@ -872,14 +1225,36 @@ type MethodDescriptorProto struct { // Identifies if client streams multiple client messages ClientStreaming *bool `protobuf:"varint,5,opt,name=client_streaming,json=clientStreaming,def=0" json:"client_streaming,omitempty"` // Identifies if server streams multiple server messages - ServerStreaming *bool `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"` - XXX_unrecognized []byte `json:"-"` + ServerStreaming *bool `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MethodDescriptorProto) Reset() { *m = MethodDescriptorProto{} } +func (m *MethodDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*MethodDescriptorProto) ProtoMessage() {} +func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{9} +} + +func (m *MethodDescriptorProto) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MethodDescriptorProto.Unmarshal(m, b) +} +func (m *MethodDescriptorProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MethodDescriptorProto.Marshal(b, m, deterministic) +} +func (m *MethodDescriptorProto) XXX_Merge(src proto.Message) { + xxx_messageInfo_MethodDescriptorProto.Merge(m, src) +} +func (m *MethodDescriptorProto) XXX_Size() int { + return xxx_messageInfo_MethodDescriptorProto.Size(m) +} +func (m *MethodDescriptorProto) XXX_DiscardUnknown() { + xxx_messageInfo_MethodDescriptorProto.DiscardUnknown(m) } -func (m *MethodDescriptorProto) Reset() { *m = MethodDescriptorProto{} } -func (m *MethodDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*MethodDescriptorProto) ProtoMessage() {} -func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } +var xxx_messageInfo_MethodDescriptorProto proto.InternalMessageInfo const Default_MethodDescriptorProto_ClientStreaming bool = false const Default_MethodDescriptorProto_ServerStreaming bool = false @@ -946,7 +1321,7 @@ type FileOptions struct { // top-level extensions defined in the file. JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"` // This option does nothing. - JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"` + JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"` // Deprecated: Do not use. // If set true, then the Java2 code generator will generate code that // throws an exception whenever an attempt is made to assign a non-UTF-8 // byte sequence to a string field. @@ -974,6 +1349,7 @@ type FileOptions struct { CcGenericServices *bool `protobuf:"varint,16,opt,name=cc_generic_services,json=ccGenericServices,def=0" json:"cc_generic_services,omitempty"` JavaGenericServices *bool `protobuf:"varint,17,opt,name=java_generic_services,json=javaGenericServices,def=0" json:"java_generic_services,omitempty"` PyGenericServices *bool `protobuf:"varint,18,opt,name=py_generic_services,json=pyGenericServices,def=0" json:"py_generic_services,omitempty"` + PhpGenericServices *bool `protobuf:"varint,42,opt,name=php_generic_services,json=phpGenericServices,def=0" json:"php_generic_services,omitempty"` // Is this file deprecated? // Depending on the target platform, this can emit Deprecated annotations // for everything in the file, or it will be completely ignored; in the very @@ -995,31 +1371,67 @@ type FileOptions struct { // Sets the php class prefix which is prepended to all php generated classes // from this .proto. Default is empty. PhpClassPrefix *string `protobuf:"bytes,40,opt,name=php_class_prefix,json=phpClassPrefix" json:"php_class_prefix,omitempty"` - // The parser stores options it doesn't recognize here. See above. + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + PhpNamespace *string `protobuf:"bytes,41,opt,name=php_namespace,json=phpNamespace" json:"php_namespace,omitempty"` + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be used + // for determining the namespace. + PhpMetadataNamespace *string `protobuf:"bytes,44,opt,name=php_metadata_namespace,json=phpMetadataNamespace" json:"php_metadata_namespace,omitempty"` + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + RubyPackage *string `protobuf:"bytes,45,opt,name=ruby_package,json=rubyPackage" json:"ruby_package,omitempty"` + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FileOptions) Reset() { *m = FileOptions{} } -func (m *FileOptions) String() string { return proto.CompactTextString(m) } -func (*FileOptions) ProtoMessage() {} -func (*FileOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } +func (m *FileOptions) Reset() { *m = FileOptions{} } +func (m *FileOptions) String() string { return proto.CompactTextString(m) } +func (*FileOptions) ProtoMessage() {} +func (*FileOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{10} +} var extRange_FileOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*FileOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_FileOptions } +func (m *FileOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileOptions.Unmarshal(m, b) +} +func (m *FileOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileOptions.Marshal(b, m, deterministic) +} +func (m *FileOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileOptions.Merge(m, src) +} +func (m *FileOptions) XXX_Size() int { + return xxx_messageInfo_FileOptions.Size(m) +} +func (m *FileOptions) XXX_DiscardUnknown() { + xxx_messageInfo_FileOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_FileOptions proto.InternalMessageInfo + const Default_FileOptions_JavaMultipleFiles bool = false const Default_FileOptions_JavaStringCheckUtf8 bool = false const Default_FileOptions_OptimizeFor FileOptions_OptimizeMode = FileOptions_SPEED const Default_FileOptions_CcGenericServices bool = false const Default_FileOptions_JavaGenericServices bool = false const Default_FileOptions_PyGenericServices bool = false +const Default_FileOptions_PhpGenericServices bool = false const Default_FileOptions_Deprecated bool = false const Default_FileOptions_CcEnableArenas bool = false @@ -1044,6 +1456,7 @@ func (m *FileOptions) GetJavaMultipleFiles() bool { return Default_FileOptions_JavaMultipleFiles } +// Deprecated: Do not use. func (m *FileOptions) GetJavaGenerateEqualsAndHash() bool { if m != nil && m.JavaGenerateEqualsAndHash != nil { return *m.JavaGenerateEqualsAndHash @@ -1093,6 +1506,13 @@ func (m *FileOptions) GetPyGenericServices() bool { return Default_FileOptions_PyGenericServices } +func (m *FileOptions) GetPhpGenericServices() bool { + if m != nil && m.PhpGenericServices != nil { + return *m.PhpGenericServices + } + return Default_FileOptions_PhpGenericServices +} + func (m *FileOptions) GetDeprecated() bool { if m != nil && m.Deprecated != nil { return *m.Deprecated @@ -1135,6 +1555,27 @@ func (m *FileOptions) GetPhpClassPrefix() string { return "" } +func (m *FileOptions) GetPhpNamespace() string { + if m != nil && m.PhpNamespace != nil { + return *m.PhpNamespace + } + return "" +} + +func (m *FileOptions) GetPhpMetadataNamespace() string { + if m != nil && m.PhpMetadataNamespace != nil { + return *m.PhpMetadataNamespace + } + return "" +} + +func (m *FileOptions) GetRubyPackage() string { + if m != nil && m.RubyPackage != nil { + return *m.RubyPackage + } + return "" +} + func (m *FileOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption @@ -1195,23 +1636,45 @@ type MessageOptions struct { MapEntry *bool `protobuf:"varint,7,opt,name=map_entry,json=mapEntry" json:"map_entry,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *MessageOptions) Reset() { *m = MessageOptions{} } -func (m *MessageOptions) String() string { return proto.CompactTextString(m) } -func (*MessageOptions) ProtoMessage() {} -func (*MessageOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } +func (m *MessageOptions) Reset() { *m = MessageOptions{} } +func (m *MessageOptions) String() string { return proto.CompactTextString(m) } +func (*MessageOptions) ProtoMessage() {} +func (*MessageOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{11} +} var extRange_MessageOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*MessageOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_MessageOptions } +func (m *MessageOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageOptions.Unmarshal(m, b) +} +func (m *MessageOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageOptions.Marshal(b, m, deterministic) +} +func (m *MessageOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageOptions.Merge(m, src) +} +func (m *MessageOptions) XXX_Size() int { + return xxx_messageInfo_MessageOptions.Size(m) +} +func (m *MessageOptions) XXX_DiscardUnknown() { + xxx_messageInfo_MessageOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageOptions proto.InternalMessageInfo + const Default_MessageOptions_MessageSetWireFormat bool = false const Default_MessageOptions_NoStandardDescriptorAccessor bool = false const Default_MessageOptions_Deprecated bool = false @@ -1265,13 +1728,15 @@ type FieldOptions struct { Packed *bool `protobuf:"varint,2,opt,name=packed" json:"packed,omitempty"` // The jstype option determines the JavaScript type used for values of the // field. The option is permitted only for 64 bit integral and fixed types - // (int64, uint64, sint64, fixed64, sfixed64). By default these types are - // represented as JavaScript strings. This avoids loss of precision that can - // happen when a large value is converted to a floating point JavaScript - // numbers. Specifying JS_NUMBER for the jstype causes the generated - // JavaScript code to use the JavaScript "number" type instead of strings. - // This option is an enum to permit additional types to be added, - // e.g. goog.math.Integer. + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. Jstype *FieldOptions_JSType `protobuf:"varint,6,opt,name=jstype,enum=google.protobuf.FieldOptions_JSType,def=0" json:"jstype,omitempty"` // Should this field be parsed lazily? Lazy applies only to message-type // fields. It means that when the outer message is initially parsed, the @@ -1311,23 +1776,45 @@ type FieldOptions struct { Weak *bool `protobuf:"varint,10,opt,name=weak,def=0" json:"weak,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FieldOptions) Reset() { *m = FieldOptions{} } -func (m *FieldOptions) String() string { return proto.CompactTextString(m) } -func (*FieldOptions) ProtoMessage() {} -func (*FieldOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } +func (m *FieldOptions) Reset() { *m = FieldOptions{} } +func (m *FieldOptions) String() string { return proto.CompactTextString(m) } +func (*FieldOptions) ProtoMessage() {} +func (*FieldOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{12} +} var extRange_FieldOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*FieldOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_FieldOptions } +func (m *FieldOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldOptions.Unmarshal(m, b) +} +func (m *FieldOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldOptions.Marshal(b, m, deterministic) +} +func (m *FieldOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldOptions.Merge(m, src) +} +func (m *FieldOptions) XXX_Size() int { + return xxx_messageInfo_FieldOptions.Size(m) +} +func (m *FieldOptions) XXX_DiscardUnknown() { + xxx_messageInfo_FieldOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldOptions proto.InternalMessageInfo + const Default_FieldOptions_Ctype FieldOptions_CType = FieldOptions_STRING const Default_FieldOptions_Jstype FieldOptions_JSType = FieldOptions_JS_NORMAL const Default_FieldOptions_Lazy bool = false @@ -1386,23 +1873,45 @@ func (m *FieldOptions) GetUninterpretedOption() []*UninterpretedOption { type OneofOptions struct { // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *OneofOptions) Reset() { *m = OneofOptions{} } -func (m *OneofOptions) String() string { return proto.CompactTextString(m) } -func (*OneofOptions) ProtoMessage() {} -func (*OneofOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } +func (m *OneofOptions) Reset() { *m = OneofOptions{} } +func (m *OneofOptions) String() string { return proto.CompactTextString(m) } +func (*OneofOptions) ProtoMessage() {} +func (*OneofOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{13} +} var extRange_OneofOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*OneofOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_OneofOptions } +func (m *OneofOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OneofOptions.Unmarshal(m, b) +} +func (m *OneofOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OneofOptions.Marshal(b, m, deterministic) +} +func (m *OneofOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_OneofOptions.Merge(m, src) +} +func (m *OneofOptions) XXX_Size() int { + return xxx_messageInfo_OneofOptions.Size(m) +} +func (m *OneofOptions) XXX_DiscardUnknown() { + xxx_messageInfo_OneofOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_OneofOptions proto.InternalMessageInfo + func (m *OneofOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption @@ -1421,23 +1930,45 @@ type EnumOptions struct { Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *EnumOptions) Reset() { *m = EnumOptions{} } -func (m *EnumOptions) String() string { return proto.CompactTextString(m) } -func (*EnumOptions) ProtoMessage() {} -func (*EnumOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } +func (m *EnumOptions) Reset() { *m = EnumOptions{} } +func (m *EnumOptions) String() string { return proto.CompactTextString(m) } +func (*EnumOptions) ProtoMessage() {} +func (*EnumOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{14} +} var extRange_EnumOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*EnumOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_EnumOptions } +func (m *EnumOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumOptions.Unmarshal(m, b) +} +func (m *EnumOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumOptions.Marshal(b, m, deterministic) +} +func (m *EnumOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumOptions.Merge(m, src) +} +func (m *EnumOptions) XXX_Size() int { + return xxx_messageInfo_EnumOptions.Size(m) +} +func (m *EnumOptions) XXX_DiscardUnknown() { + xxx_messageInfo_EnumOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumOptions proto.InternalMessageInfo + const Default_EnumOptions_Deprecated bool = false func (m *EnumOptions) GetAllowAlias() bool { @@ -1469,23 +2000,45 @@ type EnumValueOptions struct { Deprecated *bool `protobuf:"varint,1,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} } -func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) } -func (*EnumValueOptions) ProtoMessage() {} -func (*EnumValueOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } +func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} } +func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) } +func (*EnumValueOptions) ProtoMessage() {} +func (*EnumValueOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{15} +} var extRange_EnumValueOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*EnumValueOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_EnumValueOptions } +func (m *EnumValueOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumValueOptions.Unmarshal(m, b) +} +func (m *EnumValueOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumValueOptions.Marshal(b, m, deterministic) +} +func (m *EnumValueOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumValueOptions.Merge(m, src) +} +func (m *EnumValueOptions) XXX_Size() int { + return xxx_messageInfo_EnumValueOptions.Size(m) +} +func (m *EnumValueOptions) XXX_DiscardUnknown() { + xxx_messageInfo_EnumValueOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumValueOptions proto.InternalMessageInfo + const Default_EnumValueOptions_Deprecated bool = false func (m *EnumValueOptions) GetDeprecated() bool { @@ -1510,23 +2063,45 @@ type ServiceOptions struct { Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *ServiceOptions) Reset() { *m = ServiceOptions{} } -func (m *ServiceOptions) String() string { return proto.CompactTextString(m) } -func (*ServiceOptions) ProtoMessage() {} -func (*ServiceOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } +func (m *ServiceOptions) Reset() { *m = ServiceOptions{} } +func (m *ServiceOptions) String() string { return proto.CompactTextString(m) } +func (*ServiceOptions) ProtoMessage() {} +func (*ServiceOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{16} +} var extRange_ServiceOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*ServiceOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_ServiceOptions } +func (m *ServiceOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceOptions.Unmarshal(m, b) +} +func (m *ServiceOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceOptions.Marshal(b, m, deterministic) +} +func (m *ServiceOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceOptions.Merge(m, src) +} +func (m *ServiceOptions) XXX_Size() int { + return xxx_messageInfo_ServiceOptions.Size(m) +} +func (m *ServiceOptions) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_ServiceOptions proto.InternalMessageInfo + const Default_ServiceOptions_Deprecated bool = false func (m *ServiceOptions) GetDeprecated() bool { @@ -1552,23 +2127,45 @@ type MethodOptions struct { IdempotencyLevel *MethodOptions_IdempotencyLevel `protobuf:"varint,34,opt,name=idempotency_level,json=idempotencyLevel,enum=google.protobuf.MethodOptions_IdempotencyLevel,def=0" json:"idempotency_level,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *MethodOptions) Reset() { *m = MethodOptions{} } -func (m *MethodOptions) String() string { return proto.CompactTextString(m) } -func (*MethodOptions) ProtoMessage() {} -func (*MethodOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } +func (m *MethodOptions) Reset() { *m = MethodOptions{} } +func (m *MethodOptions) String() string { return proto.CompactTextString(m) } +func (*MethodOptions) ProtoMessage() {} +func (*MethodOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{17} +} var extRange_MethodOptions = []proto.ExtensionRange{ - {1000, 536870911}, + {Start: 1000, End: 536870911}, } func (*MethodOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_MethodOptions } +func (m *MethodOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MethodOptions.Unmarshal(m, b) +} +func (m *MethodOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MethodOptions.Marshal(b, m, deterministic) +} +func (m *MethodOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_MethodOptions.Merge(m, src) +} +func (m *MethodOptions) XXX_Size() int { + return xxx_messageInfo_MethodOptions.Size(m) +} +func (m *MethodOptions) XXX_DiscardUnknown() { + xxx_messageInfo_MethodOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_MethodOptions proto.InternalMessageInfo + const Default_MethodOptions_Deprecated bool = false const Default_MethodOptions_IdempotencyLevel MethodOptions_IdempotencyLevel = MethodOptions_IDEMPOTENCY_UNKNOWN @@ -1603,19 +2200,41 @@ type UninterpretedOption struct { Name []*UninterpretedOption_NamePart `protobuf:"bytes,2,rep,name=name" json:"name,omitempty"` // The value of the uninterpreted option, in whatever type the tokenizer // identified it as during parsing. Exactly one of these should be set. - IdentifierValue *string `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"` - PositiveIntValue *uint64 `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"` - NegativeIntValue *int64 `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"` - DoubleValue *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` - StringValue []byte `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` - AggregateValue *string `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"` - XXX_unrecognized []byte `json:"-"` + IdentifierValue *string `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"` + PositiveIntValue *uint64 `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"` + NegativeIntValue *int64 `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"` + DoubleValue *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` + StringValue []byte `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` + AggregateValue *string `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} } +func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) } +func (*UninterpretedOption) ProtoMessage() {} +func (*UninterpretedOption) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{18} +} + +func (m *UninterpretedOption) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UninterpretedOption.Unmarshal(m, b) +} +func (m *UninterpretedOption) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UninterpretedOption.Marshal(b, m, deterministic) +} +func (m *UninterpretedOption) XXX_Merge(src proto.Message) { + xxx_messageInfo_UninterpretedOption.Merge(m, src) +} +func (m *UninterpretedOption) XXX_Size() int { + return xxx_messageInfo_UninterpretedOption.Size(m) +} +func (m *UninterpretedOption) XXX_DiscardUnknown() { + xxx_messageInfo_UninterpretedOption.DiscardUnknown(m) } -func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} } -func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) } -func (*UninterpretedOption) ProtoMessage() {} -func (*UninterpretedOption) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } +var xxx_messageInfo_UninterpretedOption proto.InternalMessageInfo func (m *UninterpretedOption) GetName() []*UninterpretedOption_NamePart { if m != nil { @@ -1672,18 +2291,38 @@ func (m *UninterpretedOption) GetAggregateValue() string { // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents // "foo.(bar.baz).qux". type UninterpretedOption_NamePart struct { - NamePart *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"` - IsExtension *bool `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"` - XXX_unrecognized []byte `json:"-"` + NamePart *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"` + IsExtension *bool `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *UninterpretedOption_NamePart) Reset() { *m = UninterpretedOption_NamePart{} } func (m *UninterpretedOption_NamePart) String() string { return proto.CompactTextString(m) } func (*UninterpretedOption_NamePart) ProtoMessage() {} func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{17, 0} + return fileDescriptor_e5baabe45344a177, []int{18, 0} } +func (m *UninterpretedOption_NamePart) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UninterpretedOption_NamePart.Unmarshal(m, b) +} +func (m *UninterpretedOption_NamePart) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UninterpretedOption_NamePart.Marshal(b, m, deterministic) +} +func (m *UninterpretedOption_NamePart) XXX_Merge(src proto.Message) { + xxx_messageInfo_UninterpretedOption_NamePart.Merge(m, src) +} +func (m *UninterpretedOption_NamePart) XXX_Size() int { + return xxx_messageInfo_UninterpretedOption_NamePart.Size(m) +} +func (m *UninterpretedOption_NamePart) XXX_DiscardUnknown() { + xxx_messageInfo_UninterpretedOption_NamePart.DiscardUnknown(m) +} + +var xxx_messageInfo_UninterpretedOption_NamePart proto.InternalMessageInfo + func (m *UninterpretedOption_NamePart) GetNamePart() string { if m != nil && m.NamePart != nil { return *m.NamePart @@ -1744,14 +2383,36 @@ type SourceCodeInfo struct { // - Code which tries to interpret locations should probably be designed to // ignore those that it doesn't understand, as more types of locations could // be recorded in the future. - Location []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"` - XXX_unrecognized []byte `json:"-"` + Location []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} } +func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) } +func (*SourceCodeInfo) ProtoMessage() {} +func (*SourceCodeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{19} +} + +func (m *SourceCodeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceCodeInfo.Unmarshal(m, b) +} +func (m *SourceCodeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceCodeInfo.Marshal(b, m, deterministic) +} +func (m *SourceCodeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceCodeInfo.Merge(m, src) +} +func (m *SourceCodeInfo) XXX_Size() int { + return xxx_messageInfo_SourceCodeInfo.Size(m) +} +func (m *SourceCodeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SourceCodeInfo.DiscardUnknown(m) } -func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} } -func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) } -func (*SourceCodeInfo) ProtoMessage() {} -func (*SourceCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } +var xxx_messageInfo_SourceCodeInfo proto.InternalMessageInfo func (m *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location { if m != nil { @@ -1841,13 +2502,35 @@ type SourceCodeInfo_Location struct { LeadingComments *string `protobuf:"bytes,3,opt,name=leading_comments,json=leadingComments" json:"leading_comments,omitempty"` TrailingComments *string `protobuf:"bytes,4,opt,name=trailing_comments,json=trailingComments" json:"trailing_comments,omitempty"` LeadingDetachedComments []string `protobuf:"bytes,6,rep,name=leading_detached_comments,json=leadingDetachedComments" json:"leading_detached_comments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceCodeInfo_Location) Reset() { *m = SourceCodeInfo_Location{} } +func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) } +func (*SourceCodeInfo_Location) ProtoMessage() {} +func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{19, 0} +} + +func (m *SourceCodeInfo_Location) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceCodeInfo_Location.Unmarshal(m, b) +} +func (m *SourceCodeInfo_Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceCodeInfo_Location.Marshal(b, m, deterministic) +} +func (m *SourceCodeInfo_Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceCodeInfo_Location.Merge(m, src) +} +func (m *SourceCodeInfo_Location) XXX_Size() int { + return xxx_messageInfo_SourceCodeInfo_Location.Size(m) +} +func (m *SourceCodeInfo_Location) XXX_DiscardUnknown() { + xxx_messageInfo_SourceCodeInfo_Location.DiscardUnknown(m) } -func (m *SourceCodeInfo_Location) Reset() { *m = SourceCodeInfo_Location{} } -func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) } -func (*SourceCodeInfo_Location) ProtoMessage() {} -func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18, 0} } +var xxx_messageInfo_SourceCodeInfo_Location proto.InternalMessageInfo func (m *SourceCodeInfo_Location) GetPath() []int32 { if m != nil { @@ -1890,14 +2573,36 @@ func (m *SourceCodeInfo_Location) GetLeadingDetachedComments() []string { type GeneratedCodeInfo struct { // An Annotation connects some span of text in generated code to an element // of its generating .proto file. - Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"` - XXX_unrecognized []byte `json:"-"` + Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GeneratedCodeInfo) Reset() { *m = GeneratedCodeInfo{} } +func (m *GeneratedCodeInfo) String() string { return proto.CompactTextString(m) } +func (*GeneratedCodeInfo) ProtoMessage() {} +func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_e5baabe45344a177, []int{20} +} + +func (m *GeneratedCodeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeneratedCodeInfo.Unmarshal(m, b) +} +func (m *GeneratedCodeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeneratedCodeInfo.Marshal(b, m, deterministic) +} +func (m *GeneratedCodeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeneratedCodeInfo.Merge(m, src) +} +func (m *GeneratedCodeInfo) XXX_Size() int { + return xxx_messageInfo_GeneratedCodeInfo.Size(m) +} +func (m *GeneratedCodeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_GeneratedCodeInfo.DiscardUnknown(m) } -func (m *GeneratedCodeInfo) Reset() { *m = GeneratedCodeInfo{} } -func (m *GeneratedCodeInfo) String() string { return proto.CompactTextString(m) } -func (*GeneratedCodeInfo) ProtoMessage() {} -func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } +var xxx_messageInfo_GeneratedCodeInfo proto.InternalMessageInfo func (m *GeneratedCodeInfo) GetAnnotation() []*GeneratedCodeInfo_Annotation { if m != nil { @@ -1918,16 +2623,36 @@ type GeneratedCodeInfo_Annotation struct { // Identifies the ending offset in bytes in the generated code that // relates to the identified offset. The end offset should be one past // the last relevant byte (so the length of the text = end - begin). - End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"` - XXX_unrecognized []byte `json:"-"` + End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *GeneratedCodeInfo_Annotation) Reset() { *m = GeneratedCodeInfo_Annotation{} } func (m *GeneratedCodeInfo_Annotation) String() string { return proto.CompactTextString(m) } func (*GeneratedCodeInfo_Annotation) ProtoMessage() {} func (*GeneratedCodeInfo_Annotation) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{19, 0} + return fileDescriptor_e5baabe45344a177, []int{20, 0} +} + +func (m *GeneratedCodeInfo_Annotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeneratedCodeInfo_Annotation.Unmarshal(m, b) +} +func (m *GeneratedCodeInfo_Annotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeneratedCodeInfo_Annotation.Marshal(b, m, deterministic) +} +func (m *GeneratedCodeInfo_Annotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeneratedCodeInfo_Annotation.Merge(m, src) } +func (m *GeneratedCodeInfo_Annotation) XXX_Size() int { + return xxx_messageInfo_GeneratedCodeInfo_Annotation.Size(m) +} +func (m *GeneratedCodeInfo_Annotation) XXX_DiscardUnknown() { + xxx_messageInfo_GeneratedCodeInfo_Annotation.DiscardUnknown(m) +} + +var xxx_messageInfo_GeneratedCodeInfo_Annotation proto.InternalMessageInfo func (m *GeneratedCodeInfo_Annotation) GetPath() []int32 { if m != nil { @@ -1958,14 +2683,22 @@ func (m *GeneratedCodeInfo_Annotation) GetEnd() int32 { } func init() { + proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Type", FieldDescriptorProto_Type_name, FieldDescriptorProto_Type_value) + proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Label", FieldDescriptorProto_Label_name, FieldDescriptorProto_Label_value) + proto.RegisterEnum("google.protobuf.FileOptions_OptimizeMode", FileOptions_OptimizeMode_name, FileOptions_OptimizeMode_value) + proto.RegisterEnum("google.protobuf.FieldOptions_CType", FieldOptions_CType_name, FieldOptions_CType_value) + proto.RegisterEnum("google.protobuf.FieldOptions_JSType", FieldOptions_JSType_name, FieldOptions_JSType_value) + proto.RegisterEnum("google.protobuf.MethodOptions_IdempotencyLevel", MethodOptions_IdempotencyLevel_name, MethodOptions_IdempotencyLevel_value) proto.RegisterType((*FileDescriptorSet)(nil), "google.protobuf.FileDescriptorSet") proto.RegisterType((*FileDescriptorProto)(nil), "google.protobuf.FileDescriptorProto") proto.RegisterType((*DescriptorProto)(nil), "google.protobuf.DescriptorProto") proto.RegisterType((*DescriptorProto_ExtensionRange)(nil), "google.protobuf.DescriptorProto.ExtensionRange") proto.RegisterType((*DescriptorProto_ReservedRange)(nil), "google.protobuf.DescriptorProto.ReservedRange") + proto.RegisterType((*ExtensionRangeOptions)(nil), "google.protobuf.ExtensionRangeOptions") proto.RegisterType((*FieldDescriptorProto)(nil), "google.protobuf.FieldDescriptorProto") proto.RegisterType((*OneofDescriptorProto)(nil), "google.protobuf.OneofDescriptorProto") proto.RegisterType((*EnumDescriptorProto)(nil), "google.protobuf.EnumDescriptorProto") + proto.RegisterType((*EnumDescriptorProto_EnumReservedRange)(nil), "google.protobuf.EnumDescriptorProto.EnumReservedRange") proto.RegisterType((*EnumValueDescriptorProto)(nil), "google.protobuf.EnumValueDescriptorProto") proto.RegisterType((*ServiceDescriptorProto)(nil), "google.protobuf.ServiceDescriptorProto") proto.RegisterType((*MethodDescriptorProto)(nil), "google.protobuf.MethodDescriptorProto") @@ -1983,170 +2716,172 @@ func init() { proto.RegisterType((*SourceCodeInfo_Location)(nil), "google.protobuf.SourceCodeInfo.Location") proto.RegisterType((*GeneratedCodeInfo)(nil), "google.protobuf.GeneratedCodeInfo") proto.RegisterType((*GeneratedCodeInfo_Annotation)(nil), "google.protobuf.GeneratedCodeInfo.Annotation") - proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Type", FieldDescriptorProto_Type_name, FieldDescriptorProto_Type_value) - proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Label", FieldDescriptorProto_Label_name, FieldDescriptorProto_Label_value) - proto.RegisterEnum("google.protobuf.FileOptions_OptimizeMode", FileOptions_OptimizeMode_name, FileOptions_OptimizeMode_value) - proto.RegisterEnum("google.protobuf.FieldOptions_CType", FieldOptions_CType_name, FieldOptions_CType_value) - proto.RegisterEnum("google.protobuf.FieldOptions_JSType", FieldOptions_JSType_name, FieldOptions_JSType_value) - proto.RegisterEnum("google.protobuf.MethodOptions_IdempotencyLevel", MethodOptions_IdempotencyLevel_name, MethodOptions_IdempotencyLevel_value) } -func init() { proto.RegisterFile("google/protobuf/descriptor.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 2460 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0x5b, 0x6f, 0xdb, 0xc8, - 0x15, 0x5e, 0x5d, 0x2d, 0x1d, 0xc9, 0xf2, 0x78, 0xec, 0x4d, 0x18, 0xef, 0x25, 0x8e, 0xf6, 0x12, - 0x6f, 0xd2, 0xc8, 0x0b, 0xe7, 0xb2, 0x59, 0xa7, 0x48, 0x21, 0x4b, 0x8c, 0x57, 0xa9, 0x2c, 0xa9, - 0x94, 0xdc, 0x4d, 0xf6, 0x85, 0x18, 0x93, 0x23, 0x99, 0x09, 0x45, 0x72, 0x49, 0x2a, 0x89, 0xf7, - 0x29, 0x40, 0x9f, 0x0a, 0xf4, 0x07, 0x14, 0x45, 0xd1, 0x87, 0x7d, 0x59, 0xa0, 0x3f, 0xa0, 0xcf, - 0xfd, 0x05, 0x05, 0xf6, 0xb9, 0x2f, 0x45, 0x51, 0xa0, 0xfd, 0x07, 0x7d, 0x2d, 0x66, 0x86, 0xa4, - 0x48, 0x5d, 0x12, 0x77, 0x81, 0xec, 0x3e, 0xd9, 0x73, 0xce, 0x77, 0x0e, 0xcf, 0x9c, 0xf9, 0x66, - 0xce, 0x99, 0x11, 0x6c, 0x8f, 0x6c, 0x7b, 0x64, 0xd2, 0x5d, 0xc7, 0xb5, 0x7d, 0xfb, 0x64, 0x32, - 0xdc, 0xd5, 0xa9, 0xa7, 0xb9, 0x86, 0xe3, 0xdb, 0x6e, 0x8d, 0xcb, 0xf0, 0x9a, 0x40, 0xd4, 0x42, - 0x44, 0xf5, 0x08, 0xd6, 0x1f, 0x18, 0x26, 0x6d, 0x46, 0xc0, 0x3e, 0xf5, 0xf1, 0x5d, 0xc8, 0x0e, - 0x0d, 0x93, 0x4a, 0xa9, 0xed, 0xcc, 0x4e, 0x69, 0xef, 0xc3, 0xda, 0x8c, 0x51, 0x2d, 0x69, 0xd1, - 0x63, 0x62, 0x85, 0x5b, 0x54, 0xff, 0x95, 0x85, 0x8d, 0x05, 0x5a, 0x8c, 0x21, 0x6b, 0x91, 0x31, - 0xf3, 0x98, 0xda, 0x29, 0x2a, 0xfc, 0x7f, 0x2c, 0xc1, 0x8a, 0x43, 0xb4, 0xa7, 0x64, 0x44, 0xa5, - 0x34, 0x17, 0x87, 0x43, 0xfc, 0x3e, 0x80, 0x4e, 0x1d, 0x6a, 0xe9, 0xd4, 0xd2, 0xce, 0xa4, 0xcc, - 0x76, 0x66, 0xa7, 0xa8, 0xc4, 0x24, 0xf8, 0x3a, 0xac, 0x3b, 0x93, 0x13, 0xd3, 0xd0, 0xd4, 0x18, - 0x0c, 0xb6, 0x33, 0x3b, 0x39, 0x05, 0x09, 0x45, 0x73, 0x0a, 0xbe, 0x0a, 0x6b, 0xcf, 0x29, 0x79, - 0x1a, 0x87, 0x96, 0x38, 0xb4, 0xc2, 0xc4, 0x31, 0x60, 0x03, 0xca, 0x63, 0xea, 0x79, 0x64, 0x44, - 0x55, 0xff, 0xcc, 0xa1, 0x52, 0x96, 0xcf, 0x7e, 0x7b, 0x6e, 0xf6, 0xb3, 0x33, 0x2f, 0x05, 0x56, - 0x83, 0x33, 0x87, 0xe2, 0x3a, 0x14, 0xa9, 0x35, 0x19, 0x0b, 0x0f, 0xb9, 0x25, 0xf9, 0x93, 0xad, - 0xc9, 0x78, 0xd6, 0x4b, 0x81, 0x99, 0x05, 0x2e, 0x56, 0x3c, 0xea, 0x3e, 0x33, 0x34, 0x2a, 0xe5, - 0xb9, 0x83, 0xab, 0x73, 0x0e, 0xfa, 0x42, 0x3f, 0xeb, 0x23, 0xb4, 0xc3, 0x0d, 0x28, 0xd2, 0x17, - 0x3e, 0xb5, 0x3c, 0xc3, 0xb6, 0xa4, 0x15, 0xee, 0xe4, 0xa3, 0x05, 0xab, 0x48, 0x4d, 0x7d, 0xd6, - 0xc5, 0xd4, 0x0e, 0xdf, 0x81, 0x15, 0xdb, 0xf1, 0x0d, 0xdb, 0xf2, 0xa4, 0xc2, 0x76, 0x6a, 0xa7, - 0xb4, 0xf7, 0xee, 0x42, 0x22, 0x74, 0x05, 0x46, 0x09, 0xc1, 0xb8, 0x05, 0xc8, 0xb3, 0x27, 0xae, - 0x46, 0x55, 0xcd, 0xd6, 0xa9, 0x6a, 0x58, 0x43, 0x5b, 0x2a, 0x72, 0x07, 0x97, 0xe7, 0x27, 0xc2, - 0x81, 0x0d, 0x5b, 0xa7, 0x2d, 0x6b, 0x68, 0x2b, 0x15, 0x2f, 0x31, 0xc6, 0x17, 0x20, 0xef, 0x9d, - 0x59, 0x3e, 0x79, 0x21, 0x95, 0x39, 0x43, 0x82, 0x51, 0xf5, 0xbf, 0x39, 0x58, 0x3b, 0x0f, 0xc5, - 0xee, 0x41, 0x6e, 0xc8, 0x66, 0x29, 0xa5, 0xff, 0x9f, 0x1c, 0x08, 0x9b, 0x64, 0x12, 0xf3, 0x3f, - 0x30, 0x89, 0x75, 0x28, 0x59, 0xd4, 0xf3, 0xa9, 0x2e, 0x18, 0x91, 0x39, 0x27, 0xa7, 0x40, 0x18, - 0xcd, 0x53, 0x2a, 0xfb, 0x83, 0x28, 0xf5, 0x08, 0xd6, 0xa2, 0x90, 0x54, 0x97, 0x58, 0xa3, 0x90, - 0x9b, 0xbb, 0xaf, 0x8b, 0xa4, 0x26, 0x87, 0x76, 0x0a, 0x33, 0x53, 0x2a, 0x34, 0x31, 0xc6, 0x4d, - 0x00, 0xdb, 0xa2, 0xf6, 0x50, 0xd5, 0xa9, 0x66, 0x4a, 0x85, 0x25, 0x59, 0xea, 0x32, 0xc8, 0x5c, - 0x96, 0x6c, 0x21, 0xd5, 0x4c, 0xfc, 0xf9, 0x94, 0x6a, 0x2b, 0x4b, 0x98, 0x72, 0x24, 0x36, 0xd9, - 0x1c, 0xdb, 0x8e, 0xa1, 0xe2, 0x52, 0xc6, 0x7b, 0xaa, 0x07, 0x33, 0x2b, 0xf2, 0x20, 0x6a, 0xaf, - 0x9d, 0x99, 0x12, 0x98, 0x89, 0x89, 0xad, 0xba, 0xf1, 0x21, 0xfe, 0x00, 0x22, 0x81, 0xca, 0x69, - 0x05, 0xfc, 0x14, 0x2a, 0x87, 0xc2, 0x0e, 0x19, 0xd3, 0xad, 0xbb, 0x50, 0x49, 0xa6, 0x07, 0x6f, - 0x42, 0xce, 0xf3, 0x89, 0xeb, 0x73, 0x16, 0xe6, 0x14, 0x31, 0xc0, 0x08, 0x32, 0xd4, 0xd2, 0xf9, - 0x29, 0x97, 0x53, 0xd8, 0xbf, 0x5b, 0x9f, 0xc1, 0x6a, 0xe2, 0xf3, 0xe7, 0x35, 0xac, 0xfe, 0x3e, - 0x0f, 0x9b, 0x8b, 0x38, 0xb7, 0x90, 0xfe, 0x17, 0x20, 0x6f, 0x4d, 0xc6, 0x27, 0xd4, 0x95, 0x32, - 0xdc, 0x43, 0x30, 0xc2, 0x75, 0xc8, 0x99, 0xe4, 0x84, 0x9a, 0x52, 0x76, 0x3b, 0xb5, 0x53, 0xd9, - 0xbb, 0x7e, 0x2e, 0x56, 0xd7, 0xda, 0xcc, 0x44, 0x11, 0x96, 0xf8, 0x3e, 0x64, 0x83, 0x23, 0x8e, - 0x79, 0xb8, 0x76, 0x3e, 0x0f, 0x8c, 0x8b, 0x0a, 0xb7, 0xc3, 0xef, 0x40, 0x91, 0xfd, 0x15, 0xb9, - 0xcd, 0xf3, 0x98, 0x0b, 0x4c, 0xc0, 0xf2, 0x8a, 0xb7, 0xa0, 0xc0, 0x69, 0xa6, 0xd3, 0xb0, 0x34, - 0x44, 0x63, 0xb6, 0x30, 0x3a, 0x1d, 0x92, 0x89, 0xe9, 0xab, 0xcf, 0x88, 0x39, 0xa1, 0x9c, 0x30, - 0x45, 0xa5, 0x1c, 0x08, 0x7f, 0xcd, 0x64, 0xf8, 0x32, 0x94, 0x04, 0x2b, 0x0d, 0x4b, 0xa7, 0x2f, - 0xf8, 0xe9, 0x93, 0x53, 0x04, 0x51, 0x5b, 0x4c, 0xc2, 0x3e, 0xff, 0xc4, 0xb3, 0xad, 0x70, 0x69, - 0xf9, 0x27, 0x98, 0x80, 0x7f, 0xfe, 0xb3, 0xd9, 0x83, 0xef, 0xbd, 0xc5, 0xd3, 0x9b, 0xe5, 0x62, - 0xf5, 0x2f, 0x69, 0xc8, 0xf2, 0xfd, 0xb6, 0x06, 0xa5, 0xc1, 0xe3, 0x9e, 0xac, 0x36, 0xbb, 0xc7, - 0x07, 0x6d, 0x19, 0xa5, 0x70, 0x05, 0x80, 0x0b, 0x1e, 0xb4, 0xbb, 0xf5, 0x01, 0x4a, 0x47, 0xe3, - 0x56, 0x67, 0x70, 0xe7, 0x16, 0xca, 0x44, 0x06, 0xc7, 0x42, 0x90, 0x8d, 0x03, 0x6e, 0xee, 0xa1, - 0x1c, 0x46, 0x50, 0x16, 0x0e, 0x5a, 0x8f, 0xe4, 0xe6, 0x9d, 0x5b, 0x28, 0x9f, 0x94, 0xdc, 0xdc, - 0x43, 0x2b, 0x78, 0x15, 0x8a, 0x5c, 0x72, 0xd0, 0xed, 0xb6, 0x51, 0x21, 0xf2, 0xd9, 0x1f, 0x28, - 0xad, 0xce, 0x21, 0x2a, 0x46, 0x3e, 0x0f, 0x95, 0xee, 0x71, 0x0f, 0x41, 0xe4, 0xe1, 0x48, 0xee, - 0xf7, 0xeb, 0x87, 0x32, 0x2a, 0x45, 0x88, 0x83, 0xc7, 0x03, 0xb9, 0x8f, 0xca, 0x89, 0xb0, 0x6e, - 0xee, 0xa1, 0xd5, 0xe8, 0x13, 0x72, 0xe7, 0xf8, 0x08, 0x55, 0xf0, 0x3a, 0xac, 0x8a, 0x4f, 0x84, - 0x41, 0xac, 0xcd, 0x88, 0xee, 0xdc, 0x42, 0x68, 0x1a, 0x88, 0xf0, 0xb2, 0x9e, 0x10, 0xdc, 0xb9, - 0x85, 0x70, 0xb5, 0x01, 0x39, 0xce, 0x2e, 0x8c, 0xa1, 0xd2, 0xae, 0x1f, 0xc8, 0x6d, 0xb5, 0xdb, - 0x1b, 0xb4, 0xba, 0x9d, 0x7a, 0x1b, 0xa5, 0xa6, 0x32, 0x45, 0xfe, 0xd5, 0x71, 0x4b, 0x91, 0x9b, - 0x28, 0x1d, 0x97, 0xf5, 0xe4, 0xfa, 0x40, 0x6e, 0xa2, 0x4c, 0x55, 0x83, 0xcd, 0x45, 0xe7, 0xcc, - 0xc2, 0x9d, 0x11, 0x5b, 0xe2, 0xf4, 0x92, 0x25, 0xe6, 0xbe, 0xe6, 0x96, 0xf8, 0xdb, 0x14, 0x6c, - 0x2c, 0x38, 0x6b, 0x17, 0x7e, 0xe4, 0x17, 0x90, 0x13, 0x14, 0x15, 0xd5, 0xe7, 0x93, 0x85, 0x87, - 0x36, 0x27, 0xec, 0x5c, 0x05, 0xe2, 0x76, 0xf1, 0x0a, 0x9c, 0x59, 0x52, 0x81, 0x99, 0x8b, 0xb9, - 0x20, 0x7f, 0x93, 0x02, 0x69, 0x99, 0xef, 0xd7, 0x1c, 0x14, 0xe9, 0xc4, 0x41, 0x71, 0x6f, 0x36, - 0x80, 0x2b, 0xcb, 0xe7, 0x30, 0x17, 0xc5, 0x77, 0x29, 0xb8, 0xb0, 0xb8, 0x51, 0x59, 0x18, 0xc3, - 0x7d, 0xc8, 0x8f, 0xa9, 0x7f, 0x6a, 0x87, 0xc5, 0xfa, 0xe3, 0x05, 0x25, 0x80, 0xa9, 0x67, 0x73, - 0x15, 0x58, 0xc5, 0x6b, 0x48, 0x66, 0x59, 0xb7, 0x21, 0xa2, 0x99, 0x8b, 0xf4, 0xb7, 0x69, 0x78, - 0x7b, 0xa1, 0xf3, 0x85, 0x81, 0xbe, 0x07, 0x60, 0x58, 0xce, 0xc4, 0x17, 0x05, 0x59, 0x9c, 0x4f, - 0x45, 0x2e, 0xe1, 0x7b, 0x9f, 0x9d, 0x3d, 0x13, 0x3f, 0xd2, 0x67, 0xb8, 0x1e, 0x84, 0x88, 0x03, - 0xee, 0x4e, 0x03, 0xcd, 0xf2, 0x40, 0xdf, 0x5f, 0x32, 0xd3, 0xb9, 0x5a, 0xf7, 0x29, 0x20, 0xcd, - 0x34, 0xa8, 0xe5, 0xab, 0x9e, 0xef, 0x52, 0x32, 0x36, 0xac, 0x11, 0x3f, 0x80, 0x0b, 0xfb, 0xb9, - 0x21, 0x31, 0x3d, 0xaa, 0xac, 0x09, 0x75, 0x3f, 0xd4, 0x32, 0x0b, 0x5e, 0x65, 0xdc, 0x98, 0x45, - 0x3e, 0x61, 0x21, 0xd4, 0x91, 0x45, 0xf5, 0xef, 0x2b, 0x50, 0x8a, 0xb5, 0x75, 0xf8, 0x0a, 0x94, - 0x9f, 0x90, 0x67, 0x44, 0x0d, 0x5b, 0x75, 0x91, 0x89, 0x12, 0x93, 0xf5, 0x82, 0x76, 0xfd, 0x53, - 0xd8, 0xe4, 0x10, 0x7b, 0xe2, 0x53, 0x57, 0xd5, 0x4c, 0xe2, 0x79, 0x3c, 0x69, 0x05, 0x0e, 0xc5, - 0x4c, 0xd7, 0x65, 0xaa, 0x46, 0xa8, 0xc1, 0xb7, 0x61, 0x83, 0x5b, 0x8c, 0x27, 0xa6, 0x6f, 0x38, - 0x26, 0x55, 0xd9, 0xe5, 0xc1, 0xe3, 0x07, 0x71, 0x14, 0xd9, 0x3a, 0x43, 0x1c, 0x05, 0x00, 0x16, - 0x91, 0x87, 0x9b, 0xf0, 0x1e, 0x37, 0x1b, 0x51, 0x8b, 0xba, 0xc4, 0xa7, 0x2a, 0xfd, 0x7a, 0x42, - 0x4c, 0x4f, 0x25, 0x96, 0xae, 0x9e, 0x12, 0xef, 0x54, 0xda, 0x64, 0x0e, 0x0e, 0xd2, 0x52, 0x4a, - 0xb9, 0xc4, 0x80, 0x87, 0x01, 0x4e, 0xe6, 0xb0, 0xba, 0xa5, 0x7f, 0x41, 0xbc, 0x53, 0xbc, 0x0f, - 0x17, 0xb8, 0x17, 0xcf, 0x77, 0x0d, 0x6b, 0xa4, 0x6a, 0xa7, 0x54, 0x7b, 0xaa, 0x4e, 0xfc, 0xe1, - 0x5d, 0xe9, 0x9d, 0xf8, 0xf7, 0x79, 0x84, 0x7d, 0x8e, 0x69, 0x30, 0xc8, 0xb1, 0x3f, 0xbc, 0x8b, - 0xfb, 0x50, 0x66, 0x8b, 0x31, 0x36, 0xbe, 0xa1, 0xea, 0xd0, 0x76, 0x79, 0x65, 0xa9, 0x2c, 0xd8, - 0xd9, 0xb1, 0x0c, 0xd6, 0xba, 0x81, 0xc1, 0x91, 0xad, 0xd3, 0xfd, 0x5c, 0xbf, 0x27, 0xcb, 0x4d, - 0xa5, 0x14, 0x7a, 0x79, 0x60, 0xbb, 0x8c, 0x50, 0x23, 0x3b, 0x4a, 0x70, 0x49, 0x10, 0x6a, 0x64, - 0x87, 0xe9, 0xbd, 0x0d, 0x1b, 0x9a, 0x26, 0xe6, 0x6c, 0x68, 0x6a, 0xd0, 0xe2, 0x7b, 0x12, 0x4a, - 0x24, 0x4b, 0xd3, 0x0e, 0x05, 0x20, 0xe0, 0xb8, 0x87, 0x3f, 0x87, 0xb7, 0xa7, 0xc9, 0x8a, 0x1b, - 0xae, 0xcf, 0xcd, 0x72, 0xd6, 0xf4, 0x36, 0x6c, 0x38, 0x67, 0xf3, 0x86, 0x38, 0xf1, 0x45, 0xe7, - 0x6c, 0xd6, 0xec, 0x23, 0x7e, 0x6d, 0x73, 0xa9, 0x46, 0x7c, 0xaa, 0x4b, 0x17, 0xe3, 0xe8, 0x98, - 0x02, 0xef, 0x02, 0xd2, 0x34, 0x95, 0x5a, 0xe4, 0xc4, 0xa4, 0x2a, 0x71, 0xa9, 0x45, 0x3c, 0xe9, - 0x72, 0x1c, 0x5c, 0xd1, 0x34, 0x99, 0x6b, 0xeb, 0x5c, 0x89, 0xaf, 0xc1, 0xba, 0x7d, 0xf2, 0x44, - 0x13, 0xcc, 0x52, 0x1d, 0x97, 0x0e, 0x8d, 0x17, 0xd2, 0x87, 0x3c, 0x4d, 0x6b, 0x4c, 0xc1, 0x79, - 0xd5, 0xe3, 0x62, 0xfc, 0x09, 0x20, 0xcd, 0x3b, 0x25, 0xae, 0xc3, 0x4b, 0xbb, 0xe7, 0x10, 0x8d, - 0x4a, 0x1f, 0x09, 0xa8, 0x90, 0x77, 0x42, 0x31, 0x63, 0xb6, 0xf7, 0xdc, 0x18, 0xfa, 0xa1, 0xc7, - 0xab, 0x82, 0xd9, 0x5c, 0x16, 0x78, 0xdb, 0x01, 0xe4, 0x9c, 0x3a, 0xc9, 0x0f, 0xef, 0x70, 0x58, - 0xc5, 0x39, 0x75, 0xe2, 0xdf, 0x7d, 0x04, 0x9b, 0x13, 0xcb, 0xb0, 0x7c, 0xea, 0x3a, 0x2e, 0x65, - 0xed, 0xbe, 0xd8, 0xb3, 0xd2, 0xbf, 0x57, 0x96, 0x34, 0xec, 0xc7, 0x71, 0xb4, 0xa0, 0x8a, 0xb2, - 0x31, 0x99, 0x17, 0x56, 0xf7, 0xa1, 0x1c, 0x67, 0x10, 0x2e, 0x82, 0xe0, 0x10, 0x4a, 0xb1, 0x6a, - 0xdc, 0xe8, 0x36, 0x59, 0x1d, 0xfd, 0x4a, 0x46, 0x69, 0x56, 0xcf, 0xdb, 0xad, 0x81, 0xac, 0x2a, - 0xc7, 0x9d, 0x41, 0xeb, 0x48, 0x46, 0x99, 0x6b, 0xc5, 0xc2, 0x7f, 0x56, 0xd0, 0xcb, 0x97, 0x2f, - 0x5f, 0xa6, 0x1f, 0x66, 0x0b, 0x1f, 0xa3, 0xab, 0xd5, 0xef, 0xd3, 0x50, 0x49, 0x76, 0xd2, 0xf8, - 0xe7, 0x70, 0x31, 0xbc, 0xf6, 0x7a, 0xd4, 0x57, 0x9f, 0x1b, 0x2e, 0xa7, 0xf6, 0x98, 0x88, 0x5e, - 0x34, 0x5a, 0x95, 0xcd, 0x00, 0xd5, 0xa7, 0xfe, 0x97, 0x86, 0xcb, 0x88, 0x3b, 0x26, 0x3e, 0x6e, - 0xc3, 0x65, 0xcb, 0x56, 0x3d, 0x9f, 0x58, 0x3a, 0x71, 0x75, 0x75, 0xfa, 0xe0, 0xa0, 0x12, 0x4d, - 0xa3, 0x9e, 0x67, 0x8b, 0x92, 0x12, 0x79, 0x79, 0xd7, 0xb2, 0xfb, 0x01, 0x78, 0x7a, 0xd6, 0xd6, - 0x03, 0xe8, 0x0c, 0x83, 0x32, 0xcb, 0x18, 0xf4, 0x0e, 0x14, 0xc7, 0xc4, 0x51, 0xa9, 0xe5, 0xbb, - 0x67, 0xbc, 0xff, 0x2b, 0x28, 0x85, 0x31, 0x71, 0x64, 0x36, 0x7e, 0x73, 0x2b, 0x91, 0xcc, 0x66, - 0x01, 0x15, 0x1f, 0x66, 0x0b, 0x45, 0x04, 0xd5, 0x7f, 0x66, 0xa0, 0x1c, 0xef, 0x07, 0x59, 0x7b, - 0xad, 0xf1, 0xb3, 0x3f, 0xc5, 0x4f, 0x87, 0x0f, 0x5e, 0xd9, 0x3d, 0xd6, 0x1a, 0xac, 0x28, 0xec, - 0xe7, 0x45, 0x97, 0xa6, 0x08, 0x4b, 0x56, 0x90, 0xd9, 0x79, 0x40, 0x45, 0xef, 0x5f, 0x50, 0x82, - 0x11, 0x3e, 0x84, 0xfc, 0x13, 0x8f, 0xfb, 0xce, 0x73, 0xdf, 0x1f, 0xbe, 0xda, 0xf7, 0xc3, 0x3e, - 0x77, 0x5e, 0x7c, 0xd8, 0x57, 0x3b, 0x5d, 0xe5, 0xa8, 0xde, 0x56, 0x02, 0x73, 0x7c, 0x09, 0xb2, - 0x26, 0xf9, 0xe6, 0x2c, 0x59, 0x3e, 0xb8, 0xe8, 0xbc, 0x8b, 0x70, 0x09, 0xb2, 0xcf, 0x29, 0x79, - 0x9a, 0x3c, 0xb4, 0xb9, 0xe8, 0x0d, 0x6e, 0x86, 0x5d, 0xc8, 0xf1, 0x7c, 0x61, 0x80, 0x20, 0x63, - 0xe8, 0x2d, 0x5c, 0x80, 0x6c, 0xa3, 0xab, 0xb0, 0x0d, 0x81, 0xa0, 0x2c, 0xa4, 0x6a, 0xaf, 0x25, - 0x37, 0x64, 0x94, 0xae, 0xde, 0x86, 0xbc, 0x48, 0x02, 0xdb, 0x2c, 0x51, 0x1a, 0xd0, 0x5b, 0xc1, - 0x30, 0xf0, 0x91, 0x0a, 0xb5, 0xc7, 0x47, 0x07, 0xb2, 0x82, 0xd2, 0xc9, 0xa5, 0xce, 0xa2, 0x5c, - 0xd5, 0x83, 0x72, 0xbc, 0x21, 0xfc, 0x51, 0x58, 0x56, 0xfd, 0x6b, 0x0a, 0x4a, 0xb1, 0x06, 0x8f, - 0xb5, 0x16, 0xc4, 0x34, 0xed, 0xe7, 0x2a, 0x31, 0x0d, 0xe2, 0x05, 0xd4, 0x00, 0x2e, 0xaa, 0x33, - 0xc9, 0x79, 0x97, 0xee, 0x47, 0xda, 0x22, 0x39, 0x94, 0xaf, 0xfe, 0x29, 0x05, 0x68, 0xb6, 0x45, - 0x9c, 0x09, 0x33, 0xf5, 0x53, 0x86, 0x59, 0xfd, 0x63, 0x0a, 0x2a, 0xc9, 0xbe, 0x70, 0x26, 0xbc, - 0x2b, 0x3f, 0x69, 0x78, 0xff, 0x48, 0xc3, 0x6a, 0xa2, 0x1b, 0x3c, 0x6f, 0x74, 0x5f, 0xc3, 0xba, - 0xa1, 0xd3, 0xb1, 0x63, 0xfb, 0xd4, 0xd2, 0xce, 0x54, 0x93, 0x3e, 0xa3, 0xa6, 0x54, 0xe5, 0x87, - 0xc6, 0xee, 0xab, 0xfb, 0xcd, 0x5a, 0x6b, 0x6a, 0xd7, 0x66, 0x66, 0xfb, 0x1b, 0xad, 0xa6, 0x7c, - 0xd4, 0xeb, 0x0e, 0xe4, 0x4e, 0xe3, 0xb1, 0x7a, 0xdc, 0xf9, 0x65, 0xa7, 0xfb, 0x65, 0x47, 0x41, - 0xc6, 0x0c, 0xec, 0x0d, 0x6e, 0xfb, 0x1e, 0xa0, 0xd9, 0xa0, 0xf0, 0x45, 0x58, 0x14, 0x16, 0x7a, - 0x0b, 0x6f, 0xc0, 0x5a, 0xa7, 0xab, 0xf6, 0x5b, 0x4d, 0x59, 0x95, 0x1f, 0x3c, 0x90, 0x1b, 0x83, - 0xbe, 0xb8, 0x80, 0x47, 0xe8, 0x41, 0x62, 0x83, 0x57, 0xff, 0x90, 0x81, 0x8d, 0x05, 0x91, 0xe0, - 0x7a, 0xd0, 0xfb, 0x8b, 0xeb, 0xc8, 0x8d, 0xf3, 0x44, 0x5f, 0x63, 0xdd, 0x45, 0x8f, 0xb8, 0x7e, - 0x70, 0x55, 0xf8, 0x04, 0x58, 0x96, 0x2c, 0xdf, 0x18, 0x1a, 0xd4, 0x0d, 0xde, 0x2b, 0xc4, 0x85, - 0x60, 0x6d, 0x2a, 0x17, 0x4f, 0x16, 0x3f, 0x03, 0xec, 0xd8, 0x9e, 0xe1, 0x1b, 0xcf, 0xa8, 0x6a, - 0x58, 0xe1, 0xe3, 0x06, 0xbb, 0x20, 0x64, 0x15, 0x14, 0x6a, 0x5a, 0x96, 0x1f, 0xa1, 0x2d, 0x3a, - 0x22, 0x33, 0x68, 0x76, 0x98, 0x67, 0x14, 0x14, 0x6a, 0x22, 0xf4, 0x15, 0x28, 0xeb, 0xf6, 0x84, - 0xb5, 0x5b, 0x02, 0xc7, 0x6a, 0x47, 0x4a, 0x29, 0x09, 0x59, 0x04, 0x09, 0xfa, 0xe1, 0xe9, 0xab, - 0x4a, 0x59, 0x29, 0x09, 0x99, 0x80, 0x5c, 0x85, 0x35, 0x32, 0x1a, 0xb9, 0xcc, 0x79, 0xe8, 0x48, - 0x74, 0xf8, 0x95, 0x48, 0xcc, 0x81, 0x5b, 0x0f, 0xa1, 0x10, 0xe6, 0x81, 0x95, 0x6a, 0x96, 0x09, - 0xd5, 0x11, 0x6f, 0x5b, 0xe9, 0x9d, 0xa2, 0x52, 0xb0, 0x42, 0xe5, 0x15, 0x28, 0x1b, 0x9e, 0x3a, - 0x7d, 0x64, 0x4d, 0x6f, 0xa7, 0x77, 0x0a, 0x4a, 0xc9, 0xf0, 0xa2, 0x57, 0xb5, 0xea, 0x77, 0x69, - 0xa8, 0x24, 0x1f, 0x89, 0x71, 0x13, 0x0a, 0xa6, 0xad, 0x11, 0x4e, 0x2d, 0xf1, 0x0b, 0xc5, 0xce, - 0x6b, 0xde, 0x95, 0x6b, 0xed, 0x00, 0xaf, 0x44, 0x96, 0x5b, 0x7f, 0x4b, 0x41, 0x21, 0x14, 0xe3, - 0x0b, 0x90, 0x75, 0x88, 0x7f, 0xca, 0xdd, 0xe5, 0x0e, 0xd2, 0x28, 0xa5, 0xf0, 0x31, 0x93, 0x7b, - 0x0e, 0xb1, 0x38, 0x05, 0x02, 0x39, 0x1b, 0xb3, 0x75, 0x35, 0x29, 0xd1, 0xf9, 0xf5, 0xc1, 0x1e, - 0x8f, 0xa9, 0xe5, 0x7b, 0xe1, 0xba, 0x06, 0xf2, 0x46, 0x20, 0xc6, 0xd7, 0x61, 0xdd, 0x77, 0x89, - 0x61, 0x26, 0xb0, 0x59, 0x8e, 0x45, 0xa1, 0x22, 0x02, 0xef, 0xc3, 0xa5, 0xd0, 0xaf, 0x4e, 0x7d, - 0xa2, 0x9d, 0x52, 0x7d, 0x6a, 0x94, 0xe7, 0x2f, 0x90, 0x17, 0x03, 0x40, 0x33, 0xd0, 0x87, 0xb6, - 0xd5, 0xef, 0x53, 0xb0, 0x1e, 0x5e, 0x78, 0xf4, 0x28, 0x59, 0x47, 0x00, 0xc4, 0xb2, 0x6c, 0x3f, - 0x9e, 0xae, 0x79, 0x2a, 0xcf, 0xd9, 0xd5, 0xea, 0x91, 0x91, 0x12, 0x73, 0xb0, 0x35, 0x06, 0x98, - 0x6a, 0x96, 0xa6, 0xed, 0x32, 0x94, 0x82, 0x5f, 0x00, 0xf8, 0xcf, 0x48, 0xe2, 0x8a, 0x0c, 0x42, - 0xc4, 0x6e, 0x46, 0x78, 0x13, 0x72, 0x27, 0x74, 0x64, 0x58, 0xc1, 0xbb, 0xa4, 0x18, 0x84, 0xaf, - 0x9d, 0xd9, 0xe8, 0xb5, 0xf3, 0xe0, 0x77, 0x29, 0xd8, 0xd0, 0xec, 0xf1, 0x6c, 0xbc, 0x07, 0x68, - 0xe6, 0x9e, 0xee, 0x7d, 0x91, 0xfa, 0xea, 0xfe, 0xc8, 0xf0, 0x4f, 0x27, 0x27, 0x35, 0xcd, 0x1e, - 0xef, 0x8e, 0x6c, 0x93, 0x58, 0xa3, 0xe9, 0xef, 0x60, 0xfc, 0x1f, 0xed, 0xc6, 0x88, 0x5a, 0x37, - 0x46, 0x76, 0xec, 0x57, 0xb1, 0x7b, 0xd3, 0x7f, 0xbf, 0x4d, 0x67, 0x0e, 0x7b, 0x07, 0x7f, 0x4e, - 0x6f, 0x1d, 0x8a, 0x6f, 0xf5, 0xc2, 0xdc, 0x28, 0x74, 0x68, 0x52, 0x8d, 0xcd, 0xf7, 0x7f, 0x01, - 0x00, 0x00, 0xff, 0xff, 0x8e, 0x54, 0xe7, 0xef, 0x60, 0x1b, 0x00, 0x00, +func init() { proto.RegisterFile("google/protobuf/descriptor.proto", fileDescriptor_e5baabe45344a177) } + +var fileDescriptor_e5baabe45344a177 = []byte{ + // 2589 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xdd, 0x8e, 0xdb, 0xc6, + 0x15, 0x0e, 0xf5, 0xb7, 0xd2, 0x91, 0x56, 0x3b, 0x3b, 0xbb, 0xb1, 0xe9, 0xcd, 0x8f, 0xd7, 0xca, + 0x8f, 0xd7, 0x4e, 0xac, 0x0d, 0x1c, 0xdb, 0x71, 0xd6, 0x45, 0x5a, 0xad, 0x44, 0x6f, 0xe4, 0xee, + 0x4a, 0x2a, 0xa5, 0x6d, 0x7e, 0x80, 0x82, 0x98, 0x25, 0x47, 0x12, 0x6d, 0x8a, 0x64, 0x48, 0xca, + 0xf6, 0x06, 0xbd, 0x30, 0xd0, 0xab, 0x5e, 0x15, 0xe8, 0x55, 0x51, 0x14, 0xbd, 0xe8, 0x4d, 0x80, + 0x3e, 0x40, 0x81, 0xde, 0xf5, 0x09, 0x0a, 0xe4, 0x0d, 0x8a, 0xb6, 0x40, 0xfb, 0x08, 0xbd, 0x2c, + 0x66, 0x86, 0xa4, 0x48, 0x49, 0x1b, 0x6f, 0x02, 0xc4, 0xb9, 0x92, 0xe6, 0x3b, 0xdf, 0x39, 0x73, + 0xe6, 0xcc, 0x99, 0x99, 0x33, 0x43, 0xd8, 0x1e, 0x39, 0xce, 0xc8, 0xa2, 0xbb, 0xae, 0xe7, 0x04, + 0xce, 0xc9, 0x74, 0xb8, 0x6b, 0x50, 0x5f, 0xf7, 0x4c, 0x37, 0x70, 0xbc, 0x3a, 0xc7, 0xf0, 0x9a, + 0x60, 0xd4, 0x23, 0x46, 0xed, 0x08, 0xd6, 0xef, 0x9b, 0x16, 0x6d, 0xc5, 0xc4, 0x3e, 0x0d, 0xf0, + 0x5d, 0xc8, 0x0d, 0x4d, 0x8b, 0xca, 0xd2, 0x76, 0x76, 0xa7, 0x7c, 0xf3, 0xcd, 0xfa, 0x9c, 0x52, + 0x3d, 0xad, 0xd1, 0x63, 0xb0, 0xca, 0x35, 0x6a, 0xff, 0xce, 0xc1, 0xc6, 0x12, 0x29, 0xc6, 0x90, + 0xb3, 0xc9, 0x84, 0x59, 0x94, 0x76, 0x4a, 0x2a, 0xff, 0x8f, 0x65, 0x58, 0x71, 0x89, 0xfe, 0x88, + 0x8c, 0xa8, 0x9c, 0xe1, 0x70, 0xd4, 0xc4, 0xaf, 0x03, 0x18, 0xd4, 0xa5, 0xb6, 0x41, 0x6d, 0xfd, + 0x54, 0xce, 0x6e, 0x67, 0x77, 0x4a, 0x6a, 0x02, 0xc1, 0xef, 0xc0, 0xba, 0x3b, 0x3d, 0xb1, 0x4c, + 0x5d, 0x4b, 0xd0, 0x60, 0x3b, 0xbb, 0x93, 0x57, 0x91, 0x10, 0xb4, 0x66, 0xe4, 0xab, 0xb0, 0xf6, + 0x84, 0x92, 0x47, 0x49, 0x6a, 0x99, 0x53, 0xab, 0x0c, 0x4e, 0x10, 0x9b, 0x50, 0x99, 0x50, 0xdf, + 0x27, 0x23, 0xaa, 0x05, 0xa7, 0x2e, 0x95, 0x73, 0x7c, 0xf4, 0xdb, 0x0b, 0xa3, 0x9f, 0x1f, 0x79, + 0x39, 0xd4, 0x1a, 0x9c, 0xba, 0x14, 0x37, 0xa0, 0x44, 0xed, 0xe9, 0x44, 0x58, 0xc8, 0x9f, 0x11, + 0x3f, 0xc5, 0x9e, 0x4e, 0xe6, 0xad, 0x14, 0x99, 0x5a, 0x68, 0x62, 0xc5, 0xa7, 0xde, 0x63, 0x53, + 0xa7, 0x72, 0x81, 0x1b, 0xb8, 0xba, 0x60, 0xa0, 0x2f, 0xe4, 0xf3, 0x36, 0x22, 0x3d, 0xdc, 0x84, + 0x12, 0x7d, 0x1a, 0x50, 0xdb, 0x37, 0x1d, 0x5b, 0x5e, 0xe1, 0x46, 0xde, 0x5a, 0x32, 0x8b, 0xd4, + 0x32, 0xe6, 0x4d, 0xcc, 0xf4, 0xf0, 0x1d, 0x58, 0x71, 0xdc, 0xc0, 0x74, 0x6c, 0x5f, 0x2e, 0x6e, + 0x4b, 0x3b, 0xe5, 0x9b, 0xaf, 0x2e, 0x4d, 0x84, 0xae, 0xe0, 0xa8, 0x11, 0x19, 0xb7, 0x01, 0xf9, + 0xce, 0xd4, 0xd3, 0xa9, 0xa6, 0x3b, 0x06, 0xd5, 0x4c, 0x7b, 0xe8, 0xc8, 0x25, 0x6e, 0xe0, 0xf2, + 0xe2, 0x40, 0x38, 0xb1, 0xe9, 0x18, 0xb4, 0x6d, 0x0f, 0x1d, 0xb5, 0xea, 0xa7, 0xda, 0xf8, 0x02, + 0x14, 0xfc, 0x53, 0x3b, 0x20, 0x4f, 0xe5, 0x0a, 0xcf, 0x90, 0xb0, 0x55, 0xfb, 0x6b, 0x01, 0xd6, + 0xce, 0x93, 0x62, 0xf7, 0x20, 0x3f, 0x64, 0xa3, 0x94, 0x33, 0xdf, 0x26, 0x06, 0x42, 0x27, 0x1d, + 0xc4, 0xc2, 0x77, 0x0c, 0x62, 0x03, 0xca, 0x36, 0xf5, 0x03, 0x6a, 0x88, 0x8c, 0xc8, 0x9e, 0x33, + 0xa7, 0x40, 0x28, 0x2d, 0xa6, 0x54, 0xee, 0x3b, 0xa5, 0xd4, 0xa7, 0xb0, 0x16, 0xbb, 0xa4, 0x79, + 0xc4, 0x1e, 0x45, 0xb9, 0xb9, 0xfb, 0x3c, 0x4f, 0xea, 0x4a, 0xa4, 0xa7, 0x32, 0x35, 0xb5, 0x4a, + 0x53, 0x6d, 0xdc, 0x02, 0x70, 0x6c, 0xea, 0x0c, 0x35, 0x83, 0xea, 0x96, 0x5c, 0x3c, 0x23, 0x4a, + 0x5d, 0x46, 0x59, 0x88, 0x92, 0x23, 0x50, 0xdd, 0xc2, 0x1f, 0xce, 0x52, 0x6d, 0xe5, 0x8c, 0x4c, + 0x39, 0x12, 0x8b, 0x6c, 0x21, 0xdb, 0x8e, 0xa1, 0xea, 0x51, 0x96, 0xf7, 0xd4, 0x08, 0x47, 0x56, + 0xe2, 0x4e, 0xd4, 0x9f, 0x3b, 0x32, 0x35, 0x54, 0x13, 0x03, 0x5b, 0xf5, 0x92, 0x4d, 0xfc, 0x06, + 0xc4, 0x80, 0xc6, 0xd3, 0x0a, 0xf8, 0x2e, 0x54, 0x89, 0xc0, 0x0e, 0x99, 0xd0, 0xad, 0x2f, 0xa1, + 0x9a, 0x0e, 0x0f, 0xde, 0x84, 0xbc, 0x1f, 0x10, 0x2f, 0xe0, 0x59, 0x98, 0x57, 0x45, 0x03, 0x23, + 0xc8, 0x52, 0xdb, 0xe0, 0xbb, 0x5c, 0x5e, 0x65, 0x7f, 0xf1, 0x4f, 0x66, 0x03, 0xce, 0xf2, 0x01, + 0xbf, 0xbd, 0x38, 0xa3, 0x29, 0xcb, 0xf3, 0xe3, 0xde, 0xfa, 0x00, 0x56, 0x53, 0x03, 0x38, 0x6f, + 0xd7, 0xb5, 0x5f, 0xc2, 0xcb, 0x4b, 0x4d, 0xe3, 0x4f, 0x61, 0x73, 0x6a, 0x9b, 0x76, 0x40, 0x3d, + 0xd7, 0xa3, 0x2c, 0x63, 0x45, 0x57, 0xf2, 0x7f, 0x56, 0xce, 0xc8, 0xb9, 0xe3, 0x24, 0x5b, 0x58, + 0x51, 0x37, 0xa6, 0x8b, 0xe0, 0xf5, 0x52, 0xf1, 0xbf, 0x2b, 0xe8, 0xd9, 0xb3, 0x67, 0xcf, 0x32, + 0xb5, 0xdf, 0x15, 0x60, 0x73, 0xd9, 0x9a, 0x59, 0xba, 0x7c, 0x2f, 0x40, 0xc1, 0x9e, 0x4e, 0x4e, + 0xa8, 0xc7, 0x83, 0x94, 0x57, 0xc3, 0x16, 0x6e, 0x40, 0xde, 0x22, 0x27, 0xd4, 0x92, 0x73, 0xdb, + 0xd2, 0x4e, 0xf5, 0xe6, 0x3b, 0xe7, 0x5a, 0x95, 0xf5, 0x43, 0xa6, 0xa2, 0x0a, 0x4d, 0xfc, 0x11, + 0xe4, 0xc2, 0x2d, 0x9a, 0x59, 0xb8, 0x7e, 0x3e, 0x0b, 0x6c, 0x2d, 0xa9, 0x5c, 0x0f, 0xbf, 0x02, + 0x25, 0xf6, 0x2b, 0x72, 0xa3, 0xc0, 0x7d, 0x2e, 0x32, 0x80, 0xe5, 0x05, 0xde, 0x82, 0x22, 0x5f, + 0x26, 0x06, 0x8d, 0x8e, 0xb6, 0xb8, 0xcd, 0x12, 0xcb, 0xa0, 0x43, 0x32, 0xb5, 0x02, 0xed, 0x31, + 0xb1, 0xa6, 0x94, 0x27, 0x7c, 0x49, 0xad, 0x84, 0xe0, 0xcf, 0x19, 0x86, 0x2f, 0x43, 0x59, 0xac, + 0x2a, 0xd3, 0x36, 0xe8, 0x53, 0xbe, 0x7b, 0xe6, 0x55, 0xb1, 0xd0, 0xda, 0x0c, 0x61, 0xdd, 0x3f, + 0xf4, 0x1d, 0x3b, 0x4a, 0x4d, 0xde, 0x05, 0x03, 0x78, 0xf7, 0x1f, 0xcc, 0x6f, 0xdc, 0xaf, 0x2d, + 0x1f, 0xde, 0x7c, 0x4e, 0xd5, 0xfe, 0x92, 0x81, 0x1c, 0xdf, 0x2f, 0xd6, 0xa0, 0x3c, 0xf8, 0xac, + 0xa7, 0x68, 0xad, 0xee, 0xf1, 0xfe, 0xa1, 0x82, 0x24, 0x5c, 0x05, 0xe0, 0xc0, 0xfd, 0xc3, 0x6e, + 0x63, 0x80, 0x32, 0x71, 0xbb, 0xdd, 0x19, 0xdc, 0xb9, 0x85, 0xb2, 0xb1, 0xc2, 0xb1, 0x00, 0x72, + 0x49, 0xc2, 0xfb, 0x37, 0x51, 0x1e, 0x23, 0xa8, 0x08, 0x03, 0xed, 0x4f, 0x95, 0xd6, 0x9d, 0x5b, + 0xa8, 0x90, 0x46, 0xde, 0xbf, 0x89, 0x56, 0xf0, 0x2a, 0x94, 0x38, 0xb2, 0xdf, 0xed, 0x1e, 0xa2, + 0x62, 0x6c, 0xb3, 0x3f, 0x50, 0xdb, 0x9d, 0x03, 0x54, 0x8a, 0x6d, 0x1e, 0xa8, 0xdd, 0xe3, 0x1e, + 0x82, 0xd8, 0xc2, 0x91, 0xd2, 0xef, 0x37, 0x0e, 0x14, 0x54, 0x8e, 0x19, 0xfb, 0x9f, 0x0d, 0x94, + 0x3e, 0xaa, 0xa4, 0xdc, 0x7a, 0xff, 0x26, 0x5a, 0x8d, 0xbb, 0x50, 0x3a, 0xc7, 0x47, 0xa8, 0x8a, + 0xd7, 0x61, 0x55, 0x74, 0x11, 0x39, 0xb1, 0x36, 0x07, 0xdd, 0xb9, 0x85, 0xd0, 0xcc, 0x11, 0x61, + 0x65, 0x3d, 0x05, 0xdc, 0xb9, 0x85, 0x70, 0xad, 0x09, 0x79, 0x9e, 0x5d, 0x18, 0x43, 0xf5, 0xb0, + 0xb1, 0xaf, 0x1c, 0x6a, 0xdd, 0xde, 0xa0, 0xdd, 0xed, 0x34, 0x0e, 0x91, 0x34, 0xc3, 0x54, 0xe5, + 0x67, 0xc7, 0x6d, 0x55, 0x69, 0xa1, 0x4c, 0x12, 0xeb, 0x29, 0x8d, 0x81, 0xd2, 0x42, 0xd9, 0x9a, + 0x0e, 0x9b, 0xcb, 0xf6, 0xc9, 0xa5, 0x2b, 0x23, 0x31, 0xc5, 0x99, 0x33, 0xa6, 0x98, 0xdb, 0x5a, + 0x98, 0xe2, 0x7f, 0x65, 0x60, 0x63, 0xc9, 0x59, 0xb1, 0xb4, 0x93, 0x1f, 0x43, 0x5e, 0xa4, 0xa8, + 0x38, 0x3d, 0xaf, 0x2d, 0x3d, 0x74, 0x78, 0xc2, 0x2e, 0x9c, 0xa0, 0x5c, 0x2f, 0x59, 0x41, 0x64, + 0xcf, 0xa8, 0x20, 0x98, 0x89, 0x85, 0x3d, 0xfd, 0x17, 0x0b, 0x7b, 0xba, 0x38, 0xf6, 0xee, 0x9c, + 0xe7, 0xd8, 0xe3, 0xd8, 0xb7, 0xdb, 0xdb, 0xf3, 0x4b, 0xf6, 0xf6, 0x7b, 0xb0, 0xbe, 0x60, 0xe8, + 0xdc, 0x7b, 0xec, 0xaf, 0x24, 0x90, 0xcf, 0x0a, 0xce, 0x73, 0x76, 0xba, 0x4c, 0x6a, 0xa7, 0xbb, + 0x37, 0x1f, 0xc1, 0x2b, 0x67, 0x4f, 0xc2, 0xc2, 0x5c, 0x7f, 0x25, 0xc1, 0x85, 0xe5, 0x95, 0xe2, + 0x52, 0x1f, 0x3e, 0x82, 0xc2, 0x84, 0x06, 0x63, 0x27, 0xaa, 0x96, 0xde, 0x5e, 0x72, 0x06, 0x33, + 0xf1, 0xfc, 0x64, 0x87, 0x5a, 0xc9, 0x43, 0x3c, 0x7b, 0x56, 0xb9, 0x27, 0xbc, 0x59, 0xf0, 0xf4, + 0xd7, 0x19, 0x78, 0x79, 0xa9, 0xf1, 0xa5, 0x8e, 0xbe, 0x06, 0x60, 0xda, 0xee, 0x34, 0x10, 0x15, + 0x91, 0xd8, 0x60, 0x4b, 0x1c, 0xe1, 0x9b, 0x17, 0xdb, 0x3c, 0xa7, 0x41, 0x2c, 0xcf, 0x72, 0x39, + 0x08, 0x88, 0x13, 0xee, 0xce, 0x1c, 0xcd, 0x71, 0x47, 0x5f, 0x3f, 0x63, 0xa4, 0x0b, 0x89, 0xf9, + 0x1e, 0x20, 0xdd, 0x32, 0xa9, 0x1d, 0x68, 0x7e, 0xe0, 0x51, 0x32, 0x31, 0xed, 0x11, 0x3f, 0x41, + 0x8a, 0x7b, 0xf9, 0x21, 0xb1, 0x7c, 0xaa, 0xae, 0x09, 0x71, 0x3f, 0x92, 0x32, 0x0d, 0x9e, 0x40, + 0x5e, 0x42, 0xa3, 0x90, 0xd2, 0x10, 0xe2, 0x58, 0xa3, 0xf6, 0xdb, 0x12, 0x94, 0x13, 0x75, 0x35, + 0xbe, 0x02, 0x95, 0x87, 0xe4, 0x31, 0xd1, 0xa2, 0xbb, 0x92, 0x88, 0x44, 0x99, 0x61, 0xbd, 0xf0, + 0xbe, 0xf4, 0x1e, 0x6c, 0x72, 0x8a, 0x33, 0x0d, 0xa8, 0xa7, 0xe9, 0x16, 0xf1, 0x7d, 0x1e, 0xb4, + 0x22, 0xa7, 0x62, 0x26, 0xeb, 0x32, 0x51, 0x33, 0x92, 0xe0, 0xdb, 0xb0, 0xc1, 0x35, 0x26, 0x53, + 0x2b, 0x30, 0x5d, 0x8b, 0x6a, 0xec, 0xf6, 0xe6, 0xf3, 0x93, 0x24, 0xf6, 0x6c, 0x9d, 0x31, 0x8e, + 0x42, 0x02, 0xf3, 0xc8, 0xc7, 0x2d, 0x78, 0x8d, 0xab, 0x8d, 0xa8, 0x4d, 0x3d, 0x12, 0x50, 0x8d, + 0x7e, 0x31, 0x25, 0x96, 0xaf, 0x11, 0xdb, 0xd0, 0xc6, 0xc4, 0x1f, 0xcb, 0x9b, 0xcc, 0xc0, 0x7e, + 0x46, 0x96, 0xd4, 0x4b, 0x8c, 0x78, 0x10, 0xf2, 0x14, 0x4e, 0x6b, 0xd8, 0xc6, 0xc7, 0xc4, 0x1f, + 0xe3, 0x3d, 0xb8, 0xc0, 0xad, 0xf8, 0x81, 0x67, 0xda, 0x23, 0x4d, 0x1f, 0x53, 0xfd, 0x91, 0x36, + 0x0d, 0x86, 0x77, 0xe5, 0x57, 0x92, 0xfd, 0x73, 0x0f, 0xfb, 0x9c, 0xd3, 0x64, 0x94, 0xe3, 0x60, + 0x78, 0x17, 0xf7, 0xa1, 0xc2, 0x26, 0x63, 0x62, 0x7e, 0x49, 0xb5, 0xa1, 0xe3, 0xf1, 0xa3, 0xb1, + 0xba, 0x64, 0x6b, 0x4a, 0x44, 0xb0, 0xde, 0x0d, 0x15, 0x8e, 0x1c, 0x83, 0xee, 0xe5, 0xfb, 0x3d, + 0x45, 0x69, 0xa9, 0xe5, 0xc8, 0xca, 0x7d, 0xc7, 0x63, 0x09, 0x35, 0x72, 0xe2, 0x00, 0x97, 0x45, + 0x42, 0x8d, 0x9c, 0x28, 0xbc, 0xb7, 0x61, 0x43, 0xd7, 0xc5, 0x98, 0x4d, 0x5d, 0x0b, 0xef, 0x58, + 0xbe, 0x8c, 0x52, 0xc1, 0xd2, 0xf5, 0x03, 0x41, 0x08, 0x73, 0xdc, 0xc7, 0x1f, 0xc2, 0xcb, 0xb3, + 0x60, 0x25, 0x15, 0xd7, 0x17, 0x46, 0x39, 0xaf, 0x7a, 0x1b, 0x36, 0xdc, 0xd3, 0x45, 0x45, 0x9c, + 0xea, 0xd1, 0x3d, 0x9d, 0x57, 0xfb, 0x00, 0x36, 0xdd, 0xb1, 0xbb, 0xa8, 0x77, 0x3d, 0xa9, 0x87, + 0xdd, 0xb1, 0x3b, 0xaf, 0xf8, 0x16, 0xbf, 0x70, 0x7b, 0x54, 0x27, 0x01, 0x35, 0xe4, 0x8b, 0x49, + 0x7a, 0x42, 0x80, 0x77, 0x01, 0xe9, 0xba, 0x46, 0x6d, 0x72, 0x62, 0x51, 0x8d, 0x78, 0xd4, 0x26, + 0xbe, 0x7c, 0x39, 0x49, 0xae, 0xea, 0xba, 0xc2, 0xa5, 0x0d, 0x2e, 0xc4, 0xd7, 0x61, 0xdd, 0x39, + 0x79, 0xa8, 0x8b, 0x94, 0xd4, 0x5c, 0x8f, 0x0e, 0xcd, 0xa7, 0xf2, 0x9b, 0x3c, 0xbe, 0x6b, 0x4c, + 0xc0, 0x13, 0xb2, 0xc7, 0x61, 0x7c, 0x0d, 0x90, 0xee, 0x8f, 0x89, 0xe7, 0xf2, 0x3d, 0xd9, 0x77, + 0x89, 0x4e, 0xe5, 0xb7, 0x04, 0x55, 0xe0, 0x9d, 0x08, 0x66, 0x4b, 0xc2, 0x7f, 0x62, 0x0e, 0x83, + 0xc8, 0xe2, 0x55, 0xb1, 0x24, 0x38, 0x16, 0x5a, 0xdb, 0x01, 0xc4, 0x42, 0x91, 0xea, 0x78, 0x87, + 0xd3, 0xaa, 0xee, 0xd8, 0x4d, 0xf6, 0xfb, 0x06, 0xac, 0x32, 0xe6, 0xac, 0xd3, 0x6b, 0xa2, 0x20, + 0x73, 0xc7, 0x89, 0x1e, 0x6f, 0xc1, 0x05, 0x46, 0x9a, 0xd0, 0x80, 0x18, 0x24, 0x20, 0x09, 0xf6, + 0xbb, 0x9c, 0xcd, 0xe2, 0x7e, 0x14, 0x0a, 0x53, 0x7e, 0x7a, 0xd3, 0x93, 0xd3, 0x38, 0xb3, 0x6e, + 0x08, 0x3f, 0x19, 0x16, 0xe5, 0xd6, 0xf7, 0x56, 0x74, 0xd7, 0xf6, 0xa0, 0x92, 0x4c, 0x7c, 0x5c, + 0x02, 0x91, 0xfa, 0x48, 0x62, 0x55, 0x50, 0xb3, 0xdb, 0x62, 0xf5, 0xcb, 0xe7, 0x0a, 0xca, 0xb0, + 0x3a, 0xea, 0xb0, 0x3d, 0x50, 0x34, 0xf5, 0xb8, 0x33, 0x68, 0x1f, 0x29, 0x28, 0x9b, 0x28, 0xd8, + 0x1f, 0xe4, 0x8a, 0x6f, 0xa3, 0xab, 0xb5, 0xaf, 0x33, 0x50, 0x4d, 0xdf, 0xc0, 0xf0, 0x8f, 0xe0, + 0x62, 0xf4, 0x5c, 0xe2, 0xd3, 0x40, 0x7b, 0x62, 0x7a, 0x7c, 0x45, 0x4e, 0x88, 0x38, 0x1d, 0xe3, + 0x9c, 0xd8, 0x0c, 0x59, 0x7d, 0x1a, 0x7c, 0x62, 0x7a, 0x6c, 0xbd, 0x4d, 0x48, 0x80, 0x0f, 0xe1, + 0xb2, 0xed, 0x68, 0x7e, 0x40, 0x6c, 0x83, 0x78, 0x86, 0x36, 0x7b, 0xa8, 0xd2, 0x88, 0xae, 0x53, + 0xdf, 0x77, 0xc4, 0x49, 0x18, 0x5b, 0x79, 0xd5, 0x76, 0xfa, 0x21, 0x79, 0x76, 0x44, 0x34, 0x42, + 0xea, 0x5c, 0xfe, 0x66, 0xcf, 0xca, 0xdf, 0x57, 0xa0, 0x34, 0x21, 0xae, 0x46, 0xed, 0xc0, 0x3b, + 0xe5, 0x75, 0x77, 0x51, 0x2d, 0x4e, 0x88, 0xab, 0xb0, 0xf6, 0x0b, 0xb9, 0xfe, 0x3c, 0xc8, 0x15, + 0x8b, 0xa8, 0xf4, 0x20, 0x57, 0x2c, 0x21, 0xa8, 0xfd, 0x33, 0x0b, 0x95, 0x64, 0x1d, 0xce, 0xae, + 0x35, 0x3a, 0x3f, 0xb2, 0x24, 0xbe, 0xa9, 0xbd, 0xf1, 0x8d, 0x55, 0x7b, 0xbd, 0xc9, 0xce, 0xb2, + 0xbd, 0x82, 0xa8, 0x8e, 0x55, 0xa1, 0xc9, 0xea, 0x08, 0x96, 0x6c, 0x54, 0x54, 0x23, 0x45, 0x35, + 0x6c, 0xe1, 0x03, 0x28, 0x3c, 0xf4, 0xb9, 0xed, 0x02, 0xb7, 0xfd, 0xe6, 0x37, 0xdb, 0x7e, 0xd0, + 0xe7, 0xc6, 0x4b, 0x0f, 0xfa, 0x5a, 0xa7, 0xab, 0x1e, 0x35, 0x0e, 0xd5, 0x50, 0x1d, 0x5f, 0x82, + 0x9c, 0x45, 0xbe, 0x3c, 0x4d, 0x9f, 0x7a, 0x1c, 0x3a, 0xef, 0x24, 0x5c, 0x82, 0xdc, 0x13, 0x4a, + 0x1e, 0xa5, 0xcf, 0x1a, 0x0e, 0x7d, 0x8f, 0x8b, 0x61, 0x17, 0xf2, 0x3c, 0x5e, 0x18, 0x20, 0x8c, + 0x18, 0x7a, 0x09, 0x17, 0x21, 0xd7, 0xec, 0xaa, 0x6c, 0x41, 0x20, 0xa8, 0x08, 0x54, 0xeb, 0xb5, + 0x95, 0xa6, 0x82, 0x32, 0xb5, 0xdb, 0x50, 0x10, 0x41, 0x60, 0x8b, 0x25, 0x0e, 0x03, 0x7a, 0x29, + 0x6c, 0x86, 0x36, 0xa4, 0x48, 0x7a, 0x7c, 0xb4, 0xaf, 0xa8, 0x28, 0x93, 0x9e, 0xea, 0x1c, 0xca, + 0xd7, 0x7c, 0xa8, 0x24, 0x0b, 0xf1, 0x17, 0x73, 0xc9, 0xfe, 0x9b, 0x04, 0xe5, 0x44, 0x61, 0xcd, + 0x2a, 0x22, 0x62, 0x59, 0xce, 0x13, 0x8d, 0x58, 0x26, 0xf1, 0xc3, 0xd4, 0x00, 0x0e, 0x35, 0x18, + 0x72, 0xde, 0xa9, 0x7b, 0x41, 0x4b, 0x24, 0x8f, 0x0a, 0xb5, 0x3f, 0x4a, 0x80, 0xe6, 0x2b, 0xdb, + 0x39, 0x37, 0xa5, 0x1f, 0xd2, 0xcd, 0xda, 0x1f, 0x24, 0xa8, 0xa6, 0xcb, 0xd9, 0x39, 0xf7, 0xae, + 0xfc, 0xa0, 0xee, 0xfd, 0x23, 0x03, 0xab, 0xa9, 0x22, 0xf6, 0xbc, 0xde, 0x7d, 0x01, 0xeb, 0xa6, + 0x41, 0x27, 0xae, 0x13, 0x50, 0x5b, 0x3f, 0xd5, 0x2c, 0xfa, 0x98, 0x5a, 0x72, 0x8d, 0x6f, 0x1a, + 0xbb, 0xdf, 0x5c, 0x26, 0xd7, 0xdb, 0x33, 0xbd, 0x43, 0xa6, 0xb6, 0xb7, 0xd1, 0x6e, 0x29, 0x47, + 0xbd, 0xee, 0x40, 0xe9, 0x34, 0x3f, 0xd3, 0x8e, 0x3b, 0x3f, 0xed, 0x74, 0x3f, 0xe9, 0xa8, 0xc8, + 0x9c, 0xa3, 0x7d, 0x8f, 0xcb, 0xbe, 0x07, 0x68, 0xde, 0x29, 0x7c, 0x11, 0x96, 0xb9, 0x85, 0x5e, + 0xc2, 0x1b, 0xb0, 0xd6, 0xe9, 0x6a, 0xfd, 0x76, 0x4b, 0xd1, 0x94, 0xfb, 0xf7, 0x95, 0xe6, 0xa0, + 0x2f, 0x1e, 0x3e, 0x62, 0xf6, 0x20, 0xb5, 0xc0, 0x6b, 0xbf, 0xcf, 0xc2, 0xc6, 0x12, 0x4f, 0x70, + 0x23, 0xbc, 0xb2, 0x88, 0x5b, 0xd4, 0x8d, 0xf3, 0x78, 0x5f, 0x67, 0x35, 0x43, 0x8f, 0x78, 0x41, + 0x78, 0xc3, 0xb9, 0x06, 0x2c, 0x4a, 0x76, 0x60, 0x0e, 0x4d, 0xea, 0x85, 0xef, 0x44, 0xe2, 0x1e, + 0xb3, 0x36, 0xc3, 0xc5, 0x53, 0xd1, 0xbb, 0x80, 0x5d, 0xc7, 0x37, 0x03, 0xf3, 0x31, 0xd5, 0x4c, + 0x3b, 0x7a, 0x54, 0x62, 0xf7, 0x9a, 0x9c, 0x8a, 0x22, 0x49, 0xdb, 0x0e, 0x62, 0xb6, 0x4d, 0x47, + 0x64, 0x8e, 0xcd, 0x36, 0xf3, 0xac, 0x8a, 0x22, 0x49, 0xcc, 0xbe, 0x02, 0x15, 0xc3, 0x99, 0xb2, + 0x62, 0x4f, 0xf0, 0xd8, 0xd9, 0x21, 0xa9, 0x65, 0x81, 0xc5, 0x94, 0xb0, 0x8c, 0x9f, 0xbd, 0x66, + 0x55, 0xd4, 0xb2, 0xc0, 0x04, 0xe5, 0x2a, 0xac, 0x91, 0xd1, 0xc8, 0x63, 0xc6, 0x23, 0x43, 0xe2, + 0x62, 0x52, 0x8d, 0x61, 0x4e, 0xdc, 0x7a, 0x00, 0xc5, 0x28, 0x0e, 0xec, 0xa8, 0x66, 0x91, 0xd0, + 0x5c, 0x71, 0xdb, 0xce, 0xec, 0x94, 0xd4, 0xa2, 0x1d, 0x09, 0xaf, 0x40, 0xc5, 0xf4, 0xb5, 0xd9, + 0xe3, 0x7c, 0x66, 0x3b, 0xb3, 0x53, 0x54, 0xcb, 0xa6, 0x1f, 0x3f, 0x6c, 0xd6, 0xbe, 0xca, 0x40, + 0x35, 0xfd, 0x71, 0x01, 0xb7, 0xa0, 0x68, 0x39, 0x3a, 0xe1, 0xa9, 0x25, 0xbe, 0x6c, 0xed, 0x3c, + 0xe7, 0x7b, 0x44, 0xfd, 0x30, 0xe4, 0xab, 0xb1, 0xe6, 0xd6, 0xdf, 0x25, 0x28, 0x46, 0x30, 0xbe, + 0x00, 0x39, 0x97, 0x04, 0x63, 0x6e, 0x2e, 0xbf, 0x9f, 0x41, 0x92, 0xca, 0xdb, 0x0c, 0xf7, 0x5d, + 0x62, 0xf3, 0x14, 0x08, 0x71, 0xd6, 0x66, 0xf3, 0x6a, 0x51, 0x62, 0xf0, 0x5b, 0x8f, 0x33, 0x99, + 0x50, 0x3b, 0xf0, 0xa3, 0x79, 0x0d, 0xf1, 0x66, 0x08, 0xe3, 0x77, 0x60, 0x3d, 0xf0, 0x88, 0x69, + 0xa5, 0xb8, 0x39, 0xce, 0x45, 0x91, 0x20, 0x26, 0xef, 0xc1, 0xa5, 0xc8, 0xae, 0x41, 0x03, 0xa2, + 0x8f, 0xa9, 0x31, 0x53, 0x2a, 0xf0, 0xd7, 0x8d, 0x8b, 0x21, 0xa1, 0x15, 0xca, 0x23, 0xdd, 0xda, + 0xd7, 0x12, 0xac, 0x47, 0xf7, 0x34, 0x23, 0x0e, 0xd6, 0x11, 0x00, 0xb1, 0x6d, 0x27, 0x48, 0x86, + 0x6b, 0x31, 0x95, 0x17, 0xf4, 0xea, 0x8d, 0x58, 0x49, 0x4d, 0x18, 0xd8, 0x9a, 0x00, 0xcc, 0x24, + 0x67, 0x86, 0xed, 0x32, 0x94, 0xc3, 0x2f, 0x47, 0xfc, 0xf3, 0xa3, 0xb8, 0xd9, 0x83, 0x80, 0xd8, + 0x85, 0x0e, 0x6f, 0x42, 0xfe, 0x84, 0x8e, 0x4c, 0x3b, 0x7c, 0x0f, 0x16, 0x8d, 0xe8, 0xfd, 0x25, + 0x17, 0xbf, 0xbf, 0xec, 0xff, 0x46, 0x82, 0x0d, 0xdd, 0x99, 0xcc, 0xfb, 0xbb, 0x8f, 0xe6, 0x9e, + 0x17, 0xfc, 0x8f, 0xa5, 0xcf, 0x3f, 0x1a, 0x99, 0xc1, 0x78, 0x7a, 0x52, 0xd7, 0x9d, 0xc9, 0xee, + 0xc8, 0xb1, 0x88, 0x3d, 0x9a, 0x7d, 0x3f, 0xe5, 0x7f, 0xf4, 0x1b, 0x23, 0x6a, 0xdf, 0x18, 0x39, + 0x89, 0xaf, 0xa9, 0xf7, 0x66, 0x7f, 0xff, 0x27, 0x49, 0x7f, 0xca, 0x64, 0x0f, 0x7a, 0xfb, 0x7f, + 0xce, 0x6c, 0x1d, 0x88, 0xee, 0x7a, 0x51, 0x78, 0x54, 0x3a, 0xb4, 0xa8, 0xce, 0x86, 0xfc, 0xff, + 0x00, 0x00, 0x00, 0xff, 0xff, 0x3e, 0xe8, 0xef, 0xc4, 0x9b, 0x1d, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto new file mode 100644 index 0000000..ed08fcb --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto @@ -0,0 +1,883 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + + +syntax = "proto2"; + +package google.protobuf; +option go_package = "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + optional string syntax = 12; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; + optional int32 end = 2; + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + }; + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + }; + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default=false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default=false]; +} + + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default=false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default=false]; + + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default=SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default=false]; + optional bool java_generic_services = 17 [default=false]; + optional bool py_generic_services = 18 [default=false]; + optional bool php_generic_services = 42 [default=false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default=false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default=false]; + + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be used + // for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default=false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default=false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default=false]; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementions still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + optional bool lazy = 5 [default=false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default=false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default=false]; + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype +} + +message OneofOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default=false]; + + reserved 5; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default=false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = + 34 [default=IDEMPOTENCY_UNKNOWN]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendent. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed=true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed=true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed=true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile deleted file mode 100644 index b5715c3..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile +++ /dev/null @@ -1,40 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -include $(GOROOT)/src/Make.inc - -TARG=github.com/golang/protobuf/compiler/generator -GOFILES=\ - generator.go\ - -DEPS=../descriptor ../plugin ../../proto - -include $(GOROOT)/src/Make.pkg diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go index 211ab5d..6f4a902 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go @@ -40,19 +40,25 @@ import ( "bufio" "bytes" "compress/gzip" + "crypto/sha256" + "encoding/hex" "fmt" + "go/ast" + "go/build" "go/parser" "go/printer" "go/token" "log" "os" "path" + "sort" "strconv" "strings" "unicode" "unicode/utf8" "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/protoc-gen-go/generator/internal/remap" "github.com/golang/protobuf/protoc-gen-go/descriptor" plugin "github.com/golang/protobuf/protoc-gen-go/plugin" @@ -62,7 +68,7 @@ import ( // It is incremented whenever an incompatibility between the generated code and // proto package is introduced; the generated code references // a constant, proto.ProtoPackageIsVersionN (where N is generatedCodeVersion). -const generatedCodeVersion = 2 +const generatedCodeVersion = 3 // A Plugin provides functionality to add to the output during Go code generation, // such as to produce RPC stubs. @@ -88,6 +94,14 @@ func RegisterPlugin(p Plugin) { plugins = append(plugins, p) } +// A GoImportPath is the import path of a Go package. e.g., "google.golang.org/genproto/protobuf". +type GoImportPath string + +func (p GoImportPath) String() string { return strconv.Quote(string(p)) } + +// A GoPackageName is the name of a Go package. e.g., "protobuf". +type GoPackageName string + // Each type we import as a protocol buffer (other than FileDescriptorProto) needs // a pointer to the FileDescriptorProto that represents it. These types achieve that // wrapping by placing each Proto inside a struct with the pointer to its File. The @@ -96,19 +110,21 @@ func RegisterPlugin(p Plugin) { // The file and package name method are common to messages and enums. type common struct { - file *descriptor.FileDescriptorProto // File this object comes from. + file *FileDescriptor // File this object comes from. } -// PackageName is name in the package clause in the generated file. -func (c *common) PackageName() string { return uniquePackageOf(c.file) } +// GoImportPath is the import path of the Go package containing the type. +func (c *common) GoImportPath() GoImportPath { + return c.file.importPath +} -func (c *common) File() *descriptor.FileDescriptorProto { return c.file } +func (c *common) File() *FileDescriptor { return c.file } func fileIsProto3(file *descriptor.FileDescriptorProto) bool { return file.GetSyntax() == "proto3" } -func (c *common) proto3() bool { return fileIsProto3(c.file) } +func (c *common) proto3() bool { return fileIsProto3(c.file.FileDescriptorProto) } // Descriptor represents a protocol buffer message. type Descriptor struct { @@ -134,7 +150,7 @@ func (d *Descriptor) TypeName() []string { for parent := d; parent != nil; parent = parent.parent { n++ } - s := make([]string, n, n) + s := make([]string, n) for parent := d; parent != nil; parent = parent.parent { n-- s[n] = parent.GetName() @@ -256,77 +272,60 @@ type FileDescriptor struct { // This is used for supporting public imports. exported map[Object][]symbol - index int // The index of this file in the list of files to generate code for + importPath GoImportPath // Import path of this file's package. + packageName GoPackageName // Name of this file's Go package. proto3 bool // whether to generate proto3 code for this file } -// PackageName is the package name we'll use in the generated code to refer to this file. -func (d *FileDescriptor) PackageName() string { return uniquePackageOf(d.FileDescriptorProto) } - // VarName is the variable name we'll use in the generated code to refer // to the compressed bytes of this descriptor. It is not exported, so // it is only valid inside the generated package. -func (d *FileDescriptor) VarName() string { return fmt.Sprintf("fileDescriptor%d", d.index) } +func (d *FileDescriptor) VarName() string { + h := sha256.Sum256([]byte(d.GetName())) + return fmt.Sprintf("fileDescriptor_%s", hex.EncodeToString(h[:8])) +} // goPackageOption interprets the file's go_package option. // If there is no go_package, it returns ("", "", false). // If there's a simple name, it returns ("", pkg, true). // If the option implies an import path, it returns (impPath, pkg, true). -func (d *FileDescriptor) goPackageOption() (impPath, pkg string, ok bool) { - pkg = d.GetOptions().GetGoPackage() - if pkg == "" { - return - } - ok = true - // The presence of a slash implies there's an import path. - slash := strings.LastIndex(pkg, "/") - if slash < 0 { - return - } - impPath, pkg = pkg, pkg[slash+1:] - // A semicolon-delimited suffix overrides the package name. - sc := strings.IndexByte(impPath, ';') - if sc < 0 { - return +func (d *FileDescriptor) goPackageOption() (impPath GoImportPath, pkg GoPackageName, ok bool) { + opt := d.GetOptions().GetGoPackage() + if opt == "" { + return "", "", false } - impPath, pkg = impPath[:sc], impPath[sc+1:] - return -} - -// goPackageName returns the Go package name to use in the -// generated Go file. The result explicit reports whether the name -// came from an option go_package statement. If explicit is false, -// the name was derived from the protocol buffer's package statement -// or the input file name. -func (d *FileDescriptor) goPackageName() (name string, explicit bool) { - // Does the file have a "go_package" option? - if _, pkg, ok := d.goPackageOption(); ok { - return pkg, true + // A semicolon-delimited suffix delimits the import path and package name. + sc := strings.Index(opt, ";") + if sc >= 0 { + return GoImportPath(opt[:sc]), cleanPackageName(opt[sc+1:]), true } - - // Does the file have a package clause? - if pkg := d.GetPackage(); pkg != "" { - return pkg, false + // The presence of a slash implies there's an import path. + slash := strings.LastIndex(opt, "/") + if slash >= 0 { + return GoImportPath(opt), cleanPackageName(opt[slash+1:]), true } - // Use the file base name. - return baseName(d.GetName()), false + return "", cleanPackageName(opt), true } // goFileName returns the output name for the generated Go file. -func (d *FileDescriptor) goFileName() string { +func (d *FileDescriptor) goFileName(pathType pathType) string { name := *d.Name if ext := path.Ext(name); ext == ".proto" || ext == ".protodevel" { name = name[:len(name)-len(ext)] } name += ".pb.go" + if pathType == pathTypeSourceRelative { + return name + } + // Does the file have a "go_package" option? // If it does, it may override the filename. if impPath, _, ok := d.goPackageOption(); ok && impPath != "" { // Replace the existing dirname with the declared import path. _, name = path.Split(name) - name = path.Join(impPath, name) + name = path.Join(string(impPath), name) return name } @@ -341,14 +340,13 @@ func (d *FileDescriptor) addExport(obj Object, sym symbol) { type symbol interface { // GenerateAlias should generate an appropriate alias // for the symbol from the named package. - GenerateAlias(g *Generator, pkg string) + GenerateAlias(g *Generator, filename string, pkg GoPackageName) } type messageSymbol struct { sym string hasExtensions, isMessageSet bool - hasOneof bool - getters []getterSymbol + oneofTypes []string } type getterSymbol struct { @@ -358,144 +356,12 @@ type getterSymbol struct { genType bool // whether typ contains a generated type (message/group/enum) } -func (ms *messageSymbol) GenerateAlias(g *Generator, pkg string) { - remoteSym := pkg + "." + ms.sym - - g.P("type ", ms.sym, " ", remoteSym) - g.P("func (m *", ms.sym, ") Reset() { (*", remoteSym, ")(m).Reset() }") - g.P("func (m *", ms.sym, ") String() string { return (*", remoteSym, ")(m).String() }") - g.P("func (*", ms.sym, ") ProtoMessage() {}") - if ms.hasExtensions { - g.P("func (*", ms.sym, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange ", - "{ return (*", remoteSym, ")(nil).ExtensionRangeArray() }") - if ms.isMessageSet { - g.P("func (m *", ms.sym, ") Marshal() ([]byte, error) ", - "{ return (*", remoteSym, ")(m).Marshal() }") - g.P("func (m *", ms.sym, ") Unmarshal(buf []byte) error ", - "{ return (*", remoteSym, ")(m).Unmarshal(buf) }") - } - } - if ms.hasOneof { - // Oneofs and public imports do not mix well. - // We can make them work okay for the binary format, - // but they're going to break weirdly for text/JSON. - enc := "_" + ms.sym + "_OneofMarshaler" - dec := "_" + ms.sym + "_OneofUnmarshaler" - size := "_" + ms.sym + "_OneofSizer" - encSig := "(msg " + g.Pkg["proto"] + ".Message, b *" + g.Pkg["proto"] + ".Buffer) error" - decSig := "(msg " + g.Pkg["proto"] + ".Message, tag, wire int, b *" + g.Pkg["proto"] + ".Buffer) (bool, error)" - sizeSig := "(msg " + g.Pkg["proto"] + ".Message) int" - g.P("func (m *", ms.sym, ") XXX_OneofFuncs() (func", encSig, ", func", decSig, ", func", sizeSig, ", []interface{}) {") - g.P("return ", enc, ", ", dec, ", ", size, ", nil") - g.P("}") - - g.P("func ", enc, encSig, " {") - g.P("m := msg.(*", ms.sym, ")") - g.P("m0 := (*", remoteSym, ")(m)") - g.P("enc, _, _, _ := m0.XXX_OneofFuncs()") - g.P("return enc(m0, b)") - g.P("}") - - g.P("func ", dec, decSig, " {") - g.P("m := msg.(*", ms.sym, ")") - g.P("m0 := (*", remoteSym, ")(m)") - g.P("_, dec, _, _ := m0.XXX_OneofFuncs()") - g.P("return dec(m0, tag, wire, b)") - g.P("}") - - g.P("func ", size, sizeSig, " {") - g.P("m := msg.(*", ms.sym, ")") - g.P("m0 := (*", remoteSym, ")(m)") - g.P("_, _, size, _ := m0.XXX_OneofFuncs()") - g.P("return size(m0)") - g.P("}") - } - for _, get := range ms.getters { - - if get.typeName != "" { - g.RecordTypeUse(get.typeName) - } - typ := get.typ - val := "(*" + remoteSym + ")(m)." + get.name + "()" - if get.genType { - // typ will be "*pkg.T" (message/group) or "pkg.T" (enum) - // or "map[t]*pkg.T" (map to message/enum). - // The first two of those might have a "[]" prefix if it is repeated. - // Drop any package qualifier since we have hoisted the type into this package. - rep := strings.HasPrefix(typ, "[]") - if rep { - typ = typ[2:] - } - isMap := strings.HasPrefix(typ, "map[") - star := typ[0] == '*' - if !isMap { // map types handled lower down - typ = typ[strings.Index(typ, ".")+1:] - } - if star { - typ = "*" + typ - } - if rep { - // Go does not permit conversion between slice types where both - // element types are named. That means we need to generate a bit - // of code in this situation. - // typ is the element type. - // val is the expression to get the slice from the imported type. - - ctyp := typ // conversion type expression; "Foo" or "(*Foo)" - if star { - ctyp = "(" + typ + ")" - } - - g.P("func (m *", ms.sym, ") ", get.name, "() []", typ, " {") - g.In() - g.P("o := ", val) - g.P("if o == nil {") - g.In() - g.P("return nil") - g.Out() - g.P("}") - g.P("s := make([]", typ, ", len(o))") - g.P("for i, x := range o {") - g.In() - g.P("s[i] = ", ctyp, "(x)") - g.Out() - g.P("}") - g.P("return s") - g.Out() - g.P("}") - continue - } - if isMap { - // Split map[keyTyp]valTyp. - bra, ket := strings.Index(typ, "["), strings.Index(typ, "]") - keyTyp, valTyp := typ[bra+1:ket], typ[ket+1:] - // Drop any package qualifier. - // Only the value type may be foreign. - star := valTyp[0] == '*' - valTyp = valTyp[strings.Index(valTyp, ".")+1:] - if star { - valTyp = "*" + valTyp - } - - typ := "map[" + keyTyp + "]" + valTyp - g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " {") - g.P("o := ", val) - g.P("if o == nil { return nil }") - g.P("s := make(", typ, ", len(o))") - g.P("for k, v := range o {") - g.P("s[k] = (", valTyp, ")(v)") - g.P("}") - g.P("return s") - g.P("}") - continue - } - // Convert imported type into the forwarding type. - val = "(" + typ + ")(" + val + ")" - } - - g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " { return ", val, " }") +func (ms *messageSymbol) GenerateAlias(g *Generator, filename string, pkg GoPackageName) { + g.P("// ", ms.sym, " from public import ", filename) + g.P("type ", ms.sym, " = ", pkg, ".", ms.sym) + for _, name := range ms.oneofTypes { + g.P("type ", name, " = ", pkg, ".", name) } - } type enumSymbol struct { @@ -503,16 +369,12 @@ type enumSymbol struct { proto3 bool // Whether this came from a proto3 file. } -func (es enumSymbol) GenerateAlias(g *Generator, pkg string) { +func (es enumSymbol) GenerateAlias(g *Generator, filename string, pkg GoPackageName) { s := es.name - g.P("type ", s, " ", pkg, ".", s) + g.P("// ", s, " from public import ", filename) + g.P("type ", s, " = ", pkg, ".", s) g.P("var ", s, "_name = ", pkg, ".", s, "_name") g.P("var ", s, "_value = ", pkg, ".", s, "_value") - g.P("func (x ", s, ") String() string { return (", pkg, ".", s, ")(x).String() }") - if !es.proto3 { - g.P("func (x ", s, ") Enum() *", s, "{ return (*", s, ")((", pkg, ".", s, ")(x).Enum()) }") - g.P("func (x *", s, ") UnmarshalJSON(data []byte) error { return (*", pkg, ".", s, ")(x).UnmarshalJSON(data) }") - } } type constOrVarSymbol struct { @@ -521,8 +383,8 @@ type constOrVarSymbol struct { cast string // if non-empty, a type cast is required (used for enums) } -func (cs constOrVarSymbol) GenerateAlias(g *Generator, pkg string) { - v := pkg + "." + cs.sym +func (cs constOrVarSymbol) GenerateAlias(g *Generator, filename string, pkg GoPackageName) { + v := string(pkg) + "." + cs.sym if cs.cast != "" { v = cs.cast + "(" + v + ")" } @@ -531,21 +393,9 @@ func (cs constOrVarSymbol) GenerateAlias(g *Generator, pkg string) { // Object is an interface abstracting the abilities shared by enums, messages, extensions and imported objects. type Object interface { - PackageName() string // The name we use in our output (a_b_c), possibly renamed for uniqueness. + GoImportPath() GoImportPath TypeName() []string - File() *descriptor.FileDescriptorProto -} - -// Each package name we generate must be unique. The package we're generating -// gets its own name but every other package must have a unique name that does -// not conflict in the code we generate. These names are chosen globally (although -// they don't have to be, it simplifies things to do them globally). -func uniquePackageOf(fd *descriptor.FileDescriptorProto) string { - s, ok := uniquePackageName[fd] - if !ok { - log.Fatal("internal error: no package name defined for " + fd.GetName()) - } - return s + File() *FileDescriptor } // Generator is the type whose methods generate the output, stored in the associated response structure. @@ -562,18 +412,31 @@ type Generator struct { Pkg map[string]string // The names under which we import support packages - packageName string // What we're calling ourselves. - allFiles []*FileDescriptor // All files in the tree - allFilesByName map[string]*FileDescriptor // All files by filename. - genFiles []*FileDescriptor // Those files we will generate output for. - file *FileDescriptor // The file we are compiling now. - usedPackages map[string]bool // Names of packages used in current file. - typeNameToObject map[string]Object // Key is a fully-qualified name in input syntax. - init []string // Lines to emit in the init function. + outputImportPath GoImportPath // Package we're generating code for. + allFiles []*FileDescriptor // All files in the tree + allFilesByName map[string]*FileDescriptor // All files by filename. + genFiles []*FileDescriptor // Those files we will generate output for. + file *FileDescriptor // The file we are compiling now. + packageNames map[GoImportPath]GoPackageName // Imported package names in the current file. + usedPackages map[GoImportPath]bool // Packages used in current file. + usedPackageNames map[GoPackageName]bool // Package names used in the current file. + addedImports map[GoImportPath]bool // Additional imports to emit. + typeNameToObject map[string]Object // Key is a fully-qualified name in input syntax. + init []string // Lines to emit in the init function. indent string + pathType pathType // How to generate output filenames. writeOutput bool + annotateCode bool // whether to store annotations + annotations []*descriptor.GeneratedCodeInfo_Annotation // annotations to store } +type pathType int + +const ( + pathTypeImport pathType = iota + pathTypeSourceRelative +) + // New creates a new generator and allocates the request and response protobufs. func New() *Generator { g := new(Generator) @@ -618,8 +481,21 @@ func (g *Generator) CommandLineParameters(parameter string) { g.ImportPrefix = v case "import_path": g.PackageImportPath = v + case "paths": + switch v { + case "import": + g.pathType = pathTypeImport + case "source_relative": + g.pathType = pathTypeSourceRelative + default: + g.Fail(fmt.Sprintf(`Unknown path type %q: want "import" or "source_relative".`, v)) + } case "plugins": pluginList = v + case "annotate_code": + if v == "true" { + g.annotateCode = true + } default: if len(k) > 0 && k[0] == 'M' { g.ImportMap[k[1:]] = v @@ -646,37 +522,49 @@ func (g *Generator) CommandLineParameters(parameter string) { // If its file is in a different package, it returns the package name we're using for this file, plus ".". // Otherwise it returns the empty string. func (g *Generator) DefaultPackageName(obj Object) string { - pkg := obj.PackageName() - if pkg == g.packageName { + importPath := obj.GoImportPath() + if importPath == g.outputImportPath { return "" } - return pkg + "." + return string(g.GoPackageName(importPath)) + "." } -// For each input file, the unique package name to use, underscored. -var uniquePackageName = make(map[*descriptor.FileDescriptorProto]string) +// GoPackageName returns the name used for a package. +func (g *Generator) GoPackageName(importPath GoImportPath) GoPackageName { + if name, ok := g.packageNames[importPath]; ok { + return name + } + name := cleanPackageName(baseName(string(importPath))) + for i, orig := 1, name; g.usedPackageNames[name] || isGoPredeclaredIdentifier[string(name)]; i++ { + name = orig + GoPackageName(strconv.Itoa(i)) + } + g.packageNames[importPath] = name + g.usedPackageNames[name] = true + return name +} -// Package names already registered. Key is the name from the .proto file; -// value is the name that appears in the generated code. -var pkgNamesInUse = make(map[string]bool) +// AddImport adds a package to the generated file's import section. +// It returns the name used for the package. +func (g *Generator) AddImport(importPath GoImportPath) GoPackageName { + g.addedImports[importPath] = true + return g.GoPackageName(importPath) +} -// Create and remember a guaranteed unique package name for this file descriptor. -// Pkg is the candidate name. If f is nil, it's a builtin package like "proto" and -// has no file descriptor. -func RegisterUniquePackageName(pkg string, f *FileDescriptor) string { - // Convert dots to underscores before finding a unique alias. - pkg = strings.Map(badToUnderscore, pkg) +var globalPackageNames = map[GoPackageName]bool{ + "fmt": true, + "math": true, + "proto": true, +} - for i, orig := 1, pkg; pkgNamesInUse[pkg]; i++ { - // It's a duplicate; must rename. - pkg = orig + strconv.Itoa(i) - } - // Install it. - pkgNamesInUse[pkg] = true - if f != nil { - uniquePackageName[f.FileDescriptorProto] = pkg +// Create and remember a guaranteed unique package name. Pkg is the candidate name. +// The FileDescriptor parameter is unused. +func RegisterUniquePackageName(pkg string, f *FileDescriptor) string { + name := cleanPackageName(pkg) + for i, orig := 1, name; globalPackageNames[name]; i++ { + name = orig + GoPackageName(strconv.Itoa(i)) } - return pkg + globalPackageNames[name] = true + return string(name) } var isGoKeyword = map[string]bool{ @@ -707,97 +595,125 @@ var isGoKeyword = map[string]bool{ "var": true, } +var isGoPredeclaredIdentifier = map[string]bool{ + "append": true, + "bool": true, + "byte": true, + "cap": true, + "close": true, + "complex": true, + "complex128": true, + "complex64": true, + "copy": true, + "delete": true, + "error": true, + "false": true, + "float32": true, + "float64": true, + "imag": true, + "int": true, + "int16": true, + "int32": true, + "int64": true, + "int8": true, + "iota": true, + "len": true, + "make": true, + "new": true, + "nil": true, + "panic": true, + "print": true, + "println": true, + "real": true, + "recover": true, + "rune": true, + "string": true, + "true": true, + "uint": true, + "uint16": true, + "uint32": true, + "uint64": true, + "uint8": true, + "uintptr": true, +} + +func cleanPackageName(name string) GoPackageName { + name = strings.Map(badToUnderscore, name) + // Identifier must not be keyword or predeclared identifier: insert _. + if isGoKeyword[name] { + name = "_" + name + } + // Identifier must not begin with digit: insert _. + if r, _ := utf8.DecodeRuneInString(name); unicode.IsDigit(r) { + name = "_" + name + } + return GoPackageName(name) +} + // defaultGoPackage returns the package name to use, // derived from the import path of the package we're building code for. -func (g *Generator) defaultGoPackage() string { +func (g *Generator) defaultGoPackage() GoPackageName { p := g.PackageImportPath if i := strings.LastIndex(p, "/"); i >= 0 { p = p[i+1:] } - if p == "" { - return "" - } - - p = strings.Map(badToUnderscore, p) - // Identifier must not be keyword: insert _. - if isGoKeyword[p] { - p = "_" + p - } - // Identifier must not begin with digit: insert _. - if r, _ := utf8.DecodeRuneInString(p); unicode.IsDigit(r) { - p = "_" + p - } - return p + return cleanPackageName(p) } // SetPackageNames sets the package name for this run. // The package name must agree across all files being generated. // It also defines unique package names for all imported files. func (g *Generator) SetPackageNames() { - // Register the name for this package. It will be the first name - // registered so is guaranteed to be unmodified. - pkg, explicit := g.genFiles[0].goPackageName() + g.outputImportPath = g.genFiles[0].importPath - // Check all files for an explicit go_package option. + defaultPackageNames := make(map[GoImportPath]GoPackageName) for _, f := range g.genFiles { - thisPkg, thisExplicit := f.goPackageName() - if thisExplicit { - if !explicit { - // Let this file's go_package option serve for all input files. - pkg, explicit = thisPkg, true - } else if thisPkg != pkg { - g.Fail("inconsistent package names:", thisPkg, pkg) - } + if _, p, ok := f.goPackageOption(); ok { + defaultPackageNames[f.importPath] = p } } - - // If we don't have an explicit go_package option but we have an - // import path, use that. - if !explicit { - p := g.defaultGoPackage() - if p != "" { - pkg, explicit = p, true + for _, f := range g.genFiles { + if _, p, ok := f.goPackageOption(); ok { + // Source file: option go_package = "quux/bar"; + f.packageName = p + } else if p, ok := defaultPackageNames[f.importPath]; ok { + // A go_package option in another file in the same package. + // + // This is a poor choice in general, since every source file should + // contain a go_package option. Supported mainly for historical + // compatibility. + f.packageName = p + } else if p := g.defaultGoPackage(); p != "" { + // Command-line: import_path=quux/bar. + // + // The import_path flag sets a package name for files which don't + // contain a go_package option. + f.packageName = p + } else if p := f.GetPackage(); p != "" { + // Source file: package quux.bar; + f.packageName = cleanPackageName(p) + } else { + // Source filename. + f.packageName = cleanPackageName(baseName(f.GetName())) } } - // If there was no go_package and no import path to use, - // double-check that all the inputs have the same implicit - // Go package name. - if !explicit { - for _, f := range g.genFiles { - thisPkg, _ := f.goPackageName() - if thisPkg != pkg { - g.Fail("inconsistent package names:", thisPkg, pkg) - } + // Check that all files have a consistent package name and import path. + for _, f := range g.genFiles[1:] { + if a, b := g.genFiles[0].importPath, f.importPath; a != b { + g.Fail(fmt.Sprintf("inconsistent package import paths: %v, %v", a, b)) + } + if a, b := g.genFiles[0].packageName, f.packageName; a != b { + g.Fail(fmt.Sprintf("inconsistent package names: %v, %v", a, b)) } } - g.packageName = RegisterUniquePackageName(pkg, g.genFiles[0]) - - // Register the support package names. They might collide with the - // name of a package we import. + // Names of support packages. These never vary (if there are conflicts, + // we rename the conflicting package), so this could be removed someday. g.Pkg = map[string]string{ - "fmt": RegisterUniquePackageName("fmt", nil), - "math": RegisterUniquePackageName("math", nil), - "proto": RegisterUniquePackageName("proto", nil), - } - -AllFiles: - for _, f := range g.allFiles { - for _, genf := range g.genFiles { - if f == genf { - // In this package already. - uniquePackageName[f.FileDescriptorProto] = g.packageName - continue AllFiles - } - } - // The file is a dependency, so we want to ignore its go_package option - // because that is only relevant for its specific generated output. - pkg := f.GetPackage() - if pkg == "" { - pkg = baseName(*f.Name) - } - RegisterUniquePackageName(pkg, f) + "fmt": "fmt", + "math": "math", + "proto": "proto", } } @@ -807,27 +723,51 @@ AllFiles: func (g *Generator) WrapTypes() { g.allFiles = make([]*FileDescriptor, 0, len(g.Request.ProtoFile)) g.allFilesByName = make(map[string]*FileDescriptor, len(g.allFiles)) + genFileNames := make(map[string]bool) + for _, n := range g.Request.FileToGenerate { + genFileNames[n] = true + } for _, f := range g.Request.ProtoFile { - // We must wrap the descriptors before we wrap the enums - descs := wrapDescriptors(f) - g.buildNestedDescriptors(descs) - enums := wrapEnumDescriptors(f, descs) - g.buildNestedEnums(descs, enums) - exts := wrapExtensions(f) fd := &FileDescriptor{ FileDescriptorProto: f, - desc: descs, - enum: enums, - ext: exts, exported: make(map[Object][]symbol), proto3: fileIsProto3(f), } + // The import path may be set in a number of ways. + if substitution, ok := g.ImportMap[f.GetName()]; ok { + // Command-line: M=foo.proto=quux/bar. + // + // Explicit mapping of source file to import path. + fd.importPath = GoImportPath(substitution) + } else if genFileNames[f.GetName()] && g.PackageImportPath != "" { + // Command-line: import_path=quux/bar. + // + // The import_path flag sets the import path for every file that + // we generate code for. + fd.importPath = GoImportPath(g.PackageImportPath) + } else if p, _, _ := fd.goPackageOption(); p != "" { + // Source file: option go_package = "quux/bar"; + // + // The go_package option sets the import path. Most users should use this. + fd.importPath = p + } else { + // Source filename. + // + // Last resort when nothing else is available. + fd.importPath = GoImportPath(path.Dir(f.GetName())) + } + // We must wrap the descriptors before we wrap the enums + fd.desc = wrapDescriptors(fd) + g.buildNestedDescriptors(fd.desc) + fd.enum = wrapEnumDescriptors(fd, fd.desc) + g.buildNestedEnums(fd.desc, fd.enum) + fd.ext = wrapExtensions(fd) extractComments(fd) g.allFiles = append(g.allFiles, fd) g.allFilesByName[f.GetName()] = fd } for _, fd := range g.allFiles { - fd.imp = wrapImported(fd.FileDescriptorProto, g) + fd.imp = wrapImported(fd, g) } g.genFiles = make([]*FileDescriptor, 0, len(g.Request.FileToGenerate)) @@ -836,7 +776,6 @@ func (g *Generator) WrapTypes() { if fd == nil { g.Fail("could not find file named", fileName) } - fd.index = len(g.genFiles) g.genFiles = append(g.genFiles, fd) } } @@ -873,7 +812,7 @@ func (g *Generator) buildNestedEnums(descs []*Descriptor, enums []*EnumDescripto } // Construct the Descriptor -func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *Descriptor { +func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *FileDescriptor, index int) *Descriptor { d := &Descriptor{ common: common{file}, DescriptorProto: desc, @@ -910,7 +849,7 @@ func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *d } // Return a slice of all the Descriptors defined within this file -func wrapDescriptors(file *descriptor.FileDescriptorProto) []*Descriptor { +func wrapDescriptors(file *FileDescriptor) []*Descriptor { sl := make([]*Descriptor, 0, len(file.MessageType)+10) for i, desc := range file.MessageType { sl = wrapThisDescriptor(sl, desc, nil, file, i) @@ -919,7 +858,7 @@ func wrapDescriptors(file *descriptor.FileDescriptorProto) []*Descriptor { } // Wrap this Descriptor, recursively -func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) []*Descriptor { +func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, parent *Descriptor, file *FileDescriptor, index int) []*Descriptor { sl = append(sl, newDescriptor(desc, parent, file, index)) me := sl[len(sl)-1] for i, nested := range desc.NestedType { @@ -929,7 +868,7 @@ func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, pare } // Construct the EnumDescriptor -func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *EnumDescriptor { +func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, file *FileDescriptor, index int) *EnumDescriptor { ed := &EnumDescriptor{ common: common{file}, EnumDescriptorProto: desc, @@ -945,7 +884,7 @@ func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, } // Return a slice of all the EnumDescriptors defined within this file -func wrapEnumDescriptors(file *descriptor.FileDescriptorProto, descs []*Descriptor) []*EnumDescriptor { +func wrapEnumDescriptors(file *FileDescriptor, descs []*Descriptor) []*EnumDescriptor { sl := make([]*EnumDescriptor, 0, len(file.EnumType)+10) // Top-level enums. for i, enum := range file.EnumType { @@ -961,7 +900,7 @@ func wrapEnumDescriptors(file *descriptor.FileDescriptorProto, descs []*Descript } // Return a slice of all the top-level ExtensionDescriptors defined within this file. -func wrapExtensions(file *descriptor.FileDescriptorProto) []*ExtensionDescriptor { +func wrapExtensions(file *FileDescriptor) []*ExtensionDescriptor { var sl []*ExtensionDescriptor for _, field := range file.Extension { sl = append(sl, &ExtensionDescriptor{common{file}, field, nil}) @@ -970,7 +909,7 @@ func wrapExtensions(file *descriptor.FileDescriptorProto) []*ExtensionDescriptor } // Return a slice of all the types that are publicly imported into this file. -func wrapImported(file *descriptor.FileDescriptorProto, g *Generator) (sl []*ImportedDescriptor) { +func wrapImported(file *FileDescriptor, g *Generator) (sl []*ImportedDescriptor) { for _, index := range file.PublicDependency { df := g.fileByName(file.Dependency[index]) for _, d := range df.desc { @@ -1034,71 +973,87 @@ func (g *Generator) ObjectNamed(typeName string) Object { if !ok { g.Fail("can't find object with type", typeName) } + return o +} - // If the file of this object isn't a direct dependency of the current file, - // or in the current file, then this object has been publicly imported into - // a dependency of the current file. - // We should return the ImportedDescriptor object for it instead. - direct := *o.File().Name == *g.file.Name - if !direct { - for _, dep := range g.file.Dependency { - if *g.fileByName(dep).Name == *o.File().Name { - direct = true - break - } - } - } - if !direct { - found := false - Loop: - for _, dep := range g.file.Dependency { - df := g.fileByName(*g.fileByName(dep).Name) - for _, td := range df.imp { - if td.o == o { - // Found it! - o = td - found = true - break Loop - } - } - } - if !found { - log.Printf("protoc-gen-go: WARNING: failed finding publicly imported dependency for %v, used in %v", typeName, *g.file.Name) - } +// AnnotatedAtoms is a list of atoms (as consumed by P) that records the file name and proto AST path from which they originated. +type AnnotatedAtoms struct { + source string + path string + atoms []interface{} +} + +// Annotate records the file name and proto AST path of a list of atoms +// so that a later call to P can emit a link from each atom to its origin. +func Annotate(file *FileDescriptor, path string, atoms ...interface{}) *AnnotatedAtoms { + return &AnnotatedAtoms{source: *file.Name, path: path, atoms: atoms} +} + +// printAtom prints the (atomic, non-annotation) argument to the generated output. +func (g *Generator) printAtom(v interface{}) { + switch v := v.(type) { + case string: + g.WriteString(v) + case *string: + g.WriteString(*v) + case bool: + fmt.Fprint(g, v) + case *bool: + fmt.Fprint(g, *v) + case int: + fmt.Fprint(g, v) + case *int32: + fmt.Fprint(g, *v) + case *int64: + fmt.Fprint(g, *v) + case float64: + fmt.Fprint(g, v) + case *float64: + fmt.Fprint(g, *v) + case GoPackageName: + g.WriteString(string(v)) + case GoImportPath: + g.WriteString(strconv.Quote(string(v))) + default: + g.Fail(fmt.Sprintf("unknown type in printer: %T", v)) } - - return o } // P prints the arguments to the generated output. It handles strings and int32s, plus -// handling indirections because they may be *string, etc. +// handling indirections because they may be *string, etc. Any inputs of type AnnotatedAtoms may emit +// annotations in a .meta file in addition to outputting the atoms themselves (if g.annotateCode +// is true). func (g *Generator) P(str ...interface{}) { if !g.writeOutput { return } g.WriteString(g.indent) for _, v := range str { - switch s := v.(type) { - case string: - g.WriteString(s) - case *string: - g.WriteString(*s) - case bool: - fmt.Fprintf(g, "%t", s) - case *bool: - fmt.Fprintf(g, "%t", *s) - case int: - fmt.Fprintf(g, "%d", s) - case *int32: - fmt.Fprintf(g, "%d", *s) - case *int64: - fmt.Fprintf(g, "%d", *s) - case float64: - fmt.Fprintf(g, "%g", s) - case *float64: - fmt.Fprintf(g, "%g", *s) + switch v := v.(type) { + case *AnnotatedAtoms: + begin := int32(g.Len()) + for _, v := range v.atoms { + g.printAtom(v) + } + if g.annotateCode { + end := int32(g.Len()) + var path []int32 + for _, token := range strings.Split(v.path, ",") { + val, err := strconv.ParseInt(token, 10, 32) + if err != nil { + g.Fail("could not parse proto AST path: ", err.Error()) + } + path = append(path, int32(val)) + } + g.annotations = append(g.annotations, &descriptor.GeneratedCodeInfo_Annotation{ + Path: path, + SourceFile: &v.source, + Begin: &begin, + End: &end, + }) + } default: - g.Fail(fmt.Sprintf("unknown type in printer: %T", v)) + g.printAtom(v) } } g.WriteByte('\n') @@ -1135,15 +1090,25 @@ func (g *Generator) GenerateAllFiles() { } for _, file := range g.allFiles { g.Reset() + g.annotations = nil g.writeOutput = genFileMap[file] g.generate(file) if !g.writeOutput { continue } + fname := file.goFileName(g.pathType) g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{ - Name: proto.String(file.goFileName()), + Name: proto.String(fname), Content: proto.String(g.String()), }) + if g.annotateCode { + // Store the generated code annotations in text, as the protoc plugin protocol requires that + // strings contain valid UTF-8. + g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{ + Name: proto.String(file.goFileName(g.pathType) + ".meta"), + Content: proto.String(proto.CompactTextString(&descriptor.GeneratedCodeInfo{Annotation: g.annotations})), + }) + } } } @@ -1154,32 +1119,25 @@ func (g *Generator) runPlugins(file *FileDescriptor) { } } -// FileOf return the FileDescriptor for this FileDescriptorProto. -func (g *Generator) FileOf(fd *descriptor.FileDescriptorProto) *FileDescriptor { - for _, file := range g.allFiles { - if file.FileDescriptorProto == fd { - return file - } - } - g.Fail("could not find file in table:", fd.GetName()) - return nil -} - // Fill the response protocol buffer with the generated output for all the files we're // supposed to generate. func (g *Generator) generate(file *FileDescriptor) { - g.file = g.FileOf(file.FileDescriptorProto) - g.usedPackages = make(map[string]bool) - - if g.file.index == 0 { - // For one file in the package, assert version compatibility. - g.P("// This is a compile-time assertion to ensure that this generated file") - g.P("// is compatible with the proto package it is being compiled against.") - g.P("// A compilation error at this line likely means your copy of the") - g.P("// proto package needs to be updated.") - g.P("const _ = ", g.Pkg["proto"], ".ProtoPackageIsVersion", generatedCodeVersion, " // please upgrade the proto package") - g.P() - } + g.file = file + g.usedPackages = make(map[GoImportPath]bool) + g.packageNames = make(map[GoImportPath]GoPackageName) + g.usedPackageNames = make(map[GoPackageName]bool) + g.addedImports = make(map[GoImportPath]bool) + for name := range globalPackageNames { + g.usedPackageNames[name] = true + } + + g.P("// This is a compile-time assertion to ensure that this generated file") + g.P("// is compatible with the proto package it is being compiled against.") + g.P("// A compilation error at this line likely means your copy of the") + g.P("// proto package needs to be updated.") + g.P("const _ = ", g.Pkg["proto"], ".ProtoPackageIsVersion", generatedCodeVersion, " // please upgrade the proto package") + g.P() + for _, td := range g.file.imp { g.generateImported(td) } @@ -1197,91 +1155,89 @@ func (g *Generator) generate(file *FileDescriptor) { g.generateExtension(ext) } g.generateInitFunction() + g.generateFileDescriptor(file) // Run the plugins before the imports so we know which imports are necessary. g.runPlugins(file) - g.generateFileDescriptor(file) - // Generate header and imports last, though they appear first in the output. rem := g.Buffer + remAnno := g.annotations g.Buffer = new(bytes.Buffer) + g.annotations = nil g.generateHeader() g.generateImports() if !g.writeOutput { return } + // Adjust the offsets for annotations displaced by the header and imports. + for _, anno := range remAnno { + *anno.Begin += int32(g.Len()) + *anno.End += int32(g.Len()) + g.annotations = append(g.annotations, anno) + } g.Write(rem.Bytes()) - // Reformat generated code. + // Reformat generated code and patch annotation locations. fset := token.NewFileSet() - raw := g.Bytes() - ast, err := parser.ParseFile(fset, "", g, parser.ParseComments) + original := g.Bytes() + if g.annotateCode { + // make a copy independent of g; we'll need it after Reset. + original = append([]byte(nil), original...) + } + fileAST, err := parser.ParseFile(fset, "", original, parser.ParseComments) if err != nil { // Print out the bad code with line numbers. // This should never happen in practice, but it can while changing generated code, // so consider this a debugging aid. var src bytes.Buffer - s := bufio.NewScanner(bytes.NewReader(raw)) + s := bufio.NewScanner(bytes.NewReader(original)) for line := 1; s.Scan(); line++ { fmt.Fprintf(&src, "%5d\t%s\n", line, s.Bytes()) } g.Fail("bad Go source code was generated:", err.Error(), "\n"+src.String()) } + ast.SortImports(fset, fileAST) g.Reset() - err = (&printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8}).Fprint(g, fset, ast) + err = (&printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8}).Fprint(g, fset, fileAST) if err != nil { g.Fail("generated Go source code could not be reformatted:", err.Error()) } + if g.annotateCode { + m, err := remap.Compute(original, g.Bytes()) + if err != nil { + g.Fail("formatted generated Go source code could not be mapped back to the original code:", err.Error()) + } + for _, anno := range g.annotations { + new, ok := m.Find(int(*anno.Begin), int(*anno.End)) + if !ok { + g.Fail("span in formatted generated Go source code could not be mapped back to the original code") + } + *anno.Begin = int32(new.Pos) + *anno.End = int32(new.End) + } + } } // Generate the header, including package definition func (g *Generator) generateHeader() { g.P("// Code generated by protoc-gen-go. DO NOT EDIT.") - g.P("// source: ", g.file.Name) - g.P() - - name := g.file.PackageName() - - if g.file.index == 0 { - // Generate package docs for the first file in the package. - g.P("/*") - g.P("Package ", name, " is a generated protocol buffer package.") - g.P() - if loc, ok := g.file.comments[strconv.Itoa(packagePath)]; ok { - // not using g.PrintComments because this is a /* */ comment block. - text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") - for _, line := range strings.Split(text, "\n") { - line = strings.TrimPrefix(line, " ") - // ensure we don't escape from the block comment - line = strings.Replace(line, "*/", "* /", -1) - g.P(line) - } - g.P() - } - var topMsgs []string - g.P("It is generated from these files:") - for _, f := range g.genFiles { - g.P("\t", f.Name) - for _, msg := range f.desc { - if msg.parent != nil { - continue - } - topMsgs = append(topMsgs, CamelCaseSlice(msg.TypeName())) - } - } - g.P() - g.P("It has these top-level messages:") - for _, msg := range topMsgs { - g.P("\t", msg) - } - g.P("*/") + if g.file.GetOptions().GetDeprecated() { + g.P("// ", g.file.Name, " is a deprecated file.") + } else { + g.P("// source: ", g.file.Name) } - - g.P("package ", name) + g.P() + g.PrintComments(strconv.Itoa(packagePath)) + g.P() + g.P("package ", g.file.packageName) g.P() } +// deprecationComment is the standard comment added to deprecated +// messages, fields, enums, and enum values. +var deprecationComment = "// Deprecated: Do not use." + // PrintComments prints any comments from the source .proto file. // The path is a comma-separated list of integers. // It returns an indication of whether any comments were printed. @@ -1290,16 +1246,28 @@ func (g *Generator) PrintComments(path string) bool { if !g.writeOutput { return false } - if loc, ok := g.file.comments[path]; ok { - text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") - for _, line := range strings.Split(text, "\n") { - g.P("// ", strings.TrimPrefix(line, " ")) - } + if c, ok := g.makeComments(path); ok { + g.P(c) return true } return false } +// makeComments generates the comment string for the field, no "\n" at the end +func (g *Generator) makeComments(path string) (string, bool) { + loc, ok := g.file.comments[path] + if !ok { + return "", false + } + w := new(bytes.Buffer) + nl := "" + for _, line := range strings.Split(strings.TrimSuffix(loc.GetLeadingComments(), "\n"), "\n") { + fmt.Fprintf(w, "%s//%s", nl, line) + nl = "\n" + } + return w.String(), true +} + func (g *Generator) fileByName(filename string) *FileDescriptor { return g.allFilesByName[filename] } @@ -1316,39 +1284,45 @@ func (g *Generator) weak(i int32) bool { // Generate the imports func (g *Generator) generateImports() { - // We almost always need a proto import. Rather than computing when we - // do, which is tricky when there's a plugin, just import it and - // reference it later. The same argument applies to the fmt and math packages. - g.P("import " + g.Pkg["proto"] + " " + strconv.Quote(g.ImportPrefix+"github.com/golang/protobuf/proto")) - g.P("import " + g.Pkg["fmt"] + ` "fmt"`) - g.P("import " + g.Pkg["math"] + ` "math"`) + imports := make(map[GoImportPath]GoPackageName) for i, s := range g.file.Dependency { fd := g.fileByName(s) + importPath := fd.importPath // Do not import our own package. - if fd.PackageName() == g.packageName { + if importPath == g.file.importPath { continue } - filename := fd.goFileName() - // By default, import path is the dirname of the Go filename. - importPath := path.Dir(filename) - if substitution, ok := g.ImportMap[s]; ok { - importPath = substitution - } - importPath = g.ImportPrefix + importPath - // Skip weak imports. + // Do not import weak imports. if g.weak(int32(i)) { - g.P("// skipping weak import ", fd.PackageName(), " ", strconv.Quote(importPath)) + continue + } + // Do not import a package twice. + if _, ok := imports[importPath]; ok { continue } // We need to import all the dependencies, even if we don't reference them, // because other code and tools depend on having the full transitive closure // of protocol buffer types in the binary. - pname := fd.PackageName() - if _, ok := g.usedPackages[pname]; !ok { - pname = "_" + packageName := g.GoPackageName(importPath) + if _, ok := g.usedPackages[importPath]; !ok { + packageName = "_" } - g.P("import ", pname, " ", strconv.Quote(importPath)) + imports[importPath] = packageName + } + for importPath := range g.addedImports { + imports[importPath] = g.GoPackageName(importPath) + } + // We almost always need a proto import. Rather than computing when we + // do, which is tricky when there's a plugin, just import it and + // reference it later. The same argument applies to the fmt and math packages. + g.P("import (") + g.P(g.Pkg["fmt"] + ` "fmt"`) + g.P(g.Pkg["math"] + ` "math"`) + g.P(g.Pkg["proto"]+" ", GoImportPath(g.ImportPrefix)+"github.com/golang/protobuf/proto") + for importPath, packageName := range imports { + g.P(packageName, " ", GoImportPath(g.ImportPrefix)+importPath) } + g.P(")") g.P() // TODO: may need to worry about uniqueness across plugins for _, p := range plugins { @@ -1363,26 +1337,19 @@ func (g *Generator) generateImports() { } func (g *Generator) generateImported(id *ImportedDescriptor) { - // Don't generate public import symbols for files that we are generating - // code for, since those symbols will already be in this package. - // We can't simply avoid creating the ImportedDescriptor objects, - // because g.genFiles isn't populated at that stage. - tn := id.TypeName() - sn := tn[len(tn)-1] - df := g.FileOf(id.o.File()) + df := id.o.File() filename := *df.Name - for _, fd := range g.genFiles { - if *fd.Name == filename { - g.P("// Ignoring public import of ", sn, " from ", filename) - g.P() - return - } + if df.importPath == g.file.importPath { + // Don't generate type aliases for files in the same Go package as this one. + return + } + if !supportTypeAliases { + g.Fail(fmt.Sprintf("%s: public imports require at least go1.9", filename)) } - g.P("// ", sn, " from public import ", filename) - g.usedPackages[df.PackageName()] = true + g.usedPackages[df.importPath] = true for _, sym := range df.exported[id.o] { - sym.GenerateAlias(g, df.PackageName()) + sym.GenerateAlias(g, filename, g.GoPackageName(df.importPath)) } g.P() @@ -1396,22 +1363,30 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) { ccTypeName := CamelCaseSlice(typeName) ccPrefix := enum.prefix() + deprecatedEnum := "" + if enum.GetOptions().GetDeprecated() { + deprecatedEnum = deprecationComment + } g.PrintComments(enum.path) - g.P("type ", ccTypeName, " int32") + g.P("type ", Annotate(enum.file, enum.path, ccTypeName), " int32", deprecatedEnum) g.file.addExport(enum, enumSymbol{ccTypeName, enum.proto3()}) g.P("const (") - g.In() for i, e := range enum.Value { - g.PrintComments(fmt.Sprintf("%s,%d,%d", enum.path, enumValuePath, i)) + etorPath := fmt.Sprintf("%s,%d,%d", enum.path, enumValuePath, i) + g.PrintComments(etorPath) + + deprecatedValue := "" + if e.GetOptions().GetDeprecated() { + deprecatedValue = deprecationComment + } name := ccPrefix + *e.Name - g.P(name, " ", ccTypeName, " = ", e.Number) + g.P(Annotate(enum.file, etorPath, name), " ", ccTypeName, " = ", e.Number, " ", deprecatedValue) g.file.addExport(enum, constOrVarSymbol{name, "const", ccTypeName}) } - g.Out() g.P(")") + g.P() g.P("var ", ccTypeName, "_name = map[int32]string{") - g.In() generated := make(map[int32]bool) // avoid duplicate values for _, e := range enum.Value { duplicate := "" @@ -1421,45 +1396,39 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) { g.P(duplicate, e.Number, ": ", strconv.Quote(*e.Name), ",") generated[*e.Number] = true } - g.Out() g.P("}") + g.P() g.P("var ", ccTypeName, "_value = map[string]int32{") - g.In() for _, e := range enum.Value { g.P(strconv.Quote(*e.Name), ": ", e.Number, ",") } - g.Out() g.P("}") + g.P() if !enum.proto3() { g.P("func (x ", ccTypeName, ") Enum() *", ccTypeName, " {") - g.In() g.P("p := new(", ccTypeName, ")") g.P("*p = x") g.P("return p") - g.Out() g.P("}") + g.P() } g.P("func (x ", ccTypeName, ") String() string {") - g.In() g.P("return ", g.Pkg["proto"], ".EnumName(", ccTypeName, "_name, int32(x))") - g.Out() g.P("}") + g.P() if !enum.proto3() { g.P("func (x *", ccTypeName, ") UnmarshalJSON(data []byte) error {") - g.In() g.P("value, err := ", g.Pkg["proto"], ".UnmarshalJSONEnum(", ccTypeName, `_value, data, "`, ccTypeName, `")`) g.P("if err != nil {") - g.In() g.P("return err") - g.Out() g.P("}") g.P("*x = ", ccTypeName, "(value)") g.P("return nil") - g.Out() g.P("}") + g.P() } var indexes []string @@ -1468,12 +1437,16 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) { indexes = append([]string{strconv.Itoa(m.index)}, indexes...) } indexes = append(indexes, strconv.Itoa(enum.index)) - g.P("func (", ccTypeName, ") EnumDescriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") + g.P("func (", ccTypeName, ") EnumDescriptor() ([]byte, []int) {") + g.P("return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "}") + g.P("}") + g.P() if enum.file.GetPackage() == "google.protobuf" && enum.GetName() == "NullValue" { g.P("func (", ccTypeName, `) XXX_WellKnownType() string { return "`, enum.GetName(), `" }`) + g.P() } - g.P() + g.generateEnumRegistration(enum) } // The tag is a string like "varint,2,opt,name=fieldname,def=7" that @@ -1530,12 +1503,24 @@ func (g *Generator) goTag(message *Descriptor, field *descriptor.FieldDescriptor g.Fail("unknown enum type", CamelCaseSlice(obj.TypeName())) } defaultValue = enum.integerValueAsString(defaultValue) + case descriptor.FieldDescriptorProto_TYPE_FLOAT: + if def := defaultValue; def != "inf" && def != "-inf" && def != "nan" { + if f, err := strconv.ParseFloat(defaultValue, 32); err == nil { + defaultValue = fmt.Sprint(float32(f)) + } + } + case descriptor.FieldDescriptorProto_TYPE_DOUBLE: + if def := defaultValue; def != "inf" && def != "-inf" && def != "nan" { + if f, err := strconv.ParseFloat(defaultValue, 64); err == nil { + defaultValue = fmt.Sprint(f) + } + } } defaultValue = ",def=" + defaultValue } enum := "" if *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM { - // We avoid using obj.PackageName(), because we want to use the + // We avoid using obj.GoPackageName(), because we want to use the // original (proto-world) package name. obj := g.ObjectNamed(field.GetTypeName()) if id, ok := obj.(*ImportedDescriptor); ok { @@ -1567,19 +1552,14 @@ func (g *Generator) goTag(message *Descriptor, field *descriptor.FieldDescriptor name = name[i+1:] } } - if json := field.GetJsonName(); json != "" && json != name { + if json := field.GetJsonName(); field.Extendee == nil && json != "" && json != name { // TODO: escaping might be needed, in which case // perhaps this should be in its own "json" tag. name += ",json=" + json } name = ",name=" + name if message.proto3() { - // We only need the extra tag for []byte fields; - // no need to add noise for the others. - if *field.Type == descriptor.FieldDescriptorProto_TYPE_BYTES { - name += ",proto3" - } - + name += ",proto3" } oneof := "" if field.OneofIndex != nil { @@ -1617,12 +1597,6 @@ func (g *Generator) TypeName(obj Object) string { return g.DefaultPackageName(obj) + CamelCaseSlice(obj.TypeName()) } -// TypeNameWithPackage is like TypeName, but always includes the package -// name even if the object is in our own package. -func (g *Generator) TypeNameWithPackage(obj Object) string { - return obj.PackageName() + CamelCaseSlice(obj.TypeName()) -} - // GoType returns a string representing the type name, and the wire type func (g *Generator) GoType(message *Descriptor, field *descriptor.FieldDescriptorProto) (typ string, wire string) { // TODO: Options. @@ -1682,11 +1656,16 @@ func (g *Generator) GoType(message *Descriptor, field *descriptor.FieldDescripto } func (g *Generator) RecordTypeUse(t string) { - if obj, ok := g.typeNameToObject[t]; ok { - // Call ObjectNamed to get the true object to record the use. - obj = g.ObjectNamed(t) - g.usedPackages[obj.PackageName()] = true + if _, ok := g.typeNameToObject[t]; !ok { + return + } + importPath := g.ObjectNamed(t).GoImportPath() + if importPath == g.outputImportPath { + // Don't record use of objects in our package. + return } + g.AddImport(importPath) + g.usedPackages[importPath] = true } // Method names that may be generated. Fields with these names get an @@ -1725,256 +1704,294 @@ var wellKnownTypes = map[string]bool{ "BytesValue": true, } -// Generate the type and default constant definitions for this Descriptor. -func (g *Generator) generateMessage(message *Descriptor) { - // The full type name - typeName := message.TypeName() - // The full type name, CamelCased. - ccTypeName := CamelCaseSlice(typeName) - - usedNames := make(map[string]bool) - for _, n := range methodNames { - usedNames[n] = true +// getterDefault finds the default value for the field to return from a getter, +// regardless of if it's a built in default or explicit from the source. Returns e.g. "nil", `""`, "Default_MessageType_FieldName" +func (g *Generator) getterDefault(field *descriptor.FieldDescriptorProto, goMessageType string) string { + if isRepeated(field) { + return "nil" } - fieldNames := make(map[*descriptor.FieldDescriptorProto]string) - fieldGetterNames := make(map[*descriptor.FieldDescriptorProto]string) - fieldTypes := make(map[*descriptor.FieldDescriptorProto]string) - mapFieldTypes := make(map[*descriptor.FieldDescriptorProto]string) - - oneofFieldName := make(map[int32]string) // indexed by oneof_index field of FieldDescriptorProto - oneofDisc := make(map[int32]string) // name of discriminator method - oneofTypeName := make(map[*descriptor.FieldDescriptorProto]string) // without star - oneofInsertPoints := make(map[int32]int) // oneof_index => offset of g.Buffer - - g.PrintComments(message.path) - g.P("type ", ccTypeName, " struct {") - g.In() - - // allocNames finds a conflict-free variation of the given strings, - // consistently mutating their suffixes. - // It returns the same number of strings. - allocNames := func(ns ...string) []string { - Loop: - for { - for _, n := range ns { - if usedNames[n] { - for i := range ns { - ns[i] += "_" - } - continue Loop - } - } - for _, n := range ns { - usedNames[n] = true - } - return ns + if def := field.GetDefaultValue(); def != "" { + defaultConstant := g.defaultConstantName(goMessageType, field.GetName()) + if *field.Type != descriptor.FieldDescriptorProto_TYPE_BYTES { + return defaultConstant + } + return "append([]byte(nil), " + defaultConstant + "...)" + } + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_BOOL: + return "false" + case descriptor.FieldDescriptorProto_TYPE_STRING: + return `""` + case descriptor.FieldDescriptorProto_TYPE_GROUP, descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_BYTES: + return "nil" + case descriptor.FieldDescriptorProto_TYPE_ENUM: + obj := g.ObjectNamed(field.GetTypeName()) + var enum *EnumDescriptor + if id, ok := obj.(*ImportedDescriptor); ok { + // The enum type has been publicly imported. + enum, _ = id.o.(*EnumDescriptor) + } else { + enum, _ = obj.(*EnumDescriptor) } + if enum == nil { + log.Printf("don't know how to generate getter for %s", field.GetName()) + return "nil" + } + if len(enum.Value) == 0 { + return "0 // empty enum" + } + first := enum.Value[0].GetName() + return g.DefaultPackageName(obj) + enum.prefix() + first + default: + return "0" } +} - for i, field := range message.Field { - // Allocate the getter and the field at the same time so name - // collisions create field/method consistent names. - // TODO: This allocation occurs based on the order of the fields - // in the proto file, meaning that a change in the field - // ordering can change generated Method/Field names. - base := CamelCase(*field.Name) - ns := allocNames(base, "Get"+base) - fieldName, fieldGetterName := ns[0], ns[1] - typename, wiretype := g.GoType(message, field) - jsonName := *field.Name - tag := fmt.Sprintf("protobuf:%s json:%q", g.goTag(message, field, wiretype), jsonName+",omitempty") +// defaultConstantName builds the name of the default constant from the message +// type name and the untouched field name, e.g. "Default_MessageType_FieldName" +func (g *Generator) defaultConstantName(goMessageType, protoFieldName string) string { + return "Default_" + goMessageType + "_" + CamelCase(protoFieldName) +} - fieldNames[field] = fieldName - fieldGetterNames[field] = fieldGetterName +// The different types of fields in a message and how to actually print them +// Most of the logic for generateMessage is in the methods of these types. +// +// Note that the content of the field is irrelevant, a simpleField can contain +// anything from a scalar to a group (which is just a message). +// +// Extension fields (and message sets) are however handled separately. +// +// simpleField - a field that is neiter weak nor oneof, possibly repeated +// oneofField - field containing list of subfields: +// - oneofSubField - a field within the oneof - oneof := field.OneofIndex != nil - if oneof && oneofFieldName[*field.OneofIndex] == "" { - odp := message.OneofDecl[int(*field.OneofIndex)] - fname := allocNames(CamelCase(odp.GetName()))[0] +// msgCtx contains the context for the generator functions. +type msgCtx struct { + goName string // Go struct name of the message, e.g. MessageName + message *Descriptor // The descriptor for the message +} - // This is the first field of a oneof we haven't seen before. - // Generate the union field. - com := g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageOneofPath, *field.OneofIndex)) - if com { - g.P("//") - } - g.P("// Types that are valid to be assigned to ", fname, ":") - // Generate the rest of this comment later, - // when we've computed any disambiguation. - oneofInsertPoints[*field.OneofIndex] = g.Buffer.Len() +// fieldCommon contains data common to all types of fields. +type fieldCommon struct { + goName string // Go name of field, e.g. "FieldName" or "Descriptor_" + protoName string // Name of field in proto language, e.g. "field_name" or "descriptor" + getterName string // Name of the getter, e.g. "GetFieldName" or "GetDescriptor_" + goType string // The Go type as a string, e.g. "*int32" or "*OtherMessage" + tags string // The tag string/annotation for the type, e.g. `protobuf:"varint,8,opt,name=region_id,json=regionId"` + fullPath string // The full path of the field as used by Annotate etc, e.g. "4,0,2,0" +} - dname := "is" + ccTypeName + "_" + fname - oneofFieldName[*field.OneofIndex] = fname - oneofDisc[*field.OneofIndex] = dname - tag := `protobuf_oneof:"` + odp.GetName() + `"` - g.P(fname, " ", dname, " `", tag, "`") - } +// getProtoName gets the proto name of a field, e.g. "field_name" or "descriptor". +func (f *fieldCommon) getProtoName() string { + return f.protoName +} - if *field.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE { - desc := g.ObjectNamed(field.GetTypeName()) - if d, ok := desc.(*Descriptor); ok && d.GetOptions().GetMapEntry() { - // Figure out the Go types and tags for the key and value types. - keyField, valField := d.Field[0], d.Field[1] - keyType, keyWire := g.GoType(d, keyField) - valType, valWire := g.GoType(d, valField) - keyTag, valTag := g.goTag(d, keyField, keyWire), g.goTag(d, valField, valWire) +// getGoType returns the go type of the field as a string, e.g. "*int32". +func (f *fieldCommon) getGoType() string { + return f.goType +} - // We don't use stars, except for message-typed values. - // Message and enum types are the only two possibly foreign types used in maps, - // so record their use. They are not permitted as map keys. - keyType = strings.TrimPrefix(keyType, "*") - switch *valField.Type { - case descriptor.FieldDescriptorProto_TYPE_ENUM: - valType = strings.TrimPrefix(valType, "*") - g.RecordTypeUse(valField.GetTypeName()) - case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - g.RecordTypeUse(valField.GetTypeName()) - default: - valType = strings.TrimPrefix(valType, "*") - } +// simpleField is not weak, not a oneof, not an extension. Can be required, optional or repeated. +type simpleField struct { + fieldCommon + protoTypeName string // Proto type name, empty if primitive, e.g. ".google.protobuf.Duration" + protoType descriptor.FieldDescriptorProto_Type // Actual type enum value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64 + deprecated string // Deprecation comment, if any, e.g. "// Deprecated: Do not use." + getterDef string // Default for getters, e.g. "nil", `""` or "Default_MessageType_FieldName" + protoDef string // Default value as defined in the proto file, e.g "yoshi" or "5" + comment string // The full comment for the field, e.g. "// Useful information" +} - typename = fmt.Sprintf("map[%s]%s", keyType, valType) - mapFieldTypes[field] = typename // record for the getter generation +// decl prints the declaration of the field in the struct (if any). +func (f *simpleField) decl(g *Generator, mc *msgCtx) { + g.P(f.comment, Annotate(mc.message.file, f.fullPath, f.goName), "\t", f.goType, "\t`", f.tags, "`", f.deprecated) +} - tag += fmt.Sprintf(" protobuf_key:%s protobuf_val:%s", keyTag, valTag) - } - } +// getter prints the getter for the field. +func (f *simpleField) getter(g *Generator, mc *msgCtx) { + star := "" + tname := f.goType + if needsStar(f.protoType) && tname[0] == '*' { + tname = tname[1:] + star = "*" + } + if f.deprecated != "" { + g.P(f.deprecated) + } + g.P("func (m *", mc.goName, ") ", Annotate(mc.message.file, f.fullPath, f.getterName), "() "+tname+" {") + if f.getterDef == "nil" { // Simpler getter + g.P("if m != nil {") + g.P("return m." + f.goName) + g.P("}") + g.P("return nil") + g.P("}") + g.P() + return + } + if mc.message.proto3() { + g.P("if m != nil {") + } else { + g.P("if m != nil && m." + f.goName + " != nil {") + } + g.P("return " + star + "m." + f.goName) + g.P("}") + g.P("return ", f.getterDef) + g.P("}") + g.P() +} - fieldTypes[field] = typename +// setter prints the setter method of the field. +func (f *simpleField) setter(g *Generator, mc *msgCtx) { + // No setter for regular fields yet +} - if oneof { - tname := ccTypeName + "_" + fieldName - // It is possible for this to collide with a message or enum - // nested in this message. Check for collisions. - for { - ok := true - for _, desc := range message.nested { - if CamelCaseSlice(desc.TypeName()) == tname { - ok = false - break - } - } - for _, enum := range message.enums { - if CamelCaseSlice(enum.TypeName()) == tname { - ok = false - break - } - } - if !ok { - tname += "_" - continue - } - break - } +// getProtoDef returns the default value explicitly stated in the proto file, e.g "yoshi" or "5". +func (f *simpleField) getProtoDef() string { + return f.protoDef +} - oneofTypeName[field] = tname - continue - } +// getProtoTypeName returns the protobuf type name for the field as returned by field.GetTypeName(), e.g. ".google.protobuf.Duration". +func (f *simpleField) getProtoTypeName() string { + return f.protoTypeName +} - g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i)) - g.P(fieldName, "\t", typename, "\t`", tag, "`") - g.RecordTypeUse(field.GetTypeName()) +// getProtoType returns the *field.Type value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64. +func (f *simpleField) getProtoType() descriptor.FieldDescriptorProto_Type { + return f.protoType +} + +// oneofSubFields are kept slize held by each oneofField. They do not appear in the top level slize of fields for the message. +type oneofSubField struct { + fieldCommon + protoTypeName string // Proto type name, empty if primitive, e.g. ".google.protobuf.Duration" + protoType descriptor.FieldDescriptorProto_Type // Actual type enum value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64 + oneofTypeName string // Type name of the enclosing struct, e.g. "MessageName_FieldName" + fieldNumber int // Actual field number, as defined in proto, e.g. 12 + getterDef string // Default for getters, e.g. "nil", `""` or "Default_MessageType_FieldName" + protoDef string // Default value as defined in the proto file, e.g "yoshi" or "5" + deprecated string // Deprecation comment, if any. +} + +// typedNil prints a nil casted to the pointer to this field. +// - for XXX_OneofWrappers +func (f *oneofSubField) typedNil(g *Generator) { + g.P("(*", f.oneofTypeName, ")(nil),") +} + +// getProtoDef returns the default value explicitly stated in the proto file, e.g "yoshi" or "5". +func (f *oneofSubField) getProtoDef() string { + return f.protoDef +} + +// getProtoTypeName returns the protobuf type name for the field as returned by field.GetTypeName(), e.g. ".google.protobuf.Duration". +func (f *oneofSubField) getProtoTypeName() string { + return f.protoTypeName +} + +// getProtoType returns the *field.Type value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64. +func (f *oneofSubField) getProtoType() descriptor.FieldDescriptorProto_Type { + return f.protoType +} + +// oneofField represents the oneof on top level. +// The alternative fields within the oneof are represented by oneofSubField. +type oneofField struct { + fieldCommon + subFields []*oneofSubField // All the possible oneof fields + comment string // The full comment for the field, e.g. "// Types that are valid to be assigned to MyOneof:\n\\" +} + +// decl prints the declaration of the field in the struct (if any). +func (f *oneofField) decl(g *Generator, mc *msgCtx) { + comment := f.comment + for _, sf := range f.subFields { + comment += "//\t*" + sf.oneofTypeName + "\n" } - if len(message.ExtensionRange) > 0 { - g.P(g.Pkg["proto"], ".XXX_InternalExtensions `json:\"-\"`") + g.P(comment, Annotate(mc.message.file, f.fullPath, f.goName), " ", f.goType, " `", f.tags, "`") +} + +// getter for a oneof field will print additional discriminators and interfaces for the oneof, +// also it prints all the getters for the sub fields. +func (f *oneofField) getter(g *Generator, mc *msgCtx) { + // The discriminator type + g.P("type ", f.goType, " interface {") + g.P(f.goType, "()") + g.P("}") + g.P() + // The subField types, fulfilling the discriminator type contract + for _, sf := range f.subFields { + g.P("type ", Annotate(mc.message.file, sf.fullPath, sf.oneofTypeName), " struct {") + g.P(Annotate(mc.message.file, sf.fullPath, sf.goName), " ", sf.goType, " `", sf.tags, "`") + g.P("}") + g.P() } - if !message.proto3() { - g.P("XXX_unrecognized\t[]byte `json:\"-\"`") + for _, sf := range f.subFields { + g.P("func (*", sf.oneofTypeName, ") ", f.goType, "() {}") + g.P() } - g.Out() + // Getter for the oneof field + g.P("func (m *", mc.goName, ") ", Annotate(mc.message.file, f.fullPath, f.getterName), "() ", f.goType, " {") + g.P("if m != nil { return m.", f.goName, " }") + g.P("return nil") g.P("}") - - // Update g.Buffer to list valid oneof types. - // We do this down here, after we've disambiguated the oneof type names. - // We go in reverse order of insertion point to avoid invalidating offsets. - for oi := int32(len(message.OneofDecl)); oi >= 0; oi-- { - ip := oneofInsertPoints[oi] - all := g.Buffer.Bytes() - rem := all[ip:] - g.Buffer = bytes.NewBuffer(all[:ip:ip]) // set cap so we don't scribble on rem - for _, field := range message.Field { - if field.OneofIndex == nil || *field.OneofIndex != oi { - continue - } - g.P("//\t*", oneofTypeName[field]) - } - g.Buffer.Write(rem) + g.P() + // Getters for each oneof + for _, sf := range f.subFields { + if sf.deprecated != "" { + g.P(sf.deprecated) + } + g.P("func (m *", mc.goName, ") ", Annotate(mc.message.file, sf.fullPath, sf.getterName), "() "+sf.goType+" {") + g.P("if x, ok := m.", f.getterName, "().(*", sf.oneofTypeName, "); ok {") + g.P("return x.", sf.goName) + g.P("}") + g.P("return ", sf.getterDef) + g.P("}") + g.P() } +} - // Reset, String and ProtoMessage methods. - g.P("func (m *", ccTypeName, ") Reset() { *m = ", ccTypeName, "{} }") - g.P("func (m *", ccTypeName, ") String() string { return ", g.Pkg["proto"], ".CompactTextString(m) }") - g.P("func (*", ccTypeName, ") ProtoMessage() {}") - var indexes []string - for m := message; m != nil; m = m.parent { - indexes = append([]string{strconv.Itoa(m.index)}, indexes...) - } - g.P("func (*", ccTypeName, ") Descriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") - // TODO: Revisit the decision to use a XXX_WellKnownType method - // if we change proto.MessageName to work with multiple equivalents. - if message.file.GetPackage() == "google.protobuf" && wellKnownTypes[message.GetName()] { - g.P("func (*", ccTypeName, `) XXX_WellKnownType() string { return "`, message.GetName(), `" }`) - } +// setter prints the setter method of the field. +func (f *oneofField) setter(g *Generator, mc *msgCtx) { + // No setters for oneof yet +} - // Extension support methods - var hasExtensions, isMessageSet bool - if len(message.ExtensionRange) > 0 { - hasExtensions = true - // message_set_wire_format only makes sense when extensions are defined. - if opts := message.Options; opts != nil && opts.GetMessageSetWireFormat() { - isMessageSet = true - g.P() - g.P("func (m *", ccTypeName, ") Marshal() ([]byte, error) {") - g.In() - g.P("return ", g.Pkg["proto"], ".MarshalMessageSet(&m.XXX_InternalExtensions)") - g.Out() - g.P("}") - g.P("func (m *", ccTypeName, ") Unmarshal(buf []byte) error {") - g.In() - g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSet(buf, &m.XXX_InternalExtensions)") - g.Out() - g.P("}") - g.P("func (m *", ccTypeName, ") MarshalJSON() ([]byte, error) {") - g.In() - g.P("return ", g.Pkg["proto"], ".MarshalMessageSetJSON(&m.XXX_InternalExtensions)") - g.Out() - g.P("}") - g.P("func (m *", ccTypeName, ") UnmarshalJSON(buf []byte) error {") - g.In() - g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions)") - g.Out() - g.P("}") - g.P("// ensure ", ccTypeName, " satisfies proto.Marshaler and proto.Unmarshaler") - g.P("var _ ", g.Pkg["proto"], ".Marshaler = (*", ccTypeName, ")(nil)") - g.P("var _ ", g.Pkg["proto"], ".Unmarshaler = (*", ccTypeName, ")(nil)") - } +// topLevelField interface implemented by all types of fields on the top level (not oneofSubField). +type topLevelField interface { + decl(g *Generator, mc *msgCtx) // print declaration within the struct + getter(g *Generator, mc *msgCtx) // print getter + setter(g *Generator, mc *msgCtx) // print setter if applicable +} - g.P() - g.P("var extRange_", ccTypeName, " = []", g.Pkg["proto"], ".ExtensionRange{") - g.In() - for _, r := range message.ExtensionRange { - end := fmt.Sprint(*r.End - 1) // make range inclusive on both ends - g.P("{", r.Start, ", ", end, "},") +// defField interface implemented by all types of fields that can have defaults (not oneofField, but instead oneofSubField). +type defField interface { + getProtoDef() string // default value explicitly stated in the proto file, e.g "yoshi" or "5" + getProtoName() string // proto name of a field, e.g. "field_name" or "descriptor" + getGoType() string // go type of the field as a string, e.g. "*int32" + getProtoTypeName() string // protobuf type name for the field, e.g. ".google.protobuf.Duration" + getProtoType() descriptor.FieldDescriptorProto_Type // *field.Type value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64 +} + +// generateDefaultConstants adds constants for default values if needed, which is only if the default value is. +// explicit in the proto. +func (g *Generator) generateDefaultConstants(mc *msgCtx, topLevelFields []topLevelField) { + // Collect fields that can have defaults + dFields := []defField{} + for _, pf := range topLevelFields { + if f, ok := pf.(*oneofField); ok { + for _, osf := range f.subFields { + dFields = append(dFields, osf) + } + continue } - g.Out() - g.P("}") - g.P("func (*", ccTypeName, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange {") - g.In() - g.P("return extRange_", ccTypeName) - g.Out() - g.P("}") + dFields = append(dFields, pf.(defField)) } - - // Default constants - defNames := make(map[*descriptor.FieldDescriptorProto]string) - for _, field := range message.Field { - def := field.GetDefaultValue() + for _, df := range dFields { + def := df.getProtoDef() if def == "" { continue } - fieldname := "Default_" + ccTypeName + "_" + CamelCase(*field.Name) - defNames[field] = fieldname - typename, _ := g.GoType(message, field) + fieldname := g.defaultConstantName(mc.goName, df.getProtoName()) + typename := df.getGoType() if typename[0] == '*' { typename = typename[1:] } @@ -1984,7 +2001,7 @@ func (g *Generator) generateMessage(message *Descriptor) { case typename == "string": def = strconv.Quote(def) case typename == "[]byte": - def = "[]byte(" + strconv.Quote(def) + ")" + def = "[]byte(" + strconv.Quote(unescape(def)) + ")" kind = "var " case def == "inf", def == "-inf", def == "nan": // These names are known to, and defined by, the protocol language. @@ -1996,13 +2013,21 @@ func (g *Generator) generateMessage(message *Descriptor) { case "nan": def = "math.NaN()" } - if *field.Type == descriptor.FieldDescriptorProto_TYPE_FLOAT { + if df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_FLOAT { def = "float32(" + def + ")" } kind = "var " - case *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM: + case df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_FLOAT: + if f, err := strconv.ParseFloat(def, 32); err == nil { + def = fmt.Sprint(float32(f)) + } + case df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_DOUBLE: + if f, err := strconv.ParseFloat(def, 64); err == nil { + def = fmt.Sprint(f) + } + case df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_ENUM: // Must be an enum. Need to construct the prefixed name. - obj := g.ObjectNamed(field.GetTypeName()) + obj := g.ObjectNamed(df.getProtoTypeName()) var enum *EnumDescriptor if id, ok := obj.(*ImportedDescriptor); ok { // The enum type has been publicly imported. @@ -2017,485 +2042,388 @@ func (g *Generator) generateMessage(message *Descriptor) { def = g.DefaultPackageName(obj) + enum.prefix() + def } g.P(kind, fieldname, " ", typename, " = ", def) - g.file.addExport(message, constOrVarSymbol{fieldname, kind, ""}) + g.file.addExport(mc.message, constOrVarSymbol{fieldname, kind, ""}) } g.P() +} - // Oneof per-field types, discriminants and getters. - // - // Generate unexported named types for the discriminant interfaces. - // We shouldn't have to do this, but there was (~19 Aug 2015) a compiler/linker bug - // that was triggered by using anonymous interfaces here. - // TODO: Revisit this and consider reverting back to anonymous interfaces. - for oi := range message.OneofDecl { - dname := oneofDisc[int32(oi)] - g.P("type ", dname, " interface { ", dname, "() }") - } - g.P() - for _, field := range message.Field { - if field.OneofIndex == nil { - continue +// generateInternalStructFields just adds the XXX_ fields to the message struct. +func (g *Generator) generateInternalStructFields(mc *msgCtx, topLevelFields []topLevelField) { + g.P("XXX_NoUnkeyedLiteral\tstruct{} `json:\"-\"`") // prevent unkeyed struct literals + if len(mc.message.ExtensionRange) > 0 { + messageset := "" + if opts := mc.message.Options; opts != nil && opts.GetMessageSetWireFormat() { + messageset = "protobuf_messageset:\"1\" " } - _, wiretype := g.GoType(message, field) - tag := "protobuf:" + g.goTag(message, field, wiretype) - g.P("type ", oneofTypeName[field], " struct{ ", fieldNames[field], " ", fieldTypes[field], " `", tag, "` }") - g.RecordTypeUse(field.GetTypeName()) + g.P(g.Pkg["proto"], ".XXX_InternalExtensions `", messageset, "json:\"-\"`") } - g.P() - for _, field := range message.Field { - if field.OneofIndex == nil { - continue + g.P("XXX_unrecognized\t[]byte `json:\"-\"`") + g.P("XXX_sizecache\tint32 `json:\"-\"`") + +} + +// generateOneofFuncs adds all the utility functions for oneof, including marshalling, unmarshalling and sizer. +func (g *Generator) generateOneofFuncs(mc *msgCtx, topLevelFields []topLevelField) { + ofields := []*oneofField{} + for _, f := range topLevelFields { + if o, ok := f.(*oneofField); ok { + ofields = append(ofields, o) } - g.P("func (*", oneofTypeName[field], ") ", oneofDisc[*field.OneofIndex], "() {}") } - g.P() - for oi := range message.OneofDecl { - fname := oneofFieldName[int32(oi)] - g.P("func (m *", ccTypeName, ") Get", fname, "() ", oneofDisc[int32(oi)], " {") - g.P("if m != nil { return m.", fname, " }") - g.P("return nil") - g.P("}") + if len(ofields) == 0 { + return } + + // OneofFuncs + g.P("// XXX_OneofWrappers is for the internal use of the proto package.") + g.P("func (*", mc.goName, ") XXX_OneofWrappers() []interface{} {") + g.P("return []interface{}{") + for _, of := range ofields { + for _, sf := range of.subFields { + sf.typedNil(g) + } + } + g.P("}") + g.P("}") g.P() +} - // Field getters - var getters []getterSymbol - for _, field := range message.Field { - oneof := field.OneofIndex != nil +// generateMessageStruct adds the actual struct with it's members (but not methods) to the output. +func (g *Generator) generateMessageStruct(mc *msgCtx, topLevelFields []topLevelField) { + comments := g.PrintComments(mc.message.path) - fname := fieldNames[field] - typename, _ := g.GoType(message, field) - if t, ok := mapFieldTypes[field]; ok { - typename = t - } - mname := fieldGetterNames[field] - star := "" - if needsStar(*field.Type) && typename[0] == '*' { - typename = typename[1:] - star = "*" - } - - // Only export getter symbols for basic types, - // and for messages and enums in the same package. - // Groups are not exported. - // Foreign types can't be hoisted through a public import because - // the importer may not already be importing the defining .proto. - // As an example, imagine we have an import tree like this: - // A.proto -> B.proto -> C.proto - // If A publicly imports B, we need to generate the getters from B in A's output, - // but if one such getter returns something from C then we cannot do that - // because A is not importing C already. - var getter, genType bool - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_GROUP: - getter = false - case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_ENUM: - // Only export getter if its return type is in this package. - getter = g.ObjectNamed(field.GetTypeName()).PackageName() == message.PackageName() - genType = true - default: - getter = true - } - if getter { - getters = append(getters, getterSymbol{ - name: mname, - typ: typename, - typeName: field.GetTypeName(), - genType: genType, - }) + // Guarantee deprecation comments appear after user-provided comments. + if mc.message.GetOptions().GetDeprecated() { + if comments { + // Convention: Separate deprecation comments from original + // comments with an empty line. + g.P("//") } + g.P(deprecationComment) + } - g.P("func (m *", ccTypeName, ") "+mname+"() "+typename+" {") - g.In() - def, hasDef := defNames[field] - typeDefaultIsNil := false // whether this field type's default value is a literal nil unless specified - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_BYTES: - typeDefaultIsNil = !hasDef - case descriptor.FieldDescriptorProto_TYPE_GROUP, descriptor.FieldDescriptorProto_TYPE_MESSAGE: - typeDefaultIsNil = true - } - if isRepeated(field) { - typeDefaultIsNil = true - } - if typeDefaultIsNil && !oneof { - // A bytes field with no explicit default needs less generated code, - // as does a message or group field, or a repeated field. - g.P("if m != nil {") - g.In() - g.P("return m." + fname) - g.Out() - g.P("}") - g.P("return nil") - g.Out() - g.P("}") - g.P() - continue - } - if !oneof { - if message.proto3() { - g.P("if m != nil {") - } else { - g.P("if m != nil && m." + fname + " != nil {") - } - g.In() - g.P("return " + star + "m." + fname) - g.Out() - g.P("}") - } else { - uname := oneofFieldName[*field.OneofIndex] - tname := oneofTypeName[field] - g.P("if x, ok := m.Get", uname, "().(*", tname, "); ok {") - g.P("return x.", fname) - g.P("}") - } - if hasDef { - if *field.Type != descriptor.FieldDescriptorProto_TYPE_BYTES { - g.P("return " + def) - } else { - // The default is a []byte var. - // Make a copy when returning it to be safe. - g.P("return append([]byte(nil), ", def, "...)") - } - } else { - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_BOOL: - g.P("return false") - case descriptor.FieldDescriptorProto_TYPE_STRING: - g.P(`return ""`) - case descriptor.FieldDescriptorProto_TYPE_GROUP, - descriptor.FieldDescriptorProto_TYPE_MESSAGE, - descriptor.FieldDescriptorProto_TYPE_BYTES: - // This is only possible for oneof fields. - g.P("return nil") - case descriptor.FieldDescriptorProto_TYPE_ENUM: - // The default default for an enum is the first value in the enum, - // not zero. - obj := g.ObjectNamed(field.GetTypeName()) - var enum *EnumDescriptor - if id, ok := obj.(*ImportedDescriptor); ok { - // The enum type has been publicly imported. - enum, _ = id.o.(*EnumDescriptor) - } else { - enum, _ = obj.(*EnumDescriptor) - } - if enum == nil { - log.Printf("don't know how to generate getter for %s", field.GetName()) - continue - } - if len(enum.Value) == 0 { - g.P("return 0 // empty enum") - } else { - first := enum.Value[0].GetName() - g.P("return ", g.DefaultPackageName(obj)+enum.prefix()+first) - } - default: - g.P("return 0") - } - } - g.Out() - g.P("}") - g.P() + g.P("type ", Annotate(mc.message.file, mc.message.path, mc.goName), " struct {") + for _, pf := range topLevelFields { + pf.decl(g, mc) } + g.generateInternalStructFields(mc, topLevelFields) + g.P("}") +} - if !message.group { - ms := &messageSymbol{ - sym: ccTypeName, - hasExtensions: hasExtensions, - isMessageSet: isMessageSet, - hasOneof: len(message.OneofDecl) > 0, - getters: getters, - } - g.file.addExport(message, ms) +// generateGetters adds getters for all fields, including oneofs and weak fields when applicable. +func (g *Generator) generateGetters(mc *msgCtx, topLevelFields []topLevelField) { + for _, pf := range topLevelFields { + pf.getter(g, mc) } +} - // Oneof functions - if len(message.OneofDecl) > 0 { - fieldWire := make(map[*descriptor.FieldDescriptorProto]string) +// generateSetters add setters for all fields, including oneofs and weak fields when applicable. +func (g *Generator) generateSetters(mc *msgCtx, topLevelFields []topLevelField) { + for _, pf := range topLevelFields { + pf.setter(g, mc) + } +} - // method - enc := "_" + ccTypeName + "_OneofMarshaler" - dec := "_" + ccTypeName + "_OneofUnmarshaler" - size := "_" + ccTypeName + "_OneofSizer" - encSig := "(msg " + g.Pkg["proto"] + ".Message, b *" + g.Pkg["proto"] + ".Buffer) error" - decSig := "(msg " + g.Pkg["proto"] + ".Message, tag, wire int, b *" + g.Pkg["proto"] + ".Buffer) (bool, error)" - sizeSig := "(msg " + g.Pkg["proto"] + ".Message) (n int)" +// generateCommonMethods adds methods to the message that are not on a per field basis. +func (g *Generator) generateCommonMethods(mc *msgCtx) { + // Reset, String and ProtoMessage methods. + g.P("func (m *", mc.goName, ") Reset() { *m = ", mc.goName, "{} }") + g.P("func (m *", mc.goName, ") String() string { return ", g.Pkg["proto"], ".CompactTextString(m) }") + g.P("func (*", mc.goName, ") ProtoMessage() {}") + var indexes []string + for m := mc.message; m != nil; m = m.parent { + indexes = append([]string{strconv.Itoa(m.index)}, indexes...) + } + g.P("func (*", mc.goName, ") Descriptor() ([]byte, []int) {") + g.P("return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "}") + g.P("}") + g.P() + // TODO: Revisit the decision to use a XXX_WellKnownType method + // if we change proto.MessageName to work with multiple equivalents. + if mc.message.file.GetPackage() == "google.protobuf" && wellKnownTypes[mc.message.GetName()] { + g.P("func (*", mc.goName, `) XXX_WellKnownType() string { return "`, mc.message.GetName(), `" }`) + g.P() + } - g.P("// XXX_OneofFuncs is for the internal use of the proto package.") - g.P("func (*", ccTypeName, ") XXX_OneofFuncs() (func", encSig, ", func", decSig, ", func", sizeSig, ", []interface{}) {") - g.P("return ", enc, ", ", dec, ", ", size, ", []interface{}{") - for _, field := range message.Field { - if field.OneofIndex == nil { - continue - } - g.P("(*", oneofTypeName[field], ")(nil),") + // Extension support methods + if len(mc.message.ExtensionRange) > 0 { + g.P() + g.P("var extRange_", mc.goName, " = []", g.Pkg["proto"], ".ExtensionRange{") + for _, r := range mc.message.ExtensionRange { + end := fmt.Sprint(*r.End - 1) // make range inclusive on both ends + g.P("{Start: ", r.Start, ", End: ", end, "},") } g.P("}") + g.P("func (*", mc.goName, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange {") + g.P("return extRange_", mc.goName) g.P("}") g.P() + } - // marshaler - g.P("func ", enc, encSig, " {") - g.P("m := msg.(*", ccTypeName, ")") - for oi, odp := range message.OneofDecl { - g.P("// ", odp.GetName()) - fname := oneofFieldName[int32(oi)] - g.P("switch x := m.", fname, ".(type) {") - for _, field := range message.Field { - if field.OneofIndex == nil || int(*field.OneofIndex) != oi { - continue - } - g.P("case *", oneofTypeName[field], ":") - var wire, pre, post string - val := "x." + fieldNames[field] // overridden for TYPE_BOOL - canFail := false // only TYPE_MESSAGE and TYPE_GROUP can fail - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_DOUBLE: - wire = "WireFixed64" - pre = "b.EncodeFixed64(" + g.Pkg["math"] + ".Float64bits(" - post = "))" - case descriptor.FieldDescriptorProto_TYPE_FLOAT: - wire = "WireFixed32" - pre = "b.EncodeFixed32(uint64(" + g.Pkg["math"] + ".Float32bits(" - post = ")))" - case descriptor.FieldDescriptorProto_TYPE_INT64, - descriptor.FieldDescriptorProto_TYPE_UINT64: - wire = "WireVarint" - pre, post = "b.EncodeVarint(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_INT32, - descriptor.FieldDescriptorProto_TYPE_UINT32, - descriptor.FieldDescriptorProto_TYPE_ENUM: - wire = "WireVarint" - pre, post = "b.EncodeVarint(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_FIXED64, - descriptor.FieldDescriptorProto_TYPE_SFIXED64: - wire = "WireFixed64" - pre, post = "b.EncodeFixed64(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_FIXED32, - descriptor.FieldDescriptorProto_TYPE_SFIXED32: - wire = "WireFixed32" - pre, post = "b.EncodeFixed32(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_BOOL: - // bool needs special handling. - g.P("t := uint64(0)") - g.P("if ", val, " { t = 1 }") - val = "t" - wire = "WireVarint" - pre, post = "b.EncodeVarint(", ")" - case descriptor.FieldDescriptorProto_TYPE_STRING: - wire = "WireBytes" - pre, post = "b.EncodeStringBytes(", ")" - case descriptor.FieldDescriptorProto_TYPE_GROUP: - wire = "WireStartGroup" - pre, post = "b.Marshal(", ")" - canFail = true - case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - wire = "WireBytes" - pre, post = "b.EncodeMessage(", ")" - canFail = true - case descriptor.FieldDescriptorProto_TYPE_BYTES: - wire = "WireBytes" - pre, post = "b.EncodeRawBytes(", ")" - case descriptor.FieldDescriptorProto_TYPE_SINT32: - wire = "WireVarint" - pre, post = "b.EncodeZigzag32(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_SINT64: - wire = "WireVarint" - pre, post = "b.EncodeZigzag64(uint64(", "))" - default: - g.Fail("unhandled oneof field type ", field.Type.String()) - } - fieldWire[field] = wire - g.P("b.EncodeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".", wire, ")") - if !canFail { - g.P(pre, val, post) - } else { - g.P("if err := ", pre, val, post, "; err != nil {") - g.P("return err") - g.P("}") - } - if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { - g.P("b.EncodeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".WireEndGroup)") + // TODO: It does not scale to keep adding another method for every + // operation on protos that we want to switch over to using the + // table-driven approach. Instead, we should only add a single method + // that allows getting access to the *InternalMessageInfo struct and then + // calling Unmarshal, Marshal, Merge, Size, and Discard directly on that. + + // Wrapper for table-driven marshaling and unmarshaling. + g.P("func (m *", mc.goName, ") XXX_Unmarshal(b []byte) error {") + g.P("return xxx_messageInfo_", mc.goName, ".Unmarshal(m, b)") + g.P("}") + + g.P("func (m *", mc.goName, ") XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {") + g.P("return xxx_messageInfo_", mc.goName, ".Marshal(b, m, deterministic)") + g.P("}") + + g.P("func (m *", mc.goName, ") XXX_Merge(src ", g.Pkg["proto"], ".Message) {") + g.P("xxx_messageInfo_", mc.goName, ".Merge(m, src)") + g.P("}") + + g.P("func (m *", mc.goName, ") XXX_Size() int {") // avoid name clash with "Size" field in some message + g.P("return xxx_messageInfo_", mc.goName, ".Size(m)") + g.P("}") + + g.P("func (m *", mc.goName, ") XXX_DiscardUnknown() {") + g.P("xxx_messageInfo_", mc.goName, ".DiscardUnknown(m)") + g.P("}") + + g.P("var xxx_messageInfo_", mc.goName, " ", g.Pkg["proto"], ".InternalMessageInfo") + g.P() +} + +// Generate the type, methods and default constant definitions for this Descriptor. +func (g *Generator) generateMessage(message *Descriptor) { + topLevelFields := []topLevelField{} + oFields := make(map[int32]*oneofField) + // The full type name + typeName := message.TypeName() + // The full type name, CamelCased. + goTypeName := CamelCaseSlice(typeName) + + usedNames := make(map[string]bool) + for _, n := range methodNames { + usedNames[n] = true + } + + // allocNames finds a conflict-free variation of the given strings, + // consistently mutating their suffixes. + // It returns the same number of strings. + allocNames := func(ns ...string) []string { + Loop: + for { + for _, n := range ns { + if usedNames[n] { + for i := range ns { + ns[i] += "_" + } + continue Loop } } - g.P("case nil:") - g.P("default: return ", g.Pkg["fmt"], `.Errorf("`, ccTypeName, ".", fname, ` has unexpected type %T", x)`) - g.P("}") + for _, n := range ns { + usedNames[n] = true + } + return ns } - g.P("return nil") - g.P("}") - g.P() + } - // unmarshaler - g.P("func ", dec, decSig, " {") - g.P("m := msg.(*", ccTypeName, ")") - g.P("switch tag {") - for _, field := range message.Field { - if field.OneofIndex == nil { - continue - } + mapFieldTypes := make(map[*descriptor.FieldDescriptorProto]string) // keep track of the map fields to be added later + + // Build a structure more suitable for generating the text in one pass + for i, field := range message.Field { + // Allocate the getter and the field at the same time so name + // collisions create field/method consistent names. + // TODO: This allocation occurs based on the order of the fields + // in the proto file, meaning that a change in the field + // ordering can change generated Method/Field names. + base := CamelCase(*field.Name) + ns := allocNames(base, "Get"+base) + fieldName, fieldGetterName := ns[0], ns[1] + typename, wiretype := g.GoType(message, field) + jsonName := *field.Name + tag := fmt.Sprintf("protobuf:%s json:%q", g.goTag(message, field, wiretype), jsonName+",omitempty") + + oneof := field.OneofIndex != nil + if oneof && oFields[*field.OneofIndex] == nil { odp := message.OneofDecl[int(*field.OneofIndex)] - g.P("case ", field.Number, ": // ", odp.GetName(), ".", *field.Name) - g.P("if wire != ", g.Pkg["proto"], ".", fieldWire[field], " {") - g.P("return true, ", g.Pkg["proto"], ".ErrInternalBadWireType") - g.P("}") - lhs := "x, err" // overridden for TYPE_MESSAGE and TYPE_GROUP - var dec, cast, cast2 string - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_DOUBLE: - dec, cast = "b.DecodeFixed64()", g.Pkg["math"]+".Float64frombits" - case descriptor.FieldDescriptorProto_TYPE_FLOAT: - dec, cast, cast2 = "b.DecodeFixed32()", "uint32", g.Pkg["math"]+".Float32frombits" - case descriptor.FieldDescriptorProto_TYPE_INT64: - dec, cast = "b.DecodeVarint()", "int64" - case descriptor.FieldDescriptorProto_TYPE_UINT64: - dec = "b.DecodeVarint()" - case descriptor.FieldDescriptorProto_TYPE_INT32: - dec, cast = "b.DecodeVarint()", "int32" - case descriptor.FieldDescriptorProto_TYPE_FIXED64: - dec = "b.DecodeFixed64()" - case descriptor.FieldDescriptorProto_TYPE_FIXED32: - dec, cast = "b.DecodeFixed32()", "uint32" - case descriptor.FieldDescriptorProto_TYPE_BOOL: - dec = "b.DecodeVarint()" - // handled specially below - case descriptor.FieldDescriptorProto_TYPE_STRING: - dec = "b.DecodeStringBytes()" - case descriptor.FieldDescriptorProto_TYPE_GROUP: - g.P("msg := new(", fieldTypes[field][1:], ")") // drop star - lhs = "err" - dec = "b.DecodeGroup(msg)" - // handled specially below - case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - g.P("msg := new(", fieldTypes[field][1:], ")") // drop star - lhs = "err" - dec = "b.DecodeMessage(msg)" - // handled specially below - case descriptor.FieldDescriptorProto_TYPE_BYTES: - dec = "b.DecodeRawBytes(true)" - case descriptor.FieldDescriptorProto_TYPE_UINT32: - dec, cast = "b.DecodeVarint()", "uint32" - case descriptor.FieldDescriptorProto_TYPE_ENUM: - dec, cast = "b.DecodeVarint()", fieldTypes[field] - case descriptor.FieldDescriptorProto_TYPE_SFIXED32: - dec, cast = "b.DecodeFixed32()", "int32" - case descriptor.FieldDescriptorProto_TYPE_SFIXED64: - dec, cast = "b.DecodeFixed64()", "int64" - case descriptor.FieldDescriptorProto_TYPE_SINT32: - dec, cast = "b.DecodeZigzag32()", "int32" - case descriptor.FieldDescriptorProto_TYPE_SINT64: - dec, cast = "b.DecodeZigzag64()", "int64" - default: - g.Fail("unhandled oneof field type ", field.Type.String()) - } - g.P(lhs, " := ", dec) - val := "x" - if cast != "" { - val = cast + "(" + val + ")" - } - if cast2 != "" { - val = cast2 + "(" + val + ")" + base := CamelCase(odp.GetName()) + fname := allocNames(base)[0] + + // This is the first field of a oneof we haven't seen before. + // Generate the union field. + oneofFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageOneofPath, *field.OneofIndex) + c, ok := g.makeComments(oneofFullPath) + if ok { + c += "\n//\n" } - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_BOOL: - val += " != 0" - case descriptor.FieldDescriptorProto_TYPE_GROUP, - descriptor.FieldDescriptorProto_TYPE_MESSAGE: - val = "msg" + c += "// Types that are valid to be assigned to " + fname + ":\n" + // Generate the rest of this comment later, + // when we've computed any disambiguation. + + dname := "is" + goTypeName + "_" + fname + tag := `protobuf_oneof:"` + odp.GetName() + `"` + of := oneofField{ + fieldCommon: fieldCommon{ + goName: fname, + getterName: "Get"+fname, + goType: dname, + tags: tag, + protoName: odp.GetName(), + fullPath: oneofFullPath, + }, + comment: c, } - g.P("m.", oneofFieldName[*field.OneofIndex], " = &", oneofTypeName[field], "{", val, "}") - g.P("return true, err") + topLevelFields = append(topLevelFields, &of) + oFields[*field.OneofIndex] = &of } - g.P("default: return false, nil") - g.P("}") - g.P("}") - g.P() - // sizer - g.P("func ", size, sizeSig, " {") - g.P("m := msg.(*", ccTypeName, ")") - for oi, odp := range message.OneofDecl { - g.P("// ", odp.GetName()) - fname := oneofFieldName[int32(oi)] - g.P("switch x := m.", fname, ".(type) {") - for _, field := range message.Field { - if field.OneofIndex == nil || int(*field.OneofIndex) != oi { - continue - } - g.P("case *", oneofTypeName[field], ":") - val := "x." + fieldNames[field] - var wire, varint, fixed string - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_DOUBLE: - wire = "WireFixed64" - fixed = "8" - case descriptor.FieldDescriptorProto_TYPE_FLOAT: - wire = "WireFixed32" - fixed = "4" - case descriptor.FieldDescriptorProto_TYPE_INT64, - descriptor.FieldDescriptorProto_TYPE_UINT64, - descriptor.FieldDescriptorProto_TYPE_INT32, - descriptor.FieldDescriptorProto_TYPE_UINT32, - descriptor.FieldDescriptorProto_TYPE_ENUM: - wire = "WireVarint" - varint = val - case descriptor.FieldDescriptorProto_TYPE_FIXED64, - descriptor.FieldDescriptorProto_TYPE_SFIXED64: - wire = "WireFixed64" - fixed = "8" - case descriptor.FieldDescriptorProto_TYPE_FIXED32, - descriptor.FieldDescriptorProto_TYPE_SFIXED32: - wire = "WireFixed32" - fixed = "4" - case descriptor.FieldDescriptorProto_TYPE_BOOL: - wire = "WireVarint" - fixed = "1" - case descriptor.FieldDescriptorProto_TYPE_STRING: - wire = "WireBytes" - fixed = "len(" + val + ")" - varint = fixed - case descriptor.FieldDescriptorProto_TYPE_GROUP: - wire = "WireStartGroup" - fixed = g.Pkg["proto"] + ".Size(" + val + ")" + if *field.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE { + desc := g.ObjectNamed(field.GetTypeName()) + if d, ok := desc.(*Descriptor); ok && d.GetOptions().GetMapEntry() { + // Figure out the Go types and tags for the key and value types. + keyField, valField := d.Field[0], d.Field[1] + keyType, keyWire := g.GoType(d, keyField) + valType, valWire := g.GoType(d, valField) + keyTag, valTag := g.goTag(d, keyField, keyWire), g.goTag(d, valField, valWire) + + // We don't use stars, except for message-typed values. + // Message and enum types are the only two possibly foreign types used in maps, + // so record their use. They are not permitted as map keys. + keyType = strings.TrimPrefix(keyType, "*") + switch *valField.Type { + case descriptor.FieldDescriptorProto_TYPE_ENUM: + valType = strings.TrimPrefix(valType, "*") + g.RecordTypeUse(valField.GetTypeName()) case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - wire = "WireBytes" - g.P("s := ", g.Pkg["proto"], ".Size(", val, ")") - fixed = "s" - varint = fixed - case descriptor.FieldDescriptorProto_TYPE_BYTES: - wire = "WireBytes" - fixed = "len(" + val + ")" - varint = fixed - case descriptor.FieldDescriptorProto_TYPE_SINT32: - wire = "WireVarint" - varint = "(uint32(" + val + ") << 1) ^ uint32((int32(" + val + ") >> 31))" - case descriptor.FieldDescriptorProto_TYPE_SINT64: - wire = "WireVarint" - varint = "uint64(" + val + " << 1) ^ uint64((int64(" + val + ") >> 63))" + g.RecordTypeUse(valField.GetTypeName()) default: - g.Fail("unhandled oneof field type ", field.Type.String()) + valType = strings.TrimPrefix(valType, "*") } - g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".", wire, ")") - if varint != "" { - g.P("n += ", g.Pkg["proto"], ".SizeVarint(uint64(", varint, "))") + + typename = fmt.Sprintf("map[%s]%s", keyType, valType) + mapFieldTypes[field] = typename // record for the getter generation + + tag += fmt.Sprintf(" protobuf_key:%s protobuf_val:%s", keyTag, valTag) + } + } + + fieldDeprecated := "" + if field.GetOptions().GetDeprecated() { + fieldDeprecated = deprecationComment + } + + dvalue := g.getterDefault(field, goTypeName) + if oneof { + tname := goTypeName + "_" + fieldName + // It is possible for this to collide with a message or enum + // nested in this message. Check for collisions. + for { + ok := true + for _, desc := range message.nested { + if CamelCaseSlice(desc.TypeName()) == tname { + ok = false + break + } } - if fixed != "" { - g.P("n += ", fixed) + for _, enum := range message.enums { + if CamelCaseSlice(enum.TypeName()) == tname { + ok = false + break + } } - if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { - g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".WireEndGroup)") + if !ok { + tname += "_" + continue } + break + } + + oneofField := oFields[*field.OneofIndex] + tag := "protobuf:" + g.goTag(message, field, wiretype) + sf := oneofSubField{ + fieldCommon: fieldCommon{ + goName: fieldName, + getterName: fieldGetterName, + goType: typename, + tags: tag, + protoName: field.GetName(), + fullPath: fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i), + }, + protoTypeName: field.GetTypeName(), + fieldNumber: int(*field.Number), + protoType: *field.Type, + getterDef: dvalue, + protoDef: field.GetDefaultValue(), + oneofTypeName: tname, + deprecated: fieldDeprecated, } - g.P("case nil:") - g.P("default:") - g.P("panic(", g.Pkg["fmt"], ".Sprintf(\"proto: unexpected type %T in oneof\", x))") - g.P("}") + oneofField.subFields = append(oneofField.subFields, &sf) + g.RecordTypeUse(field.GetTypeName()) + continue } - g.P("return n") - g.P("}") - g.P() + + fieldFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i) + c, ok := g.makeComments(fieldFullPath) + if ok { + c += "\n" + } + rf := simpleField{ + fieldCommon: fieldCommon{ + goName: fieldName, + getterName: fieldGetterName, + goType: typename, + tags: tag, + protoName: field.GetName(), + fullPath: fieldFullPath, + }, + protoTypeName: field.GetTypeName(), + protoType: *field.Type, + deprecated: fieldDeprecated, + getterDef: dvalue, + protoDef: field.GetDefaultValue(), + comment: c, + } + var pf topLevelField = &rf + + topLevelFields = append(topLevelFields, pf) + g.RecordTypeUse(field.GetTypeName()) + } + + mc := &msgCtx{ + goName: goTypeName, + message: message, } + g.generateMessageStruct(mc, topLevelFields) + g.P() + g.generateCommonMethods(mc) + g.P() + g.generateDefaultConstants(mc, topLevelFields) + g.P() + g.generateGetters(mc, topLevelFields) + g.P() + g.generateSetters(mc, topLevelFields) + g.P() + g.generateOneofFuncs(mc, topLevelFields) + g.P() + + var oneofTypes []string + for _, f := range topLevelFields { + if of, ok := f.(*oneofField); ok { + for _, osf := range of.subFields { + oneofTypes = append(oneofTypes, osf.oneofTypeName) + } + } + } + + opts := message.Options + ms := &messageSymbol{ + sym: goTypeName, + hasExtensions: len(message.ExtensionRange) > 0, + isMessageSet: opts != nil && opts.GetMessageSetWireFormat(), + oneofTypes: oneofTypes, + } + g.file.addExport(message, ms) + for _, ext := range message.ext { g.generateExtension(ext) } @@ -2505,7 +2433,90 @@ func (g *Generator) generateMessage(message *Descriptor) { fullName = *g.file.Package + "." + fullName } - g.addInitf("%s.RegisterType((*%s)(nil), %q)", g.Pkg["proto"], ccTypeName, fullName) + g.addInitf("%s.RegisterType((*%s)(nil), %q)", g.Pkg["proto"], goTypeName, fullName) + // Register types for native map types. + for _, k := range mapFieldKeys(mapFieldTypes) { + fullName := strings.TrimPrefix(*k.TypeName, ".") + g.addInitf("%s.RegisterMapType((%s)(nil), %q)", g.Pkg["proto"], mapFieldTypes[k], fullName) + } + +} + +type byTypeName []*descriptor.FieldDescriptorProto + +func (a byTypeName) Len() int { return len(a) } +func (a byTypeName) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byTypeName) Less(i, j int) bool { return *a[i].TypeName < *a[j].TypeName } + +// mapFieldKeys returns the keys of m in a consistent order. +func mapFieldKeys(m map[*descriptor.FieldDescriptorProto]string) []*descriptor.FieldDescriptorProto { + keys := make([]*descriptor.FieldDescriptorProto, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.Sort(byTypeName(keys)) + return keys +} + +var escapeChars = [256]byte{ + 'a': '\a', 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t', 'v': '\v', '\\': '\\', '"': '"', '\'': '\'', '?': '?', +} + +// unescape reverses the "C" escaping that protoc does for default values of bytes fields. +// It is best effort in that it effectively ignores malformed input. Seemingly invalid escape +// sequences are conveyed, unmodified, into the decoded result. +func unescape(s string) string { + // NB: Sadly, we can't use strconv.Unquote because protoc will escape both + // single and double quotes, but strconv.Unquote only allows one or the + // other (based on actual surrounding quotes of its input argument). + + var out []byte + for len(s) > 0 { + // regular character, or too short to be valid escape + if s[0] != '\\' || len(s) < 2 { + out = append(out, s[0]) + s = s[1:] + } else if c := escapeChars[s[1]]; c != 0 { + // escape sequence + out = append(out, c) + s = s[2:] + } else if s[1] == 'x' || s[1] == 'X' { + // hex escape, e.g. "\x80 + if len(s) < 4 { + // too short to be valid + out = append(out, s[:2]...) + s = s[2:] + continue + } + v, err := strconv.ParseUint(s[2:4], 16, 8) + if err != nil { + out = append(out, s[:4]...) + } else { + out = append(out, byte(v)) + } + s = s[4:] + } else if '0' <= s[1] && s[1] <= '7' { + // octal escape, can vary from 1 to 3 octal digits; e.g., "\0" "\40" or "\164" + // so consume up to 2 more bytes or up to end-of-string + n := len(s[1:]) - len(strings.TrimLeft(s[1:], "01234567")) + if n > 3 { + n = 3 + } + v, err := strconv.ParseUint(s[1:1+n], 8, 8) + if err != nil { + out = append(out, s[:1+n]...) + } else { + out = append(out, byte(v)) + } + s = s[1+n:] + } else { + // bad escape, just propagate the slash as-is + out = append(out, s[0]) + s = s[1:] + } + } + + return string(out) } func (g *Generator) generateExtension(ext *ExtensionDescriptor) { @@ -2533,12 +2544,15 @@ func (g *Generator) generateExtension(ext *ExtensionDescriptor) { typeName := ext.TypeName() // Special case for proto2 message sets: If this extension is extending - // proto2_bridge.MessageSet, and its final name component is "message_set_extension", + // proto2.bridge.MessageSet, and its final name component is "message_set_extension", // then drop that last component. - mset := false - if extendedType == "*proto2_bridge.MessageSet" && typeName[len(typeName)-1] == "message_set_extension" { + // + // TODO: This should be implemented in the text formatter rather than the generator. + // In addition, the situation for when to apply this special case is implemented + // differently in other languages: + // https://github.com/google/protobuf/blob/aff10976/src/google/protobuf/text_format.cc#L1560 + if extDesc.GetOptions().GetMessageSetWireFormat() && typeName[len(typeName)-1] == "message_set_extension" { typeName = typeName[:len(typeName)-1] - mset = true } // For text formatting, the package must be exactly what the .proto file declares, @@ -2549,7 +2563,6 @@ func (g *Generator) generateExtension(ext *ExtensionDescriptor) { } g.P("var ", ccTypeName, " = &", g.Pkg["proto"], ".ExtensionDesc{") - g.In() g.P("ExtendedType: (", extendedType, ")(nil),") g.P("ExtensionType: (", fieldType, ")(nil),") g.P("Field: ", field.Number, ",") @@ -2557,39 +2570,22 @@ func (g *Generator) generateExtension(ext *ExtensionDescriptor) { g.P("Tag: ", tag, ",") g.P(`Filename: "`, g.file.GetName(), `",`) - g.Out() g.P("}") g.P() - if mset { - // Generate a bit more code to register with message_set.go. - g.addInitf("%s.RegisterMessageSetType((%s)(nil), %d, %q)", g.Pkg["proto"], fieldType, *field.Number, extName) - } + g.addInitf("%s.RegisterExtension(%s)", g.Pkg["proto"], ext.DescName()) g.file.addExport(ext, constOrVarSymbol{ccTypeName, "var", ""}) } func (g *Generator) generateInitFunction() { - for _, enum := range g.file.enum { - g.generateEnumRegistration(enum) - } - for _, d := range g.file.desc { - for _, ext := range d.ext { - g.generateExtensionRegistration(ext) - } - } - for _, ext := range g.file.ext { - g.generateExtensionRegistration(ext) - } if len(g.init) == 0 { return } g.P("func init() {") - g.In() for _, l := range g.init { g.P(l) } - g.Out() g.P("}") g.init = nil } @@ -2615,7 +2611,6 @@ func (g *Generator) generateFileDescriptor(file *FileDescriptor) { g.P() g.P("func init() { ", g.Pkg["proto"], ".RegisterFile(", strconv.Quote(*file.Name), ", ", v, ") }") g.P("var ", v, " = []byte{") - g.In() g.P("// ", len(b), " bytes of a gzipped FileDescriptorProto") for len(b) > 0 { n := 16 @@ -2631,7 +2626,6 @@ func (g *Generator) generateFileDescriptor(file *FileDescriptor) { b = b[n:] } - g.Out() g.P("}") } @@ -2648,10 +2642,6 @@ func (g *Generator) generateEnumRegistration(enum *EnumDescriptor) { g.addInitf("%s.RegisterEnum(%q, %[3]s_name, %[3]s_value)", g.Pkg["proto"], pkg+ccTypeName, ccTypeName) } -func (g *Generator) generateExtensionRegistration(ext *ExtensionDescriptor) { - g.addInitf("%s.RegisterExtension(%s)", g.Pkg["proto"], ext.DescName()) -} - // And now lots of helper functions. // Is c an ASCII lower-case letter? @@ -2803,3 +2793,14 @@ const ( // tag numbers in EnumDescriptorProto enumValuePath = 2 // value ) + +var supportTypeAliases bool + +func init() { + for _, tag := range build.Default.ReleaseTags { + if tag == "go1.9" { + supportTypeAliases = true + return + } + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap.go new file mode 100644 index 0000000..a9b6103 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap.go @@ -0,0 +1,117 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2017 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* +Package remap handles tracking the locations of Go tokens in a source text +across a rewrite by the Go formatter. +*/ +package remap + +import ( + "fmt" + "go/scanner" + "go/token" +) + +// A Location represents a span of byte offsets in the source text. +type Location struct { + Pos, End int // End is exclusive +} + +// A Map represents a mapping between token locations in an input source text +// and locations in the correspnding output text. +type Map map[Location]Location + +// Find reports whether the specified span is recorded by m, and if so returns +// the new location it was mapped to. If the input span was not found, the +// returned location is the same as the input. +func (m Map) Find(pos, end int) (Location, bool) { + key := Location{ + Pos: pos, + End: end, + } + if loc, ok := m[key]; ok { + return loc, true + } + return key, false +} + +func (m Map) add(opos, oend, npos, nend int) { + m[Location{Pos: opos, End: oend}] = Location{Pos: npos, End: nend} +} + +// Compute constructs a location mapping from input to output. An error is +// reported if any of the tokens of output cannot be mapped. +func Compute(input, output []byte) (Map, error) { + itok := tokenize(input) + otok := tokenize(output) + if len(itok) != len(otok) { + return nil, fmt.Errorf("wrong number of tokens, %d ≠ %d", len(itok), len(otok)) + } + m := make(Map) + for i, ti := range itok { + to := otok[i] + if ti.Token != to.Token { + return nil, fmt.Errorf("token %d type mismatch: %s ≠ %s", i+1, ti, to) + } + m.add(ti.pos, ti.end, to.pos, to.end) + } + return m, nil +} + +// tokinfo records the span and type of a source token. +type tokinfo struct { + pos, end int + token.Token +} + +func tokenize(src []byte) []tokinfo { + fs := token.NewFileSet() + var s scanner.Scanner + s.Init(fs.AddFile("src", fs.Base(), len(src)), src, nil, scanner.ScanComments) + var info []tokinfo + for { + pos, next, lit := s.Scan() + switch next { + case token.SEMICOLON: + continue + } + info = append(info, tokinfo{ + pos: int(pos - 1), + end: int(pos + token.Pos(len(lit)) - 1), + Token: next, + }) + if next == token.EOF { + break + } + } + return info +} diff --git a/vendor/github.com/golang/protobuf/proto/testdata/golden_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap_test.go similarity index 57% rename from vendor/github.com/golang/protobuf/proto/testdata/golden_test.go rename to vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap_test.go index 7172d0e..ccc7fca 100644 --- a/vendor/github.com/golang/protobuf/proto/testdata/golden_test.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap_test.go @@ -1,6 +1,6 @@ // Go support for Protocol Buffers - Google's data interchange format // -// Copyright 2012 The Go Authors. All rights reserved. +// Copyright 2017 The Go Authors. All rights reserved. // https://github.com/golang/protobuf // // Redistribution and use in source and binary forms, with or without @@ -29,58 +29,54 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// Verify that the compiler output for test.proto is unchanged. - -package testdata +package remap import ( - "crypto/sha1" - "fmt" - "io/ioutil" - "os" - "os/exec" - "path/filepath" + "go/format" "testing" ) -// sum returns in string form (for easy comparison) the SHA-1 hash of the named file. -func sum(t *testing.T, name string) string { - data, err := ioutil.ReadFile(name) - if err != nil { - t.Fatal(err) +func TestErrors(t *testing.T) { + tests := []struct { + in, out string + }{ + {"", "x"}, + {"x", ""}, + {"var x int = 5\n", "var x = 5\n"}, + {"these are \"one\" thing", "those are 'another' thing"}, } - t.Logf("sum(%q): length is %d", name, len(data)) - hash := sha1.New() - _, err = hash.Write(data) - if err != nil { - t.Fatal(err) + for _, test := range tests { + m, err := Compute([]byte(test.in), []byte(test.out)) + if err != nil { + t.Logf("Got expected error: %v", err) + continue + } + t.Errorf("Compute(%q, %q): got %+v, wanted error", test.in, test.out, m) } - return fmt.Sprintf("% x", hash.Sum(nil)) } -func run(t *testing.T, name string, args ...string) { - cmd := exec.Command(name, args...) - cmd.Stdin = os.Stdin - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - err := cmd.Run() +func TestMatching(t *testing.T) { + // The input is a source text that will be rearranged by the formatter. + const input = `package foo +var s int +func main(){} +` + + output, err := format.Source([]byte(input)) if err != nil { - t.Fatal(err) + t.Fatalf("Formatting failed: %v", err) + } + m, err := Compute([]byte(input), output) + if err != nil { + t.Fatalf("Unexpected error: %v", err) } -} -func TestGolden(t *testing.T) { - // Compute the original checksum. - goldenSum := sum(t, "test.pb.go") - // Run the proto compiler. - run(t, "protoc", "--go_out="+os.TempDir(), "test.proto") - newFile := filepath.Join(os.TempDir(), "test.pb.go") - defer os.Remove(newFile) - // Compute the new checksum. - newSum := sum(t, newFile) - // Verify - if newSum != goldenSum { - run(t, "diff", "-u", "test.pb.go", newFile) - t.Fatal("Code generated by protoc-gen-go has changed; update test.pb.go") + // Verify that the mapped locations have the same text. + for key, val := range m { + want := input[key.Pos:key.End] + got := string(output[val.Pos:val.End]) + if got != want { + t.Errorf("Token at %d:%d: got %q, want %q", key.Pos, key.End, got, want) + } } } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go index a5ebc85..e4a119d 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go @@ -59,14 +59,16 @@ func TestCamelCase(t *testing.T) { func TestGoPackageOption(t *testing.T) { tests := []struct { - in string - impPath, pkg string - ok bool + in string + impPath GoImportPath + pkg GoPackageName + ok bool }{ {"", "", "", false}, {"foo", "", "foo", true}, {"github.com/golang/bar", "github.com/golang/bar", "bar", true}, {"github.com/golang/bar;baz", "github.com/golang/bar", "baz", true}, + {"github.com/golang/string", "github.com/golang/string", "string", true}, } for _, tc := range tests { d := &FileDescriptor{ @@ -83,3 +85,51 @@ func TestGoPackageOption(t *testing.T) { } } } + +func TestPackageNames(t *testing.T) { + g := New() + g.packageNames = make(map[GoImportPath]GoPackageName) + g.usedPackageNames = make(map[GoPackageName]bool) + for _, test := range []struct { + importPath GoImportPath + want GoPackageName + }{ + {"github.com/golang/foo", "foo"}, + {"github.com/golang/second/package/named/foo", "foo1"}, + {"github.com/golang/third/package/named/foo", "foo2"}, + {"github.com/golang/conflicts/with/predeclared/ident/string", "string1"}, + } { + if got := g.GoPackageName(test.importPath); got != test.want { + t.Errorf("GoPackageName(%v) = %v, want %v", test.importPath, got, test.want) + } + } +} + +func TestUnescape(t *testing.T) { + tests := []struct { + in string + out string + }{ + // successful cases, including all kinds of escapes + {"", ""}, + {"foo bar baz frob nitz", "foo bar baz frob nitz"}, + {`\000\001\002\003\004\005\006\007`, string([]byte{0, 1, 2, 3, 4, 5, 6, 7})}, + {`\a\b\f\n\r\t\v\\\?\'\"`, string([]byte{'\a', '\b', '\f', '\n', '\r', '\t', '\v', '\\', '?', '\'', '"'})}, + {`\x10\x20\x30\x40\x50\x60\x70\x80`, string([]byte{16, 32, 48, 64, 80, 96, 112, 128})}, + // variable length octal escapes + {`\0\018\222\377\3\04\005\6\07`, string([]byte{0, 1, '8', 0222, 255, 3, 4, 5, 6, 7})}, + // malformed escape sequences left as is + {"foo \\g bar", "foo \\g bar"}, + {"foo \\xg0 bar", "foo \\xg0 bar"}, + {"\\", "\\"}, + {"\\x", "\\x"}, + {"\\xf", "\\xf"}, + {"\\777", "\\777"}, // overflows byte + } + for _, tc := range tests { + s := unescape(tc.in) + if s != tc.out { + t.Errorf("doUnescape(%q) = %q; should have been %q", tc.in, s, tc.out) + } + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/golden_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/golden_test.go new file mode 100644 index 0000000..2950eac --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/golden_test.go @@ -0,0 +1,363 @@ +package main + +import ( + "bytes" + "flag" + "go/build" + "go/parser" + "go/token" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "regexp" + "runtime" + "strings" + "testing" +) + +// Set --regenerate to regenerate the golden files. +var regenerate = flag.Bool("regenerate", false, "regenerate golden files") + +// When the environment variable RUN_AS_PROTOC_GEN_GO is set, we skip running +// tests and instead act as protoc-gen-go. This allows the test binary to +// pass itself to protoc. +func init() { + if os.Getenv("RUN_AS_PROTOC_GEN_GO") != "" { + main() + os.Exit(0) + } +} + +func TestGolden(t *testing.T) { + workdir, err := ioutil.TempDir("", "proto-test") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(workdir) + + // Find all the proto files we need to compile. We assume that each directory + // contains the files for a single package. + supportTypeAliases := hasReleaseTag("go1.9") + packages := map[string][]string{} + err = filepath.Walk("testdata", func(path string, info os.FileInfo, err error) error { + if filepath.Base(path) == "import_public" && !supportTypeAliases { + // Public imports require type alias support. + return filepath.SkipDir + } + if !strings.HasSuffix(path, ".proto") { + return nil + } + dir := filepath.Dir(path) + packages[dir] = append(packages[dir], path) + return nil + }) + if err != nil { + t.Fatal(err) + } + + // Compile each package, using this binary as protoc-gen-go. + for _, sources := range packages { + args := []string{"-Itestdata", "--go_out=plugins=grpc,paths=source_relative:" + workdir} + args = append(args, sources...) + protoc(t, args) + } + + // Compare each generated file to the golden version. + filepath.Walk(workdir, func(genPath string, info os.FileInfo, _ error) error { + if info.IsDir() { + return nil + } + + // For each generated file, figure out the path to the corresponding + // golden file in the testdata directory. + relPath, err := filepath.Rel(workdir, genPath) + if err != nil { + t.Errorf("filepath.Rel(%q, %q): %v", workdir, genPath, err) + return nil + } + if filepath.SplitList(relPath)[0] == ".." { + t.Errorf("generated file %q is not relative to %q", genPath, workdir) + } + goldenPath := filepath.Join("testdata", relPath) + + got, err := ioutil.ReadFile(genPath) + if err != nil { + t.Error(err) + return nil + } + if *regenerate { + // If --regenerate set, just rewrite the golden files. + err := ioutil.WriteFile(goldenPath, got, 0666) + if err != nil { + t.Error(err) + } + return nil + } + + want, err := ioutil.ReadFile(goldenPath) + if err != nil { + t.Error(err) + return nil + } + + want = fdescRE.ReplaceAll(want, nil) + got = fdescRE.ReplaceAll(got, nil) + if bytes.Equal(got, want) { + return nil + } + + cmd := exec.Command("diff", "-u", goldenPath, genPath) + out, _ := cmd.CombinedOutput() + t.Errorf("golden file differs: %v\n%v", relPath, string(out)) + return nil + }) +} + +var fdescRE = regexp.MustCompile(`(?ms)^var fileDescriptor.*}`) + +// Source files used by TestParameters. +const ( + aProto = ` +syntax = "proto3"; +package test.alpha; +option go_package = "package/alpha"; +import "beta/b.proto"; +message M { test.beta.M field = 1; }` + + bProto = ` +syntax = "proto3"; +package test.beta; +// no go_package option +message M {}` +) + +func TestParameters(t *testing.T) { + for _, test := range []struct { + parameters string + wantFiles map[string]bool + wantImportsA map[string]bool + wantPackageA string + wantPackageB string + }{{ + parameters: "", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + wantImportsA: map[string]bool{ + "github.com/golang/protobuf/proto": true, + "beta": true, + }, + }, { + parameters: "import_prefix=prefix", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + wantImportsA: map[string]bool{ + // This really doesn't seem like useful behavior. + "prefixgithub.com/golang/protobuf/proto": true, + "prefixbeta": true, + }, + }, { + // import_path only affects the 'package' line. + parameters: "import_path=import/path/of/pkg", + wantPackageA: "alpha", + wantPackageB: "pkg", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + }, { + parameters: "Mbeta/b.proto=package/gamma", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + wantImportsA: map[string]bool{ + "github.com/golang/protobuf/proto": true, + // Rewritten by the M parameter. + "package/gamma": true, + }, + }, { + parameters: "import_prefix=prefix,Mbeta/b.proto=package/gamma", + wantFiles: map[string]bool{ + "package/alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + wantImportsA: map[string]bool{ + // import_prefix applies after M. + "prefixpackage/gamma": true, + }, + }, { + parameters: "paths=source_relative", + wantFiles: map[string]bool{ + "alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + }, { + parameters: "paths=source_relative,import_prefix=prefix", + wantFiles: map[string]bool{ + // import_prefix doesn't affect filenames. + "alpha/a.pb.go": true, + "beta/b.pb.go": true, + }, + wantPackageA: "alpha", + wantPackageB: "test_beta", + }} { + name := test.parameters + if name == "" { + name = "defaults" + } + // TODO: Switch to t.Run when we no longer support Go 1.6. + t.Logf("TEST: %v", name) + workdir, err := ioutil.TempDir("", "proto-test") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(workdir) + + for _, dir := range []string{"alpha", "beta", "out"} { + if err := os.MkdirAll(filepath.Join(workdir, dir), 0777); err != nil { + t.Fatal(err) + } + } + + if err := ioutil.WriteFile(filepath.Join(workdir, "alpha", "a.proto"), []byte(aProto), 0666); err != nil { + t.Fatal(err) + } + + if err := ioutil.WriteFile(filepath.Join(workdir, "beta", "b.proto"), []byte(bProto), 0666); err != nil { + t.Fatal(err) + } + + protoc(t, []string{ + "-I" + workdir, + "--go_out=" + test.parameters + ":" + filepath.Join(workdir, "out"), + filepath.Join(workdir, "alpha", "a.proto"), + }) + protoc(t, []string{ + "-I" + workdir, + "--go_out=" + test.parameters + ":" + filepath.Join(workdir, "out"), + filepath.Join(workdir, "beta", "b.proto"), + }) + + contents := make(map[string]string) + gotFiles := make(map[string]bool) + outdir := filepath.Join(workdir, "out") + filepath.Walk(outdir, func(p string, info os.FileInfo, _ error) error { + if info.IsDir() { + return nil + } + base := filepath.Base(p) + if base == "a.pb.go" || base == "b.pb.go" { + b, err := ioutil.ReadFile(p) + if err != nil { + t.Fatal(err) + } + contents[base] = string(b) + } + relPath, _ := filepath.Rel(outdir, p) + gotFiles[relPath] = true + return nil + }) + for got := range gotFiles { + if runtime.GOOS == "windows" { + got = filepath.ToSlash(got) + } + if !test.wantFiles[got] { + t.Errorf("unexpected output file: %v", got) + } + } + for want := range test.wantFiles { + if runtime.GOOS == "windows" { + want = filepath.FromSlash(want) + } + if !gotFiles[want] { + t.Errorf("missing output file: %v", want) + } + } + gotPackageA, gotImports, err := parseFile(contents["a.pb.go"]) + if err != nil { + t.Fatal(err) + } + gotPackageB, _, err := parseFile(contents["b.pb.go"]) + if err != nil { + t.Fatal(err) + } + if got, want := gotPackageA, test.wantPackageA; want != got { + t.Errorf("output file a.pb.go is package %q, want %q", got, want) + } + if got, want := gotPackageB, test.wantPackageB; want != got { + t.Errorf("output file b.pb.go is package %q, want %q", got, want) + } + missingImport := false + WantImport: + for want := range test.wantImportsA { + for _, imp := range gotImports { + if `"`+want+`"` == imp { + continue WantImport + } + } + t.Errorf("output file a.pb.go does not contain expected import %q", want) + missingImport = true + } + if missingImport { + t.Error("got imports:") + for _, imp := range gotImports { + t.Errorf(" %v", imp) + } + } + } +} + +// parseFile returns a file's package name and a list of all packages it imports. +func parseFile(source string) (packageName string, imports []string, err error) { + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "", source, parser.ImportsOnly) + if err != nil { + return "", nil, err + } + for _, imp := range f.Imports { + imports = append(imports, imp.Path.Value) + } + return f.Name.Name, imports, nil +} + +func protoc(t *testing.T, args []string) { + cmd := exec.Command("protoc", "--plugin=protoc-gen-go="+os.Args[0]) + cmd.Args = append(cmd.Args, args...) + // We set the RUN_AS_PROTOC_GEN_GO environment variable to indicate that + // the subprocess should act as a proto compiler rather than a test. + cmd.Env = append(os.Environ(), "RUN_AS_PROTOC_GEN_GO=1") + out, err := cmd.CombinedOutput() + if len(out) > 0 || err != nil { + t.Log("RUNNING: ", strings.Join(cmd.Args, " ")) + } + if len(out) > 0 { + t.Log(string(out)) + } + if err != nil { + t.Fatalf("protoc: %v", err) + } +} + +func hasReleaseTag(want string) bool { + for _, tag := range build.Default.ReleaseTags { + if tag == want { + return true + } + } + return false +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go b/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go index 2660e47..5d1e3f0 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go @@ -36,7 +36,6 @@ package grpc import ( "fmt" - "path" "strconv" "strings" @@ -53,8 +52,10 @@ const generatedCodeVersion = 4 // Paths for packages used by code generated in this file, // relative to the import_prefix of the generator.Generator. const ( - contextPkgPath = "golang.org/x/net/context" + contextPkgPath = "context" grpcPkgPath = "google.golang.org/grpc" + codePkgPath = "google.golang.org/grpc/codes" + statusPkgPath = "google.golang.org/grpc/status" ) func init() { @@ -83,8 +84,6 @@ var ( // Init initializes the plugin. func (g *grpc) Init(gen *generator.Generator) { g.gen = gen - contextPkg = generator.RegisterUniquePackageName("context", nil) - grpcPkg = generator.RegisterUniquePackageName("grpc", nil) } // Given a type name defined in a .proto, return its object. @@ -108,6 +107,9 @@ func (g *grpc) Generate(file *generator.FileDescriptor) { return } + contextPkg = string(g.gen.AddImport(contextPkgPath)) + grpcPkg = string(g.gen.AddImport(grpcPkgPath)) + g.P("// Reference imports to suppress errors if they are not otherwise used.") g.P("var _ ", contextPkg, ".Context") g.P("var _ ", grpcPkg, ".ClientConn") @@ -126,23 +128,19 @@ func (g *grpc) Generate(file *generator.FileDescriptor) { // GenerateImports generates the import declaration for this file. func (g *grpc) GenerateImports(file *generator.FileDescriptor) { - if len(file.FileDescriptorProto.Service) == 0 { - return - } - g.P("import (") - g.P(contextPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, contextPkgPath))) - g.P(grpcPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, grpcPkgPath))) - g.P(")") - g.P() } // reservedClientName records whether a client name is reserved on the client side. var reservedClientName = map[string]bool{ -// TODO: do we need any in gRPC? + // TODO: do we need any in gRPC? } func unexport(s string) string { return strings.ToLower(s[:1]) + s[1:] } +// deprecationComment is the standard comment added to deprecated +// messages, fields, enums, and enum values. +var deprecationComment = "// Deprecated: Do not use." + // generateService generates all the code for the named service. func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.ServiceDescriptorProto, index int) { path := fmt.Sprintf("6,%d", index) // 6 means service. @@ -153,12 +151,18 @@ func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.Servi fullServName = pkg + "." + fullServName } servName := generator.CamelCase(origServName) + deprecated := service.GetOptions().GetDeprecated() g.P() - g.P("// Client API for ", servName, " service") - g.P() + g.P(fmt.Sprintf(`// %sClient is the client API for %s service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.`, servName, servName)) // Client interface. + if deprecated { + g.P("//") + g.P(deprecationComment) + } g.P("type ", servName, "Client interface {") for i, method := range service.Method { g.gen.PrintComments(fmt.Sprintf("%s,2,%d", path, i)) // 2 means method in a service. @@ -174,6 +178,9 @@ func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.Servi g.P() // NewClient factory. + if deprecated { + g.P(deprecationComment) + } g.P("func New", servName, "Client (cc *", grpcPkg, ".ClientConn) ", servName, "Client {") g.P("return &", unexport(servName), "Client{cc}") g.P("}") @@ -196,11 +203,13 @@ func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.Servi g.generateClientMethod(servName, fullServName, serviceDescVar, method, descExpr) } - g.P("// Server API for ", servName, " service") - g.P() - // Server interface. serverType := servName + "Server" + g.P("// ", serverType, " is the server API for ", servName, " service.") + if deprecated { + g.P("//") + g.P(deprecationComment) + } g.P("type ", serverType, " interface {") for i, method := range service.Method { g.gen.PrintComments(fmt.Sprintf("%s,2,%d", path, i)) // 2 means method in a service. @@ -209,7 +218,16 @@ func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.Servi g.P("}") g.P() + // Server Unimplemented struct for forward compatability. + if deprecated { + g.P(deprecationComment) + } + g.generateUnimplementedServer(servName, service) + // Server registration. + if deprecated { + g.P(deprecationComment) + } g.P("func Register", servName, "Server(s *", grpcPkg, ".Server, srv ", serverType, ") {") g.P("s.RegisterService(&", serviceDescVar, `, srv)`) g.P("}") @@ -259,6 +277,35 @@ func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.Servi g.P() } +// generateUnimplementedServer creates the unimplemented server struct +func (g *grpc) generateUnimplementedServer(servName string, service *pb.ServiceDescriptorProto) { + serverType := servName + "Server" + g.P("// Unimplemented", serverType, " can be embedded to have forward compatible implementations.") + g.P("type Unimplemented", serverType, " struct {") + g.P("}") + g.P() + // UnimplementedServer's concrete methods + for _, method := range service.Method { + g.generateServerMethodConcrete(servName, method) + } + g.P() +} + +// generateServerMethodConcrete returns unimplemented methods which ensure forward compatibility +func (g *grpc) generateServerMethodConcrete(servName string, method *pb.MethodDescriptorProto) { + header := g.generateServerSignatureWithParamNames(servName, method) + g.P("func (*Unimplemented", servName, "Server) ", header, " {") + var nilArg string + if !method.GetServerStreaming() && !method.GetClientStreaming() { + nilArg = "nil, " + } + methName := generator.CamelCase(method.GetName()) + statusPkg := string(g.gen.AddImport(statusPkgPath)) + codePkg := string(g.gen.AddImport(codePkgPath)) + g.P("return ", nilArg, statusPkg, `.Errorf(`, codePkg, `.Unimplemented, "method `, methName, ` not implemented")`) + g.P("}") +} + // generateClientSignature returns the client-side signature for a method. func (g *grpc) generateClientSignature(servName string, method *pb.MethodDescriptorProto) string { origMethName := method.GetName() @@ -283,11 +330,14 @@ func (g *grpc) generateClientMethod(servName, fullServName, serviceDescVar strin inType := g.typeName(method.GetInputType()) outType := g.typeName(method.GetOutputType()) + if method.GetOptions().GetDeprecated() { + g.P(deprecationComment) + } g.P("func (c *", unexport(servName), "Client) ", g.generateClientSignature(servName, method), "{") if !method.GetServerStreaming() && !method.GetClientStreaming() { g.P("out := new(", outType, ")") // TODO: Pass descExpr to Invoke. - g.P("err := ", grpcPkg, `.Invoke(ctx, "`, sname, `", in, out, c.cc, opts...)`) + g.P(`err := c.cc.Invoke(ctx, "`, sname, `", in, out, opts...)`) g.P("if err != nil { return nil, err }") g.P("return out, nil") g.P("}") @@ -295,7 +345,7 @@ func (g *grpc) generateClientMethod(servName, fullServName, serviceDescVar strin return } streamType := unexport(servName) + methName + "Client" - g.P("stream, err := ", grpcPkg, ".NewClientStream(ctx, ", descExpr, `, c.cc, "`, sname, `", opts...)`) + g.P("stream, err := c.cc.NewStream(ctx, ", descExpr, `, "`, sname, `", opts...)`) g.P("if err != nil { return nil, err }") g.P("x := &", streamType, "{stream}") if !method.GetClientStreaming() { @@ -355,6 +405,30 @@ func (g *grpc) generateClientMethod(servName, fullServName, serviceDescVar strin } } +// generateServerSignatureWithParamNames returns the server-side signature for a method with parameter names. +func (g *grpc) generateServerSignatureWithParamNames(servName string, method *pb.MethodDescriptorProto) string { + origMethName := method.GetName() + methName := generator.CamelCase(origMethName) + if reservedClientName[methName] { + methName += "_" + } + + var reqArgs []string + ret := "error" + if !method.GetServerStreaming() && !method.GetClientStreaming() { + reqArgs = append(reqArgs, "ctx "+contextPkg+".Context") + ret = "(*" + g.typeName(method.GetOutputType()) + ", error)" + } + if !method.GetClientStreaming() { + reqArgs = append(reqArgs, "req *"+g.typeName(method.GetInputType())) + } + if method.GetServerStreaming() || method.GetClientStreaming() { + reqArgs = append(reqArgs, "srv "+servName+"_"+generator.CamelCase(origMethName)+"Server") + } + + return methName + "(" + strings.Join(reqArgs, ", ") + ") " + ret +} + // generateServerSignature returns the server-side signature for a method. func (g *grpc) generateServerSignature(servName string, method *pb.MethodDescriptorProto) string { origMethName := method.GetName() diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile deleted file mode 100644 index 4095623..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Not stored here, but plugin.proto is in https://github.com/google/protobuf/ -# at src/google/protobuf/compiler/plugin.proto -# Also we need to fix an import. -regenerate: - @echo WARNING! THIS RULE IS PROBABLY NOT RIGHT FOR YOUR INSTALLATION - protoc --go_out=Mgoogle/protobuf/descriptor.proto=github.com/golang/protobuf/protoc-gen-go/descriptor:../../../../.. \ - -I$(HOME)/src/protobuf/include $(HOME)/src/protobuf/include/google/protobuf/compiler/plugin.proto - -restore: - cp plugin.pb.golden plugin.pb.go - -preserve: - cp plugin.pb.go plugin.pb.golden diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go index c608a24..61bfc10 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go @@ -37,14 +37,33 @@ type Version struct { Patch *int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"` // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should // be empty for mainline stable releases. - Suffix *string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"` - XXX_unrecognized []byte `json:"-"` + Suffix *string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Version) Reset() { *m = Version{} } func (m *Version) String() string { return proto.CompactTextString(m) } func (*Version) ProtoMessage() {} func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (m *Version) Unmarshal(b []byte) error { + return xxx_messageInfo_Version.Unmarshal(m, b) +} +func (m *Version) Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Version.Marshal(b, m, deterministic) +} +func (dst *Version) XXX_Merge(src proto.Message) { + xxx_messageInfo_Version.Merge(dst, src) +} +func (m *Version) XXX_Size() int { + return xxx_messageInfo_Version.Size(m) +} +func (m *Version) XXX_DiscardUnknown() { + xxx_messageInfo_Version.DiscardUnknown(m) +} + +var xxx_messageInfo_Version proto.InternalMessageInfo func (m *Version) GetMajor() int32 { if m != nil && m.Major != nil { @@ -98,14 +117,33 @@ type CodeGeneratorRequest struct { // fully qualified. ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file,json=protoFile" json:"proto_file,omitempty"` // The version number of protocol compiler. - CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"` - XXX_unrecognized []byte `json:"-"` + CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *CodeGeneratorRequest) Reset() { *m = CodeGeneratorRequest{} } func (m *CodeGeneratorRequest) String() string { return proto.CompactTextString(m) } func (*CodeGeneratorRequest) ProtoMessage() {} func (*CodeGeneratorRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (m *CodeGeneratorRequest) Unmarshal(b []byte) error { + return xxx_messageInfo_CodeGeneratorRequest.Unmarshal(m, b) +} +func (m *CodeGeneratorRequest) Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CodeGeneratorRequest.Marshal(b, m, deterministic) +} +func (dst *CodeGeneratorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CodeGeneratorRequest.Merge(dst, src) +} +func (m *CodeGeneratorRequest) XXX_Size() int { + return xxx_messageInfo_CodeGeneratorRequest.Size(m) +} +func (m *CodeGeneratorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CodeGeneratorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CodeGeneratorRequest proto.InternalMessageInfo func (m *CodeGeneratorRequest) GetFileToGenerate() []string { if m != nil { @@ -145,15 +183,34 @@ type CodeGeneratorResponse struct { // problem in protoc itself -- such as the input CodeGeneratorRequest being // unparseable -- should be reported by writing a message to stderr and // exiting with a non-zero status code. - Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` - File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"` - XXX_unrecognized []byte `json:"-"` + Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` + File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *CodeGeneratorResponse) Reset() { *m = CodeGeneratorResponse{} } func (m *CodeGeneratorResponse) String() string { return proto.CompactTextString(m) } func (*CodeGeneratorResponse) ProtoMessage() {} func (*CodeGeneratorResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (m *CodeGeneratorResponse) Unmarshal(b []byte) error { + return xxx_messageInfo_CodeGeneratorResponse.Unmarshal(m, b) +} +func (m *CodeGeneratorResponse) Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CodeGeneratorResponse.Marshal(b, m, deterministic) +} +func (dst *CodeGeneratorResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CodeGeneratorResponse.Merge(dst, src) +} +func (m *CodeGeneratorResponse) XXX_Size() int { + return xxx_messageInfo_CodeGeneratorResponse.Size(m) +} +func (m *CodeGeneratorResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CodeGeneratorResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CodeGeneratorResponse proto.InternalMessageInfo func (m *CodeGeneratorResponse) GetError() string { if m != nil && m.Error != nil { @@ -222,14 +279,33 @@ type CodeGeneratorResponse_File struct { // If |insertion_point| is present, |name| must also be present. InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point,json=insertionPoint" json:"insertion_point,omitempty"` // The file contents. - Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"` - XXX_unrecognized []byte `json:"-"` + Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *CodeGeneratorResponse_File) Reset() { *m = CodeGeneratorResponse_File{} } func (m *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(m) } func (*CodeGeneratorResponse_File) ProtoMessage() {} func (*CodeGeneratorResponse_File) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } +func (m *CodeGeneratorResponse_File) Unmarshal(b []byte) error { + return xxx_messageInfo_CodeGeneratorResponse_File.Unmarshal(m, b) +} +func (m *CodeGeneratorResponse_File) Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CodeGeneratorResponse_File.Marshal(b, m, deterministic) +} +func (dst *CodeGeneratorResponse_File) XXX_Merge(src proto.Message) { + xxx_messageInfo_CodeGeneratorResponse_File.Merge(dst, src) +} +func (m *CodeGeneratorResponse_File) XXX_Size() int { + return xxx_messageInfo_CodeGeneratorResponse_File.Size(m) +} +func (m *CodeGeneratorResponse_File) XXX_DiscardUnknown() { + xxx_messageInfo_CodeGeneratorResponse_File.DiscardUnknown(m) +} + +var xxx_messageInfo_CodeGeneratorResponse_File proto.InternalMessageInfo func (m *CodeGeneratorResponse_File) GetName() string { if m != nil && m.Name != nil { diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto new file mode 100644 index 0000000..5b55745 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto @@ -0,0 +1,167 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// WARNING: The plugin interface is currently EXPERIMENTAL and is subject to +// change. +// +// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is +// just a program that reads a CodeGeneratorRequest from stdin and writes a +// CodeGeneratorResponse to stdout. +// +// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead +// of dealing with the raw protocol defined here. +// +// A plugin executable needs only to be placed somewhere in the path. The +// plugin should be named "protoc-gen-$NAME", and will then be used when the +// flag "--${NAME}_out" is passed to protoc. + +syntax = "proto2"; +package google.protobuf.compiler; +option java_package = "com.google.protobuf.compiler"; +option java_outer_classname = "PluginProtos"; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/plugin;plugin_go"; + +import "google/protobuf/descriptor.proto"; + +// The version number of protocol compiler. +message Version { + optional int32 major = 1; + optional int32 minor = 2; + optional int32 patch = 3; + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + optional string suffix = 4; +} + +// An encoded CodeGeneratorRequest is written to the plugin's stdin. +message CodeGeneratorRequest { + // The .proto files that were explicitly listed on the command-line. The + // code generator should generate code only for these files. Each file's + // descriptor will be included in proto_file, below. + repeated string file_to_generate = 1; + + // The generator parameter passed on the command-line. + optional string parameter = 2; + + // FileDescriptorProtos for all files in files_to_generate and everything + // they import. The files will appear in topological order, so each file + // appears before any file that imports it. + // + // protoc guarantees that all proto_files will be written after + // the fields above, even though this is not technically guaranteed by the + // protobuf wire format. This theoretically could allow a plugin to stream + // in the FileDescriptorProtos and handle them one by one rather than read + // the entire set into memory at once. However, as of this writing, this + // is not similarly optimized on protoc's end -- it will store all fields in + // memory at once before sending them to the plugin. + // + // Type names of fields and extensions in the FileDescriptorProto are always + // fully qualified. + repeated FileDescriptorProto proto_file = 15; + + // The version number of protocol compiler. + optional Version compiler_version = 3; + +} + +// The plugin writes an encoded CodeGeneratorResponse to stdout. +message CodeGeneratorResponse { + // Error message. If non-empty, code generation failed. The plugin process + // should exit with status code zero even if it reports an error in this way. + // + // This should be used to indicate errors in .proto files which prevent the + // code generator from generating correct code. Errors which indicate a + // problem in protoc itself -- such as the input CodeGeneratorRequest being + // unparseable -- should be reported by writing a message to stderr and + // exiting with a non-zero status code. + optional string error = 1; + + // Represents a single generated file. + message File { + // The file name, relative to the output directory. The name must not + // contain "." or ".." components and must be relative, not be absolute (so, + // the file cannot lie outside the output directory). "/" must be used as + // the path separator, not "\". + // + // If the name is omitted, the content will be appended to the previous + // file. This allows the generator to break large files into small chunks, + // and allows the generated text to be streamed back to protoc so that large + // files need not reside completely in memory at one time. Note that as of + // this writing protoc does not optimize for this -- it will read the entire + // CodeGeneratorResponse before writing files to disk. + optional string name = 1; + + // If non-empty, indicates that the named file should already exist, and the + // content here is to be inserted into that file at a defined insertion + // point. This feature allows a code generator to extend the output + // produced by another code generator. The original generator may provide + // insertion points by placing special annotations in the file that look + // like: + // @@protoc_insertion_point(NAME) + // The annotation can have arbitrary text before and after it on the line, + // which allows it to be placed in a comment. NAME should be replaced with + // an identifier naming the point -- this is what other generators will use + // as the insertion_point. Code inserted at this point will be placed + // immediately above the line containing the insertion point (thus multiple + // insertions to the same point will come out in the order they were added). + // The double-@ is intended to make it unlikely that the generated code + // could contain things that look like insertion points by accident. + // + // For example, the C++ code generator places the following line in the + // .pb.h files that it generates: + // // @@protoc_insertion_point(namespace_scope) + // This line appears within the scope of the file's package namespace, but + // outside of any particular class. Another plugin can then specify the + // insertion_point "namespace_scope" to generate additional classes or + // other declarations that should be placed in this scope. + // + // Note that if the line containing the insertion point begins with + // whitespace, the same whitespace will be added to every line of the + // inserted text. This is useful for languages like Python, where + // indentation matters. In these languages, the insertion point comment + // should be indented the same amount as any inserted code will need to be + // in order to work correctly in that context. + // + // The code generator that generates the initial file and the one which + // inserts into it must both run as part of a single invocation of protoc. + // Code generators are executed in the order in which they appear on the + // command line. + // + // If |insertion_point| is present, |name| must also be present. + optional string insertion_point = 2; + + // The file contents. + optional string content = 15; + } + repeated File file = 15; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/Makefile deleted file mode 100644 index a0bf9fe..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/Makefile +++ /dev/null @@ -1,73 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -all: - @echo run make test - -include ../../Make.protobuf - -test: golden testbuild - -#test: golden testbuild extension_test -# ./extension_test -# @echo PASS - -my_test/test.pb.go: my_test/test.proto - protoc --go_out=Mmulti/multi1.proto=github.com/golang/protobuf/protoc-gen-go/testdata/multi:. $< - -golden: - make -B my_test/test.pb.go - sed -i -e '/return.*fileDescriptor/d' my_test/test.pb.go - sed -i -e '/^var fileDescriptor/,/^}/d' my_test/test.pb.go - sed -i -e '/proto.RegisterFile.*fileDescriptor/d' my_test/test.pb.go - gofmt -w my_test/test.pb.go - diff -w my_test/test.pb.go my_test/test.pb.go.golden - -nuke: clean - -testbuild: regenerate - go test - -regenerate: - # Invoke protoc once to generate three independent .pb.go files in the same package. - protoc --go_out=. multi/multi1.proto multi/multi2.proto multi/multi3.proto - -#extension_test: extension_test.$O -# $(LD) -L. -o $@ $< - -#multi.a: multi3.pb.$O multi2.pb.$O multi1.pb.$O -# rm -f multi.a -# $(QUOTED_GOBIN)/gopack grc $@ $< - -#test.pb.go: imp.pb.go -#multi1.pb.go: multi2.pb.go multi3.pb.go -#main.$O: imp.pb.$O test.pb.$O multi.a -#extension_test.$O: extension_base.pb.$O extension_extra.pb.$O extension_user.pb.$O diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.pb.go new file mode 100644 index 0000000..2515c99 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.pb.go @@ -0,0 +1,283 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// deprecated/deprecated.proto is a deprecated file. + +// package deprecated contains only deprecated messages and services. + +package deprecated + +import ( + context "context" + fmt "fmt" + proto "github.com/golang/protobuf/proto" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +// DeprecatedEnum contains deprecated values. +type DeprecatedEnum int32 // Deprecated: Do not use. +const ( + // DEPRECATED is the iota value of this enum. + DeprecatedEnum_DEPRECATED DeprecatedEnum = 0 // Deprecated: Do not use. +) + +var DeprecatedEnum_name = map[int32]string{ + 0: "DEPRECATED", +} + +var DeprecatedEnum_value = map[string]int32{ + "DEPRECATED": 0, +} + +func (x DeprecatedEnum) String() string { + return proto.EnumName(DeprecatedEnum_name, int32(x)) +} + +func (DeprecatedEnum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_f64ba265cd7eae3f, []int{0} +} + +// DeprecatedRequest is a request to DeprecatedCall. +// +// Deprecated: Do not use. +type DeprecatedRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeprecatedRequest) Reset() { *m = DeprecatedRequest{} } +func (m *DeprecatedRequest) String() string { return proto.CompactTextString(m) } +func (*DeprecatedRequest) ProtoMessage() {} +func (*DeprecatedRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_f64ba265cd7eae3f, []int{0} +} + +func (m *DeprecatedRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeprecatedRequest.Unmarshal(m, b) +} +func (m *DeprecatedRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeprecatedRequest.Marshal(b, m, deterministic) +} +func (m *DeprecatedRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeprecatedRequest.Merge(m, src) +} +func (m *DeprecatedRequest) XXX_Size() int { + return xxx_messageInfo_DeprecatedRequest.Size(m) +} +func (m *DeprecatedRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeprecatedRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeprecatedRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +type DeprecatedResponse struct { + // DeprecatedField contains a DeprecatedEnum. + DeprecatedField DeprecatedEnum `protobuf:"varint,1,opt,name=deprecated_field,json=deprecatedField,proto3,enum=deprecated.DeprecatedEnum" json:"deprecated_field,omitempty"` // Deprecated: Do not use. + // DeprecatedOneof contains a deprecated field. + // + // Types that are valid to be assigned to DeprecatedOneof: + // *DeprecatedResponse_DeprecatedOneofField + DeprecatedOneof isDeprecatedResponse_DeprecatedOneof `protobuf_oneof:"deprecated_oneof"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeprecatedResponse) Reset() { *m = DeprecatedResponse{} } +func (m *DeprecatedResponse) String() string { return proto.CompactTextString(m) } +func (*DeprecatedResponse) ProtoMessage() {} +func (*DeprecatedResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_f64ba265cd7eae3f, []int{1} +} + +func (m *DeprecatedResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeprecatedResponse.Unmarshal(m, b) +} +func (m *DeprecatedResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeprecatedResponse.Marshal(b, m, deterministic) +} +func (m *DeprecatedResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeprecatedResponse.Merge(m, src) +} +func (m *DeprecatedResponse) XXX_Size() int { + return xxx_messageInfo_DeprecatedResponse.Size(m) +} +func (m *DeprecatedResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DeprecatedResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DeprecatedResponse proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *DeprecatedResponse) GetDeprecatedField() DeprecatedEnum { + if m != nil { + return m.DeprecatedField + } + return DeprecatedEnum_DEPRECATED +} + +type isDeprecatedResponse_DeprecatedOneof interface { + isDeprecatedResponse_DeprecatedOneof() +} + +type DeprecatedResponse_DeprecatedOneofField struct { + DeprecatedOneofField string `protobuf:"bytes,2,opt,name=deprecated_oneof_field,json=deprecatedOneofField,proto3,oneof"` +} + +func (*DeprecatedResponse_DeprecatedOneofField) isDeprecatedResponse_DeprecatedOneof() {} + +func (m *DeprecatedResponse) GetDeprecatedOneof() isDeprecatedResponse_DeprecatedOneof { + if m != nil { + return m.DeprecatedOneof + } + return nil +} + +// Deprecated: Do not use. +func (m *DeprecatedResponse) GetDeprecatedOneofField() string { + if x, ok := m.GetDeprecatedOneof().(*DeprecatedResponse_DeprecatedOneofField); ok { + return x.DeprecatedOneofField + } + return "" +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*DeprecatedResponse) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*DeprecatedResponse_DeprecatedOneofField)(nil), + } +} + +func init() { + proto.RegisterEnum("deprecated.DeprecatedEnum", DeprecatedEnum_name, DeprecatedEnum_value) + proto.RegisterType((*DeprecatedRequest)(nil), "deprecated.DeprecatedRequest") + proto.RegisterType((*DeprecatedResponse)(nil), "deprecated.DeprecatedResponse") +} + +func init() { proto.RegisterFile("deprecated/deprecated.proto", fileDescriptor_f64ba265cd7eae3f) } + +var fileDescriptor_f64ba265cd7eae3f = []byte{ + // 287 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0xcd, 0x4a, 0xf3, 0x40, + 0x14, 0x86, 0x7b, 0xe6, 0x83, 0x0f, 0x9d, 0x45, 0xad, 0x83, 0x68, 0x88, 0x28, 0x25, 0xab, 0x20, + 0x34, 0x81, 0xba, 0x2b, 0x6e, 0x9a, 0x26, 0xa2, 0x2b, 0x25, 0x76, 0xe5, 0x46, 0xf2, 0x73, 0x12, + 0x03, 0xe9, 0x4c, 0x4c, 0x26, 0x5e, 0x83, 0xf7, 0xe3, 0xc6, 0xcb, 0x93, 0x49, 0x8b, 0x33, 0x05, + 0xdd, 0x84, 0x93, 0x79, 0xdf, 0xe7, 0xfc, 0xd2, 0xf3, 0x1c, 0x9b, 0x16, 0xb3, 0x44, 0x62, 0xee, + 0xeb, 0xd0, 0x6b, 0x5a, 0x21, 0x05, 0xa3, 0xfa, 0xc5, 0x39, 0xa3, 0xc7, 0xe1, 0xcf, 0x5f, 0x8c, + 0x6f, 0x3d, 0x76, 0x72, 0x41, 0x2c, 0x70, 0x3e, 0x81, 0x32, 0x53, 0xe9, 0x1a, 0xc1, 0x3b, 0x64, + 0xf7, 0x74, 0xa2, 0xe9, 0x97, 0xa2, 0xc2, 0x3a, 0xb7, 0x60, 0x0a, 0xee, 0x78, 0x6e, 0x7b, 0x46, + 0x21, 0x4d, 0x46, 0xbc, 0xdf, 0x04, 0xc4, 0x82, 0xf8, 0x48, 0xcb, 0xb7, 0x0a, 0x63, 0x0b, 0x7a, + 0x6a, 0xa4, 0x12, 0x1c, 0x45, 0xb1, 0x4b, 0x48, 0xa6, 0xe0, 0x1e, 0x2a, 0xe8, 0x6e, 0x14, 0x9f, + 0x68, 0xcf, 0x83, 0xb2, 0x0c, 0xac, 0xea, 0x30, 0x60, 0x7b, 0xad, 0x0c, 0xfc, 0x95, 0x4b, 0xc7, + 0xfb, 0xa5, 0x19, 0xa3, 0x34, 0x8c, 0x1e, 0xe3, 0x68, 0xb5, 0x5c, 0x47, 0xe1, 0x64, 0x64, 0x93, + 0x03, 0xb0, 0x89, 0x05, 0x73, 0x6e, 0x0e, 0xfe, 0x84, 0xed, 0x7b, 0x95, 0x21, 0x5b, 0x9b, 0xf8, + 0x2a, 0xa9, 0x6b, 0x76, 0xf1, 0xfb, 0x54, 0xbb, 0x4d, 0xd9, 0x97, 0x7f, 0xc9, 0xdb, 0x75, 0x39, + 0xff, 0x3e, 0x08, 0xd8, 0xea, 0x13, 0x2c, 0x9f, 0x6f, 0xca, 0x4a, 0xbe, 0xf6, 0xa9, 0x97, 0x89, + 0x8d, 0x5f, 0x8a, 0x3a, 0xe1, 0xa5, 0x3f, 0xdc, 0x23, 0xed, 0x8b, 0x6d, 0x90, 0xcd, 0x4a, 0xe4, + 0xb3, 0x52, 0xf8, 0x12, 0x3b, 0x99, 0x27, 0x32, 0x31, 0x4e, 0xf7, 0x05, 0x90, 0xfe, 0x1f, 0x5c, + 0xd7, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x08, 0xd5, 0xa0, 0x89, 0xdd, 0x01, 0x00, 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DeprecatedServiceClient is the client API for DeprecatedService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +// +// Deprecated: Do not use. +type DeprecatedServiceClient interface { + // DeprecatedCall takes a DeprecatedRequest and returns a DeprecatedResponse. + DeprecatedCall(ctx context.Context, in *DeprecatedRequest, opts ...grpc.CallOption) (*DeprecatedResponse, error) +} + +type deprecatedServiceClient struct { + cc *grpc.ClientConn +} + +// Deprecated: Do not use. +func NewDeprecatedServiceClient(cc *grpc.ClientConn) DeprecatedServiceClient { + return &deprecatedServiceClient{cc} +} + +// Deprecated: Do not use. +func (c *deprecatedServiceClient) DeprecatedCall(ctx context.Context, in *DeprecatedRequest, opts ...grpc.CallOption) (*DeprecatedResponse, error) { + out := new(DeprecatedResponse) + err := c.cc.Invoke(ctx, "/deprecated.DeprecatedService/DeprecatedCall", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DeprecatedServiceServer is the server API for DeprecatedService service. +// +// Deprecated: Do not use. +type DeprecatedServiceServer interface { + // DeprecatedCall takes a DeprecatedRequest and returns a DeprecatedResponse. + DeprecatedCall(context.Context, *DeprecatedRequest) (*DeprecatedResponse, error) +} + +// Deprecated: Do not use. +// UnimplementedDeprecatedServiceServer can be embedded to have forward compatible implementations. +type UnimplementedDeprecatedServiceServer struct { +} + +func (*UnimplementedDeprecatedServiceServer) DeprecatedCall(ctx context.Context, req *DeprecatedRequest) (*DeprecatedResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeprecatedCall not implemented") +} + +// Deprecated: Do not use. +func RegisterDeprecatedServiceServer(s *grpc.Server, srv DeprecatedServiceServer) { + s.RegisterService(&_DeprecatedService_serviceDesc, srv) +} + +func _DeprecatedService_DeprecatedCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeprecatedRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeprecatedServiceServer).DeprecatedCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/deprecated.DeprecatedService/DeprecatedCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeprecatedServiceServer).DeprecatedCall(ctx, req.(*DeprecatedRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DeprecatedService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "deprecated.DeprecatedService", + HandlerType: (*DeprecatedServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "DeprecatedCall", + Handler: _DeprecatedService_DeprecatedCall_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "deprecated/deprecated.proto", +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.proto new file mode 100644 index 0000000..192b158 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/deprecated/deprecated.proto @@ -0,0 +1,74 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +// package deprecated contains only deprecated messages and services. +package deprecated; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/deprecated"; + +option deprecated = true; // file-level deprecation + +// DeprecatedRequest is a request to DeprecatedCall. +message DeprecatedRequest { + option deprecated = true; +} + +message DeprecatedResponse { + // comment for DeprecatedResponse is omitted to guarantee deprecation + // message doesn't append unnecessary comments. + option deprecated = true; + // DeprecatedField contains a DeprecatedEnum. + DeprecatedEnum deprecated_field = 1 [deprecated=true]; + // DeprecatedOneof contains a deprecated field. + oneof deprecated_oneof { + // DeprecatedOneofField is a deprecated field. + string deprecated_oneof_field = 2 [deprecated=true]; + } +} + +// DeprecatedEnum contains deprecated values. +enum DeprecatedEnum { + option deprecated = true; + // DEPRECATED is the iota value of this enum. + DEPRECATED = 0 [deprecated=true]; +} + +// DeprecatedService is for making DeprecatedCalls +service DeprecatedService { + option deprecated = true; + + // DeprecatedCall takes a DeprecatedRequest and returns a DeprecatedResponse. + rpc DeprecatedCall(DeprecatedRequest) returns (DeprecatedResponse) { + option deprecated = true; + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.pb.go new file mode 100644 index 0000000..b5d4aa6 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.pb.go @@ -0,0 +1,136 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: extension_base/extension_base.proto + +package extension_base + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type BaseMessage struct { + Height *int32 `protobuf:"varint,1,opt,name=height" json:"height,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BaseMessage) Reset() { *m = BaseMessage{} } +func (m *BaseMessage) String() string { return proto.CompactTextString(m) } +func (*BaseMessage) ProtoMessage() {} +func (*BaseMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_2fbd53bac0b7ca8a, []int{0} +} + +var extRange_BaseMessage = []proto.ExtensionRange{ + {Start: 4, End: 9}, + {Start: 16, End: 536870911}, +} + +func (*BaseMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_BaseMessage +} + +func (m *BaseMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BaseMessage.Unmarshal(m, b) +} +func (m *BaseMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BaseMessage.Marshal(b, m, deterministic) +} +func (m *BaseMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_BaseMessage.Merge(m, src) +} +func (m *BaseMessage) XXX_Size() int { + return xxx_messageInfo_BaseMessage.Size(m) +} +func (m *BaseMessage) XXX_DiscardUnknown() { + xxx_messageInfo_BaseMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_BaseMessage proto.InternalMessageInfo + +func (m *BaseMessage) GetHeight() int32 { + if m != nil && m.Height != nil { + return *m.Height + } + return 0 +} + +// Another message that may be extended, using message_set_wire_format. +type OldStyleMessage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `protobuf_messageset:"1" json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OldStyleMessage) Reset() { *m = OldStyleMessage{} } +func (m *OldStyleMessage) String() string { return proto.CompactTextString(m) } +func (*OldStyleMessage) ProtoMessage() {} +func (*OldStyleMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_2fbd53bac0b7ca8a, []int{1} +} + +var extRange_OldStyleMessage = []proto.ExtensionRange{ + {Start: 100, End: 2147483646}, +} + +func (*OldStyleMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OldStyleMessage +} + +func (m *OldStyleMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldStyleMessage.Unmarshal(m, b) +} +func (m *OldStyleMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldStyleMessage.Marshal(b, m, deterministic) +} +func (m *OldStyleMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldStyleMessage.Merge(m, src) +} +func (m *OldStyleMessage) XXX_Size() int { + return xxx_messageInfo_OldStyleMessage.Size(m) +} +func (m *OldStyleMessage) XXX_DiscardUnknown() { + xxx_messageInfo_OldStyleMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_OldStyleMessage proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BaseMessage)(nil), "extension_base.BaseMessage") + proto.RegisterType((*OldStyleMessage)(nil), "extension_base.OldStyleMessage") +} + +func init() { + proto.RegisterFile("extension_base/extension_base.proto", fileDescriptor_2fbd53bac0b7ca8a) +} + +var fileDescriptor_2fbd53bac0b7ca8a = []byte{ + // 179 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4e, 0xad, 0x28, 0x49, + 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0x8b, 0x4f, 0x4a, 0x2c, 0x4e, 0xd5, 0x47, 0xe5, 0xea, 0x15, 0x14, + 0xe5, 0x97, 0xe4, 0x0b, 0xf1, 0xa1, 0x8a, 0x2a, 0x99, 0x72, 0x71, 0x3b, 0x25, 0x16, 0xa7, 0xfa, + 0xa6, 0x16, 0x17, 0x27, 0xa6, 0xa7, 0x0a, 0x89, 0x71, 0xb1, 0x65, 0xa4, 0x66, 0xa6, 0x67, 0x94, + 0x48, 0x30, 0x2a, 0x30, 0x6a, 0xb0, 0x06, 0x41, 0x79, 0x5a, 0x2c, 0x1c, 0x2c, 0x02, 0x5c, 0x5a, + 0x1c, 0x1c, 0x02, 0x02, 0x0d, 0x0d, 0x0d, 0x0d, 0x4c, 0x4a, 0xf2, 0x5c, 0xfc, 0xfe, 0x39, 0x29, + 0xc1, 0x25, 0x95, 0x39, 0x30, 0xad, 0x5a, 0x1c, 0x1c, 0x29, 0x02, 0xff, 0xff, 0xff, 0xff, 0xcf, + 0x6e, 0xc5, 0xc4, 0xc1, 0xe8, 0xe4, 0x14, 0xe5, 0x90, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, + 0x9c, 0x9f, 0xab, 0x9f, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x0f, 0x76, 0x42, 0x52, 0x69, 0x1a, + 0x84, 0x91, 0xac, 0x9b, 0x9e, 0x9a, 0xa7, 0x9b, 0x9e, 0xaf, 0x5f, 0x92, 0x5a, 0x5c, 0x92, 0x92, + 0x58, 0x92, 0x88, 0xe6, 0x62, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7a, 0x7f, 0xb7, 0x2a, 0xd1, + 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.proto similarity index 95% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.proto index 94acfc1..0ba74de 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base/extension_base.proto @@ -33,6 +33,8 @@ syntax = "proto2"; package extension_base; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/extension_base"; + message BaseMessage { optional int32 height = 1; extensions 4 to 9; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.pb.go new file mode 100644 index 0000000..fd82a25 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.pb.go @@ -0,0 +1,81 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: extension_extra/extension_extra.proto + +package extension_extra + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type ExtraMessage struct { + Width *int32 `protobuf:"varint,1,opt,name=width" json:"width,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtraMessage) Reset() { *m = ExtraMessage{} } +func (m *ExtraMessage) String() string { return proto.CompactTextString(m) } +func (*ExtraMessage) ProtoMessage() {} +func (*ExtraMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_fce75f5a63502cd5, []int{0} +} + +func (m *ExtraMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtraMessage.Unmarshal(m, b) +} +func (m *ExtraMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtraMessage.Marshal(b, m, deterministic) +} +func (m *ExtraMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtraMessage.Merge(m, src) +} +func (m *ExtraMessage) XXX_Size() int { + return xxx_messageInfo_ExtraMessage.Size(m) +} +func (m *ExtraMessage) XXX_DiscardUnknown() { + xxx_messageInfo_ExtraMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtraMessage proto.InternalMessageInfo + +func (m *ExtraMessage) GetWidth() int32 { + if m != nil && m.Width != nil { + return *m.Width + } + return 0 +} + +func init() { + proto.RegisterType((*ExtraMessage)(nil), "extension_extra.ExtraMessage") +} + +func init() { + proto.RegisterFile("extension_extra/extension_extra.proto", fileDescriptor_fce75f5a63502cd5) +} + +var fileDescriptor_fce75f5a63502cd5 = []byte{ + // 133 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4d, 0xad, 0x28, 0x49, + 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0x8b, 0x4f, 0xad, 0x28, 0x29, 0x4a, 0xd4, 0x47, 0xe3, 0xeb, 0x15, + 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0xf1, 0xa3, 0x09, 0x2b, 0xa9, 0x70, 0xf1, 0xb8, 0x82, 0x18, 0xbe, + 0xa9, 0xc5, 0xc5, 0x89, 0xe9, 0xa9, 0x42, 0x22, 0x5c, 0xac, 0xe5, 0x99, 0x29, 0x25, 0x19, 0x12, + 0x8c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x10, 0x8e, 0x93, 0x73, 0x94, 0x63, 0x7a, 0x66, 0x49, 0x46, + 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, 0xba, 0x3e, 0xd8, 0xc4, + 0xa4, 0xd2, 0x34, 0x08, 0x23, 0x59, 0x37, 0x3d, 0x35, 0x4f, 0x37, 0x3d, 0x5f, 0xbf, 0x24, 0xb5, + 0xb8, 0x24, 0x25, 0xb1, 0x04, 0xc3, 0x05, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf1, 0xec, 0xe3, + 0xb7, 0xa3, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.proto similarity index 95% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.proto index fca7f60..1dd03e7 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra/extension_extra.proto @@ -33,6 +33,8 @@ syntax = "proto2"; package extension_extra; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra"; + message ExtraMessage { optional int32 width = 1; } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go index 86e9c11..0524729 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go @@ -33,16 +33,14 @@ package testdata -/* - import ( "bytes" "regexp" "testing" "github.com/golang/protobuf/proto" - base "extension_base.pb" - user "extension_user.pb" + base "github.com/golang/protobuf/protoc-gen-go/testdata/extension_base" + user "github.com/golang/protobuf/protoc-gen-go/testdata/extension_user" ) func TestSingleFieldExtension(t *testing.T) { @@ -206,5 +204,3 @@ func main() { []testing.InternalBenchmark{}, []testing.InternalExample{}) } - -*/ diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.pb.go new file mode 100644 index 0000000..d7849be --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.pb.go @@ -0,0 +1,408 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: extension_user/extension_user.proto + +package extension_user + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + extension_base "github.com/golang/protobuf/protoc-gen-go/testdata/extension_base" + extension_extra "github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type UserMessage struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Rank *string `protobuf:"bytes,2,opt,name=rank" json:"rank,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserMessage) Reset() { *m = UserMessage{} } +func (m *UserMessage) String() string { return proto.CompactTextString(m) } +func (*UserMessage) ProtoMessage() {} +func (*UserMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_359ba8abf543ca10, []int{0} +} + +func (m *UserMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserMessage.Unmarshal(m, b) +} +func (m *UserMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserMessage.Marshal(b, m, deterministic) +} +func (m *UserMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserMessage.Merge(m, src) +} +func (m *UserMessage) XXX_Size() int { + return xxx_messageInfo_UserMessage.Size(m) +} +func (m *UserMessage) XXX_DiscardUnknown() { + xxx_messageInfo_UserMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_UserMessage proto.InternalMessageInfo + +func (m *UserMessage) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *UserMessage) GetRank() string { + if m != nil && m.Rank != nil { + return *m.Rank + } + return "" +} + +// Extend inside the scope of another type +type LoudMessage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoudMessage) Reset() { *m = LoudMessage{} } +func (m *LoudMessage) String() string { return proto.CompactTextString(m) } +func (*LoudMessage) ProtoMessage() {} +func (*LoudMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_359ba8abf543ca10, []int{1} +} + +var extRange_LoudMessage = []proto.ExtensionRange{ + {Start: 100, End: 536870911}, +} + +func (*LoudMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_LoudMessage +} + +func (m *LoudMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoudMessage.Unmarshal(m, b) +} +func (m *LoudMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoudMessage.Marshal(b, m, deterministic) +} +func (m *LoudMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoudMessage.Merge(m, src) +} +func (m *LoudMessage) XXX_Size() int { + return xxx_messageInfo_LoudMessage.Size(m) +} +func (m *LoudMessage) XXX_DiscardUnknown() { + xxx_messageInfo_LoudMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_LoudMessage proto.InternalMessageInfo + +var E_LoudMessage_Volume = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 8, + Name: "extension_user.LoudMessage.volume", + Tag: "varint,8,opt,name=volume", + Filename: "extension_user/extension_user.proto", +} + +// Extend inside the scope of another type, using a message. +type LoginMessage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoginMessage) Reset() { *m = LoginMessage{} } +func (m *LoginMessage) String() string { return proto.CompactTextString(m) } +func (*LoginMessage) ProtoMessage() {} +func (*LoginMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_359ba8abf543ca10, []int{2} +} + +func (m *LoginMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoginMessage.Unmarshal(m, b) +} +func (m *LoginMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoginMessage.Marshal(b, m, deterministic) +} +func (m *LoginMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoginMessage.Merge(m, src) +} +func (m *LoginMessage) XXX_Size() int { + return xxx_messageInfo_LoginMessage.Size(m) +} +func (m *LoginMessage) XXX_DiscardUnknown() { + xxx_messageInfo_LoginMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_LoginMessage proto.InternalMessageInfo + +var E_LoginMessage_UserMessage = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*UserMessage)(nil), + Field: 16, + Name: "extension_user.LoginMessage.user_message", + Tag: "bytes,16,opt,name=user_message", + Filename: "extension_user/extension_user.proto", +} + +type Detail struct { + Color *string `protobuf:"bytes,1,opt,name=color" json:"color,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Detail) Reset() { *m = Detail{} } +func (m *Detail) String() string { return proto.CompactTextString(m) } +func (*Detail) ProtoMessage() {} +func (*Detail) Descriptor() ([]byte, []int) { + return fileDescriptor_359ba8abf543ca10, []int{3} +} + +func (m *Detail) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Detail.Unmarshal(m, b) +} +func (m *Detail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Detail.Marshal(b, m, deterministic) +} +func (m *Detail) XXX_Merge(src proto.Message) { + xxx_messageInfo_Detail.Merge(m, src) +} +func (m *Detail) XXX_Size() int { + return xxx_messageInfo_Detail.Size(m) +} +func (m *Detail) XXX_DiscardUnknown() { + xxx_messageInfo_Detail.DiscardUnknown(m) +} + +var xxx_messageInfo_Detail proto.InternalMessageInfo + +func (m *Detail) GetColor() string { + if m != nil && m.Color != nil { + return *m.Color + } + return "" +} + +// An extension of an extension +type Announcement struct { + Words *string `protobuf:"bytes,1,opt,name=words" json:"words,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Announcement) Reset() { *m = Announcement{} } +func (m *Announcement) String() string { return proto.CompactTextString(m) } +func (*Announcement) ProtoMessage() {} +func (*Announcement) Descriptor() ([]byte, []int) { + return fileDescriptor_359ba8abf543ca10, []int{4} +} + +func (m *Announcement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Announcement.Unmarshal(m, b) +} +func (m *Announcement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Announcement.Marshal(b, m, deterministic) +} +func (m *Announcement) XXX_Merge(src proto.Message) { + xxx_messageInfo_Announcement.Merge(m, src) +} +func (m *Announcement) XXX_Size() int { + return xxx_messageInfo_Announcement.Size(m) +} +func (m *Announcement) XXX_DiscardUnknown() { + xxx_messageInfo_Announcement.DiscardUnknown(m) +} + +var xxx_messageInfo_Announcement proto.InternalMessageInfo + +func (m *Announcement) GetWords() string { + if m != nil && m.Words != nil { + return *m.Words + } + return "" +} + +var E_Announcement_LoudExt = &proto.ExtensionDesc{ + ExtendedType: (*LoudMessage)(nil), + ExtensionType: (*Announcement)(nil), + Field: 100, + Name: "extension_user.Announcement.loud_ext", + Tag: "bytes,100,opt,name=loud_ext", + Filename: "extension_user/extension_user.proto", +} + +// Something that can be put in a message set. +type OldStyleParcel struct { + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + Height *int32 `protobuf:"varint,2,opt,name=height" json:"height,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OldStyleParcel) Reset() { *m = OldStyleParcel{} } +func (m *OldStyleParcel) String() string { return proto.CompactTextString(m) } +func (*OldStyleParcel) ProtoMessage() {} +func (*OldStyleParcel) Descriptor() ([]byte, []int) { + return fileDescriptor_359ba8abf543ca10, []int{5} +} + +func (m *OldStyleParcel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldStyleParcel.Unmarshal(m, b) +} +func (m *OldStyleParcel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldStyleParcel.Marshal(b, m, deterministic) +} +func (m *OldStyleParcel) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldStyleParcel.Merge(m, src) +} +func (m *OldStyleParcel) XXX_Size() int { + return xxx_messageInfo_OldStyleParcel.Size(m) +} +func (m *OldStyleParcel) XXX_DiscardUnknown() { + xxx_messageInfo_OldStyleParcel.DiscardUnknown(m) +} + +var xxx_messageInfo_OldStyleParcel proto.InternalMessageInfo + +func (m *OldStyleParcel) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *OldStyleParcel) GetHeight() int32 { + if m != nil && m.Height != nil { + return *m.Height + } + return 0 +} + +var E_OldStyleParcel_MessageSetExtension = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.OldStyleMessage)(nil), + ExtensionType: (*OldStyleParcel)(nil), + Field: 2001, + Name: "extension_user.OldStyleParcel", + Tag: "bytes,2001,opt,name=message_set_extension", + Filename: "extension_user/extension_user.proto", +} + +var E_UserMessage = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*UserMessage)(nil), + Field: 5, + Name: "extension_user.user_message", + Tag: "bytes,5,opt,name=user_message", + Filename: "extension_user/extension_user.proto", +} + +var E_ExtraMessage = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*extension_extra.ExtraMessage)(nil), + Field: 9, + Name: "extension_user.extra_message", + Tag: "bytes,9,opt,name=extra_message", + Filename: "extension_user/extension_user.proto", +} + +var E_Width = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 6, + Name: "extension_user.width", + Tag: "varint,6,opt,name=width", + Filename: "extension_user/extension_user.proto", +} + +var E_Area = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 7, + Name: "extension_user.area", + Tag: "varint,7,opt,name=area", + Filename: "extension_user/extension_user.proto", +} + +var E_Detail = &proto.ExtensionDesc{ + ExtendedType: (*extension_base.BaseMessage)(nil), + ExtensionType: ([]*Detail)(nil), + Field: 17, + Name: "extension_user.detail", + Tag: "bytes,17,rep,name=detail", + Filename: "extension_user/extension_user.proto", +} + +func init() { + proto.RegisterType((*UserMessage)(nil), "extension_user.UserMessage") + proto.RegisterExtension(E_LoudMessage_Volume) + proto.RegisterType((*LoudMessage)(nil), "extension_user.LoudMessage") + proto.RegisterExtension(E_LoginMessage_UserMessage) + proto.RegisterType((*LoginMessage)(nil), "extension_user.LoginMessage") + proto.RegisterType((*Detail)(nil), "extension_user.Detail") + proto.RegisterExtension(E_Announcement_LoudExt) + proto.RegisterType((*Announcement)(nil), "extension_user.Announcement") + proto.RegisterExtension(E_OldStyleParcel_MessageSetExtension) + proto.RegisterType((*OldStyleParcel)(nil), "extension_user.OldStyleParcel") + proto.RegisterExtension(E_UserMessage) + proto.RegisterExtension(E_ExtraMessage) + proto.RegisterExtension(E_Width) + proto.RegisterExtension(E_Area) + proto.RegisterExtension(E_Detail) +} + +func init() { + proto.RegisterFile("extension_user/extension_user.proto", fileDescriptor_359ba8abf543ca10) +} + +var fileDescriptor_359ba8abf543ca10 = []byte{ + // 492 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x51, 0x6f, 0x94, 0x40, + 0x10, 0x0e, 0x6d, 0x8f, 0x5e, 0x87, 0x6b, 0xad, 0xa8, 0xcd, 0xa5, 0x6a, 0x25, 0x18, 0x13, 0x62, + 0xd2, 0x23, 0x62, 0x7c, 0xe1, 0x49, 0x2f, 0xde, 0x93, 0x67, 0x34, 0x54, 0x5f, 0xf4, 0x81, 0xec, + 0xc1, 0xc8, 0x91, 0xc2, 0xae, 0xd9, 0x5d, 0xec, 0xe9, 0xd3, 0xfd, 0x26, 0xff, 0x89, 0xff, 0xc8, + 0xb0, 0x2c, 0x2d, 0x87, 0xc9, 0xc5, 0xbe, 0x90, 0xfd, 0x86, 0x6f, 0xbe, 0x99, 0xfd, 0x66, 0x00, + 0x9e, 0xe2, 0x4a, 0x22, 0x15, 0x39, 0xa3, 0x71, 0x25, 0x90, 0xfb, 0x9b, 0x70, 0xf2, 0x9d, 0x33, + 0xc9, 0xec, 0xa3, 0xcd, 0xe8, 0x69, 0x27, 0x69, 0x41, 0x04, 0xfa, 0x9b, 0xb0, 0x49, 0x3a, 0x7d, + 0x76, 0x13, 0xc5, 0x95, 0xe4, 0xc4, 0xef, 0xe1, 0x86, 0xe6, 0xbe, 0x02, 0xeb, 0xb3, 0x40, 0xfe, + 0x1e, 0x85, 0x20, 0x19, 0xda, 0x36, 0xec, 0x51, 0x52, 0xe2, 0xd8, 0x70, 0x0c, 0xef, 0x20, 0x52, + 0xe7, 0x3a, 0xc6, 0x09, 0xbd, 0x1c, 0xef, 0x34, 0xb1, 0xfa, 0xec, 0xce, 0xc1, 0x9a, 0xb3, 0x2a, + 0xd5, 0x69, 0xcf, 0x87, 0xc3, 0xf4, 0x78, 0xbd, 0x5e, 0xaf, 0x77, 0x82, 0x97, 0x60, 0xfe, 0x60, + 0x45, 0x55, 0xa2, 0xfd, 0x70, 0xd2, 0xeb, 0x6b, 0x4a, 0x04, 0xea, 0x84, 0xf1, 0xd0, 0x31, 0xbc, + 0xc3, 0x48, 0x53, 0xdd, 0x4b, 0x18, 0xcd, 0x59, 0x96, 0x53, 0xfd, 0x36, 0xf8, 0x0a, 0xa3, 0xfa, + 0xa2, 0x71, 0xa9, 0xbb, 0xda, 0x2a, 0x75, 0xec, 0x18, 0x9e, 0x15, 0x74, 0x29, 0xca, 0xba, 0xce, + 0xad, 0x22, 0xab, 0xba, 0x01, 0xee, 0x19, 0x98, 0x6f, 0x51, 0x92, 0xbc, 0xb0, 0xef, 0xc3, 0x20, + 0x61, 0x05, 0xe3, 0xfa, 0xb6, 0x0d, 0x70, 0x7f, 0xc1, 0xe8, 0x0d, 0xa5, 0xac, 0xa2, 0x09, 0x96, + 0x48, 0x65, 0xcd, 0xba, 0x62, 0x3c, 0x15, 0x2d, 0x4b, 0x81, 0xe0, 0x13, 0x0c, 0x0b, 0x56, 0xa5, + 0xb5, 0x97, 0xf6, 0x3f, 0xb5, 0x3b, 0xd6, 0x8c, 0x53, 0xd5, 0xde, 0xa3, 0x3e, 0xa5, 0x5b, 0x22, + 0xda, 0xaf, 0xa5, 0x66, 0x2b, 0xe9, 0xfe, 0x36, 0xe0, 0xe8, 0x43, 0x91, 0x5e, 0xc8, 0x9f, 0x05, + 0x7e, 0x24, 0x3c, 0xc1, 0xa2, 0x33, 0x91, 0x9d, 0xeb, 0x89, 0x9c, 0x80, 0xb9, 0xc4, 0x3c, 0x5b, + 0x4a, 0x35, 0x93, 0x41, 0xa4, 0x51, 0x20, 0xe1, 0x81, 0xb6, 0x2c, 0x16, 0x28, 0xe3, 0xeb, 0x92, + 0xf6, 0x93, 0xbe, 0x81, 0x6d, 0x91, 0xb6, 0xcb, 0x3f, 0x77, 0x54, 0x9b, 0x67, 0xfd, 0x36, 0x37, + 0x9b, 0x89, 0xee, 0x69, 0xf9, 0x0b, 0x94, 0xb3, 0x96, 0x18, 0xde, 0x6a, 0x5a, 0x83, 0xdb, 0x4d, + 0x2b, 0x8c, 0xe1, 0x50, 0xad, 0xeb, 0xff, 0xa9, 0x1f, 0x28, 0xf5, 0xc7, 0x93, 0xfe, 0xae, 0xcf, + 0xea, 0x67, 0xab, 0x3f, 0xc2, 0x0e, 0x0a, 0x5f, 0xc0, 0xe0, 0x2a, 0x4f, 0xe5, 0x72, 0xbb, 0xb0, + 0xa9, 0x7c, 0x6e, 0x98, 0xa1, 0x0f, 0x7b, 0x84, 0x23, 0xd9, 0x9e, 0xb1, 0xef, 0x18, 0xde, 0x6e, + 0xa4, 0x88, 0xe1, 0x3b, 0x30, 0xd3, 0x66, 0xe5, 0xb6, 0xa6, 0xdc, 0x75, 0x76, 0x3d, 0x2b, 0x38, + 0xe9, 0x7b, 0xd3, 0x6c, 0x6b, 0xa4, 0x25, 0xa6, 0xd3, 0x2f, 0xaf, 0xb3, 0x5c, 0x2e, 0xab, 0xc5, + 0x24, 0x61, 0xa5, 0x9f, 0xb1, 0x82, 0xd0, 0xcc, 0x57, 0x1f, 0xf3, 0xa2, 0xfa, 0xd6, 0x1c, 0x92, + 0xf3, 0x0c, 0xe9, 0x79, 0xc6, 0x7c, 0x89, 0x42, 0xa6, 0x44, 0x92, 0xde, 0x7f, 0xe5, 0x6f, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xdf, 0x18, 0x64, 0x15, 0x77, 0x04, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.proto similarity index 94% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.proto index ff65873..033c186 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user/extension_user.proto @@ -31,11 +31,13 @@ syntax = "proto2"; -import "extension_base.proto"; -import "extension_extra.proto"; +import "extension_base/extension_base.proto"; +import "extension_extra/extension_extra.proto"; package extension_user; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/extension_user"; + message UserMessage { optional string name = 1; optional string rank = 2; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.pb.go new file mode 100644 index 0000000..76c9a20 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.pb.go @@ -0,0 +1,466 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/grpc.proto + +package testing + +import ( + context "context" + fmt "fmt" + proto "github.com/golang/protobuf/proto" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type SimpleRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleRequest) Reset() { *m = SimpleRequest{} } +func (m *SimpleRequest) String() string { return proto.CompactTextString(m) } +func (*SimpleRequest) ProtoMessage() {} +func (*SimpleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_81ea47a3f88c2082, []int{0} +} + +func (m *SimpleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleRequest.Unmarshal(m, b) +} +func (m *SimpleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleRequest.Marshal(b, m, deterministic) +} +func (m *SimpleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleRequest.Merge(m, src) +} +func (m *SimpleRequest) XXX_Size() int { + return xxx_messageInfo_SimpleRequest.Size(m) +} +func (m *SimpleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleRequest proto.InternalMessageInfo + +type SimpleResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleResponse) Reset() { *m = SimpleResponse{} } +func (m *SimpleResponse) String() string { return proto.CompactTextString(m) } +func (*SimpleResponse) ProtoMessage() {} +func (*SimpleResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_81ea47a3f88c2082, []int{1} +} + +func (m *SimpleResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleResponse.Unmarshal(m, b) +} +func (m *SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleResponse.Marshal(b, m, deterministic) +} +func (m *SimpleResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleResponse.Merge(m, src) +} +func (m *SimpleResponse) XXX_Size() int { + return xxx_messageInfo_SimpleResponse.Size(m) +} +func (m *SimpleResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleResponse proto.InternalMessageInfo + +type StreamMsg struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamMsg) Reset() { *m = StreamMsg{} } +func (m *StreamMsg) String() string { return proto.CompactTextString(m) } +func (*StreamMsg) ProtoMessage() {} +func (*StreamMsg) Descriptor() ([]byte, []int) { + return fileDescriptor_81ea47a3f88c2082, []int{2} +} + +func (m *StreamMsg) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamMsg.Unmarshal(m, b) +} +func (m *StreamMsg) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamMsg.Marshal(b, m, deterministic) +} +func (m *StreamMsg) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamMsg.Merge(m, src) +} +func (m *StreamMsg) XXX_Size() int { + return xxx_messageInfo_StreamMsg.Size(m) +} +func (m *StreamMsg) XXX_DiscardUnknown() { + xxx_messageInfo_StreamMsg.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamMsg proto.InternalMessageInfo + +type StreamMsg2 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamMsg2) Reset() { *m = StreamMsg2{} } +func (m *StreamMsg2) String() string { return proto.CompactTextString(m) } +func (*StreamMsg2) ProtoMessage() {} +func (*StreamMsg2) Descriptor() ([]byte, []int) { + return fileDescriptor_81ea47a3f88c2082, []int{3} +} + +func (m *StreamMsg2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamMsg2.Unmarshal(m, b) +} +func (m *StreamMsg2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamMsg2.Marshal(b, m, deterministic) +} +func (m *StreamMsg2) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamMsg2.Merge(m, src) +} +func (m *StreamMsg2) XXX_Size() int { + return xxx_messageInfo_StreamMsg2.Size(m) +} +func (m *StreamMsg2) XXX_DiscardUnknown() { + xxx_messageInfo_StreamMsg2.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamMsg2 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SimpleRequest)(nil), "grpc.testing.SimpleRequest") + proto.RegisterType((*SimpleResponse)(nil), "grpc.testing.SimpleResponse") + proto.RegisterType((*StreamMsg)(nil), "grpc.testing.StreamMsg") + proto.RegisterType((*StreamMsg2)(nil), "grpc.testing.StreamMsg2") +} + +func init() { proto.RegisterFile("grpc/grpc.proto", fileDescriptor_81ea47a3f88c2082) } + +var fileDescriptor_81ea47a3f88c2082 = []byte{ + // 244 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x4f, 0x2f, 0x2a, 0x48, + 0xd6, 0x07, 0x11, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x3c, 0x60, 0x76, 0x49, 0x6a, 0x71, + 0x49, 0x66, 0x5e, 0xba, 0x12, 0x3f, 0x17, 0x6f, 0x70, 0x66, 0x6e, 0x41, 0x4e, 0x6a, 0x50, 0x6a, + 0x61, 0x69, 0x6a, 0x71, 0x89, 0x92, 0x00, 0x17, 0x1f, 0x4c, 0xa0, 0xb8, 0x20, 0x3f, 0xaf, 0x38, + 0x55, 0x89, 0x9b, 0x8b, 0x33, 0xb8, 0xa4, 0x28, 0x35, 0x31, 0xd7, 0xb7, 0x38, 0x5d, 0x89, 0x87, + 0x8b, 0x0b, 0xce, 0x31, 0x32, 0x9a, 0xc1, 0xc4, 0xc5, 0x12, 0x92, 0x5a, 0x5c, 0x22, 0xe4, 0xc6, + 0xc5, 0x19, 0x9a, 0x97, 0x58, 0x54, 0xe9, 0x9c, 0x98, 0x93, 0x23, 0x24, 0xad, 0x87, 0x6c, 0x85, + 0x1e, 0x8a, 0xf9, 0x52, 0x32, 0xd8, 0x25, 0x21, 0x76, 0x09, 0xb9, 0x70, 0x71, 0xb9, 0xe4, 0x97, + 0xe7, 0x15, 0x83, 0xad, 0xc0, 0x6f, 0x90, 0x38, 0x9a, 0x24, 0xcc, 0x55, 0x06, 0x8c, 0x42, 0xce, + 0x5c, 0x1c, 0xa1, 0x05, 0x50, 0x33, 0x70, 0x29, 0xc3, 0xef, 0x10, 0x0d, 0x46, 0x21, 0x5b, 0x2e, + 0x16, 0xa7, 0xcc, 0x94, 0x4c, 0xdc, 0x06, 0x48, 0xe0, 0x90, 0x30, 0xd2, 0x60, 0x34, 0x60, 0x74, + 0x72, 0x88, 0xb2, 0x4b, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, + 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x07, 0xc7, 0x40, 0x52, 0x69, 0x1a, 0x84, 0x91, 0xac, 0x9b, 0x9e, + 0x9a, 0xa7, 0x9b, 0x9e, 0xaf, 0x0f, 0x32, 0x22, 0x25, 0xb1, 0x24, 0x11, 0x1c, 0x4d, 0xd6, 0x50, + 0x03, 0x93, 0xd8, 0xc0, 0x8a, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x90, 0xb9, 0x95, 0x42, + 0xc2, 0x01, 0x00, 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TestClient is the client API for Test service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TestClient interface { + UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) + // This RPC streams from the server only. + Downstream(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (Test_DownstreamClient, error) + // This RPC streams from the client. + Upstream(ctx context.Context, opts ...grpc.CallOption) (Test_UpstreamClient, error) + // This one streams in both directions. + Bidi(ctx context.Context, opts ...grpc.CallOption) (Test_BidiClient, error) +} + +type testClient struct { + cc *grpc.ClientConn +} + +func NewTestClient(cc *grpc.ClientConn) TestClient { + return &testClient{cc} +} + +func (c *testClient) UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) { + out := new(SimpleResponse) + err := c.cc.Invoke(ctx, "/grpc.testing.Test/UnaryCall", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testClient) Downstream(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (Test_DownstreamClient, error) { + stream, err := c.cc.NewStream(ctx, &_Test_serviceDesc.Streams[0], "/grpc.testing.Test/Downstream", opts...) + if err != nil { + return nil, err + } + x := &testDownstreamClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Test_DownstreamClient interface { + Recv() (*StreamMsg, error) + grpc.ClientStream +} + +type testDownstreamClient struct { + grpc.ClientStream +} + +func (x *testDownstreamClient) Recv() (*StreamMsg, error) { + m := new(StreamMsg) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testClient) Upstream(ctx context.Context, opts ...grpc.CallOption) (Test_UpstreamClient, error) { + stream, err := c.cc.NewStream(ctx, &_Test_serviceDesc.Streams[1], "/grpc.testing.Test/Upstream", opts...) + if err != nil { + return nil, err + } + x := &testUpstreamClient{stream} + return x, nil +} + +type Test_UpstreamClient interface { + Send(*StreamMsg) error + CloseAndRecv() (*SimpleResponse, error) + grpc.ClientStream +} + +type testUpstreamClient struct { + grpc.ClientStream +} + +func (x *testUpstreamClient) Send(m *StreamMsg) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testUpstreamClient) CloseAndRecv() (*SimpleResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(SimpleResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testClient) Bidi(ctx context.Context, opts ...grpc.CallOption) (Test_BidiClient, error) { + stream, err := c.cc.NewStream(ctx, &_Test_serviceDesc.Streams[2], "/grpc.testing.Test/Bidi", opts...) + if err != nil { + return nil, err + } + x := &testBidiClient{stream} + return x, nil +} + +type Test_BidiClient interface { + Send(*StreamMsg) error + Recv() (*StreamMsg2, error) + grpc.ClientStream +} + +type testBidiClient struct { + grpc.ClientStream +} + +func (x *testBidiClient) Send(m *StreamMsg) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testBidiClient) Recv() (*StreamMsg2, error) { + m := new(StreamMsg2) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// TestServer is the server API for Test service. +type TestServer interface { + UnaryCall(context.Context, *SimpleRequest) (*SimpleResponse, error) + // This RPC streams from the server only. + Downstream(*SimpleRequest, Test_DownstreamServer) error + // This RPC streams from the client. + Upstream(Test_UpstreamServer) error + // This one streams in both directions. + Bidi(Test_BidiServer) error +} + +// UnimplementedTestServer can be embedded to have forward compatible implementations. +type UnimplementedTestServer struct { +} + +func (*UnimplementedTestServer) UnaryCall(ctx context.Context, req *SimpleRequest) (*SimpleResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method UnaryCall not implemented") +} +func (*UnimplementedTestServer) Downstream(req *SimpleRequest, srv Test_DownstreamServer) error { + return status.Errorf(codes.Unimplemented, "method Downstream not implemented") +} +func (*UnimplementedTestServer) Upstream(srv Test_UpstreamServer) error { + return status.Errorf(codes.Unimplemented, "method Upstream not implemented") +} +func (*UnimplementedTestServer) Bidi(srv Test_BidiServer) error { + return status.Errorf(codes.Unimplemented, "method Bidi not implemented") +} + +func RegisterTestServer(s *grpc.Server, srv TestServer) { + s.RegisterService(&_Test_serviceDesc, srv) +} + +func _Test_UnaryCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SimpleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServer).UnaryCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.Test/UnaryCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServer).UnaryCall(ctx, req.(*SimpleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Test_Downstream_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(SimpleRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(TestServer).Downstream(m, &testDownstreamServer{stream}) +} + +type Test_DownstreamServer interface { + Send(*StreamMsg) error + grpc.ServerStream +} + +type testDownstreamServer struct { + grpc.ServerStream +} + +func (x *testDownstreamServer) Send(m *StreamMsg) error { + return x.ServerStream.SendMsg(m) +} + +func _Test_Upstream_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServer).Upstream(&testUpstreamServer{stream}) +} + +type Test_UpstreamServer interface { + SendAndClose(*SimpleResponse) error + Recv() (*StreamMsg, error) + grpc.ServerStream +} + +type testUpstreamServer struct { + grpc.ServerStream +} + +func (x *testUpstreamServer) SendAndClose(m *SimpleResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testUpstreamServer) Recv() (*StreamMsg, error) { + m := new(StreamMsg) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _Test_Bidi_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServer).Bidi(&testBidiServer{stream}) +} + +type Test_BidiServer interface { + Send(*StreamMsg2) error + Recv() (*StreamMsg, error) + grpc.ServerStream +} + +type testBidiServer struct { + grpc.ServerStream +} + +func (x *testBidiServer) Send(m *StreamMsg2) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testBidiServer) Recv() (*StreamMsg, error) { + m := new(StreamMsg) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _Test_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.Test", + HandlerType: (*TestServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "UnaryCall", + Handler: _Test_UnaryCall_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "Downstream", + Handler: _Test_Downstream_Handler, + ServerStreams: true, + }, + { + StreamName: "Upstream", + Handler: _Test_Upstream_Handler, + ClientStreams: true, + }, + { + StreamName: "Bidi", + Handler: _Test_Bidi_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "grpc/grpc.proto", +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.proto similarity index 96% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.proto index b8bc41a..0e5c64a 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc.proto @@ -33,6 +33,8 @@ syntax = "proto3"; package grpc.testing; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/grpc;testing"; + message SimpleRequest { } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc_empty.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc_empty.pb.go new file mode 100644 index 0000000..9c6244b --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc_empty.pb.go @@ -0,0 +1,79 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/grpc_empty.proto + +package testing + +import ( + context "context" + fmt "fmt" + proto "github.com/golang/protobuf/proto" + grpc "google.golang.org/grpc" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +func init() { proto.RegisterFile("grpc/grpc_empty.proto", fileDescriptor_c580a37f1c90e9b1) } + +var fileDescriptor_c580a37f1c90e9b1 = []byte{ + // 125 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4d, 0x2f, 0x2a, 0x48, + 0xd6, 0x07, 0x11, 0xf1, 0xa9, 0xb9, 0x05, 0x25, 0x95, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, + 0x3c, 0x20, 0x11, 0xbd, 0x92, 0xd4, 0xe2, 0x92, 0xcc, 0xbc, 0x74, 0x23, 0x3e, 0x2e, 0x1e, 0x57, + 0x90, 0x64, 0x70, 0x6a, 0x51, 0x59, 0x66, 0x72, 0xaa, 0x93, 0x43, 0x94, 0x5d, 0x7a, 0x66, 0x49, + 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, 0xba, 0x3e, 0x58, + 0x63, 0x52, 0x69, 0x1a, 0x84, 0x91, 0xac, 0x9b, 0x9e, 0x9a, 0xa7, 0x9b, 0x9e, 0xaf, 0x0f, 0x32, + 0x23, 0x25, 0xb1, 0x24, 0x11, 0x6c, 0x87, 0x35, 0xd4, 0xc4, 0x24, 0x36, 0xb0, 0x22, 0x63, 0x40, + 0x00, 0x00, 0x00, 0xff, 0xff, 0x93, 0x1d, 0xf2, 0x47, 0x7f, 0x00, 0x00, 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EmptyServiceClient is the client API for EmptyService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EmptyServiceClient interface { +} + +type emptyServiceClient struct { + cc *grpc.ClientConn +} + +func NewEmptyServiceClient(cc *grpc.ClientConn) EmptyServiceClient { + return &emptyServiceClient{cc} +} + +// EmptyServiceServer is the server API for EmptyService service. +type EmptyServiceServer interface { +} + +// UnimplementedEmptyServiceServer can be embedded to have forward compatible implementations. +type UnimplementedEmptyServiceServer struct { +} + +func RegisterEmptyServiceServer(s *grpc.Server, srv EmptyServiceServer) { + s.RegisterService(&_EmptyService_serviceDesc, srv) +} + +var _EmptyService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.EmptyService", + HandlerType: (*EmptyServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{}, + Metadata: "grpc/grpc_empty.proto", +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc_empty.proto similarity index 88% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp2.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc_empty.proto index 3bb0632..ae07b81 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp2.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc/grpc_empty.proto @@ -1,6 +1,6 @@ // Go support for Protocol Buffers - Google's data interchange format // -// Copyright 2011 The Go Authors. All rights reserved. +// Copyright 2019 The Go Authors. All rights reserved. // https://github.com/golang/protobuf // // Redistribution and use in source and binary forms, with or without @@ -29,15 +29,10 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -syntax = "proto2"; +syntax = "proto3"; -package imp; +package grpc.testing; -message PubliclyImportedMessage { - optional int64 field = 1; -} +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/grpc;testing"; -enum PubliclyImportedEnum { - GLASSES = 1; - HAIR = 2; -} +service EmptyService {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.pb.go.golden b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.pb.go.golden deleted file mode 100644 index 784a4f8..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.pb.go.golden +++ /dev/null @@ -1,113 +0,0 @@ -// Code generated by protoc-gen-go. -// source: imp.proto -// DO NOT EDIT! - -package imp - -import proto "github.com/golang/protobuf/proto" -import "math" -import "os" -import imp1 "imp2.pb" - -// Reference proto & math imports to suppress error if they are not otherwise used. -var _ = proto.GetString -var _ = math.Inf - -// Types from public import imp2.proto -type PubliclyImportedMessage imp1.PubliclyImportedMessage - -func (this *PubliclyImportedMessage) Reset() { (*imp1.PubliclyImportedMessage)(this).Reset() } -func (this *PubliclyImportedMessage) String() string { - return (*imp1.PubliclyImportedMessage)(this).String() -} - -// PubliclyImportedMessage from public import imp.proto - -type ImportedMessage_Owner int32 - -const ( - ImportedMessage_DAVE ImportedMessage_Owner = 1 - ImportedMessage_MIKE ImportedMessage_Owner = 2 -) - -var ImportedMessage_Owner_name = map[int32]string{ - 1: "DAVE", - 2: "MIKE", -} -var ImportedMessage_Owner_value = map[string]int32{ - "DAVE": 1, - "MIKE": 2, -} - -// NewImportedMessage_Owner is deprecated. Use x.Enum() instead. -func NewImportedMessage_Owner(x ImportedMessage_Owner) *ImportedMessage_Owner { - e := ImportedMessage_Owner(x) - return &e -} -func (x ImportedMessage_Owner) Enum() *ImportedMessage_Owner { - p := new(ImportedMessage_Owner) - *p = x - return p -} -func (x ImportedMessage_Owner) String() string { - return proto.EnumName(ImportedMessage_Owner_name, int32(x)) -} - -type ImportedMessage struct { - Field *int64 `protobuf:"varint,1,req,name=field" json:"field,omitempty"` - XXX_extensions map[int32][]byte `json:",omitempty"` - XXX_unrecognized []byte `json:",omitempty"` -} - -func (this *ImportedMessage) Reset() { *this = ImportedMessage{} } -func (this *ImportedMessage) String() string { return proto.CompactTextString(this) } - -var extRange_ImportedMessage = []proto.ExtensionRange{ - proto.ExtensionRange{90, 100}, -} - -func (*ImportedMessage) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_ImportedMessage -} -func (this *ImportedMessage) ExtensionMap() map[int32][]byte { - if this.XXX_extensions == nil { - this.XXX_extensions = make(map[int32][]byte) - } - return this.XXX_extensions -} - -type ImportedExtendable struct { - XXX_extensions map[int32][]byte `json:",omitempty"` - XXX_unrecognized []byte `json:",omitempty"` -} - -func (this *ImportedExtendable) Reset() { *this = ImportedExtendable{} } -func (this *ImportedExtendable) String() string { return proto.CompactTextString(this) } - -func (this *ImportedExtendable) Marshal() ([]byte, error) { - return proto.MarshalMessageSet(this.ExtensionMap()) -} -func (this *ImportedExtendable) Unmarshal(buf []byte) error { - return proto.UnmarshalMessageSet(buf, this.ExtensionMap()) -} -// ensure ImportedExtendable satisfies proto.Marshaler and proto.Unmarshaler -var _ proto.Marshaler = (*ImportedExtendable)(nil) -var _ proto.Unmarshaler = (*ImportedExtendable)(nil) - -var extRange_ImportedExtendable = []proto.ExtensionRange{ - proto.ExtensionRange{100, 536870911}, -} - -func (*ImportedExtendable) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_ImportedExtendable -} -func (this *ImportedExtendable) ExtensionMap() map[int32][]byte { - if this.XXX_extensions == nil { - this.XXX_extensions = make(map[int32][]byte) - } - return this.XXX_extensions -} - -func init() { - proto.RegisterEnum("imp.ImportedMessage_Owner", ImportedMessage_Owner_name, ImportedMessage_Owner_value) -} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.pb.go new file mode 100644 index 0000000..fa511fc --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.pb.go @@ -0,0 +1,141 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: import_public/a.proto + +package import_public + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + sub "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +const Default_M_DefaultField = sub.Default_M_DefaultField + +// M from public import import_public/sub/a.proto +type M = sub.M +type M_OneofInt32 = sub.M_OneofInt32 +type M_OneofInt64 = sub.M_OneofInt64 + +// M_Grouping from public import import_public/sub/a.proto +type M_Grouping = sub.M_Grouping + +// M_Submessage from public import import_public/sub/a.proto +type M_Submessage = sub.M_Submessage +type M_Submessage_SubmessageOneofInt32 = sub.M_Submessage_SubmessageOneofInt32 +type M_Submessage_SubmessageOneofInt64 = sub.M_Submessage_SubmessageOneofInt64 + +// E from public import import_public/sub/a.proto +type E = sub.E + +var E_name = sub.E_name +var E_value = sub.E_value + +const E_ZERO = E(sub.E_ZERO) + +// M_Subenum from public import import_public/sub/a.proto +type M_Subenum = sub.M_Subenum + +var M_Subenum_name = sub.M_Subenum_name +var M_Subenum_value = sub.M_Subenum_value + +const M_M_ZERO = M_Subenum(sub.M_M_ZERO) + +// M_Submessage_Submessage_Subenum from public import import_public/sub/a.proto +type M_Submessage_Submessage_Subenum = sub.M_Submessage_Submessage_Subenum + +var M_Submessage_Submessage_Subenum_name = sub.M_Submessage_Submessage_Subenum_name +var M_Submessage_Submessage_Subenum_value = sub.M_Submessage_Submessage_Subenum_value + +const M_Submessage_M_SUBMESSAGE_ZERO = M_Submessage_Submessage_Subenum(sub.M_Submessage_M_SUBMESSAGE_ZERO) + +var E_ExtensionField = sub.E_ExtensionField + +type Public struct { + M *sub.M `protobuf:"bytes,1,opt,name=m" json:"m,omitempty"` + E *sub.E `protobuf:"varint,2,opt,name=e,enum=goproto.test.import_public.sub.E" json:"e,omitempty"` + Local *Local `protobuf:"bytes,3,opt,name=local" json:"local,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Public) Reset() { *m = Public{} } +func (m *Public) String() string { return proto.CompactTextString(m) } +func (*Public) ProtoMessage() {} +func (*Public) Descriptor() ([]byte, []int) { + return fileDescriptor_73b7577c95fa6b70, []int{0} +} + +func (m *Public) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Public.Unmarshal(m, b) +} +func (m *Public) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Public.Marshal(b, m, deterministic) +} +func (m *Public) XXX_Merge(src proto.Message) { + xxx_messageInfo_Public.Merge(m, src) +} +func (m *Public) XXX_Size() int { + return xxx_messageInfo_Public.Size(m) +} +func (m *Public) XXX_DiscardUnknown() { + xxx_messageInfo_Public.DiscardUnknown(m) +} + +var xxx_messageInfo_Public proto.InternalMessageInfo + +func (m *Public) GetM() *sub.M { + if m != nil { + return m.M + } + return nil +} + +func (m *Public) GetE() sub.E { + if m != nil && m.E != nil { + return *m.E + } + return sub.E_ZERO +} + +func (m *Public) GetLocal() *Local { + if m != nil { + return m.Local + } + return nil +} + +func init() { + proto.RegisterType((*Public)(nil), "goproto.test.import_public.Public") +} + +func init() { proto.RegisterFile("import_public/a.proto", fileDescriptor_73b7577c95fa6b70) } + +var fileDescriptor_73b7577c95fa6b70 = []byte{ + // 195 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xcd, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x89, 0x2f, 0x28, 0x4d, 0xca, 0xc9, 0x4c, 0xd6, 0x4f, 0xd4, 0x2b, 0x28, 0xca, 0x2f, + 0xc9, 0x17, 0x92, 0x4a, 0xcf, 0x07, 0x33, 0xf4, 0x4a, 0x52, 0x8b, 0x4b, 0xf4, 0x50, 0xd4, 0x48, + 0x49, 0xa2, 0x6a, 0x29, 0x2e, 0x4d, 0x82, 0x69, 0x93, 0x42, 0x33, 0x2d, 0x09, 0x22, 0xac, 0xb4, + 0x98, 0x91, 0x8b, 0x2d, 0x00, 0x2c, 0x24, 0xa4, 0xcf, 0xc5, 0x98, 0x2b, 0xc1, 0xa8, 0xc0, 0xa8, + 0xc1, 0x6d, 0xa4, 0xa8, 0x87, 0xdb, 0x12, 0xbd, 0xe2, 0xd2, 0x24, 0x3d, 0xdf, 0x20, 0xc6, 0x5c, + 0x90, 0x86, 0x54, 0x09, 0x26, 0x05, 0x46, 0x0d, 0x3e, 0xc2, 0x1a, 0x5c, 0x83, 0x18, 0x53, 0x85, + 0xcc, 0xb9, 0x58, 0x73, 0xf2, 0x93, 0x13, 0x73, 0x24, 0x98, 0x09, 0xdb, 0xe2, 0x03, 0x52, 0x18, + 0x04, 0x51, 0xef, 0xe4, 0x18, 0x65, 0x9f, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, + 0xab, 0x9f, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x0f, 0xd6, 0x9a, 0x54, 0x9a, 0x06, 0x61, 0x24, + 0xeb, 0xa6, 0xa7, 0xe6, 0xe9, 0xa6, 0xe7, 0xeb, 0x83, 0xcc, 0x4a, 0x49, 0x2c, 0x49, 0xd4, 0x47, + 0x31, 0x2f, 0x80, 0x21, 0x80, 0x11, 0x10, 0x00, 0x00, 0xff, 0xff, 0x17, 0x83, 0x2d, 0xd4, 0x52, + 0x01, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.proto new file mode 100644 index 0000000..9a4e7c0 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/a.proto @@ -0,0 +1,45 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package goproto.test.import_public; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/import_public"; + +import public "import_public/sub/a.proto"; // Different Go package. +import public "import_public/b.proto"; // Same Go package. + +message Public { + optional goproto.test.import_public.sub.M m = 1; + optional goproto.test.import_public.sub.E e = 2; + optional Local local = 3; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.pb.go new file mode 100644 index 0000000..522f215 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.pb.go @@ -0,0 +1,90 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: import_public/b.proto + +package import_public + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + sub "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type Local struct { + M *sub.M `protobuf:"bytes,1,opt,name=m" json:"m,omitempty"` + E *sub.E `protobuf:"varint,2,opt,name=e,enum=goproto.test.import_public.sub.E" json:"e,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Local) Reset() { *m = Local{} } +func (m *Local) String() string { return proto.CompactTextString(m) } +func (*Local) ProtoMessage() {} +func (*Local) Descriptor() ([]byte, []int) { + return fileDescriptor_84995586b3d09710, []int{0} +} + +func (m *Local) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Local.Unmarshal(m, b) +} +func (m *Local) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Local.Marshal(b, m, deterministic) +} +func (m *Local) XXX_Merge(src proto.Message) { + xxx_messageInfo_Local.Merge(m, src) +} +func (m *Local) XXX_Size() int { + return xxx_messageInfo_Local.Size(m) +} +func (m *Local) XXX_DiscardUnknown() { + xxx_messageInfo_Local.DiscardUnknown(m) +} + +var xxx_messageInfo_Local proto.InternalMessageInfo + +func (m *Local) GetM() *sub.M { + if m != nil { + return m.M + } + return nil +} + +func (m *Local) GetE() sub.E { + if m != nil && m.E != nil { + return *m.E + } + return sub.E_ZERO +} + +func init() { + proto.RegisterType((*Local)(nil), "goproto.test.import_public.Local") +} + +func init() { proto.RegisterFile("import_public/b.proto", fileDescriptor_84995586b3d09710) } + +var fileDescriptor_84995586b3d09710 = []byte{ + // 169 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xcd, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x89, 0x2f, 0x28, 0x4d, 0xca, 0xc9, 0x4c, 0xd6, 0x4f, 0xd2, 0x2b, 0x28, 0xca, 0x2f, + 0xc9, 0x17, 0x92, 0x4a, 0xcf, 0x07, 0x33, 0xf4, 0x4a, 0x52, 0x8b, 0x4b, 0xf4, 0x50, 0xd4, 0x48, + 0x49, 0xa2, 0x6a, 0x29, 0x2e, 0x4d, 0xd2, 0x4f, 0x84, 0x68, 0x53, 0xca, 0xe4, 0x62, 0xf5, 0xc9, + 0x4f, 0x4e, 0xcc, 0x11, 0xd2, 0xe7, 0x62, 0xcc, 0x95, 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x36, 0x52, + 0xd4, 0xc3, 0x6d, 0x96, 0x5e, 0x71, 0x69, 0x92, 0x9e, 0x6f, 0x10, 0x63, 0x2e, 0x48, 0x43, 0xaa, + 0x04, 0x93, 0x02, 0xa3, 0x06, 0x1f, 0x61, 0x0d, 0xae, 0x41, 0x8c, 0xa9, 0x4e, 0x8e, 0x51, 0xf6, + 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0xe9, 0xf9, 0x39, 0x89, 0x79, + 0xe9, 0xfa, 0x60, 0x6d, 0x49, 0xa5, 0x69, 0x10, 0x46, 0xb2, 0x6e, 0x7a, 0x6a, 0x9e, 0x6e, 0x7a, + 0xbe, 0x3e, 0xc8, 0x9c, 0x94, 0xc4, 0x92, 0x44, 0x7d, 0x14, 0xb3, 0x00, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x35, 0x0e, 0x6a, 0x82, 0xfc, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.proto new file mode 100644 index 0000000..424306e --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/b.proto @@ -0,0 +1,43 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package goproto.test.import_public; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/import_public"; + +import "import_public/sub/a.proto"; + +message Local { + optional goproto.test.import_public.sub.M m = 1; + optional goproto.test.import_public.sub.E e = 2; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/importing/importing.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/importing/importing.pb.go new file mode 100644 index 0000000..3f0f37e --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/importing/importing.pb.go @@ -0,0 +1,85 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: import_public/importing/importing.proto + +package importing + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + _ "github.com/golang/protobuf/protoc-gen-go/testdata/import_public" + sub "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type M struct { + // Message type defined in a file publicly imported by a file we import. + M *sub.M `protobuf:"bytes,1,opt,name=m" json:"m,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M) Reset() { *m = M{} } +func (m *M) String() string { return proto.CompactTextString(m) } +func (*M) ProtoMessage() {} +func (*M) Descriptor() ([]byte, []int) { + return fileDescriptor_36b835b3b8f6171a, []int{0} +} + +func (m *M) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M.Unmarshal(m, b) +} +func (m *M) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M.Marshal(b, m, deterministic) +} +func (m *M) XXX_Merge(src proto.Message) { + xxx_messageInfo_M.Merge(m, src) +} +func (m *M) XXX_Size() int { + return xxx_messageInfo_M.Size(m) +} +func (m *M) XXX_DiscardUnknown() { + xxx_messageInfo_M.DiscardUnknown(m) +} + +var xxx_messageInfo_M proto.InternalMessageInfo + +func (m *M) GetM() *sub.M { + if m != nil { + return m.M + } + return nil +} + +func init() { + proto.RegisterType((*M)(nil), "goproto.test.import_public.importing.M") +} + +func init() { + proto.RegisterFile("import_public/importing/importing.proto", fileDescriptor_36b835b3b8f6171a) +} + +var fileDescriptor_36b835b3b8f6171a = []byte{ + // 153 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0xcf, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x89, 0x2f, 0x28, 0x4d, 0xca, 0xc9, 0x4c, 0xd6, 0x87, 0xf0, 0x32, 0xf3, 0xd2, 0x11, + 0x2c, 0xbd, 0x82, 0xa2, 0xfc, 0x92, 0x7c, 0x21, 0x95, 0xf4, 0x7c, 0x30, 0x43, 0xaf, 0x24, 0xb5, + 0xb8, 0x44, 0x0f, 0x45, 0x97, 0x1e, 0x5c, 0xad, 0x94, 0x28, 0xaa, 0x71, 0x89, 0x10, 0xcd, 0x4a, + 0x26, 0x5c, 0x8c, 0xbe, 0x42, 0xfa, 0x5c, 0x8c, 0xb9, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xdc, 0x46, + 0x8a, 0x7a, 0x78, 0x4c, 0x2b, 0x2e, 0x4d, 0xd2, 0xf3, 0x0d, 0x62, 0xcc, 0x75, 0xf2, 0x8e, 0xf2, + 0x4c, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, + 0x4b, 0xd7, 0x07, 0x6b, 0x4b, 0x2a, 0x4d, 0x83, 0x30, 0x92, 0x75, 0xd3, 0x53, 0xf3, 0x74, 0xd3, + 0xf3, 0xf5, 0x41, 0xe6, 0xa4, 0x24, 0x96, 0x24, 0xea, 0xe3, 0xf0, 0x0f, 0x20, 0x00, 0x00, 0xff, + 0xff, 0xd8, 0x7e, 0x58, 0x1c, 0xe9, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/importing/importing.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/importing/importing.proto new file mode 100644 index 0000000..fc78f5f --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/importing/importing.proto @@ -0,0 +1,43 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package goproto.test.import_public.importing; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/importing"; + +import "import_public/a.proto"; + +message M { + // Message type defined in a file publicly imported by a file we import. + optional goproto.test.import_public.sub.M m = 1; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.pb.go new file mode 100644 index 0000000..6f81567 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.pb.go @@ -0,0 +1,409 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: import_public/sub/a.proto + +package sub + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type E int32 + +const ( + E_ZERO E = 0 +) + +var E_name = map[int32]string{ + 0: "ZERO", +} + +var E_value = map[string]int32{ + "ZERO": 0, +} + +func (x E) Enum() *E { + p := new(E) + *p = x + return p +} + +func (x E) String() string { + return proto.EnumName(E_name, int32(x)) +} + +func (x *E) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(E_value, data, "E") + if err != nil { + return err + } + *x = E(value) + return nil +} + +func (E) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_382f7805394b5c4e, []int{0} +} + +type M_Subenum int32 + +const ( + M_M_ZERO M_Subenum = 0 +) + +var M_Subenum_name = map[int32]string{ + 0: "M_ZERO", +} + +var M_Subenum_value = map[string]int32{ + "M_ZERO": 0, +} + +func (x M_Subenum) Enum() *M_Subenum { + p := new(M_Subenum) + *p = x + return p +} + +func (x M_Subenum) String() string { + return proto.EnumName(M_Subenum_name, int32(x)) +} + +func (x *M_Subenum) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(M_Subenum_value, data, "M_Subenum") + if err != nil { + return err + } + *x = M_Subenum(value) + return nil +} + +func (M_Subenum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_382f7805394b5c4e, []int{0, 0} +} + +type M_Submessage_Submessage_Subenum int32 + +const ( + M_Submessage_M_SUBMESSAGE_ZERO M_Submessage_Submessage_Subenum = 0 +) + +var M_Submessage_Submessage_Subenum_name = map[int32]string{ + 0: "M_SUBMESSAGE_ZERO", +} + +var M_Submessage_Submessage_Subenum_value = map[string]int32{ + "M_SUBMESSAGE_ZERO": 0, +} + +func (x M_Submessage_Submessage_Subenum) Enum() *M_Submessage_Submessage_Subenum { + p := new(M_Submessage_Submessage_Subenum) + *p = x + return p +} + +func (x M_Submessage_Submessage_Subenum) String() string { + return proto.EnumName(M_Submessage_Submessage_Subenum_name, int32(x)) +} + +func (x *M_Submessage_Submessage_Subenum) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(M_Submessage_Submessage_Subenum_value, data, "M_Submessage_Submessage_Subenum") + if err != nil { + return err + } + *x = M_Submessage_Submessage_Subenum(value) + return nil +} + +func (M_Submessage_Submessage_Subenum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_382f7805394b5c4e, []int{0, 1, 0} +} + +type M struct { + // Field using a type in the same Go package, but a different source file. + M2 *M2 `protobuf:"bytes,1,opt,name=m2" json:"m2,omitempty"` + // Types that are valid to be assigned to OneofField: + // *M_OneofInt32 + // *M_OneofInt64 + OneofField isM_OneofField `protobuf_oneof:"oneof_field"` + Grouping *M_Grouping `protobuf:"group,4,opt,name=Grouping,json=grouping" json:"grouping,omitempty"` + DefaultField *string `protobuf:"bytes,6,opt,name=default_field,json=defaultField,def=def" json:"default_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M) Reset() { *m = M{} } +func (m *M) String() string { return proto.CompactTextString(m) } +func (*M) ProtoMessage() {} +func (*M) Descriptor() ([]byte, []int) { + return fileDescriptor_382f7805394b5c4e, []int{0} +} + +func (m *M) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M.Unmarshal(m, b) +} +func (m *M) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M.Marshal(b, m, deterministic) +} +func (m *M) XXX_Merge(src proto.Message) { + xxx_messageInfo_M.Merge(m, src) +} +func (m *M) XXX_Size() int { + return xxx_messageInfo_M.Size(m) +} +func (m *M) XXX_DiscardUnknown() { + xxx_messageInfo_M.DiscardUnknown(m) +} + +var xxx_messageInfo_M proto.InternalMessageInfo + +const Default_M_DefaultField string = "def" + +func (m *M) GetM2() *M2 { + if m != nil { + return m.M2 + } + return nil +} + +type isM_OneofField interface { + isM_OneofField() +} + +type M_OneofInt32 struct { + OneofInt32 int32 `protobuf:"varint,2,opt,name=oneof_int32,json=oneofInt32,oneof"` +} + +type M_OneofInt64 struct { + OneofInt64 int64 `protobuf:"varint,3,opt,name=oneof_int64,json=oneofInt64,oneof"` +} + +func (*M_OneofInt32) isM_OneofField() {} + +func (*M_OneofInt64) isM_OneofField() {} + +func (m *M) GetOneofField() isM_OneofField { + if m != nil { + return m.OneofField + } + return nil +} + +func (m *M) GetOneofInt32() int32 { + if x, ok := m.GetOneofField().(*M_OneofInt32); ok { + return x.OneofInt32 + } + return 0 +} + +func (m *M) GetOneofInt64() int64 { + if x, ok := m.GetOneofField().(*M_OneofInt64); ok { + return x.OneofInt64 + } + return 0 +} + +func (m *M) GetGrouping() *M_Grouping { + if m != nil { + return m.Grouping + } + return nil +} + +func (m *M) GetDefaultField() string { + if m != nil && m.DefaultField != nil { + return *m.DefaultField + } + return Default_M_DefaultField +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*M) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*M_OneofInt32)(nil), + (*M_OneofInt64)(nil), + } +} + +type M_Grouping struct { + GroupField *string `protobuf:"bytes,5,opt,name=group_field,json=groupField" json:"group_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M_Grouping) Reset() { *m = M_Grouping{} } +func (m *M_Grouping) String() string { return proto.CompactTextString(m) } +func (*M_Grouping) ProtoMessage() {} +func (*M_Grouping) Descriptor() ([]byte, []int) { + return fileDescriptor_382f7805394b5c4e, []int{0, 0} +} + +func (m *M_Grouping) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M_Grouping.Unmarshal(m, b) +} +func (m *M_Grouping) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M_Grouping.Marshal(b, m, deterministic) +} +func (m *M_Grouping) XXX_Merge(src proto.Message) { + xxx_messageInfo_M_Grouping.Merge(m, src) +} +func (m *M_Grouping) XXX_Size() int { + return xxx_messageInfo_M_Grouping.Size(m) +} +func (m *M_Grouping) XXX_DiscardUnknown() { + xxx_messageInfo_M_Grouping.DiscardUnknown(m) +} + +var xxx_messageInfo_M_Grouping proto.InternalMessageInfo + +func (m *M_Grouping) GetGroupField() string { + if m != nil && m.GroupField != nil { + return *m.GroupField + } + return "" +} + +type M_Submessage struct { + // Types that are valid to be assigned to SubmessageOneofField: + // *M_Submessage_SubmessageOneofInt32 + // *M_Submessage_SubmessageOneofInt64 + SubmessageOneofField isM_Submessage_SubmessageOneofField `protobuf_oneof:"submessage_oneof_field"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M_Submessage) Reset() { *m = M_Submessage{} } +func (m *M_Submessage) String() string { return proto.CompactTextString(m) } +func (*M_Submessage) ProtoMessage() {} +func (*M_Submessage) Descriptor() ([]byte, []int) { + return fileDescriptor_382f7805394b5c4e, []int{0, 1} +} + +func (m *M_Submessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M_Submessage.Unmarshal(m, b) +} +func (m *M_Submessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M_Submessage.Marshal(b, m, deterministic) +} +func (m *M_Submessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_M_Submessage.Merge(m, src) +} +func (m *M_Submessage) XXX_Size() int { + return xxx_messageInfo_M_Submessage.Size(m) +} +func (m *M_Submessage) XXX_DiscardUnknown() { + xxx_messageInfo_M_Submessage.DiscardUnknown(m) +} + +var xxx_messageInfo_M_Submessage proto.InternalMessageInfo + +type isM_Submessage_SubmessageOneofField interface { + isM_Submessage_SubmessageOneofField() +} + +type M_Submessage_SubmessageOneofInt32 struct { + SubmessageOneofInt32 int32 `protobuf:"varint,1,opt,name=submessage_oneof_int32,json=submessageOneofInt32,oneof"` +} + +type M_Submessage_SubmessageOneofInt64 struct { + SubmessageOneofInt64 int64 `protobuf:"varint,2,opt,name=submessage_oneof_int64,json=submessageOneofInt64,oneof"` +} + +func (*M_Submessage_SubmessageOneofInt32) isM_Submessage_SubmessageOneofField() {} + +func (*M_Submessage_SubmessageOneofInt64) isM_Submessage_SubmessageOneofField() {} + +func (m *M_Submessage) GetSubmessageOneofField() isM_Submessage_SubmessageOneofField { + if m != nil { + return m.SubmessageOneofField + } + return nil +} + +func (m *M_Submessage) GetSubmessageOneofInt32() int32 { + if x, ok := m.GetSubmessageOneofField().(*M_Submessage_SubmessageOneofInt32); ok { + return x.SubmessageOneofInt32 + } + return 0 +} + +func (m *M_Submessage) GetSubmessageOneofInt64() int64 { + if x, ok := m.GetSubmessageOneofField().(*M_Submessage_SubmessageOneofInt64); ok { + return x.SubmessageOneofInt64 + } + return 0 +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*M_Submessage) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*M_Submessage_SubmessageOneofInt32)(nil), + (*M_Submessage_SubmessageOneofInt64)(nil), + } +} + +var E_ExtensionField = &proto.ExtensionDesc{ + ExtendedType: (*M2)(nil), + ExtensionType: (*string)(nil), + Field: 1, + Name: "goproto.test.import_public.sub.extension_field", + Tag: "bytes,1,opt,name=extension_field", + Filename: "import_public/sub/a.proto", +} + +func init() { + proto.RegisterEnum("goproto.test.import_public.sub.E", E_name, E_value) + proto.RegisterEnum("goproto.test.import_public.sub.M_Subenum", M_Subenum_name, M_Subenum_value) + proto.RegisterEnum("goproto.test.import_public.sub.M_Submessage_Submessage_Subenum", M_Submessage_Submessage_Subenum_name, M_Submessage_Submessage_Subenum_value) + proto.RegisterType((*M)(nil), "goproto.test.import_public.sub.M") + proto.RegisterType((*M_Grouping)(nil), "goproto.test.import_public.sub.M.Grouping") + proto.RegisterType((*M_Submessage)(nil), "goproto.test.import_public.sub.M.Submessage") + proto.RegisterExtension(E_ExtensionField) +} + +func init() { proto.RegisterFile("import_public/sub/a.proto", fileDescriptor_382f7805394b5c4e) } + +var fileDescriptor_382f7805394b5c4e = []byte{ + // 410 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xcf, 0x6e, 0xd3, 0x40, + 0x10, 0xc6, 0xbb, 0x49, 0x5a, 0xd2, 0x09, 0xe1, 0xcf, 0x8a, 0x22, 0xd3, 0x03, 0x98, 0x9c, 0xac, + 0x56, 0x5d, 0x4b, 0x26, 0xf2, 0xa1, 0x37, 0x82, 0xdc, 0x82, 0x90, 0x55, 0xc9, 0x16, 0x97, 0x5e, + 0x2c, 0x6f, 0xbc, 0x5e, 0x2c, 0xd9, 0xbb, 0x56, 0xbc, 0x2b, 0xf1, 0x08, 0xbc, 0x17, 0x2f, 0x86, + 0xbc, 0xb6, 0x93, 0x46, 0x04, 0xe8, 0x6d, 0x3d, 0xf3, 0xfd, 0xbe, 0xd1, 0x7c, 0x1e, 0x78, 0x53, + 0x54, 0xb5, 0xdc, 0xa8, 0xa4, 0xd6, 0xb4, 0x2c, 0xd6, 0x6e, 0xa3, 0xa9, 0x9b, 0x92, 0x7a, 0x23, + 0x95, 0xc4, 0x6f, 0xb9, 0x34, 0x0f, 0xa2, 0x58, 0xa3, 0xc8, 0x9e, 0x8e, 0x34, 0x9a, 0x9e, 0x1f, + 0x40, 0x69, 0x87, 0x2e, 0x7e, 0x4e, 0x00, 0x85, 0xd8, 0x83, 0x51, 0xe5, 0x59, 0xc8, 0x46, 0xce, + 0xcc, 0x5b, 0x90, 0x7f, 0xbb, 0x91, 0xd0, 0x8b, 0x46, 0x95, 0x87, 0xdf, 0xc3, 0x4c, 0x0a, 0x26, + 0xf3, 0xa4, 0x10, 0xea, 0x83, 0x67, 0x8d, 0x6c, 0xe4, 0x1c, 0x7f, 0x3e, 0x8a, 0xc0, 0x14, 0xbf, + 0xb4, 0xb5, 0x3d, 0x89, 0xbf, 0xb4, 0xc6, 0x36, 0x72, 0xc6, 0x0f, 0x25, 0xfe, 0x12, 0xdf, 0xc0, + 0x94, 0x6f, 0xa4, 0xae, 0x0b, 0xc1, 0xad, 0x89, 0x8d, 0x1c, 0xf0, 0x2e, 0xfe, 0x3b, 0x9f, 0xdc, + 0xf6, 0x44, 0xb4, 0x65, 0xb1, 0x03, 0xf3, 0x8c, 0xe5, 0xa9, 0x2e, 0x55, 0x92, 0x17, 0xac, 0xcc, + 0xac, 0x13, 0x1b, 0x39, 0xa7, 0xd7, 0xe3, 0x8c, 0xe5, 0xd1, 0xd3, 0xbe, 0x73, 0xd3, 0x36, 0xce, + 0x2f, 0x61, 0x3a, 0xf0, 0xf8, 0x1d, 0xcc, 0x8c, 0x43, 0xcf, 0x1c, 0xb7, 0x4c, 0x04, 0xa6, 0xd4, + 0x89, 0x7f, 0x21, 0x80, 0x58, 0xd3, 0x8a, 0x35, 0x4d, 0xca, 0x19, 0xf6, 0xe1, 0x75, 0xb3, 0xfd, + 0x4a, 0x1e, 0xae, 0x8f, 0xfa, 0xf5, 0x5f, 0xed, 0xfa, 0x77, 0xbb, 0x20, 0xfe, 0xc2, 0xf9, 0x4b, + 0x13, 0xdb, 0xf8, 0x30, 0xe7, 0x2f, 0x17, 0x97, 0x80, 0x77, 0xd3, 0x93, 0x58, 0x53, 0x26, 0x74, + 0x85, 0xcf, 0xe0, 0x65, 0x98, 0xc4, 0xdf, 0x56, 0x61, 0x10, 0xc7, 0x1f, 0x6f, 0x83, 0xe4, 0x3e, + 0x88, 0xee, 0x5e, 0x1c, 0xad, 0xac, 0x03, 0x43, 0xcc, 0x5e, 0x8b, 0x33, 0x78, 0x32, 0xb0, 0x00, + 0x27, 0xe1, 0x00, 0xcc, 0x87, 0xdf, 0x63, 0x54, 0x17, 0x73, 0x40, 0x01, 0x9e, 0xc2, 0xa4, 0xeb, + 0x5e, 0x7f, 0x85, 0xe7, 0xec, 0x87, 0x62, 0xa2, 0x29, 0xa4, 0xe8, 0x14, 0xf8, 0x11, 0xa7, 0x61, + 0x82, 0x38, 0x8d, 0x9e, 0x6d, 0x51, 0x93, 0xe3, 0x2a, 0xb8, 0xff, 0xc4, 0x0b, 0xf5, 0x5d, 0x53, + 0xb2, 0x96, 0x95, 0xcb, 0x65, 0x99, 0x0a, 0xee, 0x1a, 0x2b, 0xaa, 0xf3, 0xee, 0xb1, 0xbe, 0xe2, + 0x4c, 0x5c, 0x71, 0xe9, 0xb6, 0xde, 0x59, 0xaa, 0x52, 0xf7, 0x8f, 0xab, 0xfd, 0x1d, 0x00, 0x00, + 0xff, 0xff, 0x13, 0x4f, 0x31, 0x07, 0x04, 0x03, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.proto new file mode 100644 index 0000000..8f8895b --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/a.proto @@ -0,0 +1,77 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package goproto.test.import_public.sub; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub"; + +import "import_public/sub/b.proto"; + +message M { + // Field using a type in the same Go package, but a different source file. + optional M2 m2 = 1; + + oneof oneof_field { + int32 oneof_int32 = 2; + int64 oneof_int64 = 3; + } + + optional group Grouping = 4 { + optional string group_field = 5; + } + + optional string default_field = 6 [default="def"]; + + message Submessage { + enum Submessage_Subenum { + M_SUBMESSAGE_ZERO = 0; + } + + oneof submessage_oneof_field { + int32 submessage_oneof_int32 = 1; + int64 submessage_oneof_int64 = 2; + } + } + + enum Subenum { + M_ZERO = 0; + } +} + +enum E { + ZERO = 0; +} + +extend M2 { + optional string extension_field = 1; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.pb.go new file mode 100644 index 0000000..cb4ea17 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.pb.go @@ -0,0 +1,80 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: import_public/sub/b.proto + +package sub + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type M2 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M2) Reset() { *m = M2{} } +func (m *M2) String() string { return proto.CompactTextString(m) } +func (*M2) ProtoMessage() {} +func (*M2) Descriptor() ([]byte, []int) { + return fileDescriptor_fc66afda3d7c2232, []int{0} +} + +var extRange_M2 = []proto.ExtensionRange{ + {Start: 1, End: 536870911}, +} + +func (*M2) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_M2 +} + +func (m *M2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M2.Unmarshal(m, b) +} +func (m *M2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M2.Marshal(b, m, deterministic) +} +func (m *M2) XXX_Merge(src proto.Message) { + xxx_messageInfo_M2.Merge(m, src) +} +func (m *M2) XXX_Size() int { + return xxx_messageInfo_M2.Size(m) +} +func (m *M2) XXX_DiscardUnknown() { + xxx_messageInfo_M2.DiscardUnknown(m) +} + +var xxx_messageInfo_M2 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M2)(nil), "goproto.test.import_public.sub.M2") +} + +func init() { proto.RegisterFile("import_public/sub/b.proto", fileDescriptor_fc66afda3d7c2232) } + +var fileDescriptor_fc66afda3d7c2232 = []byte{ + // 132 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x89, 0x2f, 0x28, 0x4d, 0xca, 0xc9, 0x4c, 0xd6, 0x2f, 0x2e, 0x4d, 0xd2, 0x4f, 0xd2, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x92, 0x4b, 0xcf, 0x07, 0x33, 0xf4, 0x4a, 0x52, 0x8b, 0x4b, + 0xf4, 0x50, 0xd4, 0xe9, 0x15, 0x97, 0x26, 0x29, 0xf1, 0x71, 0x31, 0xf9, 0x1a, 0x69, 0x71, 0x70, + 0x30, 0x0a, 0x34, 0x34, 0x34, 0x34, 0x30, 0x39, 0xb9, 0x46, 0x39, 0xa7, 0x67, 0x96, 0x64, 0x94, + 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x4d, 0x48, + 0x2a, 0x4d, 0x83, 0x30, 0x92, 0x75, 0xd3, 0x53, 0xf3, 0x74, 0xd3, 0xf3, 0xf5, 0x41, 0x46, 0xa6, + 0x24, 0x96, 0x24, 0xea, 0x63, 0x58, 0x0f, 0x08, 0x00, 0x00, 0xff, 0xff, 0x87, 0x44, 0x22, 0x2d, + 0x92, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.proto new file mode 100644 index 0000000..0b1b27c --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub/b.proto @@ -0,0 +1,40 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package goproto.test.import_public.sub; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub"; + +message M2 { + extensions 1 to max; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public_test.go similarity index 69% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public_test.go index 156e078..e3cbc89 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/import_public_test.go @@ -29,42 +29,38 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -syntax = "proto2"; +// +build go1.9 -package imp; +package testdata -import "imp2.proto"; -import "imp3.proto"; +import ( + "testing" -message ImportedMessage { - required int64 field = 1; + mainpb "github.com/golang/protobuf/protoc-gen-go/testdata/import_public" + subpb "github.com/golang/protobuf/protoc-gen-go/testdata/import_public/sub" +) - // The forwarded getters for these fields are fiddly to get right. - optional ImportedMessage2 local_msg = 2; - optional ForeignImportedMessage foreign_msg = 3; // in imp3.proto - optional Owner enum_field = 4; - oneof union { - int32 state = 9; - } - - repeated string name = 5; - repeated Owner boss = 6; - repeated ImportedMessage2 memo = 7; - - map msg_map = 8; - - enum Owner { - DAVE = 1; - MIKE = 2; - } - - extensions 90 to 100; -} - -message ImportedMessage2 { -} - -message ImportedExtendable { - option message_set_wire_format = true; - extensions 100 to max; +func TestImportPublicLink(t *testing.T) { + // mainpb.[ME] should be interchangable with subpb.[ME]. + var _ mainpb.M = subpb.M{} + var _ mainpb.E = subpb.E(0) + _ = &mainpb.Public{ + M: &mainpb.M{}, + E: mainpb.E_ZERO.Enum(), + Local: &mainpb.Local{ + M: &mainpb.M{}, + E: mainpb.E_ZERO.Enum(), + }, + } + _ = &mainpb.Public{ + M: &subpb.M{}, + E: subpb.E_ZERO.Enum(), + Local: &mainpb.Local{ + M: &subpb.M{}, + E: subpb.E_ZERO.Enum(), + }, + } + _ = &mainpb.M{ + M2: &subpb.M2{}, + } } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.pb.go new file mode 100644 index 0000000..9f774fe --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.pb.go @@ -0,0 +1,69 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/fmt/m.proto + +package fmt + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type M struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M) Reset() { *m = M{} } +func (m *M) String() string { return proto.CompactTextString(m) } +func (*M) ProtoMessage() {} +func (*M) Descriptor() ([]byte, []int) { + return fileDescriptor_72c126fcd452e392, []int{0} +} + +func (m *M) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M.Unmarshal(m, b) +} +func (m *M) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M.Marshal(b, m, deterministic) +} +func (m *M) XXX_Merge(src proto.Message) { + xxx_messageInfo_M.Merge(m, src) +} +func (m *M) XXX_Size() int { + return xxx_messageInfo_M.Size(m) +} +func (m *M) XXX_DiscardUnknown() { + xxx_messageInfo_M.DiscardUnknown(m) +} + +var xxx_messageInfo_M proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M)(nil), "fmt.M") +} + +func init() { proto.RegisterFile("imports/fmt/m.proto", fileDescriptor_72c126fcd452e392) } + +var fileDescriptor_72c126fcd452e392 = []byte{ + // 109 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xce, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x4f, 0xcb, 0x2d, 0xd1, 0xcf, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, + 0x62, 0x4e, 0xcb, 0x2d, 0x51, 0x62, 0xe6, 0x62, 0xf4, 0x75, 0xb2, 0x8f, 0xb2, 0x4d, 0xcf, 0x2c, + 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x07, + 0x2b, 0x4a, 0x2a, 0x4d, 0x83, 0x30, 0x92, 0x75, 0xd3, 0x53, 0xf3, 0x74, 0xd3, 0xf3, 0xf5, 0x4b, + 0x52, 0x8b, 0x4b, 0x52, 0x12, 0x4b, 0x12, 0xf5, 0x91, 0x8c, 0x4c, 0x62, 0x03, 0xab, 0x31, 0x06, + 0x04, 0x00, 0x00, 0xff, 0xff, 0xc4, 0xc9, 0xee, 0xbe, 0x68, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.proto similarity index 89% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp3.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.proto index 58fc759..142d8cf 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp3.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt/m.proto @@ -1,6 +1,6 @@ // Go support for Protocol Buffers - Google's data interchange format // -// Copyright 2012 The Go Authors. All rights reserved. +// Copyright 2018 The Go Authors. All rights reserved. // https://github.com/golang/protobuf // // Redistribution and use in source and binary forms, with or without @@ -29,10 +29,7 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -syntax = "proto2"; - -package imp; - -message ForeignImportedMessage { - optional string tuber = 1; -} +syntax = "proto3"; +package fmt; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt"; +message M {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.pb.go new file mode 100644 index 0000000..1cec006 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.pb.go @@ -0,0 +1,136 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_a_1/m1.proto + +package test_a_1 + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type E1 int32 + +const ( + E1_E1_ZERO E1 = 0 +) + +var E1_name = map[int32]string{ + 0: "E1_ZERO", +} + +var E1_value = map[string]int32{ + "E1_ZERO": 0, +} + +func (x E1) String() string { + return proto.EnumName(E1_name, int32(x)) +} + +func (E1) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_c1091de3fa870a14, []int{0} +} + +type M1 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M1) Reset() { *m = M1{} } +func (m *M1) String() string { return proto.CompactTextString(m) } +func (*M1) ProtoMessage() {} +func (*M1) Descriptor() ([]byte, []int) { + return fileDescriptor_c1091de3fa870a14, []int{0} +} + +func (m *M1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M1.Unmarshal(m, b) +} +func (m *M1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M1.Marshal(b, m, deterministic) +} +func (m *M1) XXX_Merge(src proto.Message) { + xxx_messageInfo_M1.Merge(m, src) +} +func (m *M1) XXX_Size() int { + return xxx_messageInfo_M1.Size(m) +} +func (m *M1) XXX_DiscardUnknown() { + xxx_messageInfo_M1.DiscardUnknown(m) +} + +var xxx_messageInfo_M1 proto.InternalMessageInfo + +type M1_1 struct { + M1 *M1 `protobuf:"bytes,1,opt,name=m1,proto3" json:"m1,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M1_1) Reset() { *m = M1_1{} } +func (m *M1_1) String() string { return proto.CompactTextString(m) } +func (*M1_1) ProtoMessage() {} +func (*M1_1) Descriptor() ([]byte, []int) { + return fileDescriptor_c1091de3fa870a14, []int{1} +} + +func (m *M1_1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M1_1.Unmarshal(m, b) +} +func (m *M1_1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M1_1.Marshal(b, m, deterministic) +} +func (m *M1_1) XXX_Merge(src proto.Message) { + xxx_messageInfo_M1_1.Merge(m, src) +} +func (m *M1_1) XXX_Size() int { + return xxx_messageInfo_M1_1.Size(m) +} +func (m *M1_1) XXX_DiscardUnknown() { + xxx_messageInfo_M1_1.DiscardUnknown(m) +} + +var xxx_messageInfo_M1_1 proto.InternalMessageInfo + +func (m *M1_1) GetM1() *M1 { + if m != nil { + return m.M1 + } + return nil +} + +func init() { + proto.RegisterEnum("test.a.E1", E1_name, E1_value) + proto.RegisterType((*M1)(nil), "test.a.M1") + proto.RegisterType((*M1_1)(nil), "test.a.M1_1") +} + +func init() { proto.RegisterFile("imports/test_a_1/m1.proto", fileDescriptor_c1091de3fa870a14) } + +var fileDescriptor_c1091de3fa870a14 = []byte{ + // 165 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8c, 0x37, 0xd4, 0xcf, 0x35, 0xd4, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0x09, 0xe9, 0x25, 0x2a, 0xb1, 0x70, 0x31, 0xf9, + 0x1a, 0x2a, 0x29, 0x71, 0xb1, 0xf8, 0x1a, 0xc6, 0x1b, 0x0a, 0x49, 0x71, 0x31, 0xe5, 0x1a, 0x4a, + 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x1b, 0x71, 0xe9, 0x41, 0x94, 0xe8, 0xf9, 0x1a, 0x06, 0x31, 0xe5, + 0x1a, 0x6a, 0x09, 0x72, 0x31, 0xb9, 0x1a, 0x0a, 0x71, 0x73, 0xb1, 0xbb, 0x1a, 0xc6, 0x47, 0xb9, + 0x06, 0xf9, 0x0b, 0x30, 0x38, 0xb9, 0x44, 0x39, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, + 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0xcd, 0x4f, 0x2a, 0x4d, 0x83, + 0x30, 0x92, 0x75, 0xd3, 0x53, 0xf3, 0x74, 0xd3, 0xf3, 0xc1, 0x4e, 0x48, 0x49, 0x2c, 0x49, 0xd4, + 0x47, 0x77, 0x53, 0x12, 0x1b, 0x58, 0xa1, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0xcc, 0xae, 0xc9, + 0xcd, 0xae, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.proto new file mode 100644 index 0000000..da54c1e --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m1.proto @@ -0,0 +1,44 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.a; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1"; + +message M1 {} + +message M1_1 { + M1 m1 = 1; +} + +enum E1 { + E1_ZERO = 0; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.pb.go new file mode 100644 index 0000000..0563d58 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.pb.go @@ -0,0 +1,70 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_a_1/m2.proto + +package test_a_1 + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type M2 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M2) Reset() { *m = M2{} } +func (m *M2) String() string { return proto.CompactTextString(m) } +func (*M2) ProtoMessage() {} +func (*M2) Descriptor() ([]byte, []int) { + return fileDescriptor_20cf27515c0d621c, []int{0} +} + +func (m *M2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M2.Unmarshal(m, b) +} +func (m *M2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M2.Marshal(b, m, deterministic) +} +func (m *M2) XXX_Merge(src proto.Message) { + xxx_messageInfo_M2.Merge(m, src) +} +func (m *M2) XXX_Size() int { + return xxx_messageInfo_M2.Size(m) +} +func (m *M2) XXX_DiscardUnknown() { + xxx_messageInfo_M2.DiscardUnknown(m) +} + +var xxx_messageInfo_M2 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M2)(nil), "test.a.M2") +} + +func init() { proto.RegisterFile("imports/test_a_1/m2.proto", fileDescriptor_20cf27515c0d621c) } + +var fileDescriptor_20cf27515c0d621c = []byte{ + // 114 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8c, 0x37, 0xd4, 0xcf, 0x35, 0xd2, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0x09, 0xe9, 0x25, 0x2a, 0xb1, 0x70, 0x31, 0xf9, + 0x1a, 0x39, 0xb9, 0x44, 0x39, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, + 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x15, 0x26, 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, + 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0xb3, 0x52, 0x12, 0x4b, 0x12, 0xf5, 0xd1, 0x0d, 0x4f, + 0x62, 0x03, 0x2b, 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xe3, 0xe0, 0x7e, 0xc0, 0x77, 0x00, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.proto new file mode 100644 index 0000000..49499dc --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1/m2.proto @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.a; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1"; +message M2 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.pb.go new file mode 100644 index 0000000..cbc8430 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.pb.go @@ -0,0 +1,70 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_a_2/m3.proto + +package test_a_2 + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type M3 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M3) Reset() { *m = M3{} } +func (m *M3) String() string { return proto.CompactTextString(m) } +func (*M3) ProtoMessage() {} +func (*M3) Descriptor() ([]byte, []int) { + return fileDescriptor_ff9d8f834875c9c5, []int{0} +} + +func (m *M3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M3.Unmarshal(m, b) +} +func (m *M3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M3.Marshal(b, m, deterministic) +} +func (m *M3) XXX_Merge(src proto.Message) { + xxx_messageInfo_M3.Merge(m, src) +} +func (m *M3) XXX_Size() int { + return xxx_messageInfo_M3.Size(m) +} +func (m *M3) XXX_DiscardUnknown() { + xxx_messageInfo_M3.DiscardUnknown(m) +} + +var xxx_messageInfo_M3 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M3)(nil), "test.a.M3") +} + +func init() { proto.RegisterFile("imports/test_a_2/m3.proto", fileDescriptor_ff9d8f834875c9c5) } + +var fileDescriptor_ff9d8f834875c9c5 = []byte{ + // 114 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8c, 0x37, 0xd2, 0xcf, 0x35, 0xd6, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0x09, 0xe9, 0x25, 0x2a, 0xb1, 0x70, 0x31, 0xf9, + 0x1a, 0x3b, 0xb9, 0x44, 0x39, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, + 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x15, 0x26, 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, + 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0xb3, 0x52, 0x12, 0x4b, 0x12, 0xf5, 0xd1, 0x0d, 0x4f, + 0x62, 0x03, 0x2b, 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x23, 0x86, 0x27, 0x47, 0x77, 0x00, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.proto new file mode 100644 index 0000000..5e811ef --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m3.proto @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.a; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2"; +message M3 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.pb.go new file mode 100644 index 0000000..2771b83 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.pb.go @@ -0,0 +1,70 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_a_2/m4.proto + +package test_a_2 + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type M4 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M4) Reset() { *m = M4{} } +func (m *M4) String() string { return proto.CompactTextString(m) } +func (*M4) ProtoMessage() {} +func (*M4) Descriptor() ([]byte, []int) { + return fileDescriptor_fdd24f82f6c5a786, []int{0} +} + +func (m *M4) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M4.Unmarshal(m, b) +} +func (m *M4) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M4.Marshal(b, m, deterministic) +} +func (m *M4) XXX_Merge(src proto.Message) { + xxx_messageInfo_M4.Merge(m, src) +} +func (m *M4) XXX_Size() int { + return xxx_messageInfo_M4.Size(m) +} +func (m *M4) XXX_DiscardUnknown() { + xxx_messageInfo_M4.DiscardUnknown(m) +} + +var xxx_messageInfo_M4 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M4)(nil), "test.a.M4") +} + +func init() { proto.RegisterFile("imports/test_a_2/m4.proto", fileDescriptor_fdd24f82f6c5a786) } + +var fileDescriptor_fdd24f82f6c5a786 = []byte{ + // 114 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8c, 0x37, 0xd2, 0xcf, 0x35, 0xd1, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x03, 0x09, 0xe9, 0x25, 0x2a, 0xb1, 0x70, 0x31, 0xf9, + 0x9a, 0x38, 0xb9, 0x44, 0x39, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, + 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x15, 0x26, 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, + 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0xb3, 0x52, 0x12, 0x4b, 0x12, 0xf5, 0xd1, 0x0d, 0x4f, + 0x62, 0x03, 0x2b, 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x58, 0xcb, 0x10, 0xc8, 0x77, 0x00, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.proto new file mode 100644 index 0000000..8f8fe3e --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2/m4.proto @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.a; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2"; +message M4 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.pb.go new file mode 100644 index 0000000..51592e7 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.pb.go @@ -0,0 +1,70 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_b_1/m1.proto + +package beta + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type M1 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M1) Reset() { *m = M1{} } +func (m *M1) String() string { return proto.CompactTextString(m) } +func (*M1) ProtoMessage() {} +func (*M1) Descriptor() ([]byte, []int) { + return fileDescriptor_7f49573d035512a8, []int{0} +} + +func (m *M1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M1.Unmarshal(m, b) +} +func (m *M1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M1.Marshal(b, m, deterministic) +} +func (m *M1) XXX_Merge(src proto.Message) { + xxx_messageInfo_M1.Merge(m, src) +} +func (m *M1) XXX_Size() int { + return xxx_messageInfo_M1.Size(m) +} +func (m *M1) XXX_DiscardUnknown() { + xxx_messageInfo_M1.DiscardUnknown(m) +} + +var xxx_messageInfo_M1 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M1)(nil), "test.b.part1.M1") +} + +func init() { proto.RegisterFile("imports/test_b_1/m1.proto", fileDescriptor_7f49573d035512a8) } + +var fileDescriptor_7f49573d035512a8 = []byte{ + // 125 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8a, 0x37, 0xd4, 0xcf, 0x35, 0xd4, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x01, 0x09, 0xe9, 0x25, 0xe9, 0x15, 0x24, 0x16, 0x95, + 0x18, 0x2a, 0xb1, 0x70, 0x31, 0xf9, 0x1a, 0x3a, 0x79, 0x46, 0xb9, 0xa7, 0x67, 0x96, 0x64, 0x94, + 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x95, 0x27, + 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0x13, 0x53, 0x12, + 0x4b, 0x12, 0xf5, 0xd1, 0xad, 0xb0, 0x4e, 0x4a, 0x2d, 0x49, 0x4c, 0x62, 0x03, 0xab, 0x36, 0x06, + 0x04, 0x00, 0x00, 0xff, 0xff, 0x4a, 0xf1, 0x3b, 0x7f, 0x82, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.proto new file mode 100644 index 0000000..2c35ec4 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m1.proto @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.b.part1; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1;beta"; +message M1 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.pb.go new file mode 100644 index 0000000..f31fb0d --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.pb.go @@ -0,0 +1,70 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_b_1/m2.proto + +package beta + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type M2 struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *M2) Reset() { *m = M2{} } +func (m *M2) String() string { return proto.CompactTextString(m) } +func (*M2) ProtoMessage() {} +func (*M2) Descriptor() ([]byte, []int) { + return fileDescriptor_a1becddceeb586f2, []int{0} +} + +func (m *M2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_M2.Unmarshal(m, b) +} +func (m *M2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_M2.Marshal(b, m, deterministic) +} +func (m *M2) XXX_Merge(src proto.Message) { + xxx_messageInfo_M2.Merge(m, src) +} +func (m *M2) XXX_Size() int { + return xxx_messageInfo_M2.Size(m) +} +func (m *M2) XXX_DiscardUnknown() { + xxx_messageInfo_M2.DiscardUnknown(m) +} + +var xxx_messageInfo_M2 proto.InternalMessageInfo + +func init() { + proto.RegisterType((*M2)(nil), "test.b.part2.M2") +} + +func init() { proto.RegisterFile("imports/test_b_1/m2.proto", fileDescriptor_a1becddceeb586f2) } + +var fileDescriptor_a1becddceeb586f2 = []byte{ + // 125 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcc, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x4f, 0x8a, 0x37, 0xd4, 0xcf, 0x35, 0xd2, + 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x01, 0x09, 0xe9, 0x25, 0xe9, 0x15, 0x24, 0x16, 0x95, + 0x18, 0x29, 0xb1, 0x70, 0x31, 0xf9, 0x1a, 0x39, 0x79, 0x46, 0xb9, 0xa7, 0x67, 0x96, 0x64, 0x94, + 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0x83, 0x95, 0x27, + 0x95, 0xa6, 0x41, 0x18, 0xc9, 0xba, 0xe9, 0xa9, 0x79, 0xba, 0xe9, 0xf9, 0x60, 0x13, 0x53, 0x12, + 0x4b, 0x12, 0xf5, 0xd1, 0xad, 0xb0, 0x4e, 0x4a, 0x2d, 0x49, 0x4c, 0x62, 0x03, 0xab, 0x36, 0x06, + 0x04, 0x00, 0x00, 0xff, 0xff, 0x44, 0x29, 0xbe, 0x6d, 0x82, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.proto new file mode 100644 index 0000000..13723be --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1/m2.proto @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package test.b.part2; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1;beta"; +message M2 {} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.pb.go new file mode 100644 index 0000000..1fef9ea --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.pb.go @@ -0,0 +1,81 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_import_a1m1.proto + +package imports + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + test_a_1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type A1M1 struct { + F *test_a_1.M1 `protobuf:"bytes,1,opt,name=f,proto3" json:"f,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *A1M1) Reset() { *m = A1M1{} } +func (m *A1M1) String() string { return proto.CompactTextString(m) } +func (*A1M1) ProtoMessage() {} +func (*A1M1) Descriptor() ([]byte, []int) { + return fileDescriptor_3b904a47327455f3, []int{0} +} + +func (m *A1M1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_A1M1.Unmarshal(m, b) +} +func (m *A1M1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_A1M1.Marshal(b, m, deterministic) +} +func (m *A1M1) XXX_Merge(src proto.Message) { + xxx_messageInfo_A1M1.Merge(m, src) +} +func (m *A1M1) XXX_Size() int { + return xxx_messageInfo_A1M1.Size(m) +} +func (m *A1M1) XXX_DiscardUnknown() { + xxx_messageInfo_A1M1.DiscardUnknown(m) +} + +var xxx_messageInfo_A1M1 proto.InternalMessageInfo + +func (m *A1M1) GetF() *test_a_1.M1 { + if m != nil { + return m.F + } + return nil +} + +func init() { + proto.RegisterType((*A1M1)(nil), "test.A1M1") +} + +func init() { proto.RegisterFile("imports/test_import_a1m1.proto", fileDescriptor_3b904a47327455f3) } + +var fileDescriptor_3b904a47327455f3 = []byte{ + // 149 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcb, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x87, 0x70, 0xe2, 0x13, 0x0d, 0x73, 0x0d, + 0xf5, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0x58, 0x40, 0xe2, 0x52, 0x92, 0x28, 0xaa, 0x12, 0xe3, + 0x0d, 0xf5, 0x61, 0x0a, 0x94, 0x14, 0xb8, 0x58, 0x1c, 0x0d, 0x7d, 0x0d, 0x85, 0x24, 0xb8, 0x18, + 0xd3, 0x24, 0x18, 0x15, 0x18, 0x35, 0xb8, 0x8d, 0xb8, 0xf4, 0x40, 0xca, 0xf4, 0x12, 0xf5, 0x7c, + 0x0d, 0x83, 0x18, 0xd3, 0x9c, 0xac, 0xa3, 0x2c, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, + 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, 0xc1, 0x9a, 0x93, 0x4a, 0xd3, 0x20, + 0x8c, 0x64, 0xdd, 0xf4, 0xd4, 0x3c, 0xdd, 0xf4, 0x7c, 0xb0, 0xf9, 0x29, 0x89, 0x25, 0x89, 0xfa, + 0x50, 0x0b, 0x93, 0xd8, 0xc0, 0xf2, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x84, 0x2f, 0x18, + 0x23, 0xa8, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.proto new file mode 100644 index 0000000..abf07f2 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m1.proto @@ -0,0 +1,42 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package test; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports"; + +import "imports/test_a_1/m1.proto"; + +message A1M1 { + test.a.M1 f = 1; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.pb.go new file mode 100644 index 0000000..13d5dd6 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.pb.go @@ -0,0 +1,81 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_import_a1m2.proto + +package imports + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + test_a_1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type A1M2 struct { + F *test_a_1.M2 `protobuf:"bytes,1,opt,name=f,proto3" json:"f,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *A1M2) Reset() { *m = A1M2{} } +func (m *A1M2) String() string { return proto.CompactTextString(m) } +func (*A1M2) ProtoMessage() {} +func (*A1M2) Descriptor() ([]byte, []int) { + return fileDescriptor_bdb27b114687957d, []int{0} +} + +func (m *A1M2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_A1M2.Unmarshal(m, b) +} +func (m *A1M2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_A1M2.Marshal(b, m, deterministic) +} +func (m *A1M2) XXX_Merge(src proto.Message) { + xxx_messageInfo_A1M2.Merge(m, src) +} +func (m *A1M2) XXX_Size() int { + return xxx_messageInfo_A1M2.Size(m) +} +func (m *A1M2) XXX_DiscardUnknown() { + xxx_messageInfo_A1M2.DiscardUnknown(m) +} + +var xxx_messageInfo_A1M2 proto.InternalMessageInfo + +func (m *A1M2) GetF() *test_a_1.M2 { + if m != nil { + return m.F + } + return nil +} + +func init() { + proto.RegisterType((*A1M2)(nil), "test.A1M2") +} + +func init() { proto.RegisterFile("imports/test_import_a1m2.proto", fileDescriptor_bdb27b114687957d) } + +var fileDescriptor_bdb27b114687957d = []byte{ + // 149 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcb, 0xcc, 0x2d, 0xc8, + 0x2f, 0x2a, 0x29, 0xd6, 0x2f, 0x49, 0x2d, 0x2e, 0x89, 0x87, 0x70, 0xe2, 0x13, 0x0d, 0x73, 0x8d, + 0xf4, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0x58, 0x40, 0xe2, 0x52, 0x92, 0x28, 0xaa, 0x12, 0xe3, + 0x0d, 0xf5, 0x61, 0x0a, 0x94, 0x14, 0xb8, 0x58, 0x1c, 0x0d, 0x7d, 0x8d, 0x84, 0x24, 0xb8, 0x18, + 0xd3, 0x24, 0x18, 0x15, 0x18, 0x35, 0xb8, 0x8d, 0xb8, 0xf4, 0x40, 0xca, 0xf4, 0x12, 0xf5, 0x7c, + 0x8d, 0x82, 0x18, 0xd3, 0x9c, 0xac, 0xa3, 0x2c, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, + 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, 0xc1, 0x9a, 0x93, 0x4a, 0xd3, 0x20, + 0x8c, 0x64, 0xdd, 0xf4, 0xd4, 0x3c, 0xdd, 0xf4, 0x7c, 0xb0, 0xf9, 0x29, 0x89, 0x25, 0x89, 0xfa, + 0x50, 0x0b, 0x93, 0xd8, 0xc0, 0xf2, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x1f, 0x88, 0xfb, + 0xea, 0xa8, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.proto new file mode 100644 index 0000000..5c53950 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_a1m2.proto @@ -0,0 +1,42 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package test; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports"; + +import "imports/test_a_1/m2.proto"; + +message A1M2 { + test.a.M2 f = 1; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.pb.go new file mode 100644 index 0000000..4670517 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.pb.go @@ -0,0 +1,139 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: imports/test_import_all.proto + +package imports + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + fmt1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/fmt" + test_a_1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_1" + test_a_2 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_a_2" + test_b_1 "github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_b_1" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type All struct { + Am1 *test_a_1.M1 `protobuf:"bytes,1,opt,name=am1,proto3" json:"am1,omitempty"` + Am2 *test_a_1.M2 `protobuf:"bytes,2,opt,name=am2,proto3" json:"am2,omitempty"` + Am3 *test_a_2.M3 `protobuf:"bytes,3,opt,name=am3,proto3" json:"am3,omitempty"` + Am4 *test_a_2.M4 `protobuf:"bytes,4,opt,name=am4,proto3" json:"am4,omitempty"` + Bm1 *test_b_1.M1 `protobuf:"bytes,5,opt,name=bm1,proto3" json:"bm1,omitempty"` + Bm2 *test_b_1.M2 `protobuf:"bytes,6,opt,name=bm2,proto3" json:"bm2,omitempty"` + Fmt *fmt1.M `protobuf:"bytes,7,opt,name=fmt,proto3" json:"fmt,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *All) Reset() { *m = All{} } +func (m *All) String() string { return proto.CompactTextString(m) } +func (*All) ProtoMessage() {} +func (*All) Descriptor() ([]byte, []int) { + return fileDescriptor_324466f0afc16f77, []int{0} +} + +func (m *All) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_All.Unmarshal(m, b) +} +func (m *All) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_All.Marshal(b, m, deterministic) +} +func (m *All) XXX_Merge(src proto.Message) { + xxx_messageInfo_All.Merge(m, src) +} +func (m *All) XXX_Size() int { + return xxx_messageInfo_All.Size(m) +} +func (m *All) XXX_DiscardUnknown() { + xxx_messageInfo_All.DiscardUnknown(m) +} + +var xxx_messageInfo_All proto.InternalMessageInfo + +func (m *All) GetAm1() *test_a_1.M1 { + if m != nil { + return m.Am1 + } + return nil +} + +func (m *All) GetAm2() *test_a_1.M2 { + if m != nil { + return m.Am2 + } + return nil +} + +func (m *All) GetAm3() *test_a_2.M3 { + if m != nil { + return m.Am3 + } + return nil +} + +func (m *All) GetAm4() *test_a_2.M4 { + if m != nil { + return m.Am4 + } + return nil +} + +func (m *All) GetBm1() *test_b_1.M1 { + if m != nil { + return m.Bm1 + } + return nil +} + +func (m *All) GetBm2() *test_b_1.M2 { + if m != nil { + return m.Bm2 + } + return nil +} + +func (m *All) GetFmt() *fmt1.M { + if m != nil { + return m.Fmt + } + return nil +} + +func init() { + proto.RegisterType((*All)(nil), "test.All") +} + +func init() { proto.RegisterFile("imports/test_import_all.proto", fileDescriptor_324466f0afc16f77) } + +var fileDescriptor_324466f0afc16f77 = []byte{ + // 258 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0xd0, 0xb1, 0x4e, 0xc3, 0x30, + 0x10, 0x06, 0x60, 0x15, 0x97, 0x20, 0x99, 0x05, 0x85, 0xc5, 0x20, 0x90, 0x50, 0x27, 0x96, 0xda, + 0xb2, 0x9d, 0x05, 0x31, 0xc1, 0xde, 0xa5, 0x23, 0x4b, 0x64, 0x97, 0xc6, 0x54, 0xf2, 0xd5, 0x51, + 0x7a, 0x7d, 0x5e, 0x5e, 0x05, 0xd9, 0x07, 0x12, 0x84, 0x66, 0x4b, 0xfe, 0xef, 0xb7, 0xce, 0x3e, + 0x7e, 0xbf, 0x83, 0x3e, 0x0d, 0x78, 0x50, 0xb8, 0x3d, 0x60, 0x4b, 0x3f, 0xad, 0x8b, 0x51, 0xf6, + 0x43, 0xc2, 0x54, 0xcf, 0x73, 0x7c, 0x7b, 0xf3, 0xa7, 0xe4, 0x5a, 0xad, 0x40, 0x53, 0xe1, 0x14, + 0x99, 0x09, 0x32, 0x0a, 0xec, 0x34, 0x35, 0x27, 0xc9, 0x4f, 0xcf, 0xf2, 0xbf, 0x67, 0x5d, 0xff, + 0x50, 0x07, 0xa8, 0x80, 0xc2, 0xc5, 0xe7, 0x8c, 0xb3, 0x97, 0x18, 0xeb, 0x3b, 0xce, 0x1c, 0x68, + 0x31, 0x7b, 0x98, 0x3d, 0x5e, 0x1a, 0x2e, 0xf3, 0x69, 0xe9, 0xe4, 0x4a, 0xaf, 0x73, 0x4c, 0x6a, + 0xc4, 0xd9, 0x48, 0x4d, 0x56, 0x43, 0x6a, 0x05, 0x1b, 0xa9, 0xcd, 0x6a, 0x49, 0x1b, 0x31, 0x1f, + 0x69, 0x93, 0xb5, 0xa9, 0x17, 0x9c, 0x79, 0xd0, 0xe2, 0xbc, 0xe8, 0x15, 0xa9, 0x97, 0xbd, 0x1b, + 0x50, 0x97, 0xe9, 0x1e, 0x34, 0x75, 0x8c, 0xa8, 0xfe, 0x77, 0x4c, 0xb9, 0x83, 0x07, 0x53, 0x0b, + 0xce, 0x3a, 0x40, 0x71, 0x51, 0x3a, 0x95, 0xec, 0x00, 0xe5, 0x6a, 0x9d, 0xa3, 0xd7, 0xe7, 0xb7, + 0xa7, 0xb0, 0xc3, 0x8f, 0xa3, 0x97, 0x9b, 0x04, 0x2a, 0xa4, 0xe8, 0xf6, 0x41, 0x95, 0xc7, 0xfb, + 0x63, 0x47, 0x1f, 0x9b, 0x65, 0xd8, 0xee, 0x97, 0x21, 0x95, 0xa5, 0xbd, 0x3b, 0x74, 0xea, 0x7b, + 0x55, 0xbe, 0x2a, 0x6e, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0x95, 0x39, 0xa3, 0x82, 0x03, 0x02, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.proto new file mode 100644 index 0000000..582d722 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imports/test_import_all.proto @@ -0,0 +1,58 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2018 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package test; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/imports"; + +// test_a_1/m*.proto are in the same Go package and proto package. +// test_a_*/*.proto are in different Go packages, but the same proto package. +// test_b_1/*.proto are in the same Go package, but different proto packages. +// fmt/m.proto has a package name which conflicts with "fmt". +import "imports/test_a_1/m1.proto"; +import "imports/test_a_1/m2.proto"; +import "imports/test_a_2/m3.proto"; +import "imports/test_a_2/m4.proto"; +import "imports/test_b_1/m1.proto"; +import "imports/test_b_1/m2.proto"; +import "imports/fmt/m.proto"; + +message All { + test.a.M1 am1 = 1; + test.a.M2 am2 = 2; + test.a.M3 am3 = 3; + test.a.M4 am4 = 4; + test.b.part1.M1 bm1 = 5; + test.b.part2.M2 bm2 = 6; + fmt.M fmt = 7; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/issue780_oneof_conflict/test.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/issue780_oneof_conflict/test.pb.go new file mode 100644 index 0000000..c39404e --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/issue780_oneof_conflict/test.pb.go @@ -0,0 +1,103 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: issue780_oneof_conflict/test.proto + +package oneoftest + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type Foo struct { + // Types that are valid to be assigned to Bar: + // *Foo_GetBar + Bar isFoo_Bar `protobuf_oneof:"bar"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Foo) Reset() { *m = Foo{} } +func (m *Foo) String() string { return proto.CompactTextString(m) } +func (*Foo) ProtoMessage() {} +func (*Foo) Descriptor() ([]byte, []int) { + return fileDescriptor_48462cafc802a68e, []int{0} +} + +func (m *Foo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Foo.Unmarshal(m, b) +} +func (m *Foo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Foo.Marshal(b, m, deterministic) +} +func (m *Foo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Foo.Merge(m, src) +} +func (m *Foo) XXX_Size() int { + return xxx_messageInfo_Foo.Size(m) +} +func (m *Foo) XXX_DiscardUnknown() { + xxx_messageInfo_Foo.DiscardUnknown(m) +} + +var xxx_messageInfo_Foo proto.InternalMessageInfo + +type isFoo_Bar interface { + isFoo_Bar() +} + +type Foo_GetBar struct { + GetBar string `protobuf:"bytes,1,opt,name=get_bar,json=getBar,oneof"` +} + +func (*Foo_GetBar) isFoo_Bar() {} + +func (m *Foo) GetBar() isFoo_Bar { + if m != nil { + return m.Bar + } + return nil +} + +func (m *Foo) GetGetBar() string { + if x, ok := m.GetBar().(*Foo_GetBar); ok { + return x.GetBar + } + return "" +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Foo) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*Foo_GetBar)(nil), + } +} + +func init() { + proto.RegisterType((*Foo)(nil), "oneoftest.Foo") +} + +func init() { proto.RegisterFile("issue780_oneof_conflict/test.proto", fileDescriptor_48462cafc802a68e) } + +var fileDescriptor_48462cafc802a68e = []byte{ + // 107 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0xca, 0x2c, 0x2e, 0x2e, + 0x4d, 0x35, 0xb7, 0x30, 0x88, 0xcf, 0xcf, 0x4b, 0xcd, 0x4f, 0x8b, 0x4f, 0xce, 0xcf, 0x4b, 0xcb, + 0xc9, 0x4c, 0x2e, 0xd1, 0x2f, 0x49, 0x2d, 0x2e, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, + 0x04, 0x4b, 0x81, 0x04, 0x94, 0xd4, 0xb9, 0x98, 0xdd, 0xf2, 0xf3, 0x85, 0x24, 0xb9, 0xd8, 0xd3, + 0x53, 0x4b, 0xe2, 0x93, 0x12, 0x8b, 0x24, 0x18, 0x15, 0x18, 0x35, 0x38, 0x3d, 0x18, 0x82, 0xd8, + 0xd2, 0x53, 0x4b, 0x9c, 0x12, 0x8b, 0x9c, 0x58, 0xb9, 0x98, 0x93, 0x12, 0x8b, 0x00, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x12, 0x66, 0x0c, 0x02, 0x58, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/issue780_oneof_conflict/test.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/issue780_oneof_conflict/test.proto new file mode 100644 index 0000000..07879c9 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/issue780_oneof_conflict/test.proto @@ -0,0 +1,9 @@ +syntax = "proto2"; + +package oneoftest; + +message Foo { + oneof bar { + string get_bar = 1; + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go index f9b5ccf..7ec1f2d 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go @@ -36,11 +36,13 @@ package testdata import ( "testing" - mytestpb "./my_test" + importspb "github.com/golang/protobuf/protoc-gen-go/testdata/imports" multipb "github.com/golang/protobuf/protoc-gen-go/testdata/multi" + mytestpb "github.com/golang/protobuf/protoc-gen-go/testdata/my_test" ) func TestLink(t *testing.T) { _ = &multipb.Multi1{} _ = &mytestpb.Request{} + _ = &importspb.All{} } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.pb.go new file mode 100644 index 0000000..6ff8dd6 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.pb.go @@ -0,0 +1,99 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: multi/multi1.proto + +package multitest + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type Multi1 struct { + Multi2 *Multi2 `protobuf:"bytes,1,req,name=multi2" json:"multi2,omitempty"` + Color *Multi2_Color `protobuf:"varint,2,opt,name=color,enum=multitest.Multi2_Color" json:"color,omitempty"` + HatType *Multi3_HatType `protobuf:"varint,3,opt,name=hat_type,json=hatType,enum=multitest.Multi3_HatType" json:"hat_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Multi1) Reset() { *m = Multi1{} } +func (m *Multi1) String() string { return proto.CompactTextString(m) } +func (*Multi1) ProtoMessage() {} +func (*Multi1) Descriptor() ([]byte, []int) { + return fileDescriptor_e0bffc140cd1b1d9, []int{0} +} + +func (m *Multi1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Multi1.Unmarshal(m, b) +} +func (m *Multi1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Multi1.Marshal(b, m, deterministic) +} +func (m *Multi1) XXX_Merge(src proto.Message) { + xxx_messageInfo_Multi1.Merge(m, src) +} +func (m *Multi1) XXX_Size() int { + return xxx_messageInfo_Multi1.Size(m) +} +func (m *Multi1) XXX_DiscardUnknown() { + xxx_messageInfo_Multi1.DiscardUnknown(m) +} + +var xxx_messageInfo_Multi1 proto.InternalMessageInfo + +func (m *Multi1) GetMulti2() *Multi2 { + if m != nil { + return m.Multi2 + } + return nil +} + +func (m *Multi1) GetColor() Multi2_Color { + if m != nil && m.Color != nil { + return *m.Color + } + return Multi2_BLUE +} + +func (m *Multi1) GetHatType() Multi3_HatType { + if m != nil && m.HatType != nil { + return *m.HatType + } + return Multi3_FEDORA +} + +func init() { + proto.RegisterType((*Multi1)(nil), "multitest.Multi1") +} + +func init() { proto.RegisterFile("multi/multi1.proto", fileDescriptor_e0bffc140cd1b1d9) } + +var fileDescriptor_e0bffc140cd1b1d9 = []byte{ + // 200 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xca, 0x2d, 0xcd, 0x29, + 0xc9, 0xd4, 0x07, 0x93, 0x86, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x9c, 0x60, 0x5e, 0x49, + 0x6a, 0x71, 0x89, 0x14, 0xb2, 0xb4, 0x11, 0x44, 0x1a, 0x45, 0xcc, 0x18, 0x22, 0xa6, 0x34, 0x83, + 0x91, 0x8b, 0xcd, 0x17, 0x6c, 0x86, 0x90, 0x26, 0x17, 0x1b, 0x44, 0xb9, 0x04, 0xa3, 0x02, 0x93, + 0x06, 0xb7, 0x91, 0xa0, 0x1e, 0xdc, 0x38, 0x3d, 0xb0, 0x12, 0xa3, 0x20, 0xa8, 0x02, 0x21, 0x5d, + 0x2e, 0xd6, 0xe4, 0xfc, 0x9c, 0xfc, 0x22, 0x09, 0x26, 0x05, 0x46, 0x0d, 0x3e, 0x23, 0x71, 0x0c, + 0x95, 0x7a, 0xce, 0x20, 0xe9, 0x20, 0x88, 0x2a, 0x21, 0x13, 0x2e, 0x8e, 0x8c, 0xc4, 0x92, 0xf8, + 0x92, 0xca, 0x82, 0x54, 0x09, 0x66, 0xb0, 0x0e, 0x49, 0x74, 0x1d, 0xc6, 0x7a, 0x1e, 0x89, 0x25, + 0x21, 0x95, 0x05, 0xa9, 0x41, 0xec, 0x19, 0x10, 0x86, 0x93, 0x73, 0x94, 0x63, 0x7a, 0x66, 0x49, + 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, 0xba, 0x3e, 0xd8, + 0xd5, 0x49, 0xa5, 0x69, 0x10, 0x46, 0xb2, 0x6e, 0x7a, 0x6a, 0x9e, 0x6e, 0x7a, 0xbe, 0x3e, 0xc8, + 0xa0, 0x94, 0xc4, 0x92, 0x44, 0x88, 0xe7, 0xac, 0xe1, 0x86, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, + 0x60, 0x7d, 0xfc, 0x9f, 0x27, 0x01, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto index 0da6e0a..d3a3204 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto @@ -36,6 +36,8 @@ import "multi/multi3.proto"; package multitest; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/multi;multitest"; + message Multi1 { required Multi2 multi2 = 1; optional Multi2.Color color = 2; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.pb.go new file mode 100644 index 0000000..2eb07ad --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.pb.go @@ -0,0 +1,135 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: multi/multi2.proto + +package multitest + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type Multi2_Color int32 + +const ( + Multi2_BLUE Multi2_Color = 1 + Multi2_GREEN Multi2_Color = 2 + Multi2_RED Multi2_Color = 3 +) + +var Multi2_Color_name = map[int32]string{ + 1: "BLUE", + 2: "GREEN", + 3: "RED", +} + +var Multi2_Color_value = map[string]int32{ + "BLUE": 1, + "GREEN": 2, + "RED": 3, +} + +func (x Multi2_Color) Enum() *Multi2_Color { + p := new(Multi2_Color) + *p = x + return p +} + +func (x Multi2_Color) String() string { + return proto.EnumName(Multi2_Color_name, int32(x)) +} + +func (x *Multi2_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Multi2_Color_value, data, "Multi2_Color") + if err != nil { + return err + } + *x = Multi2_Color(value) + return nil +} + +func (Multi2_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_a2aebe588a0b2853, []int{0, 0} +} + +type Multi2 struct { + RequiredValue *int32 `protobuf:"varint,1,req,name=required_value,json=requiredValue" json:"required_value,omitempty"` + Color *Multi2_Color `protobuf:"varint,2,opt,name=color,enum=multitest.Multi2_Color" json:"color,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Multi2) Reset() { *m = Multi2{} } +func (m *Multi2) String() string { return proto.CompactTextString(m) } +func (*Multi2) ProtoMessage() {} +func (*Multi2) Descriptor() ([]byte, []int) { + return fileDescriptor_a2aebe588a0b2853, []int{0} +} + +func (m *Multi2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Multi2.Unmarshal(m, b) +} +func (m *Multi2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Multi2.Marshal(b, m, deterministic) +} +func (m *Multi2) XXX_Merge(src proto.Message) { + xxx_messageInfo_Multi2.Merge(m, src) +} +func (m *Multi2) XXX_Size() int { + return xxx_messageInfo_Multi2.Size(m) +} +func (m *Multi2) XXX_DiscardUnknown() { + xxx_messageInfo_Multi2.DiscardUnknown(m) +} + +var xxx_messageInfo_Multi2 proto.InternalMessageInfo + +func (m *Multi2) GetRequiredValue() int32 { + if m != nil && m.RequiredValue != nil { + return *m.RequiredValue + } + return 0 +} + +func (m *Multi2) GetColor() Multi2_Color { + if m != nil && m.Color != nil { + return *m.Color + } + return Multi2_BLUE +} + +func init() { + proto.RegisterEnum("multitest.Multi2_Color", Multi2_Color_name, Multi2_Color_value) + proto.RegisterType((*Multi2)(nil), "multitest.Multi2") +} + +func init() { proto.RegisterFile("multi/multi2.proto", fileDescriptor_a2aebe588a0b2853) } + +var fileDescriptor_a2aebe588a0b2853 = []byte{ + // 202 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xca, 0x2d, 0xcd, 0x29, + 0xc9, 0xd4, 0x07, 0x93, 0x46, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x9c, 0x60, 0x5e, 0x49, + 0x6a, 0x71, 0x89, 0x52, 0x2b, 0x23, 0x17, 0x9b, 0x2f, 0x58, 0x4e, 0x48, 0x95, 0x8b, 0xaf, 0x28, + 0xb5, 0xb0, 0x34, 0xb3, 0x28, 0x35, 0x25, 0xbe, 0x2c, 0x31, 0xa7, 0x34, 0x55, 0x82, 0x51, 0x81, + 0x49, 0x83, 0x35, 0x88, 0x17, 0x26, 0x1a, 0x06, 0x12, 0x14, 0xd2, 0xe5, 0x62, 0x4d, 0xce, 0xcf, + 0xc9, 0x2f, 0x92, 0x60, 0x52, 0x60, 0xd4, 0xe0, 0x33, 0x12, 0xd7, 0x83, 0x1b, 0xa6, 0x07, 0x31, + 0x48, 0xcf, 0x19, 0x24, 0x1d, 0x04, 0x51, 0xa5, 0xa4, 0xca, 0xc5, 0x0a, 0xe6, 0x0b, 0x71, 0x70, + 0xb1, 0x38, 0xf9, 0x84, 0xba, 0x0a, 0x30, 0x0a, 0x71, 0x72, 0xb1, 0xba, 0x07, 0xb9, 0xba, 0xfa, + 0x09, 0x30, 0x09, 0xb1, 0x73, 0x31, 0x07, 0xb9, 0xba, 0x08, 0x30, 0x3b, 0x39, 0x47, 0x39, 0xa6, + 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, + 0xeb, 0x83, 0x5d, 0x9b, 0x54, 0x9a, 0x06, 0x61, 0x24, 0xeb, 0xa6, 0xa7, 0xe6, 0xe9, 0xa6, 0xe7, + 0xeb, 0x83, 0xec, 0x4a, 0x49, 0x2c, 0x49, 0x84, 0x78, 0xca, 0x1a, 0x6e, 0x3f, 0x20, 0x00, 0x00, + 0xff, 0xff, 0x49, 0x3b, 0x52, 0x44, 0xec, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto index e6bfc71..ec5b431 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto @@ -33,6 +33,8 @@ syntax = "proto2"; package multitest; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/multi;multitest"; + message Multi2 { required int32 required_value = 1; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.pb.go new file mode 100644 index 0000000..9fb771f --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: multi/multi3.proto + +package multitest + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type Multi3_HatType int32 + +const ( + Multi3_FEDORA Multi3_HatType = 1 + Multi3_FEZ Multi3_HatType = 2 +) + +var Multi3_HatType_name = map[int32]string{ + 1: "FEDORA", + 2: "FEZ", +} + +var Multi3_HatType_value = map[string]int32{ + "FEDORA": 1, + "FEZ": 2, +} + +func (x Multi3_HatType) Enum() *Multi3_HatType { + p := new(Multi3_HatType) + *p = x + return p +} + +func (x Multi3_HatType) String() string { + return proto.EnumName(Multi3_HatType_name, int32(x)) +} + +func (x *Multi3_HatType) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Multi3_HatType_value, data, "Multi3_HatType") + if err != nil { + return err + } + *x = Multi3_HatType(value) + return nil +} + +func (Multi3_HatType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_580398fc0bbeeaa7, []int{0, 0} +} + +type Multi3 struct { + HatType *Multi3_HatType `protobuf:"varint,1,opt,name=hat_type,json=hatType,enum=multitest.Multi3_HatType" json:"hat_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Multi3) Reset() { *m = Multi3{} } +func (m *Multi3) String() string { return proto.CompactTextString(m) } +func (*Multi3) ProtoMessage() {} +func (*Multi3) Descriptor() ([]byte, []int) { + return fileDescriptor_580398fc0bbeeaa7, []int{0} +} + +func (m *Multi3) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Multi3.Unmarshal(m, b) +} +func (m *Multi3) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Multi3.Marshal(b, m, deterministic) +} +func (m *Multi3) XXX_Merge(src proto.Message) { + xxx_messageInfo_Multi3.Merge(m, src) +} +func (m *Multi3) XXX_Size() int { + return xxx_messageInfo_Multi3.Size(m) +} +func (m *Multi3) XXX_DiscardUnknown() { + xxx_messageInfo_Multi3.DiscardUnknown(m) +} + +var xxx_messageInfo_Multi3 proto.InternalMessageInfo + +func (m *Multi3) GetHatType() Multi3_HatType { + if m != nil && m.HatType != nil { + return *m.HatType + } + return Multi3_FEDORA +} + +func init() { + proto.RegisterEnum("multitest.Multi3_HatType", Multi3_HatType_name, Multi3_HatType_value) + proto.RegisterType((*Multi3)(nil), "multitest.Multi3") +} + +func init() { proto.RegisterFile("multi/multi3.proto", fileDescriptor_580398fc0bbeeaa7) } + +var fileDescriptor_580398fc0bbeeaa7 = []byte{ + // 170 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xca, 0x2d, 0xcd, 0x29, + 0xc9, 0xd4, 0x07, 0x93, 0xc6, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0x9c, 0x60, 0x5e, 0x49, + 0x6a, 0x71, 0x89, 0x52, 0x1c, 0x17, 0x9b, 0x2f, 0x58, 0x4a, 0xc8, 0x84, 0x8b, 0x23, 0x23, 0xb1, + 0x24, 0xbe, 0xa4, 0xb2, 0x20, 0x55, 0x82, 0x51, 0x81, 0x51, 0x83, 0xcf, 0x48, 0x52, 0x0f, 0xae, + 0x4e, 0x0f, 0xa2, 0x48, 0xcf, 0x23, 0xb1, 0x24, 0xa4, 0xb2, 0x20, 0x35, 0x88, 0x3d, 0x03, 0xc2, + 0x50, 0x92, 0xe3, 0x62, 0x87, 0x8a, 0x09, 0x71, 0x71, 0xb1, 0xb9, 0xb9, 0xba, 0xf8, 0x07, 0x39, + 0x0a, 0x30, 0x0a, 0xb1, 0x73, 0x31, 0xbb, 0xb9, 0x46, 0x09, 0x30, 0x39, 0x39, 0x47, 0x39, 0xa6, + 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, + 0xeb, 0x83, 0x5d, 0x91, 0x54, 0x9a, 0x06, 0x61, 0x24, 0xeb, 0xa6, 0xa7, 0xe6, 0xe9, 0xa6, 0xe7, + 0xeb, 0x83, 0x2c, 0x4a, 0x49, 0x2c, 0x49, 0x84, 0x38, 0xd6, 0x1a, 0x6e, 0x39, 0x20, 0x00, 0x00, + 0xff, 0xff, 0xd5, 0xa4, 0x1a, 0x0e, 0xc4, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto index 146c255..8690b88 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto @@ -33,6 +33,8 @@ syntax = "proto2"; package multitest; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/multi;multitest"; + message Multi3 { enum HatType { FEDORA = 1; diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go index 1954e3f..92d456b 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go @@ -1,29 +1,16 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: my_test/test.proto -/* -Package my_test is a generated protocol buffer package. - -This package holds interesting messages. - -It is generated from these files: - my_test/test.proto - -It has these top-level messages: - Request - Reply - OtherBase - ReplyExtensions - OtherReplyExtensions - OldReply - Communique -*/ -package my_test - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _ "github.com/golang/protobuf/protoc-gen-go/testdata/multi" +// This package holds interesting messages. + +package test + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + _ "github.com/golang/protobuf/protoc-gen-go/testdata/multi" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -34,7 +21,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type HatType int32 @@ -48,6 +35,7 @@ var HatType_name = map[int32]string{ 1: "FEDORA", 2: "FEZ", } + var HatType_value = map[string]int32{ "FEDORA": 1, "FEZ": 2, @@ -58,9 +46,11 @@ func (x HatType) Enum() *HatType { *p = x return p } + func (x HatType) String() string { return proto.EnumName(HatType_name, int32(x)) } + func (x *HatType) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(HatType_value, data, "HatType") if err != nil { @@ -70,6 +60,10 @@ func (x *HatType) UnmarshalJSON(data []byte) error { return nil } +func (HatType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{0} +} + // This enum represents days of the week. type Days int32 @@ -84,6 +78,7 @@ var Days_name = map[int32]string{ 2: "TUESDAY", // Duplicate value: 1: "LUNDI", } + var Days_value = map[string]int32{ "MONDAY": 1, "TUESDAY": 2, @@ -95,9 +90,11 @@ func (x Days) Enum() *Days { *p = x return p } + func (x Days) String() string { return proto.EnumName(Days_name, int32(x)) } + func (x *Days) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(Days_value, data, "Days") if err != nil { @@ -107,6 +104,10 @@ func (x *Days) UnmarshalJSON(data []byte) error { return nil } +func (Days) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{1} +} + type Request_Color int32 const ( @@ -120,6 +121,7 @@ var Request_Color_name = map[int32]string{ 1: "GREEN", 2: "BLUE", } + var Request_Color_value = map[string]int32{ "RED": 0, "GREEN": 1, @@ -131,9 +133,11 @@ func (x Request_Color) Enum() *Request_Color { *p = x return p } + func (x Request_Color) String() string { return proto.EnumName(Request_Color_name, int32(x)) } + func (x *Request_Color) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(Request_Color_value, data, "Request_Color") if err != nil { @@ -143,6 +147,10 @@ func (x *Request_Color) UnmarshalJSON(data []byte) error { return nil } +func (Request_Color) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{0, 0} +} + type Reply_Entry_Game int32 const ( @@ -154,6 +162,7 @@ var Reply_Entry_Game_name = map[int32]string{ 1: "FOOTBALL", 2: "TENNIS", } + var Reply_Entry_Game_value = map[string]int32{ "FOOTBALL": 1, "TENNIS": 2, @@ -164,9 +173,11 @@ func (x Reply_Entry_Game) Enum() *Reply_Entry_Game { *p = x return p } + func (x Reply_Entry_Game) String() string { return proto.EnumName(Reply_Entry_Game_name, int32(x)) } + func (x *Reply_Entry_Game) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(Reply_Entry_Game_value, data, "Reply_Entry_Game") if err != nil { @@ -176,6 +187,10 @@ func (x *Reply_Entry_Game) UnmarshalJSON(data []byte) error { return nil } +func (Reply_Entry_Game) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{1, 0, 0} +} + // This is a message that might be sent somewhere. type Request struct { Key []int64 `protobuf:"varint,1,rep,name=key" json:"key,omitempty"` @@ -191,17 +206,55 @@ type Request struct { MsgMapping map[int64]*Reply `protobuf:"bytes,15,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` Reset_ *int32 `protobuf:"varint,12,opt,name=reset" json:"reset,omitempty"` // This field should not conflict with any getters. - GetKey_ *string `protobuf:"bytes,16,opt,name=get_key,json=getKey" json:"get_key,omitempty"` - XXX_unrecognized []byte `json:"-"` + GetKey_ *string `protobuf:"bytes,16,opt,name=get_key,json=getKey" json:"get_key,omitempty"` + FloatNinf *float32 `protobuf:"fixed32,20,opt,name=float_ninf,json=floatNinf,def=-inf" json:"float_ninf,omitempty"` + FloatPinf *float32 `protobuf:"fixed32,21,opt,name=float_pinf,json=floatPinf,def=inf" json:"float_pinf,omitempty"` + FloatExp *float32 `protobuf:"fixed32,22,opt,name=float_exp,json=floatExp,def=1e+09" json:"float_exp,omitempty"` + DoubleNinf *float64 `protobuf:"fixed64,23,opt,name=double_ninf,json=doubleNinf,def=-inf" json:"double_ninf,omitempty"` + DoublePinf *float64 `protobuf:"fixed64,24,opt,name=double_pinf,json=doublePinf,def=inf" json:"double_pinf,omitempty"` + DoubleExp *float64 `protobuf:"fixed64,25,opt,name=double_exp,json=doubleExp,def=1e+09" json:"double_exp,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Request) Reset() { *m = Request{} } func (m *Request) String() string { return proto.CompactTextString(m) } func (*Request) ProtoMessage() {} +func (*Request) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{0} +} + +func (m *Request) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Request.Unmarshal(m, b) +} +func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Request.Marshal(b, m, deterministic) +} +func (m *Request) XXX_Merge(src proto.Message) { + xxx_messageInfo_Request.Merge(m, src) +} +func (m *Request) XXX_Size() int { + return xxx_messageInfo_Request.Size(m) +} +func (m *Request) XXX_DiscardUnknown() { + xxx_messageInfo_Request.DiscardUnknown(m) +} + +var xxx_messageInfo_Request proto.InternalMessageInfo const Default_Request_Hat HatType = HatType_FEDORA var Default_Request_Deadline float32 = float32(math.Inf(1)) +var Default_Request_FloatNinf float32 = float32(math.Inf(-1)) +var Default_Request_FloatPinf float32 = float32(math.Inf(1)) + +const Default_Request_FloatExp float32 = 1e+09 + +var Default_Request_DoubleNinf float64 = math.Inf(-1) +var Default_Request_DoublePinf float64 = math.Inf(1) + +const Default_Request_DoubleExp float64 = 1e+09 func (m *Request) GetKey() []int64 { if m != nil { @@ -266,14 +319,79 @@ func (m *Request) GetGetKey_() string { return "" } +func (m *Request) GetFloatNinf() float32 { + if m != nil && m.FloatNinf != nil { + return *m.FloatNinf + } + return Default_Request_FloatNinf +} + +func (m *Request) GetFloatPinf() float32 { + if m != nil && m.FloatPinf != nil { + return *m.FloatPinf + } + return Default_Request_FloatPinf +} + +func (m *Request) GetFloatExp() float32 { + if m != nil && m.FloatExp != nil { + return *m.FloatExp + } + return Default_Request_FloatExp +} + +func (m *Request) GetDoubleNinf() float64 { + if m != nil && m.DoubleNinf != nil { + return *m.DoubleNinf + } + return Default_Request_DoubleNinf +} + +func (m *Request) GetDoublePinf() float64 { + if m != nil && m.DoublePinf != nil { + return *m.DoublePinf + } + return Default_Request_DoublePinf +} + +func (m *Request) GetDoubleExp() float64 { + if m != nil && m.DoubleExp != nil { + return *m.DoubleExp + } + return Default_Request_DoubleExp +} + type Request_SomeGroup struct { - GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` - XXX_unrecognized []byte `json:"-"` + GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Request_SomeGroup) Reset() { *m = Request_SomeGroup{} } func (m *Request_SomeGroup) String() string { return proto.CompactTextString(m) } func (*Request_SomeGroup) ProtoMessage() {} +func (*Request_SomeGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{0, 0} +} + +func (m *Request_SomeGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Request_SomeGroup.Unmarshal(m, b) +} +func (m *Request_SomeGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Request_SomeGroup.Marshal(b, m, deterministic) +} +func (m *Request_SomeGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_Request_SomeGroup.Merge(m, src) +} +func (m *Request_SomeGroup) XXX_Size() int { + return xxx_messageInfo_Request_SomeGroup.Size(m) +} +func (m *Request_SomeGroup) XXX_DiscardUnknown() { + xxx_messageInfo_Request_SomeGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_Request_SomeGroup proto.InternalMessageInfo func (m *Request_SomeGroup) GetGroupField() int32 { if m != nil && m.GroupField != nil { @@ -285,22 +403,45 @@ func (m *Request_SomeGroup) GetGroupField() int32 { type Reply struct { Found []*Reply_Entry `protobuf:"bytes,1,rep,name=found" json:"found,omitempty"` CompactKeys []int32 `protobuf:"varint,2,rep,packed,name=compact_keys,json=compactKeys" json:"compact_keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Reply) Reset() { *m = Reply{} } func (m *Reply) String() string { return proto.CompactTextString(m) } func (*Reply) ProtoMessage() {} +func (*Reply) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{1} +} var extRange_Reply = []proto.ExtensionRange{ - {100, 536870911}, + {Start: 100, End: 536870911}, } func (*Reply) ExtensionRangeArray() []proto.ExtensionRange { return extRange_Reply } +func (m *Reply) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Reply.Unmarshal(m, b) +} +func (m *Reply) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Reply.Marshal(b, m, deterministic) +} +func (m *Reply) XXX_Merge(src proto.Message) { + xxx_messageInfo_Reply.Merge(m, src) +} +func (m *Reply) XXX_Size() int { + return xxx_messageInfo_Reply.Size(m) +} +func (m *Reply) XXX_DiscardUnknown() { + xxx_messageInfo_Reply.DiscardUnknown(m) +} + +var xxx_messageInfo_Reply proto.InternalMessageInfo + func (m *Reply) GetFound() []*Reply_Entry { if m != nil { return m.Found @@ -316,15 +457,38 @@ func (m *Reply) GetCompactKeys() []int32 { } type Reply_Entry struct { - KeyThatNeeds_1234Camel_CasIng *int64 `protobuf:"varint,1,req,name=key_that_needs_1234camel_CasIng,json=keyThatNeeds1234camelCasIng" json:"key_that_needs_1234camel_CasIng,omitempty"` - Value *int64 `protobuf:"varint,2,opt,name=value,def=7" json:"value,omitempty"` - XMyFieldName_2 *int64 `protobuf:"varint,3,opt,name=_my_field_name_2,json=MyFieldName2" json:"_my_field_name_2,omitempty"` - XXX_unrecognized []byte `json:"-"` + KeyThatNeeds_1234Camel_CasIng *int64 `protobuf:"varint,1,req,name=key_that_needs_1234camel_CasIng,json=keyThatNeeds1234camelCasIng" json:"key_that_needs_1234camel_CasIng,omitempty"` + Value *int64 `protobuf:"varint,2,opt,name=value,def=7" json:"value,omitempty"` + XMyFieldName_2 *int64 `protobuf:"varint,3,opt,name=_my_field_name_2,json=MyFieldName2" json:"_my_field_name_2,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Reply_Entry) Reset() { *m = Reply_Entry{} } func (m *Reply_Entry) String() string { return proto.CompactTextString(m) } func (*Reply_Entry) ProtoMessage() {} +func (*Reply_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{1, 0} +} + +func (m *Reply_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Reply_Entry.Unmarshal(m, b) +} +func (m *Reply_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Reply_Entry.Marshal(b, m, deterministic) +} +func (m *Reply_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_Reply_Entry.Merge(m, src) +} +func (m *Reply_Entry) XXX_Size() int { + return xxx_messageInfo_Reply_Entry.Size(m) +} +func (m *Reply_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_Reply_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_Reply_Entry proto.InternalMessageInfo const Default_Reply_Entry_Value int64 = 7 @@ -350,23 +514,46 @@ func (m *Reply_Entry) GetXMyFieldName_2() int64 { } type OtherBase struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *OtherBase) Reset() { *m = OtherBase{} } func (m *OtherBase) String() string { return proto.CompactTextString(m) } func (*OtherBase) ProtoMessage() {} +func (*OtherBase) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{2} +} var extRange_OtherBase = []proto.ExtensionRange{ - {100, 536870911}, + {Start: 100, End: 536870911}, } func (*OtherBase) ExtensionRangeArray() []proto.ExtensionRange { return extRange_OtherBase } +func (m *OtherBase) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OtherBase.Unmarshal(m, b) +} +func (m *OtherBase) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OtherBase.Marshal(b, m, deterministic) +} +func (m *OtherBase) XXX_Merge(src proto.Message) { + xxx_messageInfo_OtherBase.Merge(m, src) +} +func (m *OtherBase) XXX_Size() int { + return xxx_messageInfo_OtherBase.Size(m) +} +func (m *OtherBase) XXX_DiscardUnknown() { + xxx_messageInfo_OtherBase.DiscardUnknown(m) +} + +var xxx_messageInfo_OtherBase proto.InternalMessageInfo + func (m *OtherBase) GetName() string { if m != nil && m.Name != nil { return *m.Name @@ -375,12 +562,35 @@ func (m *OtherBase) GetName() string { } type ReplyExtensions struct { - XXX_unrecognized []byte `json:"-"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ReplyExtensions) Reset() { *m = ReplyExtensions{} } func (m *ReplyExtensions) String() string { return proto.CompactTextString(m) } func (*ReplyExtensions) ProtoMessage() {} +func (*ReplyExtensions) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{3} +} + +func (m *ReplyExtensions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReplyExtensions.Unmarshal(m, b) +} +func (m *ReplyExtensions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReplyExtensions.Marshal(b, m, deterministic) +} +func (m *ReplyExtensions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReplyExtensions.Merge(m, src) +} +func (m *ReplyExtensions) XXX_Size() int { + return xxx_messageInfo_ReplyExtensions.Size(m) +} +func (m *ReplyExtensions) XXX_DiscardUnknown() { + xxx_messageInfo_ReplyExtensions.DiscardUnknown(m) +} + +var xxx_messageInfo_ReplyExtensions proto.InternalMessageInfo var E_ReplyExtensions_Time = &proto.ExtensionDesc{ ExtendedType: (*Reply)(nil), @@ -410,13 +620,36 @@ var E_ReplyExtensions_Donut = &proto.ExtensionDesc{ } type OtherReplyExtensions struct { - Key *int32 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` - XXX_unrecognized []byte `json:"-"` + Key *int32 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *OtherReplyExtensions) Reset() { *m = OtherReplyExtensions{} } func (m *OtherReplyExtensions) String() string { return proto.CompactTextString(m) } func (*OtherReplyExtensions) ProtoMessage() {} +func (*OtherReplyExtensions) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{4} +} + +func (m *OtherReplyExtensions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OtherReplyExtensions.Unmarshal(m, b) +} +func (m *OtherReplyExtensions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OtherReplyExtensions.Marshal(b, m, deterministic) +} +func (m *OtherReplyExtensions) XXX_Merge(src proto.Message) { + xxx_messageInfo_OtherReplyExtensions.Merge(m, src) +} +func (m *OtherReplyExtensions) XXX_Size() int { + return xxx_messageInfo_OtherReplyExtensions.Size(m) +} +func (m *OtherReplyExtensions) XXX_DiscardUnknown() { + xxx_messageInfo_OtherReplyExtensions.DiscardUnknown(m) +} + +var xxx_messageInfo_OtherReplyExtensions proto.InternalMessageInfo func (m *OtherReplyExtensions) GetKey() int32 { if m != nil && m.Key != nil { @@ -426,39 +659,45 @@ func (m *OtherReplyExtensions) GetKey() int32 { } type OldReply struct { - proto.XXX_InternalExtensions `json:"-"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `protobuf_messageset:"1" json:"-"` XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *OldReply) Reset() { *m = OldReply{} } func (m *OldReply) String() string { return proto.CompactTextString(m) } func (*OldReply) ProtoMessage() {} - -func (m *OldReply) Marshal() ([]byte, error) { - return proto.MarshalMessageSet(&m.XXX_InternalExtensions) -} -func (m *OldReply) Unmarshal(buf []byte) error { - return proto.UnmarshalMessageSet(buf, &m.XXX_InternalExtensions) -} -func (m *OldReply) MarshalJSON() ([]byte, error) { - return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) -} -func (m *OldReply) UnmarshalJSON(buf []byte) error { - return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) +func (*OldReply) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{5} } -// ensure OldReply satisfies proto.Marshaler and proto.Unmarshaler -var _ proto.Marshaler = (*OldReply)(nil) -var _ proto.Unmarshaler = (*OldReply)(nil) - var extRange_OldReply = []proto.ExtensionRange{ - {100, 2147483646}, + {Start: 100, End: 2147483646}, } func (*OldReply) ExtensionRangeArray() []proto.ExtensionRange { return extRange_OldReply } +func (m *OldReply) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OldReply.Unmarshal(m, b) +} +func (m *OldReply) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OldReply.Marshal(b, m, deterministic) +} +func (m *OldReply) XXX_Merge(src proto.Message) { + xxx_messageInfo_OldReply.Merge(m, src) +} +func (m *OldReply) XXX_Size() int { + return xxx_messageInfo_OldReply.Size(m) +} +func (m *OldReply) XXX_DiscardUnknown() { + xxx_messageInfo_OldReply.DiscardUnknown(m) +} + +var xxx_messageInfo_OldReply proto.InternalMessageInfo + type Communique struct { MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` // This is a oneof, called "union". @@ -474,13 +713,43 @@ type Communique struct { // *Communique_Delta_ // *Communique_Msg // *Communique_Somegroup - Union isCommunique_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` + Union isCommunique_Union `protobuf_oneof:"union"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Communique) Reset() { *m = Communique{} } func (m *Communique) String() string { return proto.CompactTextString(m) } func (*Communique) ProtoMessage() {} +func (*Communique) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{6} +} + +func (m *Communique) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Communique.Unmarshal(m, b) +} +func (m *Communique) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Communique.Marshal(b, m, deterministic) +} +func (m *Communique) XXX_Merge(src proto.Message) { + xxx_messageInfo_Communique.Merge(m, src) +} +func (m *Communique) XXX_Size() int { + return xxx_messageInfo_Communique.Size(m) +} +func (m *Communique) XXX_DiscardUnknown() { + xxx_messageInfo_Communique.DiscardUnknown(m) +} + +var xxx_messageInfo_Communique proto.InternalMessageInfo + +func (m *Communique) GetMakeMeCry() bool { + if m != nil && m.MakeMeCry != nil { + return *m.MakeMeCry + } + return false +} type isCommunique_Union interface { isCommunique_Union() @@ -489,43 +758,61 @@ type isCommunique_Union interface { type Communique_Number struct { Number int32 `protobuf:"varint,5,opt,name=number,oneof"` } + type Communique_Name struct { Name string `protobuf:"bytes,6,opt,name=name,oneof"` } + type Communique_Data struct { Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` } + type Communique_TempC struct { TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` } + type Communique_Height struct { Height float32 `protobuf:"fixed32,9,opt,name=height,oneof"` } + type Communique_Today struct { Today Days `protobuf:"varint,10,opt,name=today,enum=my.test.Days,oneof"` } + type Communique_Maybe struct { Maybe bool `protobuf:"varint,11,opt,name=maybe,oneof"` } + type Communique_Delta_ struct { Delta int32 `protobuf:"zigzag32,12,opt,name=delta,oneof"` } + type Communique_Msg struct { - Msg *Reply `protobuf:"bytes,13,opt,name=msg,oneof"` + Msg *Reply `protobuf:"bytes,16,opt,name=msg,oneof"` } + type Communique_Somegroup struct { Somegroup *Communique_SomeGroup `protobuf:"group,14,opt,name=SomeGroup,json=somegroup,oneof"` } -func (*Communique_Number) isCommunique_Union() {} -func (*Communique_Name) isCommunique_Union() {} -func (*Communique_Data) isCommunique_Union() {} -func (*Communique_TempC) isCommunique_Union() {} -func (*Communique_Height) isCommunique_Union() {} -func (*Communique_Today) isCommunique_Union() {} -func (*Communique_Maybe) isCommunique_Union() {} -func (*Communique_Delta_) isCommunique_Union() {} -func (*Communique_Msg) isCommunique_Union() {} +func (*Communique_Number) isCommunique_Union() {} + +func (*Communique_Name) isCommunique_Union() {} + +func (*Communique_Data) isCommunique_Union() {} + +func (*Communique_TempC) isCommunique_Union() {} + +func (*Communique_Height) isCommunique_Union() {} + +func (*Communique_Today) isCommunique_Union() {} + +func (*Communique_Maybe) isCommunique_Union() {} + +func (*Communique_Delta_) isCommunique_Union() {} + +func (*Communique_Msg) isCommunique_Union() {} + func (*Communique_Somegroup) isCommunique_Union() {} func (m *Communique) GetUnion() isCommunique_Union { @@ -535,13 +822,6 @@ func (m *Communique) GetUnion() isCommunique_Union { return nil } -func (m *Communique) GetMakeMeCry() bool { - if m != nil && m.MakeMeCry != nil { - return *m.MakeMeCry - } - return false -} - func (m *Communique) GetNumber() int32 { if x, ok := m.GetUnion().(*Communique_Number); ok { return x.Number @@ -612,9 +892,9 @@ func (m *Communique) GetSomegroup() *Communique_SomeGroup { return nil } -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Communique_OneofMarshaler, _Communique_OneofUnmarshaler, _Communique_OneofSizer, []interface{}{ +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Communique) XXX_OneofWrappers() []interface{} { + return []interface{}{ (*Communique_Number)(nil), (*Communique_Name)(nil), (*Communique_Data)(nil), @@ -628,190 +908,37 @@ func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) er } } -func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - b.EncodeVarint(5<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Number)) - case *Communique_Name: - b.EncodeVarint(6<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Name) - case *Communique_Data: - b.EncodeVarint(7<<3 | proto.WireBytes) - b.EncodeRawBytes(x.Data) - case *Communique_TempC: - b.EncodeVarint(8<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.TempC)) - case *Communique_Height: - b.EncodeVarint(9<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(math.Float32bits(x.Height))) - case *Communique_Today: - b.EncodeVarint(10<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Today)) - case *Communique_Maybe: - t := uint64(0) - if x.Maybe { - t = 1 - } - b.EncodeVarint(11<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *Communique_Delta_: - b.EncodeVarint(12<<3 | proto.WireVarint) - b.EncodeZigzag32(uint64(x.Delta)) - case *Communique_Msg: - b.EncodeVarint(13<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Msg); err != nil { - return err - } - case *Communique_Somegroup: - b.EncodeVarint(14<<3 | proto.WireStartGroup) - if err := b.Marshal(x.Somegroup); err != nil { - return err - } - b.EncodeVarint(14<<3 | proto.WireEndGroup) - case nil: - default: - return fmt.Errorf("Communique.Union has unexpected type %T", x) - } - return nil -} - -func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Communique) - switch tag { - case 5: // union.number - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Number{int32(x)} - return true, err - case 6: // union.name - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &Communique_Name{x} - return true, err - case 7: // union.data - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Union = &Communique_Data{x} - return true, err - case 8: // union.temp_c - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Union = &Communique_TempC{math.Float64frombits(x)} - return true, err - case 9: // union.height - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.Union = &Communique_Height{math.Float32frombits(uint32(x))} - return true, err - case 10: // union.today - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Today{Days(x)} - return true, err - case 11: // union.maybe - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Maybe{x != 0} - return true, err - case 12: // union.delta - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeZigzag32() - m.Union = &Communique_Delta_{int32(x)} - return true, err - case 13: // union.msg - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Reply) - err := b.DecodeMessage(msg) - m.Union = &Communique_Msg{msg} - return true, err - case 14: // union.somegroup - if wire != proto.WireStartGroup { - return true, proto.ErrInternalBadWireType - } - msg := new(Communique_SomeGroup) - err := b.DecodeGroup(msg) - m.Union = &Communique_Somegroup{msg} - return true, err - default: - return false, nil - } -} - -func _Communique_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - n += proto.SizeVarint(5<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Number)) - case *Communique_Name: - n += proto.SizeVarint(6<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Name))) - n += len(x.Name) - case *Communique_Data: - n += proto.SizeVarint(7<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Data))) - n += len(x.Data) - case *Communique_TempC: - n += proto.SizeVarint(8<<3 | proto.WireFixed64) - n += 8 - case *Communique_Height: - n += proto.SizeVarint(9<<3 | proto.WireFixed32) - n += 4 - case *Communique_Today: - n += proto.SizeVarint(10<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Today)) - case *Communique_Maybe: - n += proto.SizeVarint(11<<3 | proto.WireVarint) - n += 1 - case *Communique_Delta_: - n += proto.SizeVarint(12<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64((uint32(x.Delta) << 1) ^ uint32((int32(x.Delta) >> 31)))) - case *Communique_Msg: - s := proto.Size(x.Msg) - n += proto.SizeVarint(13<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case *Communique_Somegroup: - n += proto.SizeVarint(14<<3 | proto.WireStartGroup) - n += proto.Size(x.Somegroup) - n += proto.SizeVarint(14<<3 | proto.WireEndGroup) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - type Communique_SomeGroup struct { - Member *string `protobuf:"bytes,15,opt,name=member" json:"member,omitempty"` - XXX_unrecognized []byte `json:"-"` + Member *string `protobuf:"bytes,15,opt,name=member" json:"member,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Communique_SomeGroup) Reset() { *m = Communique_SomeGroup{} } func (m *Communique_SomeGroup) String() string { return proto.CompactTextString(m) } func (*Communique_SomeGroup) ProtoMessage() {} +func (*Communique_SomeGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{6, 0} +} + +func (m *Communique_SomeGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Communique_SomeGroup.Unmarshal(m, b) +} +func (m *Communique_SomeGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Communique_SomeGroup.Marshal(b, m, deterministic) +} +func (m *Communique_SomeGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_Communique_SomeGroup.Merge(m, src) +} +func (m *Communique_SomeGroup) XXX_Size() int { + return xxx_messageInfo_Communique_SomeGroup.Size(m) +} +func (m *Communique_SomeGroup) XXX_DiscardUnknown() { + xxx_messageInfo_Communique_SomeGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_Communique_SomeGroup proto.InternalMessageInfo func (m *Communique_SomeGroup) GetMember() string { if m != nil && m.Member != nil { @@ -821,12 +948,35 @@ func (m *Communique_SomeGroup) GetMember() string { } type Communique_Delta struct { - XXX_unrecognized []byte `json:"-"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Communique_Delta) Reset() { *m = Communique_Delta{} } func (m *Communique_Delta) String() string { return proto.CompactTextString(m) } func (*Communique_Delta) ProtoMessage() {} +func (*Communique_Delta) Descriptor() ([]byte, []int) { + return fileDescriptor_2c9b60a40d5131b9, []int{6, 1} +} + +func (m *Communique_Delta) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Communique_Delta.Unmarshal(m, b) +} +func (m *Communique_Delta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Communique_Delta.Marshal(b, m, deterministic) +} +func (m *Communique_Delta) XXX_Merge(src proto.Message) { + xxx_messageInfo_Communique_Delta.Merge(m, src) +} +func (m *Communique_Delta) XXX_Size() int { + return xxx_messageInfo_Communique_Delta.Size(m) +} +func (m *Communique_Delta) XXX_DiscardUnknown() { + xxx_messageInfo_Communique_Delta.DiscardUnknown(m) +} + +var xxx_messageInfo_Communique_Delta proto.InternalMessageInfo var E_Tag = &proto.ExtensionDesc{ ExtendedType: (*Reply)(nil), @@ -847,24 +997,104 @@ var E_Donut = &proto.ExtensionDesc{ } func init() { + proto.RegisterEnum("my.test.HatType", HatType_name, HatType_value) + proto.RegisterEnum("my.test.Days", Days_name, Days_value) + proto.RegisterEnum("my.test.Request_Color", Request_Color_name, Request_Color_value) + proto.RegisterEnum("my.test.Reply_Entry_Game", Reply_Entry_Game_name, Reply_Entry_Game_value) proto.RegisterType((*Request)(nil), "my.test.Request") + proto.RegisterMapType((map[int64]*Reply)(nil), "my.test.Request.MsgMappingEntry") + proto.RegisterMapType((map[int32]string)(nil), "my.test.Request.NameMappingEntry") proto.RegisterType((*Request_SomeGroup)(nil), "my.test.Request.SomeGroup") proto.RegisterType((*Reply)(nil), "my.test.Reply") proto.RegisterType((*Reply_Entry)(nil), "my.test.Reply.Entry") proto.RegisterType((*OtherBase)(nil), "my.test.OtherBase") + proto.RegisterExtension(E_ReplyExtensions_Time) + proto.RegisterExtension(E_ReplyExtensions_Carrot) + proto.RegisterExtension(E_ReplyExtensions_Donut) proto.RegisterType((*ReplyExtensions)(nil), "my.test.ReplyExtensions") proto.RegisterType((*OtherReplyExtensions)(nil), "my.test.OtherReplyExtensions") proto.RegisterType((*OldReply)(nil), "my.test.OldReply") proto.RegisterType((*Communique)(nil), "my.test.Communique") proto.RegisterType((*Communique_SomeGroup)(nil), "my.test.Communique.SomeGroup") proto.RegisterType((*Communique_Delta)(nil), "my.test.Communique.Delta") - proto.RegisterEnum("my.test.HatType", HatType_name, HatType_value) - proto.RegisterEnum("my.test.Days", Days_name, Days_value) - proto.RegisterEnum("my.test.Request_Color", Request_Color_name, Request_Color_value) - proto.RegisterEnum("my.test.Reply_Entry_Game", Reply_Entry_Game_name, Reply_Entry_Game_value) - proto.RegisterExtension(E_ReplyExtensions_Time) - proto.RegisterExtension(E_ReplyExtensions_Carrot) - proto.RegisterExtension(E_ReplyExtensions_Donut) proto.RegisterExtension(E_Tag) proto.RegisterExtension(E_Donut) } + +func init() { proto.RegisterFile("my_test/test.proto", fileDescriptor_2c9b60a40d5131b9) } + +var fileDescriptor_2c9b60a40d5131b9 = []byte{ + // 1148 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x56, 0xcf, 0x6e, 0xdb, 0x46, + 0x13, 0x37, 0x49, 0x51, 0x7f, 0x46, 0xfe, 0x6c, 0x7e, 0x0b, 0xd7, 0x66, 0x55, 0x24, 0x61, 0x95, + 0xb8, 0x50, 0xdc, 0x46, 0x8e, 0xd5, 0x02, 0x4d, 0x55, 0x34, 0x88, 0x65, 0xc9, 0x71, 0x10, 0x5b, + 0x2e, 0x36, 0xce, 0xa1, 0xb9, 0x10, 0x6b, 0x69, 0x45, 0xb1, 0xd6, 0x92, 0x8c, 0xb8, 0x2c, 0xcc, + 0x9b, 0x9f, 0xa2, 0x7d, 0x8d, 0xde, 0xfb, 0x0c, 0x7d, 0x26, 0x17, 0x3b, 0xab, 0x48, 0xb2, 0x55, + 0x94, 0x07, 0x82, 0x33, 0xf3, 0x9b, 0xdf, 0xec, 0xce, 0xcc, 0xce, 0x12, 0x88, 0xc8, 0x7d, 0xc9, + 0x53, 0xb9, 0xaf, 0x5e, 0xcd, 0x64, 0x1a, 0xcb, 0x98, 0x94, 0x44, 0xde, 0x54, 0x62, 0x8d, 0x88, + 0x6c, 0x22, 0xc3, 0x7d, 0x7c, 0x1f, 0x68, 0x63, 0xfd, 0xef, 0x22, 0x94, 0x28, 0xff, 0x98, 0xf1, + 0x54, 0x12, 0x07, 0xac, 0x2b, 0x9e, 0xbb, 0x86, 0x67, 0x35, 0x2c, 0xaa, 0x3e, 0x49, 0x03, 0xac, + 0x71, 0xc6, 0x5d, 0xcb, 0x33, 0x1a, 0x1b, 0xad, 0xed, 0xe6, 0x8c, 0xa8, 0x39, 0x73, 0x68, 0x1e, + 0xc5, 0x93, 0x78, 0x4a, 0x15, 0x84, 0xec, 0x81, 0x35, 0x66, 0xd2, 0x2d, 0x20, 0xd2, 0x99, 0x23, + 0x4f, 0x98, 0xbc, 0xc8, 0x13, 0xde, 0x2e, 0x1e, 0xf7, 0xba, 0xe7, 0xf4, 0x90, 0x2a, 0x10, 0x79, + 0x04, 0xe5, 0x21, 0x67, 0xc3, 0x49, 0x18, 0x71, 0xb7, 0xe4, 0x19, 0x0d, 0xb3, 0x6d, 0x85, 0xd1, + 0x88, 0xce, 0x95, 0xe4, 0x05, 0x54, 0xd2, 0x58, 0xf0, 0x60, 0x1a, 0x67, 0x89, 0x5b, 0xf6, 0x8c, + 0x06, 0xb4, 0x6a, 0x2b, 0xc1, 0xdf, 0xc5, 0x82, 0xbf, 0x56, 0x08, 0xba, 0x00, 0x93, 0x2e, 0xac, + 0x47, 0x4c, 0x70, 0x5f, 0xb0, 0x24, 0x09, 0xa3, 0xc0, 0xdd, 0xf0, 0xac, 0x46, 0xb5, 0xf5, 0xe5, + 0x8a, 0x73, 0x9f, 0x09, 0x7e, 0xa6, 0x31, 0xbd, 0x48, 0x4e, 0x73, 0x5a, 0x8d, 0x16, 0x1a, 0x72, + 0x08, 0x55, 0x91, 0x06, 0x73, 0x92, 0x4d, 0x24, 0xf1, 0x56, 0x48, 0xce, 0xd2, 0xe0, 0x0e, 0x07, + 0x88, 0xb9, 0x82, 0x6c, 0x81, 0x3d, 0xe5, 0x29, 0x97, 0xee, 0xba, 0x67, 0x34, 0x6c, 0xaa, 0x05, + 0xb2, 0x03, 0xa5, 0x80, 0x4b, 0x5f, 0x65, 0xd9, 0xf1, 0x8c, 0x46, 0x85, 0x16, 0x03, 0x2e, 0xdf, + 0xf2, 0x9c, 0x3c, 0x06, 0x18, 0x4d, 0x62, 0x26, 0xfd, 0x28, 0x8c, 0x46, 0xee, 0x16, 0x26, 0xa5, + 0xf0, 0x4c, 0x65, 0xa5, 0x82, 0xfa, 0x7e, 0x18, 0x8d, 0x48, 0xfd, 0x13, 0x28, 0x51, 0xa0, 0xcf, + 0x16, 0x99, 0xd3, 0x98, 0x9f, 0x35, 0x46, 0x0b, 0x3e, 0xbf, 0x4e, 0xdc, 0x6d, 0x84, 0xd8, 0x07, + 0xfc, 0xeb, 0xe7, 0x3f, 0xd0, 0x32, 0xea, 0x7b, 0xd7, 0x09, 0xd9, 0x85, 0xea, 0x30, 0xce, 0x2e, + 0x27, 0x5c, 0x47, 0xdb, 0xf1, 0x8c, 0x86, 0x31, 0x8b, 0x06, 0xda, 0x80, 0xe1, 0x9e, 0xcc, 0x61, + 0x18, 0xcf, 0x45, 0x98, 0xb5, 0x84, 0xc2, 0x80, 0x4f, 0x61, 0x26, 0x61, 0xc4, 0xcf, 0x11, 0x04, + 0x07, 0xcf, 0x3f, 0x3d, 0xb4, 0xa2, 0xad, 0xbd, 0xeb, 0xa4, 0xf6, 0x0d, 0x54, 0xe6, 0x45, 0x23, + 0x8f, 0xa0, 0x8a, 0x25, 0xf3, 0x47, 0x21, 0x9f, 0x0c, 0xdd, 0x0a, 0xa6, 0x09, 0x50, 0x75, 0xac, + 0x34, 0xb5, 0x97, 0xe0, 0xdc, 0xaf, 0xd2, 0xa2, 0x43, 0x15, 0x18, 0x3b, 0x74, 0x0b, 0xec, 0xdf, + 0xd8, 0x24, 0xe3, 0xae, 0x89, 0xf9, 0xd4, 0x42, 0xdb, 0x7c, 0x61, 0xd4, 0xce, 0x60, 0xf3, 0x5e, + 0x81, 0x96, 0xdd, 0x89, 0x76, 0x7f, 0xb2, 0xec, 0x5e, 0x6d, 0x6d, 0x2c, 0xd5, 0x38, 0x99, 0xe4, + 0x4b, 0x74, 0xf5, 0x5d, 0xb0, 0xb1, 0xdd, 0x49, 0x09, 0x2c, 0xda, 0xeb, 0x3a, 0x6b, 0xa4, 0x02, + 0xf6, 0x6b, 0xda, 0xeb, 0xf5, 0x1d, 0x83, 0x94, 0xa1, 0xd0, 0x39, 0x7d, 0xdf, 0x73, 0xcc, 0xfa, + 0x1f, 0x26, 0xd8, 0xe8, 0x4b, 0xf6, 0xc0, 0x1e, 0xc5, 0x59, 0x34, 0xc4, 0xf3, 0x54, 0x6d, 0x6d, + 0xdd, 0xa5, 0x6e, 0xea, 0x96, 0xd1, 0x10, 0xb2, 0x0b, 0xeb, 0x83, 0x58, 0x24, 0x6c, 0x80, 0xbd, + 0x91, 0xba, 0xa6, 0x67, 0x35, 0xec, 0x8e, 0xe9, 0x18, 0xb4, 0x3a, 0xd3, 0xbf, 0xe5, 0x79, 0x5a, + 0xfb, 0xd3, 0x00, 0x5b, 0xef, 0xa4, 0x0b, 0x8f, 0xae, 0x78, 0xee, 0xcb, 0xb1, 0x6a, 0x19, 0xce, + 0x87, 0xa9, 0x7f, 0xd0, 0xfa, 0xf6, 0xbb, 0x01, 0x13, 0x7c, 0xe2, 0x1f, 0xb1, 0xf4, 0x4d, 0x14, + 0xb8, 0x86, 0x67, 0x36, 0x2c, 0xfa, 0xc5, 0x15, 0xcf, 0x2f, 0xc6, 0x4c, 0xf6, 0x15, 0x68, 0x8e, + 0xd1, 0x10, 0xb2, 0xb3, 0xbc, 0x7b, 0xab, 0x6d, 0x7c, 0x3f, 0xdb, 0x30, 0xf9, 0x0a, 0x1c, 0x5f, + 0xe4, 0xba, 0x34, 0x3e, 0x1e, 0xa8, 0x16, 0x0e, 0x01, 0x8b, 0xae, 0x9f, 0xe5, 0x58, 0x1e, 0x55, + 0x9a, 0x56, 0xdd, 0x83, 0xc2, 0x6b, 0x26, 0x38, 0x59, 0x87, 0xf2, 0xf1, 0xf9, 0xf9, 0x45, 0xe7, + 0xf0, 0xf4, 0xd4, 0x31, 0x08, 0x40, 0xf1, 0xa2, 0xd7, 0xef, 0xbf, 0x79, 0xe7, 0x98, 0x7b, 0xe5, + 0xf2, 0xd0, 0xb9, 0xb9, 0xb9, 0xb9, 0x31, 0xeb, 0x4f, 0xa1, 0x72, 0x2e, 0xc7, 0x7c, 0xda, 0x61, + 0x29, 0x27, 0x04, 0x0a, 0x8a, 0x16, 0x4b, 0x51, 0xa1, 0xf8, 0xbd, 0x04, 0xfd, 0xcb, 0x80, 0x4d, + 0xcc, 0x52, 0xef, 0x5a, 0xf2, 0x28, 0x0d, 0xe3, 0x28, 0x6d, 0xd5, 0xa1, 0x20, 0x43, 0xc1, 0xc9, + 0xbd, 0x12, 0xb9, 0x5c, 0x75, 0x1c, 0x45, 0x5b, 0xeb, 0x15, 0x14, 0x07, 0x6c, 0x3a, 0x8d, 0xe5, + 0x0a, 0x2a, 0xc4, 0xf2, 0xba, 0x77, 0xb5, 0x0b, 0x76, 0x3a, 0xf3, 0x6b, 0x75, 0xc0, 0x1e, 0xc6, + 0x51, 0x26, 0x09, 0x99, 0x43, 0xe7, 0x8b, 0xc6, 0x50, 0xff, 0x45, 0xa2, 0x5d, 0xeb, 0x0d, 0xd8, + 0x42, 0x9f, 0x7b, 0xe6, 0xd5, 0xe6, 0xad, 0xbb, 0x50, 0x3e, 0x9f, 0x0c, 0x11, 0x87, 0xbb, 0xbf, + 0xbd, 0xbd, 0xbd, 0x2d, 0xb5, 0xcd, 0xb2, 0x51, 0xff, 0xdd, 0x02, 0x38, 0x8a, 0x85, 0xc8, 0xa2, + 0xf0, 0x63, 0xc6, 0xc9, 0x43, 0xa8, 0x0a, 0x76, 0xc5, 0x7d, 0xc1, 0xfd, 0xc1, 0x54, 0x53, 0x94, + 0x69, 0x45, 0xa9, 0xce, 0xf8, 0xd1, 0x34, 0x27, 0x2e, 0x14, 0xa3, 0x4c, 0x5c, 0xf2, 0xa9, 0x6b, + 0x2b, 0xf6, 0x93, 0x35, 0x3a, 0x93, 0xc9, 0xd6, 0x2c, 0xd1, 0x45, 0x95, 0xe8, 0x93, 0x35, 0x9d, + 0x6a, 0xa5, 0x1d, 0x32, 0xc9, 0x70, 0xfa, 0xae, 0x2b, 0xad, 0x92, 0xc8, 0x0e, 0x14, 0x25, 0x17, + 0x89, 0x3f, 0xc0, 0x99, 0x6b, 0x9c, 0xac, 0x51, 0x5b, 0xc9, 0x47, 0x8a, 0x7e, 0xcc, 0xc3, 0x60, + 0x2c, 0xf1, 0x98, 0x9a, 0x8a, 0x5e, 0xcb, 0x64, 0x17, 0x6c, 0x19, 0x0f, 0x59, 0xee, 0x02, 0x0e, + 0xfe, 0xff, 0xcd, 0x73, 0xd3, 0x65, 0x79, 0x8a, 0x04, 0xca, 0x4a, 0xb6, 0xc1, 0x16, 0x2c, 0xbf, + 0xe4, 0x6e, 0x55, 0xad, 0x5c, 0xe9, 0x51, 0x54, 0xfa, 0x21, 0x9f, 0x48, 0x86, 0x53, 0xf2, 0xff, + 0x4a, 0x8f, 0x22, 0xa9, 0x83, 0x25, 0xd2, 0x00, 0x67, 0xe4, 0xca, 0xa1, 0x3c, 0x59, 0xa3, 0xca, + 0x48, 0x7e, 0x5a, 0xbe, 0x24, 0x36, 0xf0, 0x92, 0x78, 0x30, 0x47, 0x2e, 0x72, 0xb7, 0xb8, 0x27, + 0x4e, 0xd6, 0x96, 0x6e, 0x8a, 0xda, 0xe3, 0xe5, 0x61, 0xb4, 0x0d, 0x45, 0xc1, 0x31, 0x7f, 0x9b, + 0x7a, 0x2c, 0x6b, 0xa9, 0x56, 0x02, 0xbb, 0xab, 0x16, 0xd4, 0x29, 0x81, 0x9d, 0x45, 0x61, 0x1c, + 0xed, 0x3d, 0x84, 0xd2, 0xec, 0x4e, 0x53, 0x6d, 0xae, 0x6f, 0x35, 0xc7, 0x50, 0x43, 0xe1, 0xb8, + 0xf7, 0xc1, 0x31, 0xf7, 0x9a, 0x50, 0x50, 0x5b, 0x57, 0xc6, 0xb3, 0xf3, 0x7e, 0xf7, 0xf0, 0x17, + 0xc7, 0x20, 0x55, 0x28, 0x5d, 0xbc, 0xef, 0xbd, 0x53, 0x82, 0xa9, 0xa6, 0xc6, 0xe9, 0xfb, 0x7e, + 0xf7, 0x8d, 0x63, 0xd4, 0x4c, 0xc7, 0x68, 0x7b, 0x60, 0x49, 0x16, 0xac, 0xf4, 0x6b, 0x80, 0xcb, + 0x50, 0xa6, 0xf6, 0xd1, 0xa7, 0x96, 0xbc, 0x8f, 0xf9, 0x15, 0xb3, 0xf3, 0xe0, 0x6e, 0xa3, 0xfe, + 0x7b, 0x4f, 0x76, 0x5e, 0x7d, 0x78, 0x19, 0x84, 0x72, 0x9c, 0x5d, 0x36, 0x07, 0xb1, 0xd8, 0x0f, + 0xe2, 0x09, 0x8b, 0x82, 0x7d, 0xfc, 0x03, 0xb8, 0xcc, 0x46, 0xfa, 0x63, 0xf0, 0x2c, 0xe0, 0xd1, + 0xb3, 0x20, 0xc6, 0x5f, 0x07, 0xd5, 0x0f, 0xfb, 0xb3, 0x7f, 0x89, 0x1f, 0xd5, 0xeb, 0x9f, 0x00, + 0x00, 0x00, 0xff, 0xff, 0x31, 0xff, 0x4e, 0x30, 0x5a, 0x08, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go.golden b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go.golden deleted file mode 100644 index 1954e3f..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go.golden +++ /dev/null @@ -1,870 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: my_test/test.proto - -/* -Package my_test is a generated protocol buffer package. - -This package holds interesting messages. - -It is generated from these files: - my_test/test.proto - -It has these top-level messages: - Request - Reply - OtherBase - ReplyExtensions - OtherReplyExtensions - OldReply - Communique -*/ -package my_test - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import _ "github.com/golang/protobuf/protoc-gen-go/testdata/multi" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type HatType int32 - -const ( - // deliberately skipping 0 - HatType_FEDORA HatType = 1 - HatType_FEZ HatType = 2 -) - -var HatType_name = map[int32]string{ - 1: "FEDORA", - 2: "FEZ", -} -var HatType_value = map[string]int32{ - "FEDORA": 1, - "FEZ": 2, -} - -func (x HatType) Enum() *HatType { - p := new(HatType) - *p = x - return p -} -func (x HatType) String() string { - return proto.EnumName(HatType_name, int32(x)) -} -func (x *HatType) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(HatType_value, data, "HatType") - if err != nil { - return err - } - *x = HatType(value) - return nil -} - -// This enum represents days of the week. -type Days int32 - -const ( - Days_MONDAY Days = 1 - Days_TUESDAY Days = 2 - Days_LUNDI Days = 1 -) - -var Days_name = map[int32]string{ - 1: "MONDAY", - 2: "TUESDAY", - // Duplicate value: 1: "LUNDI", -} -var Days_value = map[string]int32{ - "MONDAY": 1, - "TUESDAY": 2, - "LUNDI": 1, -} - -func (x Days) Enum() *Days { - p := new(Days) - *p = x - return p -} -func (x Days) String() string { - return proto.EnumName(Days_name, int32(x)) -} -func (x *Days) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(Days_value, data, "Days") - if err != nil { - return err - } - *x = Days(value) - return nil -} - -type Request_Color int32 - -const ( - Request_RED Request_Color = 0 - Request_GREEN Request_Color = 1 - Request_BLUE Request_Color = 2 -) - -var Request_Color_name = map[int32]string{ - 0: "RED", - 1: "GREEN", - 2: "BLUE", -} -var Request_Color_value = map[string]int32{ - "RED": 0, - "GREEN": 1, - "BLUE": 2, -} - -func (x Request_Color) Enum() *Request_Color { - p := new(Request_Color) - *p = x - return p -} -func (x Request_Color) String() string { - return proto.EnumName(Request_Color_name, int32(x)) -} -func (x *Request_Color) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(Request_Color_value, data, "Request_Color") - if err != nil { - return err - } - *x = Request_Color(value) - return nil -} - -type Reply_Entry_Game int32 - -const ( - Reply_Entry_FOOTBALL Reply_Entry_Game = 1 - Reply_Entry_TENNIS Reply_Entry_Game = 2 -) - -var Reply_Entry_Game_name = map[int32]string{ - 1: "FOOTBALL", - 2: "TENNIS", -} -var Reply_Entry_Game_value = map[string]int32{ - "FOOTBALL": 1, - "TENNIS": 2, -} - -func (x Reply_Entry_Game) Enum() *Reply_Entry_Game { - p := new(Reply_Entry_Game) - *p = x - return p -} -func (x Reply_Entry_Game) String() string { - return proto.EnumName(Reply_Entry_Game_name, int32(x)) -} -func (x *Reply_Entry_Game) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(Reply_Entry_Game_value, data, "Reply_Entry_Game") - if err != nil { - return err - } - *x = Reply_Entry_Game(value) - return nil -} - -// This is a message that might be sent somewhere. -type Request struct { - Key []int64 `protobuf:"varint,1,rep,name=key" json:"key,omitempty"` - // optional imp.ImportedMessage imported_message = 2; - Hue *Request_Color `protobuf:"varint,3,opt,name=hue,enum=my.test.Request_Color" json:"hue,omitempty"` - Hat *HatType `protobuf:"varint,4,opt,name=hat,enum=my.test.HatType,def=1" json:"hat,omitempty"` - // optional imp.ImportedMessage.Owner owner = 6; - Deadline *float32 `protobuf:"fixed32,7,opt,name=deadline,def=inf" json:"deadline,omitempty"` - Somegroup *Request_SomeGroup `protobuf:"group,8,opt,name=SomeGroup,json=somegroup" json:"somegroup,omitempty"` - // This is a map field. It will generate map[int32]string. - NameMapping map[int32]string `protobuf:"bytes,14,rep,name=name_mapping,json=nameMapping" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - // This is a map field whose value type is a message. - MsgMapping map[int64]*Reply `protobuf:"bytes,15,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Reset_ *int32 `protobuf:"varint,12,opt,name=reset" json:"reset,omitempty"` - // This field should not conflict with any getters. - GetKey_ *string `protobuf:"bytes,16,opt,name=get_key,json=getKey" json:"get_key,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Request) Reset() { *m = Request{} } -func (m *Request) String() string { return proto.CompactTextString(m) } -func (*Request) ProtoMessage() {} - -const Default_Request_Hat HatType = HatType_FEDORA - -var Default_Request_Deadline float32 = float32(math.Inf(1)) - -func (m *Request) GetKey() []int64 { - if m != nil { - return m.Key - } - return nil -} - -func (m *Request) GetHue() Request_Color { - if m != nil && m.Hue != nil { - return *m.Hue - } - return Request_RED -} - -func (m *Request) GetHat() HatType { - if m != nil && m.Hat != nil { - return *m.Hat - } - return Default_Request_Hat -} - -func (m *Request) GetDeadline() float32 { - if m != nil && m.Deadline != nil { - return *m.Deadline - } - return Default_Request_Deadline -} - -func (m *Request) GetSomegroup() *Request_SomeGroup { - if m != nil { - return m.Somegroup - } - return nil -} - -func (m *Request) GetNameMapping() map[int32]string { - if m != nil { - return m.NameMapping - } - return nil -} - -func (m *Request) GetMsgMapping() map[int64]*Reply { - if m != nil { - return m.MsgMapping - } - return nil -} - -func (m *Request) GetReset_() int32 { - if m != nil && m.Reset_ != nil { - return *m.Reset_ - } - return 0 -} - -func (m *Request) GetGetKey_() string { - if m != nil && m.GetKey_ != nil { - return *m.GetKey_ - } - return "" -} - -type Request_SomeGroup struct { - GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Request_SomeGroup) Reset() { *m = Request_SomeGroup{} } -func (m *Request_SomeGroup) String() string { return proto.CompactTextString(m) } -func (*Request_SomeGroup) ProtoMessage() {} - -func (m *Request_SomeGroup) GetGroupField() int32 { - if m != nil && m.GroupField != nil { - return *m.GroupField - } - return 0 -} - -type Reply struct { - Found []*Reply_Entry `protobuf:"bytes,1,rep,name=found" json:"found,omitempty"` - CompactKeys []int32 `protobuf:"varint,2,rep,packed,name=compact_keys,json=compactKeys" json:"compact_keys,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Reply) Reset() { *m = Reply{} } -func (m *Reply) String() string { return proto.CompactTextString(m) } -func (*Reply) ProtoMessage() {} - -var extRange_Reply = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*Reply) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_Reply -} - -func (m *Reply) GetFound() []*Reply_Entry { - if m != nil { - return m.Found - } - return nil -} - -func (m *Reply) GetCompactKeys() []int32 { - if m != nil { - return m.CompactKeys - } - return nil -} - -type Reply_Entry struct { - KeyThatNeeds_1234Camel_CasIng *int64 `protobuf:"varint,1,req,name=key_that_needs_1234camel_CasIng,json=keyThatNeeds1234camelCasIng" json:"key_that_needs_1234camel_CasIng,omitempty"` - Value *int64 `protobuf:"varint,2,opt,name=value,def=7" json:"value,omitempty"` - XMyFieldName_2 *int64 `protobuf:"varint,3,opt,name=_my_field_name_2,json=MyFieldName2" json:"_my_field_name_2,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Reply_Entry) Reset() { *m = Reply_Entry{} } -func (m *Reply_Entry) String() string { return proto.CompactTextString(m) } -func (*Reply_Entry) ProtoMessage() {} - -const Default_Reply_Entry_Value int64 = 7 - -func (m *Reply_Entry) GetKeyThatNeeds_1234Camel_CasIng() int64 { - if m != nil && m.KeyThatNeeds_1234Camel_CasIng != nil { - return *m.KeyThatNeeds_1234Camel_CasIng - } - return 0 -} - -func (m *Reply_Entry) GetValue() int64 { - if m != nil && m.Value != nil { - return *m.Value - } - return Default_Reply_Entry_Value -} - -func (m *Reply_Entry) GetXMyFieldName_2() int64 { - if m != nil && m.XMyFieldName_2 != nil { - return *m.XMyFieldName_2 - } - return 0 -} - -type OtherBase struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OtherBase) Reset() { *m = OtherBase{} } -func (m *OtherBase) String() string { return proto.CompactTextString(m) } -func (*OtherBase) ProtoMessage() {} - -var extRange_OtherBase = []proto.ExtensionRange{ - {100, 536870911}, -} - -func (*OtherBase) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_OtherBase -} - -func (m *OtherBase) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -type ReplyExtensions struct { - XXX_unrecognized []byte `json:"-"` -} - -func (m *ReplyExtensions) Reset() { *m = ReplyExtensions{} } -func (m *ReplyExtensions) String() string { return proto.CompactTextString(m) } -func (*ReplyExtensions) ProtoMessage() {} - -var E_ReplyExtensions_Time = &proto.ExtensionDesc{ - ExtendedType: (*Reply)(nil), - ExtensionType: (*float64)(nil), - Field: 101, - Name: "my.test.ReplyExtensions.time", - Tag: "fixed64,101,opt,name=time", - Filename: "my_test/test.proto", -} - -var E_ReplyExtensions_Carrot = &proto.ExtensionDesc{ - ExtendedType: (*Reply)(nil), - ExtensionType: (*ReplyExtensions)(nil), - Field: 105, - Name: "my.test.ReplyExtensions.carrot", - Tag: "bytes,105,opt,name=carrot", - Filename: "my_test/test.proto", -} - -var E_ReplyExtensions_Donut = &proto.ExtensionDesc{ - ExtendedType: (*OtherBase)(nil), - ExtensionType: (*ReplyExtensions)(nil), - Field: 101, - Name: "my.test.ReplyExtensions.donut", - Tag: "bytes,101,opt,name=donut", - Filename: "my_test/test.proto", -} - -type OtherReplyExtensions struct { - Key *int32 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OtherReplyExtensions) Reset() { *m = OtherReplyExtensions{} } -func (m *OtherReplyExtensions) String() string { return proto.CompactTextString(m) } -func (*OtherReplyExtensions) ProtoMessage() {} - -func (m *OtherReplyExtensions) GetKey() int32 { - if m != nil && m.Key != nil { - return *m.Key - } - return 0 -} - -type OldReply struct { - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OldReply) Reset() { *m = OldReply{} } -func (m *OldReply) String() string { return proto.CompactTextString(m) } -func (*OldReply) ProtoMessage() {} - -func (m *OldReply) Marshal() ([]byte, error) { - return proto.MarshalMessageSet(&m.XXX_InternalExtensions) -} -func (m *OldReply) Unmarshal(buf []byte) error { - return proto.UnmarshalMessageSet(buf, &m.XXX_InternalExtensions) -} -func (m *OldReply) MarshalJSON() ([]byte, error) { - return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) -} -func (m *OldReply) UnmarshalJSON(buf []byte) error { - return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) -} - -// ensure OldReply satisfies proto.Marshaler and proto.Unmarshaler -var _ proto.Marshaler = (*OldReply)(nil) -var _ proto.Unmarshaler = (*OldReply)(nil) - -var extRange_OldReply = []proto.ExtensionRange{ - {100, 2147483646}, -} - -func (*OldReply) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_OldReply -} - -type Communique struct { - MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` - // This is a oneof, called "union". - // - // Types that are valid to be assigned to Union: - // *Communique_Number - // *Communique_Name - // *Communique_Data - // *Communique_TempC - // *Communique_Height - // *Communique_Today - // *Communique_Maybe - // *Communique_Delta_ - // *Communique_Msg - // *Communique_Somegroup - Union isCommunique_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Communique) Reset() { *m = Communique{} } -func (m *Communique) String() string { return proto.CompactTextString(m) } -func (*Communique) ProtoMessage() {} - -type isCommunique_Union interface { - isCommunique_Union() -} - -type Communique_Number struct { - Number int32 `protobuf:"varint,5,opt,name=number,oneof"` -} -type Communique_Name struct { - Name string `protobuf:"bytes,6,opt,name=name,oneof"` -} -type Communique_Data struct { - Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` -} -type Communique_TempC struct { - TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` -} -type Communique_Height struct { - Height float32 `protobuf:"fixed32,9,opt,name=height,oneof"` -} -type Communique_Today struct { - Today Days `protobuf:"varint,10,opt,name=today,enum=my.test.Days,oneof"` -} -type Communique_Maybe struct { - Maybe bool `protobuf:"varint,11,opt,name=maybe,oneof"` -} -type Communique_Delta_ struct { - Delta int32 `protobuf:"zigzag32,12,opt,name=delta,oneof"` -} -type Communique_Msg struct { - Msg *Reply `protobuf:"bytes,13,opt,name=msg,oneof"` -} -type Communique_Somegroup struct { - Somegroup *Communique_SomeGroup `protobuf:"group,14,opt,name=SomeGroup,json=somegroup,oneof"` -} - -func (*Communique_Number) isCommunique_Union() {} -func (*Communique_Name) isCommunique_Union() {} -func (*Communique_Data) isCommunique_Union() {} -func (*Communique_TempC) isCommunique_Union() {} -func (*Communique_Height) isCommunique_Union() {} -func (*Communique_Today) isCommunique_Union() {} -func (*Communique_Maybe) isCommunique_Union() {} -func (*Communique_Delta_) isCommunique_Union() {} -func (*Communique_Msg) isCommunique_Union() {} -func (*Communique_Somegroup) isCommunique_Union() {} - -func (m *Communique) GetUnion() isCommunique_Union { - if m != nil { - return m.Union - } - return nil -} - -func (m *Communique) GetMakeMeCry() bool { - if m != nil && m.MakeMeCry != nil { - return *m.MakeMeCry - } - return false -} - -func (m *Communique) GetNumber() int32 { - if x, ok := m.GetUnion().(*Communique_Number); ok { - return x.Number - } - return 0 -} - -func (m *Communique) GetName() string { - if x, ok := m.GetUnion().(*Communique_Name); ok { - return x.Name - } - return "" -} - -func (m *Communique) GetData() []byte { - if x, ok := m.GetUnion().(*Communique_Data); ok { - return x.Data - } - return nil -} - -func (m *Communique) GetTempC() float64 { - if x, ok := m.GetUnion().(*Communique_TempC); ok { - return x.TempC - } - return 0 -} - -func (m *Communique) GetHeight() float32 { - if x, ok := m.GetUnion().(*Communique_Height); ok { - return x.Height - } - return 0 -} - -func (m *Communique) GetToday() Days { - if x, ok := m.GetUnion().(*Communique_Today); ok { - return x.Today - } - return Days_MONDAY -} - -func (m *Communique) GetMaybe() bool { - if x, ok := m.GetUnion().(*Communique_Maybe); ok { - return x.Maybe - } - return false -} - -func (m *Communique) GetDelta() int32 { - if x, ok := m.GetUnion().(*Communique_Delta_); ok { - return x.Delta - } - return 0 -} - -func (m *Communique) GetMsg() *Reply { - if x, ok := m.GetUnion().(*Communique_Msg); ok { - return x.Msg - } - return nil -} - -func (m *Communique) GetSomegroup() *Communique_SomeGroup { - if x, ok := m.GetUnion().(*Communique_Somegroup); ok { - return x.Somegroup - } - return nil -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Communique_OneofMarshaler, _Communique_OneofUnmarshaler, _Communique_OneofSizer, []interface{}{ - (*Communique_Number)(nil), - (*Communique_Name)(nil), - (*Communique_Data)(nil), - (*Communique_TempC)(nil), - (*Communique_Height)(nil), - (*Communique_Today)(nil), - (*Communique_Maybe)(nil), - (*Communique_Delta_)(nil), - (*Communique_Msg)(nil), - (*Communique_Somegroup)(nil), - } -} - -func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - b.EncodeVarint(5<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Number)) - case *Communique_Name: - b.EncodeVarint(6<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Name) - case *Communique_Data: - b.EncodeVarint(7<<3 | proto.WireBytes) - b.EncodeRawBytes(x.Data) - case *Communique_TempC: - b.EncodeVarint(8<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.TempC)) - case *Communique_Height: - b.EncodeVarint(9<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(math.Float32bits(x.Height))) - case *Communique_Today: - b.EncodeVarint(10<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Today)) - case *Communique_Maybe: - t := uint64(0) - if x.Maybe { - t = 1 - } - b.EncodeVarint(11<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *Communique_Delta_: - b.EncodeVarint(12<<3 | proto.WireVarint) - b.EncodeZigzag32(uint64(x.Delta)) - case *Communique_Msg: - b.EncodeVarint(13<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Msg); err != nil { - return err - } - case *Communique_Somegroup: - b.EncodeVarint(14<<3 | proto.WireStartGroup) - if err := b.Marshal(x.Somegroup); err != nil { - return err - } - b.EncodeVarint(14<<3 | proto.WireEndGroup) - case nil: - default: - return fmt.Errorf("Communique.Union has unexpected type %T", x) - } - return nil -} - -func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Communique) - switch tag { - case 5: // union.number - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Number{int32(x)} - return true, err - case 6: // union.name - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Union = &Communique_Name{x} - return true, err - case 7: // union.data - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Union = &Communique_Data{x} - return true, err - case 8: // union.temp_c - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Union = &Communique_TempC{math.Float64frombits(x)} - return true, err - case 9: // union.height - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.Union = &Communique_Height{math.Float32frombits(uint32(x))} - return true, err - case 10: // union.today - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Today{Days(x)} - return true, err - case 11: // union.maybe - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Union = &Communique_Maybe{x != 0} - return true, err - case 12: // union.delta - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeZigzag32() - m.Union = &Communique_Delta_{int32(x)} - return true, err - case 13: // union.msg - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Reply) - err := b.DecodeMessage(msg) - m.Union = &Communique_Msg{msg} - return true, err - case 14: // union.somegroup - if wire != proto.WireStartGroup { - return true, proto.ErrInternalBadWireType - } - msg := new(Communique_SomeGroup) - err := b.DecodeGroup(msg) - m.Union = &Communique_Somegroup{msg} - return true, err - default: - return false, nil - } -} - -func _Communique_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Communique) - // union - switch x := m.Union.(type) { - case *Communique_Number: - n += proto.SizeVarint(5<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Number)) - case *Communique_Name: - n += proto.SizeVarint(6<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Name))) - n += len(x.Name) - case *Communique_Data: - n += proto.SizeVarint(7<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Data))) - n += len(x.Data) - case *Communique_TempC: - n += proto.SizeVarint(8<<3 | proto.WireFixed64) - n += 8 - case *Communique_Height: - n += proto.SizeVarint(9<<3 | proto.WireFixed32) - n += 4 - case *Communique_Today: - n += proto.SizeVarint(10<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.Today)) - case *Communique_Maybe: - n += proto.SizeVarint(11<<3 | proto.WireVarint) - n += 1 - case *Communique_Delta_: - n += proto.SizeVarint(12<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64((uint32(x.Delta) << 1) ^ uint32((int32(x.Delta) >> 31)))) - case *Communique_Msg: - s := proto.Size(x.Msg) - n += proto.SizeVarint(13<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case *Communique_Somegroup: - n += proto.SizeVarint(14<<3 | proto.WireStartGroup) - n += proto.Size(x.Somegroup) - n += proto.SizeVarint(14<<3 | proto.WireEndGroup) - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -type Communique_SomeGroup struct { - Member *string `protobuf:"bytes,15,opt,name=member" json:"member,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *Communique_SomeGroup) Reset() { *m = Communique_SomeGroup{} } -func (m *Communique_SomeGroup) String() string { return proto.CompactTextString(m) } -func (*Communique_SomeGroup) ProtoMessage() {} - -func (m *Communique_SomeGroup) GetMember() string { - if m != nil && m.Member != nil { - return *m.Member - } - return "" -} - -type Communique_Delta struct { - XXX_unrecognized []byte `json:"-"` -} - -func (m *Communique_Delta) Reset() { *m = Communique_Delta{} } -func (m *Communique_Delta) String() string { return proto.CompactTextString(m) } -func (*Communique_Delta) ProtoMessage() {} - -var E_Tag = &proto.ExtensionDesc{ - ExtendedType: (*Reply)(nil), - ExtensionType: (*string)(nil), - Field: 103, - Name: "my.test.tag", - Tag: "bytes,103,opt,name=tag", - Filename: "my_test/test.proto", -} - -var E_Donut = &proto.ExtensionDesc{ - ExtendedType: (*Reply)(nil), - ExtensionType: (*OtherReplyExtensions)(nil), - Field: 106, - Name: "my.test.donut", - Tag: "bytes,106,opt,name=donut", - Filename: "my_test/test.proto", -} - -func init() { - proto.RegisterType((*Request)(nil), "my.test.Request") - proto.RegisterType((*Request_SomeGroup)(nil), "my.test.Request.SomeGroup") - proto.RegisterType((*Reply)(nil), "my.test.Reply") - proto.RegisterType((*Reply_Entry)(nil), "my.test.Reply.Entry") - proto.RegisterType((*OtherBase)(nil), "my.test.OtherBase") - proto.RegisterType((*ReplyExtensions)(nil), "my.test.ReplyExtensions") - proto.RegisterType((*OtherReplyExtensions)(nil), "my.test.OtherReplyExtensions") - proto.RegisterType((*OldReply)(nil), "my.test.OldReply") - proto.RegisterType((*Communique)(nil), "my.test.Communique") - proto.RegisterType((*Communique_SomeGroup)(nil), "my.test.Communique.SomeGroup") - proto.RegisterType((*Communique_Delta)(nil), "my.test.Communique.Delta") - proto.RegisterEnum("my.test.HatType", HatType_name, HatType_value) - proto.RegisterEnum("my.test.Days", Days_name, Days_value) - proto.RegisterEnum("my.test.Request_Color", Request_Color_name, Request_Color_value) - proto.RegisterEnum("my.test.Reply_Entry_Game", Reply_Entry_Game_name, Reply_Entry_Game_value) - proto.RegisterExtension(E_ReplyExtensions_Time) - proto.RegisterExtension(E_ReplyExtensions_Carrot) - proto.RegisterExtension(E_ReplyExtensions_Donut) - proto.RegisterExtension(E_Tag) - proto.RegisterExtension(E_Donut) -} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto index 8e70946..64fcdca 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto @@ -34,6 +34,8 @@ syntax = "proto2"; // This package holds interesting messages. package my.test; // dotted package name +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/my_test;test"; + //import "imp.proto"; import "multi/multi1.proto"; // unused import @@ -83,6 +85,13 @@ message Request { optional int32 reset = 12; // This field should not conflict with any getters. optional string get_key = 16; + + optional float float_ninf = 20 [default=-inf]; + optional float float_pinf = 21 [default=inf]; + optional float float_exp = 22 [default=1e9]; + optional double double_ninf = 23 [default=-inf]; + optional double double_pinf = 24 [default=inf]; + optional double double_exp = 25 [default=1e9]; } message Reply { @@ -145,7 +154,7 @@ message Communique { Days today = 10; bool maybe = 11; sint32 delta = 12; // name will conflict with Delta below - Reply msg = 13; + Reply msg = 16; // requires two bytes to encode field tag group SomeGroup = 14 { optional string member = 15; } diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.pb.go new file mode 100644 index 0000000..a816962 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.pb.go @@ -0,0 +1,202 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: proto3/proto3.proto + +package proto3 + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type Request_Flavour int32 + +const ( + Request_SWEET Request_Flavour = 0 + Request_SOUR Request_Flavour = 1 + Request_UMAMI Request_Flavour = 2 + Request_GOPHERLICIOUS Request_Flavour = 3 +) + +var Request_Flavour_name = map[int32]string{ + 0: "SWEET", + 1: "SOUR", + 2: "UMAMI", + 3: "GOPHERLICIOUS", +} + +var Request_Flavour_value = map[string]int32{ + "SWEET": 0, + "SOUR": 1, + "UMAMI": 2, + "GOPHERLICIOUS": 3, +} + +func (x Request_Flavour) String() string { + return proto.EnumName(Request_Flavour_name, int32(x)) +} + +func (Request_Flavour) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ab04eb4084a521db, []int{0, 0} +} + +type Request struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Key []int64 `protobuf:"varint,2,rep,packed,name=key,proto3" json:"key,omitempty"` + Taste Request_Flavour `protobuf:"varint,3,opt,name=taste,proto3,enum=proto3.Request_Flavour" json:"taste,omitempty"` + Book *Book `protobuf:"bytes,4,opt,name=book,proto3" json:"book,omitempty"` + Unpacked []int64 `protobuf:"varint,5,rep,name=unpacked,proto3" json:"unpacked,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Request) Reset() { *m = Request{} } +func (m *Request) String() string { return proto.CompactTextString(m) } +func (*Request) ProtoMessage() {} +func (*Request) Descriptor() ([]byte, []int) { + return fileDescriptor_ab04eb4084a521db, []int{0} +} + +func (m *Request) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Request.Unmarshal(m, b) +} +func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Request.Marshal(b, m, deterministic) +} +func (m *Request) XXX_Merge(src proto.Message) { + xxx_messageInfo_Request.Merge(m, src) +} +func (m *Request) XXX_Size() int { + return xxx_messageInfo_Request.Size(m) +} +func (m *Request) XXX_DiscardUnknown() { + xxx_messageInfo_Request.DiscardUnknown(m) +} + +var xxx_messageInfo_Request proto.InternalMessageInfo + +func (m *Request) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Request) GetKey() []int64 { + if m != nil { + return m.Key + } + return nil +} + +func (m *Request) GetTaste() Request_Flavour { + if m != nil { + return m.Taste + } + return Request_SWEET +} + +func (m *Request) GetBook() *Book { + if m != nil { + return m.Book + } + return nil +} + +func (m *Request) GetUnpacked() []int64 { + if m != nil { + return m.Unpacked + } + return nil +} + +type Book struct { + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + RawData []byte `protobuf:"bytes,2,opt,name=raw_data,json=rawData,proto3" json:"raw_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Book) Reset() { *m = Book{} } +func (m *Book) String() string { return proto.CompactTextString(m) } +func (*Book) ProtoMessage() {} +func (*Book) Descriptor() ([]byte, []int) { + return fileDescriptor_ab04eb4084a521db, []int{1} +} + +func (m *Book) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Book.Unmarshal(m, b) +} +func (m *Book) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Book.Marshal(b, m, deterministic) +} +func (m *Book) XXX_Merge(src proto.Message) { + xxx_messageInfo_Book.Merge(m, src) +} +func (m *Book) XXX_Size() int { + return xxx_messageInfo_Book.Size(m) +} +func (m *Book) XXX_DiscardUnknown() { + xxx_messageInfo_Book.DiscardUnknown(m) +} + +var xxx_messageInfo_Book proto.InternalMessageInfo + +func (m *Book) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Book) GetRawData() []byte { + if m != nil { + return m.RawData + } + return nil +} + +func init() { + proto.RegisterEnum("proto3.Request_Flavour", Request_Flavour_name, Request_Flavour_value) + proto.RegisterType((*Request)(nil), "proto3.Request") + proto.RegisterType((*Book)(nil), "proto3.Book") +} + +func init() { proto.RegisterFile("proto3/proto3.proto", fileDescriptor_ab04eb4084a521db) } + +var fileDescriptor_ab04eb4084a521db = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x3c, 0x90, 0xcf, 0x4e, 0xf2, 0x40, + 0x14, 0xc5, 0x99, 0xfe, 0xf9, 0x80, 0xfb, 0xa1, 0x19, 0xaf, 0x26, 0x8e, 0x1b, 0x33, 0x61, 0xd5, + 0x0d, 0x25, 0xc1, 0x85, 0xc6, 0xb8, 0x11, 0x45, 0x25, 0x91, 0x60, 0x06, 0x89, 0x89, 0x1b, 0x33, + 0x85, 0xb1, 0x92, 0x42, 0x07, 0xcb, 0x54, 0xe2, 0xcb, 0xfa, 0x2c, 0xa6, 0x9d, 0xe2, 0xea, 0x9e, + 0x7b, 0xe7, 0xe4, 0x77, 0x32, 0x07, 0x0e, 0xd7, 0x99, 0x36, 0xfa, 0xac, 0x6b, 0x47, 0x58, 0x0e, + 0xfc, 0x67, 0xb7, 0xf6, 0x0f, 0x81, 0xba, 0x50, 0x9f, 0xb9, 0xda, 0x18, 0x44, 0xf0, 0x52, 0xb9, + 0x52, 0x8c, 0x70, 0x12, 0x34, 0x45, 0xa9, 0x91, 0x82, 0x9b, 0xa8, 0x6f, 0xe6, 0x70, 0x37, 0x70, + 0x45, 0x21, 0xb1, 0x03, 0xbe, 0x91, 0x1b, 0xa3, 0x98, 0xcb, 0x49, 0xb0, 0xdf, 0x3b, 0x0e, 0x2b, + 0x6e, 0x45, 0x09, 0xef, 0x96, 0xf2, 0x4b, 0xe7, 0x99, 0xb0, 0x2e, 0xe4, 0xe0, 0x45, 0x5a, 0x27, + 0xcc, 0xe3, 0x24, 0xf8, 0xdf, 0x6b, 0xed, 0xdc, 0x7d, 0xad, 0x13, 0x51, 0xbe, 0xe0, 0x29, 0x34, + 0xf2, 0x74, 0x2d, 0x67, 0x89, 0x9a, 0x33, 0xbf, 0xc8, 0xe9, 0x3b, 0xb4, 0x26, 0xfe, 0x6e, 0xed, + 0x2b, 0xa8, 0x57, 0x4c, 0x6c, 0x82, 0x3f, 0x79, 0x19, 0x0c, 0x9e, 0x69, 0x0d, 0x1b, 0xe0, 0x4d, + 0xc6, 0x53, 0x41, 0x49, 0x71, 0x9c, 0x8e, 0xae, 0x47, 0x43, 0xea, 0xe0, 0x01, 0xec, 0xdd, 0x8f, + 0x9f, 0x1e, 0x06, 0xe2, 0x71, 0x78, 0x33, 0x1c, 0x4f, 0x27, 0xd4, 0x6d, 0x9f, 0x83, 0x57, 0x64, + 0xe1, 0x11, 0xf8, 0x66, 0x61, 0x96, 0xbb, 0xdf, 0xd9, 0x05, 0x4f, 0xa0, 0x91, 0xc9, 0xed, 0xdb, + 0x5c, 0x1a, 0xc9, 0x1c, 0x4e, 0x82, 0x96, 0xa8, 0x67, 0x72, 0x7b, 0x2b, 0x8d, 0xec, 0x5f, 0xbe, + 0x5e, 0xc4, 0x0b, 0xf3, 0x91, 0x47, 0xe1, 0x4c, 0xaf, 0xba, 0xb1, 0x5e, 0xca, 0x34, 0xb6, 0x1d, + 0x46, 0xf9, 0xbb, 0x15, 0xb3, 0x4e, 0xac, 0xd2, 0x4e, 0xac, 0xbb, 0x46, 0x6d, 0x4c, 0xc1, 0xa8, + 0x3a, 0x8e, 0xaa, 0x76, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xec, 0x71, 0xee, 0xdb, 0x7b, 0x01, + 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.proto similarity index 96% rename from vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3.proto rename to vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.proto index 869b9af..79954e4 100644 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3.proto +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3/proto3.proto @@ -33,6 +33,8 @@ syntax = "proto3"; package proto3; +option go_package = "github.com/golang/protobuf/protoc-gen-go/testdata/proto3"; + message Request { enum Flavour { SWEET = 0; diff --git a/vendor/github.com/golang/protobuf/ptypes/any.go b/vendor/github.com/golang/protobuf/ptypes/any.go index b2af97f..70276e8 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any.go +++ b/vendor/github.com/golang/protobuf/ptypes/any.go @@ -130,10 +130,12 @@ func UnmarshalAny(any *any.Any, pb proto.Message) error { // Is returns true if any value contains a given message type. func Is(any *any.Any, pb proto.Message) bool { - aname, err := AnyMessageName(any) - if err != nil { + // The following is equivalent to AnyMessageName(any) == proto.MessageName(pb), + // but it avoids scanning TypeUrl for the slash. + if any == nil { return false } - - return aname == proto.MessageName(pb) + name := proto.MessageName(pb) + prefix := len(any.TypeUrl) - len(name) + return prefix >= 1 && any.TypeUrl[prefix-1] == '/' && any.TypeUrl[prefix:] == name } diff --git a/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go index 1fbaa44..78ee523 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go @@ -1,20 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/any/any.proto +// source: google/protobuf/any.proto -/* -Package any is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/any/any.proto - -It has these top-level messages: - Any -*/ package any -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -25,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // `Any` contains an arbitrary serialized protocol buffer message along with a // URL that describes the type of the serialized message. @@ -62,6 +55,16 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // any.Unpack(foo) // ... // +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := ptypes.MarshalAny(foo) +// ... +// foo := &pb.Foo{} +// if err := ptypes.UnmarshalAny(any, foo); err != nil { +// ... +// } +// // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack // methods only use the fully qualified type name after the last '/' @@ -98,17 +101,18 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // } // type Any struct { - // A URL/resource name whose content describes the type of the - // serialized protocol buffer message. + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). // - // For URLs which use the scheme `http`, `https`, or no scheme, the - // following restrictions and interpretations apply: + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: // // * If no scheme is provided, `https` is assumed. - // * The last segment of the URL's path must represent the fully - // qualified name of the type (as in `path/google.protobuf.Duration`). - // The name should be in a canonical form (e.g., leading "." is - // not accepted). // * An HTTP GET on the URL must yield a [google.protobuf.Type][] // value in binary format, or produce an error. // * Applications are allowed to cache lookup results based on the @@ -117,19 +121,47 @@ type Any struct { // on changes to types. (Use versioned type names to manage // breaking changes.) // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. + // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. // - TypeUrl string `protobuf:"bytes,1,opt,name=type_url,json=typeUrl" json:"type_url,omitempty"` + TypeUrl string `protobuf:"bytes,1,opt,name=type_url,json=typeUrl,proto3" json:"type_url,omitempty"` // Must be a valid serialized protocol buffer of the above specified type. - Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Any) Reset() { *m = Any{} } +func (m *Any) String() string { return proto.CompactTextString(m) } +func (*Any) ProtoMessage() {} +func (*Any) Descriptor() ([]byte, []int) { + return fileDescriptor_b53526c13ae22eb4, []int{0} +} + +func (*Any) XXX_WellKnownType() string { return "Any" } + +func (m *Any) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Any.Unmarshal(m, b) +} +func (m *Any) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Any.Marshal(b, m, deterministic) +} +func (m *Any) XXX_Merge(src proto.Message) { + xxx_messageInfo_Any.Merge(m, src) +} +func (m *Any) XXX_Size() int { + return xxx_messageInfo_Any.Size(m) +} +func (m *Any) XXX_DiscardUnknown() { + xxx_messageInfo_Any.DiscardUnknown(m) } -func (m *Any) Reset() { *m = Any{} } -func (m *Any) String() string { return proto.CompactTextString(m) } -func (*Any) ProtoMessage() {} -func (*Any) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Any) XXX_WellKnownType() string { return "Any" } +var xxx_messageInfo_Any proto.InternalMessageInfo func (m *Any) GetTypeUrl() string { if m != nil { @@ -149,20 +181,20 @@ func init() { proto.RegisterType((*Any)(nil), "google.protobuf.Any") } -func init() { proto.RegisterFile("github.com/golang/protobuf/ptypes/any/any.proto", fileDescriptor0) } +func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor_b53526c13ae22eb4) } -var fileDescriptor0 = []byte{ - // 184 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4f, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x4f, 0xcc, - 0xab, 0x04, 0x61, 0x3d, 0xb0, 0xb8, 0x10, 0x7f, 0x7a, 0x7e, 0x7e, 0x7a, 0x4e, 0xaa, 0x1e, 0x4c, - 0x95, 0x92, 0x19, 0x17, 0xb3, 0x63, 0x5e, 0xa5, 0x90, 0x24, 0x17, 0x07, 0x48, 0x79, 0x7c, 0x69, - 0x51, 0x8e, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x67, 0x10, 0x3b, 0x88, 0x1f, 0x5a, 0x94, 0x23, 0x24, - 0xc2, 0xc5, 0x5a, 0x96, 0x98, 0x53, 0x9a, 0x2a, 0xc1, 0xa4, 0xc0, 0xa8, 0xc1, 0x13, 0x04, 0xe1, - 0x38, 0xe5, 0x73, 0x09, 0x27, 0xe7, 0xe7, 0xea, 0xa1, 0x19, 0xe7, 0xc4, 0xe1, 0x98, 0x57, 0x19, - 0x00, 0xe2, 0x04, 0x30, 0x46, 0xa9, 0x12, 0xe5, 0xb8, 0x45, 0x4c, 0xcc, 0xee, 0x01, 0x4e, 0xab, - 0x98, 0xe4, 0xdc, 0x21, 0x46, 0x05, 0x40, 0x95, 0xe8, 0x85, 0xa7, 0xe6, 0xe4, 0x78, 0xe7, 0xe5, - 0x97, 0xe7, 0x85, 0x80, 0x94, 0x26, 0xb1, 0x81, 0xf5, 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, - 0x45, 0x1f, 0x1a, 0xf2, 0xf3, 0x00, 0x00, 0x00, +var fileDescriptor_b53526c13ae22eb4 = []byte{ + // 185 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcc, 0xab, 0xd4, + 0x03, 0x73, 0x84, 0xf8, 0x21, 0x52, 0x7a, 0x30, 0x29, 0x25, 0x33, 0x2e, 0x66, 0xc7, 0xbc, 0x4a, + 0x21, 0x49, 0x2e, 0x8e, 0x92, 0xca, 0x82, 0xd4, 0xf8, 0xd2, 0xa2, 0x1c, 0x09, 0x46, 0x05, 0x46, + 0x0d, 0xce, 0x20, 0x76, 0x10, 0x3f, 0xb4, 0x28, 0x47, 0x48, 0x84, 0x8b, 0xb5, 0x2c, 0x31, 0xa7, + 0x34, 0x55, 0x82, 0x49, 0x81, 0x51, 0x83, 0x27, 0x08, 0xc2, 0x71, 0xca, 0xe7, 0x12, 0x4e, 0xce, + 0xcf, 0xd5, 0x43, 0x33, 0xce, 0x89, 0xc3, 0x31, 0xaf, 0x32, 0x00, 0xc4, 0x09, 0x60, 0x8c, 0x52, + 0x4d, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, + 0x4b, 0x47, 0xb8, 0xa8, 0x00, 0x64, 0x7a, 0x31, 0xc8, 0x61, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, + 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x8c, 0x0a, 0x80, 0x2a, 0xd1, 0x0b, 0x4f, 0xcd, 0xc9, 0xf1, 0xce, + 0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0x29, 0x4d, 0x62, 0x03, 0xeb, 0x35, 0x06, 0x04, 0x00, 0x00, 0xff, + 0xff, 0x13, 0xf8, 0xe8, 0x42, 0xdd, 0x00, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/any/any.proto b/vendor/github.com/golang/protobuf/ptypes/any/any.proto index 9bd3f50..4932942 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any/any.proto +++ b/vendor/github.com/golang/protobuf/ptypes/any/any.proto @@ -74,6 +74,16 @@ option objc_class_prefix = "GPB"; // any.Unpack(foo) // ... // +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := ptypes.MarshalAny(foo) +// ... +// foo := &pb.Foo{} +// if err := ptypes.UnmarshalAny(any, foo); err != nil { +// ... +// } +// // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack // methods only use the fully qualified type name after the last '/' @@ -110,17 +120,18 @@ option objc_class_prefix = "GPB"; // } // message Any { - // A URL/resource name whose content describes the type of the - // serialized protocol buffer message. + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). // - // For URLs which use the scheme `http`, `https`, or no scheme, the - // following restrictions and interpretations apply: + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: // // * If no scheme is provided, `https` is assumed. - // * The last segment of the URL's path must represent the fully - // qualified name of the type (as in `path/google.protobuf.Duration`). - // The name should be in a canonical form (e.g., leading "." is - // not accepted). // * An HTTP GET on the URL must yield a [google.protobuf.Type][] // value in binary format, or produce an error. // * Applications are allowed to cache lookup results based on the @@ -129,6 +140,10 @@ message Any { // on changes to types. (Use versioned type names to manage // breaking changes.) // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. + // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. // diff --git a/vendor/github.com/golang/protobuf/ptypes/any_test.go b/vendor/github.com/golang/protobuf/ptypes/any_test.go index ed675b4..871c6de 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any_test.go +++ b/vendor/github.com/golang/protobuf/ptypes/any_test.go @@ -60,8 +60,13 @@ func TestIs(t *testing.T) { t.Fatal(err) } if Is(a, &pb.DescriptorProto{}) { + // No spurious match for message names of different length. t.Error("FileDescriptorProto is not a DescriptorProto, but Is says it is") } + if Is(a, &pb.EnumDescriptorProto{}) { + // No spurious match for message names of equal length. + t.Error("FileDescriptorProto is not an EnumDescriptorProto, but Is says it is") + } if !Is(a, &pb.FileDescriptorProto{}) { t.Error("FileDescriptorProto is indeed a FileDescriptorProto, but Is says it is not") } @@ -75,6 +80,21 @@ func TestIsDifferentUrlPrefixes(t *testing.T) { } } +func TestIsCornerCases(t *testing.T) { + m := &pb.FileDescriptorProto{} + if Is(nil, m) { + t.Errorf("message with nil type url incorrectly claimed to be %q", proto.MessageName(m)) + } + noPrefix := &any.Any{TypeUrl: proto.MessageName(m)} + if Is(noPrefix, m) { + t.Errorf("message with type url %q incorrectly claimed to be %q", noPrefix.TypeUrl, proto.MessageName(m)) + } + shortPrefix := &any.Any{TypeUrl: "/" + proto.MessageName(m)} + if !Is(shortPrefix, m) { + t.Errorf("message with type url %q didn't satisfy Is for type %q", shortPrefix.TypeUrl, proto.MessageName(m)) + } +} + func TestUnmarshalDynamic(t *testing.T) { want := &pb.FileDescriptorProto{Name: proto.String("foo")} a, err := MarshalAny(want) @@ -111,3 +131,24 @@ func TestEmpty(t *testing.T) { t.Errorf("got no error for an attempt to create a message of type %q, which shouldn't be linked in", a.TypeUrl) } } + +func TestEmptyCornerCases(t *testing.T) { + _, err := Empty(nil) + if err == nil { + t.Error("expected Empty for nil to fail") + } + want := &pb.FileDescriptorProto{} + noPrefix := &any.Any{TypeUrl: proto.MessageName(want)} + _, err = Empty(noPrefix) + if err == nil { + t.Errorf("expected Empty for any type %q to fail", noPrefix.TypeUrl) + } + shortPrefix := &any.Any{TypeUrl: "/" + proto.MessageName(want)} + got, err := Empty(shortPrefix) + if err != nil { + t.Errorf("Empty for any type %q failed: %s", shortPrefix.TypeUrl, err) + } + if !proto.Equal(got, want) { + t.Errorf("Empty for any type %q differs, got %q, want %q", shortPrefix.TypeUrl, got, want) + } +} diff --git a/vendor/github.com/golang/protobuf/ptypes/duration.go b/vendor/github.com/golang/protobuf/ptypes/duration.go index 65cb0f8..26d1ca2 100644 --- a/vendor/github.com/golang/protobuf/ptypes/duration.go +++ b/vendor/github.com/golang/protobuf/ptypes/duration.go @@ -82,7 +82,7 @@ func Duration(p *durpb.Duration) (time.Duration, error) { return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) } if p.Nanos != 0 { - d += time.Duration(p.Nanos) + d += time.Duration(p.Nanos) * time.Nanosecond if (d < 0) != (p.Nanos < 0) { return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) } diff --git a/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go index fe3350b..0d681ee 100644 --- a/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go @@ -1,20 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/duration/duration.proto +// source: google/protobuf/duration.proto -/* -Package duration is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/duration/duration.proto - -It has these top-level messages: - Duration -*/ package duration -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -25,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // A Duration represents a signed, fixed-length span of time represented // as a count of seconds and fractions of seconds at nanosecond @@ -91,21 +84,45 @@ type Duration struct { // Signed seconds of the span of time. Must be from -315,576,000,000 // to +315,576,000,000 inclusive. Note: these bounds are computed from: // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - Seconds int64 `protobuf:"varint,1,opt,name=seconds" json:"seconds,omitempty"` + Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` // Signed fractions of a second at nanosecond resolution of the span // of time. Durations less than one second are represented with a 0 // `seconds` field and a positive or negative `nanos` field. For durations // of one second or more, a non-zero value for the `nanos` field must be // of the same sign as the `seconds` field. Must be from -999,999,999 // to +999,999,999 inclusive. - Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"` + Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Duration) Reset() { *m = Duration{} } +func (m *Duration) String() string { return proto.CompactTextString(m) } +func (*Duration) ProtoMessage() {} +func (*Duration) Descriptor() ([]byte, []int) { + return fileDescriptor_23597b2ebd7ac6c5, []int{0} } -func (m *Duration) Reset() { *m = Duration{} } -func (m *Duration) String() string { return proto.CompactTextString(m) } -func (*Duration) ProtoMessage() {} -func (*Duration) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Duration) XXX_WellKnownType() string { return "Duration" } +func (*Duration) XXX_WellKnownType() string { return "Duration" } + +func (m *Duration) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Duration.Unmarshal(m, b) +} +func (m *Duration) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Duration.Marshal(b, m, deterministic) +} +func (m *Duration) XXX_Merge(src proto.Message) { + xxx_messageInfo_Duration.Merge(m, src) +} +func (m *Duration) XXX_Size() int { + return xxx_messageInfo_Duration.Size(m) +} +func (m *Duration) XXX_DiscardUnknown() { + xxx_messageInfo_Duration.DiscardUnknown(m) +} + +var xxx_messageInfo_Duration proto.InternalMessageInfo func (m *Duration) GetSeconds() int64 { if m != nil { @@ -125,22 +142,20 @@ func init() { proto.RegisterType((*Duration)(nil), "google.protobuf.Duration") } -func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/duration/duration.proto", fileDescriptor0) -} +func init() { proto.RegisterFile("google/protobuf/duration.proto", fileDescriptor_23597b2ebd7ac6c5) } -var fileDescriptor0 = []byte{ - // 189 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4c, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x4f, 0x29, - 0x2d, 0x4a, 0x2c, 0xc9, 0xcc, 0xcf, 0x83, 0x33, 0xf4, 0xc0, 0x2a, 0x84, 0xf8, 0xd3, 0xf3, 0xf3, - 0xd3, 0x73, 0x52, 0xf5, 0x60, 0xea, 0x95, 0xac, 0xb8, 0x38, 0x5c, 0xa0, 0x4a, 0x84, 0x24, 0xb8, - 0xd8, 0x8b, 0x53, 0x93, 0xf3, 0xf3, 0x52, 0x8a, 0x25, 0x18, 0x15, 0x18, 0x35, 0x98, 0x83, 0x60, - 0x5c, 0x21, 0x11, 0x2e, 0xd6, 0xbc, 0xc4, 0xbc, 0xfc, 0x62, 0x09, 0x26, 0x05, 0x46, 0x0d, 0xd6, - 0x20, 0x08, 0xc7, 0xa9, 0x86, 0x4b, 0x38, 0x39, 0x3f, 0x57, 0x0f, 0xcd, 0x48, 0x27, 0x5e, 0x98, - 0x81, 0x01, 0x20, 0x91, 0x00, 0xc6, 0x28, 0x2d, 0xe2, 0xdd, 0xfb, 0x83, 0x91, 0x71, 0x11, 0x13, - 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xb9, 0x01, 0x50, 0xa5, 0x7a, 0xe1, 0xa9, - 0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x2d, 0x49, 0x6c, 0x60, 0x33, 0x8c, 0x01, - 0x01, 0x00, 0x00, 0xff, 0xff, 0x45, 0x5a, 0x81, 0x3d, 0x0e, 0x01, 0x00, 0x00, +var fileDescriptor_23597b2ebd7ac6c5 = []byte{ + // 190 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0x29, 0x2d, 0x4a, + 0x2c, 0xc9, 0xcc, 0xcf, 0xd3, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0x56, + 0x5c, 0x1c, 0x2e, 0x50, 0x25, 0x42, 0x12, 0x5c, 0xec, 0xc5, 0xa9, 0xc9, 0xf9, 0x79, 0x29, 0xc5, + 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x30, 0xae, 0x90, 0x08, 0x17, 0x6b, 0x5e, 0x62, 0x5e, + 0x7e, 0xb1, 0x04, 0x93, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x84, 0xe3, 0x54, 0xc3, 0x25, 0x9c, 0x9c, + 0x9f, 0xab, 0x87, 0x66, 0xa4, 0x13, 0x2f, 0xcc, 0xc0, 0x00, 0x90, 0x48, 0x00, 0x63, 0x94, 0x56, + 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, + 0x3a, 0xc2, 0x7d, 0x05, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x70, 0x67, 0xfe, 0x60, 0x64, 0x5c, 0xc4, + 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x6e, 0x00, 0x54, 0xa9, 0x5e, 0x78, + 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x4b, 0x12, 0x1b, 0xd8, 0x0c, 0x63, + 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x84, 0x30, 0xff, 0xf3, 0x00, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go b/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go index ae15941..b4eb03e 100644 --- a/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go @@ -1,20 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/empty/empty.proto +// source: google/protobuf/empty.proto -/* -Package empty is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/empty/empty.proto - -It has these top-level messages: - Empty -*/ package empty -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -25,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // A generic empty message that you can re-use to avoid defining duplicated // empty messages in your APIs. A typical example is to use it as the request @@ -37,32 +30,54 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // // The JSON representation for `Empty` is empty JSON object `{}`. type Empty struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Empty) Reset() { *m = Empty{} } -func (m *Empty) String() string { return proto.CompactTextString(m) } -func (*Empty) ProtoMessage() {} -func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Empty) XXX_WellKnownType() string { return "Empty" } +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { + return fileDescriptor_900544acb223d5b8, []int{0} +} -func init() { - proto.RegisterType((*Empty)(nil), "google.protobuf.Empty") +func (*Empty) XXX_WellKnownType() string { return "Empty" } + +func (m *Empty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Empty.Unmarshal(m, b) +} +func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Empty.Marshal(b, m, deterministic) +} +func (m *Empty) XXX_Merge(src proto.Message) { + xxx_messageInfo_Empty.Merge(m, src) +} +func (m *Empty) XXX_Size() int { + return xxx_messageInfo_Empty.Size(m) +} +func (m *Empty) XXX_DiscardUnknown() { + xxx_messageInfo_Empty.DiscardUnknown(m) } +var xxx_messageInfo_Empty proto.InternalMessageInfo + func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/empty/empty.proto", fileDescriptor0) + proto.RegisterType((*Empty)(nil), "google.protobuf.Empty") } -var fileDescriptor0 = []byte{ - // 147 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x32, 0x4e, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x4f, 0xcd, - 0x2d, 0x28, 0xa9, 0x84, 0x90, 0x7a, 0x60, 0x39, 0x21, 0xfe, 0xf4, 0xfc, 0xfc, 0xf4, 0x9c, 0x54, - 0x3d, 0x98, 0x4a, 0x25, 0x76, 0x2e, 0x56, 0x57, 0x90, 0xbc, 0x53, 0x19, 0x97, 0x70, 0x72, 0x7e, - 0xae, 0x1e, 0x9a, 0xbc, 0x13, 0x17, 0x58, 0x36, 0x00, 0xc4, 0x0d, 0x60, 0x8c, 0x52, 0x27, 0xd2, - 0xce, 0x1f, 0x8c, 0x8c, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x4c, - 0x0c, 0x80, 0xaa, 0xd3, 0x0b, 0x4f, 0xcd, 0xc9, 0xf1, 0xce, 0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0xa9, - 0x4f, 0x62, 0x03, 0x1b, 0x60, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x6e, 0x8e, 0x0a, 0x06, 0xcf, - 0x00, 0x00, 0x00, +func init() { proto.RegisterFile("google/protobuf/empty.proto", fileDescriptor_900544acb223d5b8) } + +var fileDescriptor_900544acb223d5b8 = []byte{ + // 148 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcd, 0x2d, 0x28, + 0xa9, 0xd4, 0x03, 0x73, 0x85, 0xf8, 0x21, 0x92, 0x7a, 0x30, 0x49, 0x25, 0x76, 0x2e, 0x56, 0x57, + 0x90, 0xbc, 0x53, 0x19, 0x97, 0x70, 0x72, 0x7e, 0xae, 0x1e, 0x9a, 0xbc, 0x13, 0x17, 0x58, 0x36, + 0x00, 0xc4, 0x0d, 0x60, 0x8c, 0x52, 0x4f, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, + 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0x47, 0x58, 0x53, 0x50, 0x52, 0x59, 0x90, 0x5a, 0x0c, + 0xb1, 0xed, 0x07, 0x23, 0xe3, 0x22, 0x26, 0x66, 0xf7, 0x00, 0xa7, 0x55, 0x4c, 0x72, 0xee, 0x10, + 0x13, 0x03, 0xa0, 0xea, 0xf4, 0xc2, 0x53, 0x73, 0x72, 0xbc, 0xf3, 0xf2, 0xcb, 0xf3, 0x42, 0x40, + 0xea, 0x93, 0xd8, 0xc0, 0x06, 0x18, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x64, 0xd4, 0xb3, 0xa6, + 0xb7, 0x00, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/regen.sh b/vendor/github.com/golang/protobuf/ptypes/regen.sh deleted file mode 100755 index 2a5b4e8..0000000 --- a/vendor/github.com/golang/protobuf/ptypes/regen.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash -e -# -# This script fetches and rebuilds the "well-known types" protocol buffers. -# To run this you will need protoc and goprotobuf installed; -# see https://github.com/golang/protobuf for instructions. -# You also need Go and Git installed. - -PKG=github.com/golang/protobuf/ptypes -UPSTREAM=https://github.com/google/protobuf -UPSTREAM_SUBDIR=src/google/protobuf -PROTO_FILES=' - any.proto - duration.proto - empty.proto - struct.proto - timestamp.proto - wrappers.proto -' - -function die() { - echo 1>&2 $* - exit 1 -} - -# Sanity check that the right tools are accessible. -for tool in go git protoc protoc-gen-go; do - q=$(which $tool) || die "didn't find $tool" - echo 1>&2 "$tool: $q" -done - -tmpdir=$(mktemp -d -t regen-wkt.XXXXXX) -trap 'rm -rf $tmpdir' EXIT - -echo -n 1>&2 "finding package dir... " -pkgdir=$(go list -f '{{.Dir}}' $PKG) -echo 1>&2 $pkgdir -base=$(echo $pkgdir | sed "s,/$PKG\$,,") -echo 1>&2 "base: $base" -cd $base - -echo 1>&2 "fetching latest protos... " -git clone -q $UPSTREAM $tmpdir -# Pass 1: build mapping from upstream filename to our filename. -declare -A filename_map -for f in $(cd $PKG && find * -name '*.proto'); do - echo -n 1>&2 "looking for latest version of $f... " - up=$(cd $tmpdir/$UPSTREAM_SUBDIR && find * -name $(basename $f) | grep -v /testdata/) - echo 1>&2 $up - if [ $(echo $up | wc -w) != "1" ]; then - die "not exactly one match" - fi - filename_map[$up]=$f -done -# Pass 2: copy files -for up in "${!filename_map[@]}"; do - f=${filename_map[$up]} - shortname=$(basename $f | sed 's,\.proto$,,') - cp $tmpdir/$UPSTREAM_SUBDIR/$up $PKG/$f -done - -# Run protoc once per package. -for dir in $(find $PKG -name '*.proto' | xargs dirname | sort | uniq); do - echo 1>&2 "* $dir" - protoc --go_out=. $dir/*.proto -done -echo 1>&2 "All OK" diff --git a/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go b/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go index 35a8ec5..33daa73 100644 --- a/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go @@ -1,22 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/struct/struct.proto +// source: google/protobuf/struct.proto -/* -Package structpb is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/struct/struct.proto - -It has these top-level messages: - Struct - Value - ListValue -*/ package structpb -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -27,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // `NullValue` is a singleton enumeration to represent the null value for the // `Value` type union. @@ -43,6 +34,7 @@ const ( var NullValue_name = map[int32]string{ 0: "NULL_VALUE", } + var NullValue_value = map[string]int32{ "NULL_VALUE": 0, } @@ -50,8 +42,12 @@ var NullValue_value = map[string]int32{ func (x NullValue) String() string { return proto.EnumName(NullValue_name, int32(x)) } -func (NullValue) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (NullValue) XXX_WellKnownType() string { return "NullValue" } + +func (NullValue) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_df322afd6c9fb402, []int{0} +} + +func (NullValue) XXX_WellKnownType() string { return "NullValue" } // `Struct` represents a structured data value, consisting of fields // which map to dynamically typed values. In some languages, `Struct` @@ -63,14 +59,38 @@ func (NullValue) XXX_WellKnownType() string { return "NullValue" } // The JSON representation for `Struct` is JSON object. type Struct struct { // Unordered map of dynamically typed values. - Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Struct) Reset() { *m = Struct{} } -func (m *Struct) String() string { return proto.CompactTextString(m) } -func (*Struct) ProtoMessage() {} -func (*Struct) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Struct) XXX_WellKnownType() string { return "Struct" } +func (m *Struct) Reset() { *m = Struct{} } +func (m *Struct) String() string { return proto.CompactTextString(m) } +func (*Struct) ProtoMessage() {} +func (*Struct) Descriptor() ([]byte, []int) { + return fileDescriptor_df322afd6c9fb402, []int{0} +} + +func (*Struct) XXX_WellKnownType() string { return "Struct" } + +func (m *Struct) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Struct.Unmarshal(m, b) +} +func (m *Struct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Struct.Marshal(b, m, deterministic) +} +func (m *Struct) XXX_Merge(src proto.Message) { + xxx_messageInfo_Struct.Merge(m, src) +} +func (m *Struct) XXX_Size() int { + return xxx_messageInfo_Struct.Size(m) +} +func (m *Struct) XXX_DiscardUnknown() { + xxx_messageInfo_Struct.DiscardUnknown(m) +} + +var xxx_messageInfo_Struct proto.InternalMessageInfo func (m *Struct) GetFields() map[string]*Value { if m != nil { @@ -95,44 +115,78 @@ type Value struct { // *Value_BoolValue // *Value_StructValue // *Value_ListValue - Kind isValue_Kind `protobuf_oneof:"kind"` + Kind isValue_Kind `protobuf_oneof:"kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_df322afd6c9fb402, []int{1} +} + +func (*Value) XXX_WellKnownType() string { return "Value" } + +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (m *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(m, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) } -func (m *Value) Reset() { *m = Value{} } -func (m *Value) String() string { return proto.CompactTextString(m) } -func (*Value) ProtoMessage() {} -func (*Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } -func (*Value) XXX_WellKnownType() string { return "Value" } +var xxx_messageInfo_Value proto.InternalMessageInfo type isValue_Kind interface { isValue_Kind() } type Value_NullValue struct { - NullValue NullValue `protobuf:"varint,1,opt,name=null_value,json=nullValue,enum=google.protobuf.NullValue,oneof"` + NullValue NullValue `protobuf:"varint,1,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` } + type Value_NumberValue struct { - NumberValue float64 `protobuf:"fixed64,2,opt,name=number_value,json=numberValue,oneof"` + NumberValue float64 `protobuf:"fixed64,2,opt,name=number_value,json=numberValue,proto3,oneof"` } + type Value_StringValue struct { - StringValue string `protobuf:"bytes,3,opt,name=string_value,json=stringValue,oneof"` + StringValue string `protobuf:"bytes,3,opt,name=string_value,json=stringValue,proto3,oneof"` } + type Value_BoolValue struct { - BoolValue bool `protobuf:"varint,4,opt,name=bool_value,json=boolValue,oneof"` + BoolValue bool `protobuf:"varint,4,opt,name=bool_value,json=boolValue,proto3,oneof"` } + type Value_StructValue struct { - StructValue *Struct `protobuf:"bytes,5,opt,name=struct_value,json=structValue,oneof"` + StructValue *Struct `protobuf:"bytes,5,opt,name=struct_value,json=structValue,proto3,oneof"` } + type Value_ListValue struct { - ListValue *ListValue `protobuf:"bytes,6,opt,name=list_value,json=listValue,oneof"` + ListValue *ListValue `protobuf:"bytes,6,opt,name=list_value,json=listValue,proto3,oneof"` } -func (*Value_NullValue) isValue_Kind() {} +func (*Value_NullValue) isValue_Kind() {} + func (*Value_NumberValue) isValue_Kind() {} + func (*Value_StringValue) isValue_Kind() {} -func (*Value_BoolValue) isValue_Kind() {} + +func (*Value_BoolValue) isValue_Kind() {} + func (*Value_StructValue) isValue_Kind() {} -func (*Value_ListValue) isValue_Kind() {} + +func (*Value_ListValue) isValue_Kind() {} func (m *Value) GetKind() isValue_Kind { if m != nil { @@ -183,9 +237,9 @@ func (m *Value) GetListValue() *ListValue { return nil } -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Value) XXX_OneofWrappers() []interface{} { + return []interface{}{ (*Value_NullValue)(nil), (*Value_NumberValue)(nil), (*Value_StringValue)(nil), @@ -195,142 +249,43 @@ func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, } } -func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Value) - // kind - switch x := m.Kind.(type) { - case *Value_NullValue: - b.EncodeVarint(1<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.NullValue)) - case *Value_NumberValue: - b.EncodeVarint(2<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.NumberValue)) - case *Value_StringValue: - b.EncodeVarint(3<<3 | proto.WireBytes) - b.EncodeStringBytes(x.StringValue) - case *Value_BoolValue: - t := uint64(0) - if x.BoolValue { - t = 1 - } - b.EncodeVarint(4<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *Value_StructValue: - b.EncodeVarint(5<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.StructValue); err != nil { - return err - } - case *Value_ListValue: - b.EncodeVarint(6<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.ListValue); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("Value.Kind has unexpected type %T", x) - } - return nil -} - -func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Value) - switch tag { - case 1: // kind.null_value - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Kind = &Value_NullValue{NullValue(x)} - return true, err - case 2: // kind.number_value - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Kind = &Value_NumberValue{math.Float64frombits(x)} - return true, err - case 3: // kind.string_value - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Kind = &Value_StringValue{x} - return true, err - case 4: // kind.bool_value - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Kind = &Value_BoolValue{x != 0} - return true, err - case 5: // kind.struct_value - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Struct) - err := b.DecodeMessage(msg) - m.Kind = &Value_StructValue{msg} - return true, err - case 6: // kind.list_value - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(ListValue) - err := b.DecodeMessage(msg) - m.Kind = &Value_ListValue{msg} - return true, err - default: - return false, nil - } -} - -func _Value_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Value) - // kind - switch x := m.Kind.(type) { - case *Value_NullValue: - n += proto.SizeVarint(1<<3 | proto.WireVarint) - n += proto.SizeVarint(uint64(x.NullValue)) - case *Value_NumberValue: - n += proto.SizeVarint(2<<3 | proto.WireFixed64) - n += 8 - case *Value_StringValue: - n += proto.SizeVarint(3<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.StringValue))) - n += len(x.StringValue) - case *Value_BoolValue: - n += proto.SizeVarint(4<<3 | proto.WireVarint) - n += 1 - case *Value_StructValue: - s := proto.Size(x.StructValue) - n += proto.SizeVarint(5<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case *Value_ListValue: - s := proto.Size(x.ListValue) - n += proto.SizeVarint(6<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - // `ListValue` is a wrapper around a repeated field of values. // // The JSON representation for `ListValue` is JSON array. type ListValue struct { // Repeated field of dynamically typed values. - Values []*Value `protobuf:"bytes,1,rep,name=values" json:"values,omitempty"` + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListValue) Reset() { *m = ListValue{} } +func (m *ListValue) String() string { return proto.CompactTextString(m) } +func (*ListValue) ProtoMessage() {} +func (*ListValue) Descriptor() ([]byte, []int) { + return fileDescriptor_df322afd6c9fb402, []int{2} +} + +func (*ListValue) XXX_WellKnownType() string { return "ListValue" } + +func (m *ListValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListValue.Unmarshal(m, b) +} +func (m *ListValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListValue.Marshal(b, m, deterministic) +} +func (m *ListValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListValue.Merge(m, src) +} +func (m *ListValue) XXX_Size() int { + return xxx_messageInfo_ListValue.Size(m) +} +func (m *ListValue) XXX_DiscardUnknown() { + xxx_messageInfo_ListValue.DiscardUnknown(m) } -func (m *ListValue) Reset() { *m = ListValue{} } -func (m *ListValue) String() string { return proto.CompactTextString(m) } -func (*ListValue) ProtoMessage() {} -func (*ListValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } -func (*ListValue) XXX_WellKnownType() string { return "ListValue" } +var xxx_messageInfo_ListValue proto.InternalMessageInfo func (m *ListValue) GetValues() []*Value { if m != nil { @@ -340,43 +295,42 @@ func (m *ListValue) GetValues() []*Value { } func init() { + proto.RegisterEnum("google.protobuf.NullValue", NullValue_name, NullValue_value) proto.RegisterType((*Struct)(nil), "google.protobuf.Struct") + proto.RegisterMapType((map[string]*Value)(nil), "google.protobuf.Struct.FieldsEntry") proto.RegisterType((*Value)(nil), "google.protobuf.Value") proto.RegisterType((*ListValue)(nil), "google.protobuf.ListValue") - proto.RegisterEnum("google.protobuf.NullValue", NullValue_name, NullValue_value) } -func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/struct/struct.proto", fileDescriptor0) -} +func init() { proto.RegisterFile("google/protobuf/struct.proto", fileDescriptor_df322afd6c9fb402) } -var fileDescriptor0 = []byte{ +var fileDescriptor_df322afd6c9fb402 = []byte{ // 417 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x41, 0x8b, 0xd3, 0x40, - 0x14, 0x80, 0x3b, 0xc9, 0x36, 0x98, 0x17, 0x59, 0x97, 0x11, 0xb4, 0xac, 0xa0, 0xa1, 0x7b, 0x09, - 0x22, 0x09, 0x56, 0x04, 0x31, 0x5e, 0x0c, 0xac, 0xbb, 0x60, 0x58, 0x62, 0x74, 0x57, 0xf0, 0x52, - 0x9a, 0x34, 0x8d, 0xa1, 0xd3, 0x99, 0x90, 0xcc, 0x28, 0x3d, 0xfa, 0x2f, 0x3c, 0x7b, 0xf4, 0xe8, - 0xaf, 0xf3, 0x28, 0x33, 0x93, 0x44, 0x69, 0x29, 0x78, 0x9a, 0xbe, 0x37, 0xdf, 0xfb, 0xe6, 0xbd, - 0xd7, 0xc0, 0xf3, 0xb2, 0xe2, 0x9f, 0x45, 0xe6, 0xe7, 0x6c, 0x13, 0x94, 0x8c, 0x2c, 0x68, 0x19, - 0xd4, 0x0d, 0xe3, 0x2c, 0x13, 0xab, 0xa0, 0xe6, 0xdb, 0xba, 0x68, 0x83, 0x96, 0x37, 0x22, 0xe7, - 0xdd, 0xe1, 0xab, 0x5b, 0x7c, 0xa7, 0x64, 0xac, 0x24, 0x85, 0xdf, 0xb3, 0xd3, 0xef, 0x08, 0xac, - 0xf7, 0x8a, 0xc0, 0x21, 0x58, 0xab, 0xaa, 0x20, 0xcb, 0x76, 0x82, 0x5c, 0xd3, 0x73, 0x66, 0x67, - 0xfe, 0x0e, 0xec, 0x6b, 0xd0, 0x7f, 0xa3, 0xa8, 0x73, 0xca, 0x9b, 0x6d, 0xda, 0x95, 0x9c, 0xbe, - 0x03, 0xe7, 0x9f, 0x34, 0x3e, 0x01, 0x73, 0x5d, 0x6c, 0x27, 0xc8, 0x45, 0x9e, 0x9d, 0xca, 0x9f, - 0xf8, 0x09, 0x8c, 0xbf, 0x2c, 0x88, 0x28, 0x26, 0x86, 0x8b, 0x3c, 0x67, 0x76, 0x6f, 0x4f, 0x7e, - 0x23, 0x6f, 0x53, 0x0d, 0xbd, 0x34, 0x5e, 0xa0, 0xe9, 0x2f, 0x03, 0xc6, 0x2a, 0x89, 0x43, 0x00, - 0x2a, 0x08, 0x99, 0x6b, 0x81, 0x94, 0x1e, 0xcf, 0x4e, 0xf7, 0x04, 0x57, 0x82, 0x10, 0xc5, 0x5f, - 0x8e, 0x52, 0x9b, 0xf6, 0x01, 0x3e, 0x83, 0xdb, 0x54, 0x6c, 0xb2, 0xa2, 0x99, 0xff, 0x7d, 0x1f, - 0x5d, 0x8e, 0x52, 0x47, 0x67, 0x07, 0xa8, 0xe5, 0x4d, 0x45, 0xcb, 0x0e, 0x32, 0x65, 0xe3, 0x12, - 0xd2, 0x59, 0x0d, 0x3d, 0x02, 0xc8, 0x18, 0xeb, 0xdb, 0x38, 0x72, 0x91, 0x77, 0x4b, 0x3e, 0x25, - 0x73, 0x1a, 0x78, 0xa5, 0x2c, 0x22, 0xe7, 0x1d, 0x32, 0x56, 0xa3, 0xde, 0x3f, 0xb0, 0xc7, 0x4e, - 0x2f, 0x72, 0x3e, 0x4c, 0x49, 0xaa, 0xb6, 0xaf, 0xb5, 0x54, 0xed, 0xfe, 0x94, 0x71, 0xd5, 0xf2, - 0x61, 0x4a, 0xd2, 0x07, 0x91, 0x05, 0x47, 0xeb, 0x8a, 0x2e, 0xa7, 0x21, 0xd8, 0x03, 0x81, 0x7d, - 0xb0, 0x94, 0xac, 0xff, 0x47, 0x0f, 0x2d, 0xbd, 0xa3, 0x1e, 0x3f, 0x00, 0x7b, 0x58, 0x22, 0x3e, - 0x06, 0xb8, 0xba, 0x8e, 0xe3, 0xf9, 0xcd, 0xeb, 0xf8, 0xfa, 0xfc, 0x64, 0x14, 0x7d, 0x43, 0x70, - 0x37, 0x67, 0x9b, 0x5d, 0x45, 0xe4, 0xe8, 0x69, 0x12, 0x19, 0x27, 0xe8, 0xd3, 0xd3, 0xff, 0xfd, - 0x30, 0x43, 0x7d, 0xd4, 0xd9, 0x6f, 0x84, 0x7e, 0x18, 0xe6, 0x45, 0x12, 0xfd, 0x34, 0x1e, 0x5e, - 0x68, 0x79, 0xd2, 0xf7, 0xf7, 0xb1, 0x20, 0xe4, 0x2d, 0x65, 0x5f, 0xe9, 0x07, 0x59, 0x99, 0x59, - 0x4a, 0xf5, 0xec, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x9b, 0x6e, 0x5d, 0x3c, 0xfe, 0x02, 0x00, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0x41, 0x8b, 0xd3, 0x40, + 0x14, 0xc7, 0x3b, 0xc9, 0x36, 0x98, 0x17, 0x59, 0x97, 0x11, 0xb4, 0xac, 0xa2, 0xa1, 0x7b, 0x09, + 0x22, 0x29, 0xd6, 0x8b, 0x18, 0x2f, 0x06, 0xd6, 0x5d, 0x30, 0x2c, 0x31, 0xba, 0x15, 0xbc, 0x94, + 0x26, 0x4d, 0x63, 0xe8, 0x74, 0x26, 0x24, 0x33, 0x4a, 0x8f, 0x7e, 0x0b, 0xcf, 0x1e, 0x3d, 0xfa, + 0xe9, 0x3c, 0xca, 0xcc, 0x24, 0xa9, 0xb4, 0xf4, 0x94, 0xbc, 0xf7, 0x7e, 0xef, 0x3f, 0xef, 0xff, + 0x66, 0xe0, 0x71, 0xc1, 0x58, 0x41, 0xf2, 0x49, 0x55, 0x33, 0xce, 0x52, 0xb1, 0x9a, 0x34, 0xbc, + 0x16, 0x19, 0xf7, 0x55, 0x8c, 0xef, 0xe9, 0xaa, 0xdf, 0x55, 0xc7, 0x3f, 0x11, 0x58, 0x1f, 0x15, + 0x81, 0x03, 0xb0, 0x56, 0x65, 0x4e, 0x96, 0xcd, 0x08, 0xb9, 0xa6, 0xe7, 0x4c, 0x2f, 0xfc, 0x3d, + 0xd8, 0xd7, 0xa0, 0xff, 0x4e, 0x51, 0x97, 0x94, 0xd7, 0xdb, 0xa4, 0x6d, 0x39, 0xff, 0x00, 0xce, + 0x7f, 0x69, 0x7c, 0x06, 0xe6, 0x3a, 0xdf, 0x8e, 0x90, 0x8b, 0x3c, 0x3b, 0x91, 0xbf, 0xf8, 0x39, + 0x0c, 0xbf, 0x2d, 0x88, 0xc8, 0x47, 0x86, 0x8b, 0x3c, 0x67, 0xfa, 0xe0, 0x40, 0x7c, 0x26, 0xab, + 0x89, 0x86, 0x5e, 0x1b, 0xaf, 0xd0, 0xf8, 0x8f, 0x01, 0x43, 0x95, 0xc4, 0x01, 0x00, 0x15, 0x84, + 0xcc, 0xb5, 0x80, 0x14, 0x3d, 0x9d, 0x9e, 0x1f, 0x08, 0xdc, 0x08, 0x42, 0x14, 0x7f, 0x3d, 0x48, + 0x6c, 0xda, 0x05, 0xf8, 0x02, 0xee, 0x52, 0xb1, 0x49, 0xf3, 0x7a, 0xbe, 0x3b, 0x1f, 0x5d, 0x0f, + 0x12, 0x47, 0x67, 0x7b, 0xa8, 0xe1, 0x75, 0x49, 0x8b, 0x16, 0x32, 0xe5, 0xe0, 0x12, 0xd2, 0x59, + 0x0d, 0x3d, 0x05, 0x48, 0x19, 0xeb, 0xc6, 0x38, 0x71, 0x91, 0x77, 0x47, 0x1e, 0x25, 0x73, 0x1a, + 0x78, 0xa3, 0x54, 0x44, 0xc6, 0x5b, 0x64, 0xa8, 0xac, 0x3e, 0x3c, 0xb2, 0xc7, 0x56, 0x5e, 0x64, + 0xbc, 0x77, 0x49, 0xca, 0xa6, 0xeb, 0xb5, 0x54, 0xef, 0xa1, 0xcb, 0xa8, 0x6c, 0x78, 0xef, 0x92, + 0x74, 0x41, 0x68, 0xc1, 0xc9, 0xba, 0xa4, 0xcb, 0x71, 0x00, 0x76, 0x4f, 0x60, 0x1f, 0x2c, 0x25, + 0xd6, 0xdd, 0xe8, 0xb1, 0xa5, 0xb7, 0xd4, 0xb3, 0x47, 0x60, 0xf7, 0x4b, 0xc4, 0xa7, 0x00, 0x37, + 0xb7, 0x51, 0x34, 0x9f, 0xbd, 0x8d, 0x6e, 0x2f, 0xcf, 0x06, 0xe1, 0x0f, 0x04, 0xf7, 0x33, 0xb6, + 0xd9, 0x97, 0x08, 0x1d, 0xed, 0x26, 0x96, 0x71, 0x8c, 0xbe, 0xbc, 0x28, 0x4a, 0xfe, 0x55, 0xa4, + 0x7e, 0xc6, 0x36, 0x93, 0x82, 0x91, 0x05, 0x2d, 0x76, 0x4f, 0xb1, 0xe2, 0xdb, 0x2a, 0x6f, 0xda, + 0x17, 0x19, 0xe8, 0x4f, 0x95, 0xfe, 0x45, 0xe8, 0x97, 0x61, 0x5e, 0xc5, 0xe1, 0x6f, 0xe3, 0xc9, + 0x95, 0x16, 0x8f, 0xbb, 0xf9, 0x3e, 0xe7, 0x84, 0xbc, 0xa7, 0xec, 0x3b, 0xfd, 0x24, 0x3b, 0x53, + 0x4b, 0x49, 0xbd, 0xfc, 0x17, 0x00, 0x00, 0xff, 0xff, 0xe8, 0x1b, 0x59, 0xf8, 0xe5, 0x02, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp.go b/vendor/github.com/golang/protobuf/ptypes/timestamp.go index 47f10db..8da0df0 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp.go +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp.go @@ -111,11 +111,9 @@ func TimestampNow() *tspb.Timestamp { // TimestampProto converts the time.Time to a google.protobuf.Timestamp proto. // It returns an error if the resulting Timestamp is invalid. func TimestampProto(t time.Time) (*tspb.Timestamp, error) { - seconds := t.Unix() - nanos := int32(t.Sub(time.Unix(seconds, 0))) ts := &tspb.Timestamp{ - Seconds: seconds, - Nanos: nanos, + Seconds: t.Unix(), + Nanos: int32(t.Nanosecond()), } if err := validateTimestamp(ts); err != nil { return nil, err diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go index 3b76261..31cd846 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go @@ -1,20 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/timestamp/timestamp.proto +// source: google/protobuf/timestamp.proto -/* -Package timestamp is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/timestamp/timestamp.proto - -It has these top-level messages: - Timestamp -*/ package timestamp -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -25,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // A Timestamp represents a point in time independent of any time zone // or calendar, represented as seconds and fractions of seconds at @@ -90,7 +83,9 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional // seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), // are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -// is required, though only UTC (as indicated by "Z") is presently supported. +// is required. A proto3 JSON serializer should always use UTC (as indicated by +// "Z") when printing the Timestamp type and a proto3 JSON parser should be +// able to accept both UTC and other timezones (as indicated by an offset). // // For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past // 01:30 UTC on January 15, 2017. @@ -101,27 +96,51 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) // with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one // can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( -// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()) -// to obtain a formatter capable of generating timestamps in this format. +// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime-- +// ) to obtain a formatter capable of generating timestamps in this format. // // type Timestamp struct { // Represents seconds of UTC time since Unix epoch // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to // 9999-12-31T23:59:59Z inclusive. - Seconds int64 `protobuf:"varint,1,opt,name=seconds" json:"seconds,omitempty"` + Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` // Non-negative fractions of a second at nanosecond resolution. Negative // second values with fractions must still have non-negative nanos values // that count forward in time. Must be from 0 to 999,999,999 // inclusive. - Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"` + Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Timestamp) Reset() { *m = Timestamp{} } +func (m *Timestamp) String() string { return proto.CompactTextString(m) } +func (*Timestamp) ProtoMessage() {} +func (*Timestamp) Descriptor() ([]byte, []int) { + return fileDescriptor_292007bbfe81227e, []int{0} } -func (m *Timestamp) Reset() { *m = Timestamp{} } -func (m *Timestamp) String() string { return proto.CompactTextString(m) } -func (*Timestamp) ProtoMessage() {} -func (*Timestamp) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*Timestamp) XXX_WellKnownType() string { return "Timestamp" } +func (*Timestamp) XXX_WellKnownType() string { return "Timestamp" } + +func (m *Timestamp) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Timestamp.Unmarshal(m, b) +} +func (m *Timestamp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Timestamp.Marshal(b, m, deterministic) +} +func (m *Timestamp) XXX_Merge(src proto.Message) { + xxx_messageInfo_Timestamp.Merge(m, src) +} +func (m *Timestamp) XXX_Size() int { + return xxx_messageInfo_Timestamp.Size(m) +} +func (m *Timestamp) XXX_DiscardUnknown() { + xxx_messageInfo_Timestamp.DiscardUnknown(m) +} + +var xxx_messageInfo_Timestamp proto.InternalMessageInfo func (m *Timestamp) GetSeconds() int64 { if m != nil { @@ -141,22 +160,20 @@ func init() { proto.RegisterType((*Timestamp)(nil), "google.protobuf.Timestamp") } -func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/timestamp/timestamp.proto", fileDescriptor0) -} +func init() { proto.RegisterFile("google/protobuf/timestamp.proto", fileDescriptor_292007bbfe81227e) } -var fileDescriptor0 = []byte{ - // 190 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4e, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x2f, 0xc9, - 0xcc, 0x4d, 0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0x40, 0xb0, 0xf4, 0xc0, 0x6a, 0x84, 0xf8, 0xd3, 0xf3, - 0xf3, 0xd3, 0x73, 0x52, 0xf5, 0x60, 0x3a, 0x94, 0xac, 0xb9, 0x38, 0x43, 0x60, 0x6a, 0x84, 0x24, - 0xb8, 0xd8, 0x8b, 0x53, 0x93, 0xf3, 0xf3, 0x52, 0x8a, 0x25, 0x18, 0x15, 0x18, 0x35, 0x98, 0x83, - 0x60, 0x5c, 0x21, 0x11, 0x2e, 0xd6, 0xbc, 0xc4, 0xbc, 0xfc, 0x62, 0x09, 0x26, 0x05, 0x46, 0x0d, - 0xd6, 0x20, 0x08, 0xc7, 0xa9, 0x8e, 0x4b, 0x38, 0x39, 0x3f, 0x57, 0x0f, 0xcd, 0x4c, 0x27, 0x3e, - 0xb8, 0x89, 0x01, 0x20, 0xa1, 0x00, 0xc6, 0x28, 0x6d, 0x12, 0xdc, 0xfc, 0x83, 0x91, 0x71, 0x11, - 0x13, 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xc9, 0x01, 0x50, 0xb5, 0x7a, 0xe1, - 0xa9, 0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x3d, 0x49, 0x6c, 0x60, 0x43, 0x8c, - 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x6b, 0x59, 0x0a, 0x4d, 0x13, 0x01, 0x00, 0x00, +var fileDescriptor_292007bbfe81227e = []byte{ + // 191 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4f, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xc9, 0xcc, 0x4d, + 0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0xd0, 0x03, 0x0b, 0x09, 0xf1, 0x43, 0x14, 0xe8, 0xc1, 0x14, 0x28, + 0x59, 0x73, 0x71, 0x86, 0xc0, 0xd4, 0x08, 0x49, 0x70, 0xb1, 0x17, 0xa7, 0x26, 0xe7, 0xe7, 0xa5, + 0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6a, 0x30, 0x07, 0xc1, 0xb8, 0x42, 0x22, 0x5c, 0xac, 0x79, 0x89, + 0x79, 0xf9, 0xc5, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x10, 0x8e, 0x53, 0x1d, 0x97, 0x70, + 0x72, 0x7e, 0xae, 0x1e, 0x9a, 0x99, 0x4e, 0x7c, 0x70, 0x13, 0x03, 0x40, 0x42, 0x01, 0x8c, 0x51, + 0xda, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0xe9, 0xf9, 0x39, 0x89, + 0x79, 0xe9, 0x08, 0x27, 0x16, 0x94, 0x54, 0x16, 0xa4, 0x16, 0x23, 0x5c, 0xfa, 0x83, 0x91, 0x71, + 0x11, 0x13, 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xc9, 0x01, 0x50, 0xb5, 0x7a, + 0xe1, 0xa9, 0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x3d, 0x49, 0x6c, 0x60, 0x43, + 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xbc, 0x77, 0x4a, 0x07, 0xf7, 0x00, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto index b7cbd17..eafb3fa 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto @@ -103,7 +103,9 @@ option objc_class_prefix = "GPB"; // {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional // seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), // are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -// is required, though only UTC (as indicated by "Z") is presently supported. +// is required. A proto3 JSON serializer should always use UTC (as indicated by +// "Z") when printing the Timestamp type and a proto3 JSON parser should be +// able to accept both UTC and other timezones (as indicated by an offset). // // For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past // 01:30 UTC on January 15, 2017. @@ -114,8 +116,8 @@ option objc_class_prefix = "GPB"; // to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) // with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one // can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( -// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()) -// to obtain a formatter capable of generating timestamps in this format. +// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime-- +// ) to obtain a formatter capable of generating timestamps in this format. // // message Timestamp { diff --git a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go index 1328bd2..add19a1 100644 --- a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go @@ -1,28 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: github.com/golang/protobuf/ptypes/wrappers/wrappers.proto - -/* -Package wrappers is a generated protocol buffer package. - -It is generated from these files: - github.com/golang/protobuf/ptypes/wrappers/wrappers.proto - -It has these top-level messages: - DoubleValue - FloatValue - Int64Value - UInt64Value - Int32Value - UInt32Value - BoolValue - StringValue - BytesValue -*/ +// source: google/protobuf/wrappers.proto + package wrappers -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -33,21 +18,45 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // Wrapper message for `double`. // // The JSON representation for `DoubleValue` is JSON number. type DoubleValue struct { // The double value. - Value float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` + Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DoubleValue) Reset() { *m = DoubleValue{} } +func (m *DoubleValue) String() string { return proto.CompactTextString(m) } +func (*DoubleValue) ProtoMessage() {} +func (*DoubleValue) Descriptor() ([]byte, []int) { + return fileDescriptor_5377b62bda767935, []int{0} } -func (m *DoubleValue) Reset() { *m = DoubleValue{} } -func (m *DoubleValue) String() string { return proto.CompactTextString(m) } -func (*DoubleValue) ProtoMessage() {} -func (*DoubleValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (*DoubleValue) XXX_WellKnownType() string { return "DoubleValue" } +func (*DoubleValue) XXX_WellKnownType() string { return "DoubleValue" } + +func (m *DoubleValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DoubleValue.Unmarshal(m, b) +} +func (m *DoubleValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DoubleValue.Marshal(b, m, deterministic) +} +func (m *DoubleValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_DoubleValue.Merge(m, src) +} +func (m *DoubleValue) XXX_Size() int { + return xxx_messageInfo_DoubleValue.Size(m) +} +func (m *DoubleValue) XXX_DiscardUnknown() { + xxx_messageInfo_DoubleValue.DiscardUnknown(m) +} + +var xxx_messageInfo_DoubleValue proto.InternalMessageInfo func (m *DoubleValue) GetValue() float64 { if m != nil { @@ -61,14 +70,38 @@ func (m *DoubleValue) GetValue() float64 { // The JSON representation for `FloatValue` is JSON number. type FloatValue struct { // The float value. - Value float32 `protobuf:"fixed32,1,opt,name=value" json:"value,omitempty"` + Value float32 `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *FloatValue) Reset() { *m = FloatValue{} } -func (m *FloatValue) String() string { return proto.CompactTextString(m) } -func (*FloatValue) ProtoMessage() {} -func (*FloatValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } -func (*FloatValue) XXX_WellKnownType() string { return "FloatValue" } +func (m *FloatValue) Reset() { *m = FloatValue{} } +func (m *FloatValue) String() string { return proto.CompactTextString(m) } +func (*FloatValue) ProtoMessage() {} +func (*FloatValue) Descriptor() ([]byte, []int) { + return fileDescriptor_5377b62bda767935, []int{1} +} + +func (*FloatValue) XXX_WellKnownType() string { return "FloatValue" } + +func (m *FloatValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FloatValue.Unmarshal(m, b) +} +func (m *FloatValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FloatValue.Marshal(b, m, deterministic) +} +func (m *FloatValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_FloatValue.Merge(m, src) +} +func (m *FloatValue) XXX_Size() int { + return xxx_messageInfo_FloatValue.Size(m) +} +func (m *FloatValue) XXX_DiscardUnknown() { + xxx_messageInfo_FloatValue.DiscardUnknown(m) +} + +var xxx_messageInfo_FloatValue proto.InternalMessageInfo func (m *FloatValue) GetValue() float32 { if m != nil { @@ -82,14 +115,38 @@ func (m *FloatValue) GetValue() float32 { // The JSON representation for `Int64Value` is JSON string. type Int64Value struct { // The int64 value. - Value int64 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value int64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Int64Value) Reset() { *m = Int64Value{} } +func (m *Int64Value) String() string { return proto.CompactTextString(m) } +func (*Int64Value) ProtoMessage() {} +func (*Int64Value) Descriptor() ([]byte, []int) { + return fileDescriptor_5377b62bda767935, []int{2} } -func (m *Int64Value) Reset() { *m = Int64Value{} } -func (m *Int64Value) String() string { return proto.CompactTextString(m) } -func (*Int64Value) ProtoMessage() {} -func (*Int64Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } -func (*Int64Value) XXX_WellKnownType() string { return "Int64Value" } +func (*Int64Value) XXX_WellKnownType() string { return "Int64Value" } + +func (m *Int64Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Int64Value.Unmarshal(m, b) +} +func (m *Int64Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Int64Value.Marshal(b, m, deterministic) +} +func (m *Int64Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int64Value.Merge(m, src) +} +func (m *Int64Value) XXX_Size() int { + return xxx_messageInfo_Int64Value.Size(m) +} +func (m *Int64Value) XXX_DiscardUnknown() { + xxx_messageInfo_Int64Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Int64Value proto.InternalMessageInfo func (m *Int64Value) GetValue() int64 { if m != nil { @@ -103,14 +160,38 @@ func (m *Int64Value) GetValue() int64 { // The JSON representation for `UInt64Value` is JSON string. type UInt64Value struct { // The uint64 value. - Value uint64 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value uint64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UInt64Value) Reset() { *m = UInt64Value{} } +func (m *UInt64Value) String() string { return proto.CompactTextString(m) } +func (*UInt64Value) ProtoMessage() {} +func (*UInt64Value) Descriptor() ([]byte, []int) { + return fileDescriptor_5377b62bda767935, []int{3} } -func (m *UInt64Value) Reset() { *m = UInt64Value{} } -func (m *UInt64Value) String() string { return proto.CompactTextString(m) } -func (*UInt64Value) ProtoMessage() {} -func (*UInt64Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } -func (*UInt64Value) XXX_WellKnownType() string { return "UInt64Value" } +func (*UInt64Value) XXX_WellKnownType() string { return "UInt64Value" } + +func (m *UInt64Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UInt64Value.Unmarshal(m, b) +} +func (m *UInt64Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UInt64Value.Marshal(b, m, deterministic) +} +func (m *UInt64Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_UInt64Value.Merge(m, src) +} +func (m *UInt64Value) XXX_Size() int { + return xxx_messageInfo_UInt64Value.Size(m) +} +func (m *UInt64Value) XXX_DiscardUnknown() { + xxx_messageInfo_UInt64Value.DiscardUnknown(m) +} + +var xxx_messageInfo_UInt64Value proto.InternalMessageInfo func (m *UInt64Value) GetValue() uint64 { if m != nil { @@ -124,14 +205,38 @@ func (m *UInt64Value) GetValue() uint64 { // The JSON representation for `Int32Value` is JSON number. type Int32Value struct { // The int32 value. - Value int32 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value int32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *Int32Value) Reset() { *m = Int32Value{} } -func (m *Int32Value) String() string { return proto.CompactTextString(m) } -func (*Int32Value) ProtoMessage() {} -func (*Int32Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } -func (*Int32Value) XXX_WellKnownType() string { return "Int32Value" } +func (m *Int32Value) Reset() { *m = Int32Value{} } +func (m *Int32Value) String() string { return proto.CompactTextString(m) } +func (*Int32Value) ProtoMessage() {} +func (*Int32Value) Descriptor() ([]byte, []int) { + return fileDescriptor_5377b62bda767935, []int{4} +} + +func (*Int32Value) XXX_WellKnownType() string { return "Int32Value" } + +func (m *Int32Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Int32Value.Unmarshal(m, b) +} +func (m *Int32Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Int32Value.Marshal(b, m, deterministic) +} +func (m *Int32Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int32Value.Merge(m, src) +} +func (m *Int32Value) XXX_Size() int { + return xxx_messageInfo_Int32Value.Size(m) +} +func (m *Int32Value) XXX_DiscardUnknown() { + xxx_messageInfo_Int32Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Int32Value proto.InternalMessageInfo func (m *Int32Value) GetValue() int32 { if m != nil { @@ -145,14 +250,38 @@ func (m *Int32Value) GetValue() int32 { // The JSON representation for `UInt32Value` is JSON number. type UInt32Value struct { // The uint32 value. - Value uint32 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value uint32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UInt32Value) Reset() { *m = UInt32Value{} } +func (m *UInt32Value) String() string { return proto.CompactTextString(m) } +func (*UInt32Value) ProtoMessage() {} +func (*UInt32Value) Descriptor() ([]byte, []int) { + return fileDescriptor_5377b62bda767935, []int{5} } -func (m *UInt32Value) Reset() { *m = UInt32Value{} } -func (m *UInt32Value) String() string { return proto.CompactTextString(m) } -func (*UInt32Value) ProtoMessage() {} -func (*UInt32Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } -func (*UInt32Value) XXX_WellKnownType() string { return "UInt32Value" } +func (*UInt32Value) XXX_WellKnownType() string { return "UInt32Value" } + +func (m *UInt32Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UInt32Value.Unmarshal(m, b) +} +func (m *UInt32Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UInt32Value.Marshal(b, m, deterministic) +} +func (m *UInt32Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_UInt32Value.Merge(m, src) +} +func (m *UInt32Value) XXX_Size() int { + return xxx_messageInfo_UInt32Value.Size(m) +} +func (m *UInt32Value) XXX_DiscardUnknown() { + xxx_messageInfo_UInt32Value.DiscardUnknown(m) +} + +var xxx_messageInfo_UInt32Value proto.InternalMessageInfo func (m *UInt32Value) GetValue() uint32 { if m != nil { @@ -166,14 +295,38 @@ func (m *UInt32Value) GetValue() uint32 { // The JSON representation for `BoolValue` is JSON `true` and `false`. type BoolValue struct { // The bool value. - Value bool `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` + Value bool `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *BoolValue) Reset() { *m = BoolValue{} } -func (m *BoolValue) String() string { return proto.CompactTextString(m) } -func (*BoolValue) ProtoMessage() {} -func (*BoolValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } -func (*BoolValue) XXX_WellKnownType() string { return "BoolValue" } +func (m *BoolValue) Reset() { *m = BoolValue{} } +func (m *BoolValue) String() string { return proto.CompactTextString(m) } +func (*BoolValue) ProtoMessage() {} +func (*BoolValue) Descriptor() ([]byte, []int) { + return fileDescriptor_5377b62bda767935, []int{6} +} + +func (*BoolValue) XXX_WellKnownType() string { return "BoolValue" } + +func (m *BoolValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoolValue.Unmarshal(m, b) +} +func (m *BoolValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoolValue.Marshal(b, m, deterministic) +} +func (m *BoolValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoolValue.Merge(m, src) +} +func (m *BoolValue) XXX_Size() int { + return xxx_messageInfo_BoolValue.Size(m) +} +func (m *BoolValue) XXX_DiscardUnknown() { + xxx_messageInfo_BoolValue.DiscardUnknown(m) +} + +var xxx_messageInfo_BoolValue proto.InternalMessageInfo func (m *BoolValue) GetValue() bool { if m != nil { @@ -187,14 +340,38 @@ func (m *BoolValue) GetValue() bool { // The JSON representation for `StringValue` is JSON string. type StringValue struct { // The string value. - Value string `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` + Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StringValue) Reset() { *m = StringValue{} } +func (m *StringValue) String() string { return proto.CompactTextString(m) } +func (*StringValue) ProtoMessage() {} +func (*StringValue) Descriptor() ([]byte, []int) { + return fileDescriptor_5377b62bda767935, []int{7} +} + +func (*StringValue) XXX_WellKnownType() string { return "StringValue" } + +func (m *StringValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StringValue.Unmarshal(m, b) +} +func (m *StringValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StringValue.Marshal(b, m, deterministic) +} +func (m *StringValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_StringValue.Merge(m, src) +} +func (m *StringValue) XXX_Size() int { + return xxx_messageInfo_StringValue.Size(m) +} +func (m *StringValue) XXX_DiscardUnknown() { + xxx_messageInfo_StringValue.DiscardUnknown(m) } -func (m *StringValue) Reset() { *m = StringValue{} } -func (m *StringValue) String() string { return proto.CompactTextString(m) } -func (*StringValue) ProtoMessage() {} -func (*StringValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } -func (*StringValue) XXX_WellKnownType() string { return "StringValue" } +var xxx_messageInfo_StringValue proto.InternalMessageInfo func (m *StringValue) GetValue() string { if m != nil { @@ -208,14 +385,38 @@ func (m *StringValue) GetValue() string { // The JSON representation for `BytesValue` is JSON string. type BytesValue struct { // The bytes value. - Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } -func (m *BytesValue) Reset() { *m = BytesValue{} } -func (m *BytesValue) String() string { return proto.CompactTextString(m) } -func (*BytesValue) ProtoMessage() {} -func (*BytesValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } -func (*BytesValue) XXX_WellKnownType() string { return "BytesValue" } +func (m *BytesValue) Reset() { *m = BytesValue{} } +func (m *BytesValue) String() string { return proto.CompactTextString(m) } +func (*BytesValue) ProtoMessage() {} +func (*BytesValue) Descriptor() ([]byte, []int) { + return fileDescriptor_5377b62bda767935, []int{8} +} + +func (*BytesValue) XXX_WellKnownType() string { return "BytesValue" } + +func (m *BytesValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BytesValue.Unmarshal(m, b) +} +func (m *BytesValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BytesValue.Marshal(b, m, deterministic) +} +func (m *BytesValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_BytesValue.Merge(m, src) +} +func (m *BytesValue) XXX_Size() int { + return xxx_messageInfo_BytesValue.Size(m) +} +func (m *BytesValue) XXX_DiscardUnknown() { + xxx_messageInfo_BytesValue.DiscardUnknown(m) +} + +var xxx_messageInfo_BytesValue proto.InternalMessageInfo func (m *BytesValue) GetValue() []byte { if m != nil { @@ -236,27 +437,25 @@ func init() { proto.RegisterType((*BytesValue)(nil), "google.protobuf.BytesValue") } -func init() { - proto.RegisterFile("github.com/golang/protobuf/ptypes/wrappers/wrappers.proto", fileDescriptor0) -} - -var fileDescriptor0 = []byte{ - // 257 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4c, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28, - 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x2f, 0x2f, - 0x4a, 0x2c, 0x28, 0x48, 0x2d, 0x42, 0x30, 0xf4, 0xc0, 0x2a, 0x84, 0xf8, 0xd3, 0xf3, 0xf3, 0xd3, - 0x73, 0x52, 0xf5, 0x60, 0xea, 0x95, 0x94, 0xb9, 0xb8, 0x5d, 0xf2, 0x4b, 0x93, 0x72, 0x52, 0xc3, - 0x12, 0x73, 0x4a, 0x53, 0x85, 0x44, 0xb8, 0x58, 0xcb, 0x40, 0x0c, 0x09, 0x46, 0x05, 0x46, 0x0d, - 0xc6, 0x20, 0x08, 0x47, 0x49, 0x89, 0x8b, 0xcb, 0x2d, 0x27, 0x3f, 0xb1, 0x04, 0x8b, 0x1a, 0x26, - 0x24, 0x35, 0x9e, 0x79, 0x25, 0x66, 0x26, 0x58, 0xd4, 0x30, 0xc3, 0xd4, 0x28, 0x73, 0x71, 0x87, - 0xe2, 0x52, 0xc4, 0x82, 0x6a, 0x90, 0xb1, 0x11, 0x16, 0x35, 0xac, 0x68, 0x06, 0x61, 0x55, 0xc4, - 0x0b, 0x53, 0xa4, 0xc8, 0xc5, 0xe9, 0x94, 0x9f, 0x9f, 0x83, 0x45, 0x09, 0x07, 0x92, 0x39, 0xc1, - 0x25, 0x45, 0x99, 0x79, 0xe9, 0x58, 0x14, 0x71, 0x22, 0x39, 0xc8, 0xa9, 0xb2, 0x24, 0xb5, 0x18, - 0x8b, 0x1a, 0x1e, 0xa8, 0x1a, 0xa7, 0x1a, 0x2e, 0xe1, 0xe4, 0xfc, 0x5c, 0x3d, 0xb4, 0xd0, 0x75, - 0xe2, 0x0d, 0x87, 0x06, 0x7f, 0x00, 0x48, 0x24, 0x80, 0x31, 0x4a, 0x8b, 0xf8, 0xa8, 0xfb, 0xc1, - 0xc8, 0xb8, 0x88, 0x89, 0xd9, 0x3d, 0xc0, 0x69, 0x15, 0x93, 0x9c, 0x3b, 0xc4, 0xdc, 0x00, 0xa8, - 0x52, 0xbd, 0xf0, 0xd4, 0x9c, 0x1c, 0xef, 0xbc, 0xfc, 0xf2, 0xbc, 0x10, 0x90, 0x96, 0x24, 0x36, - 0xb0, 0x19, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xee, 0x36, 0x8d, 0xd8, 0x19, 0x02, 0x00, - 0x00, +func init() { proto.RegisterFile("google/protobuf/wrappers.proto", fileDescriptor_5377b62bda767935) } + +var fileDescriptor_5377b62bda767935 = []byte{ + // 259 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x2f, 0x4a, 0x2c, + 0x28, 0x48, 0x2d, 0x2a, 0xd6, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0xca, + 0x5c, 0xdc, 0x2e, 0xf9, 0xa5, 0x49, 0x39, 0xa9, 0x61, 0x89, 0x39, 0xa5, 0xa9, 0x42, 0x22, 0x5c, + 0xac, 0x65, 0x20, 0x86, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x63, 0x10, 0x84, 0xa3, 0xa4, 0xc4, 0xc5, + 0xe5, 0x96, 0x93, 0x9f, 0x58, 0x82, 0x45, 0x0d, 0x13, 0x92, 0x1a, 0xcf, 0xbc, 0x12, 0x33, 0x13, + 0x2c, 0x6a, 0x98, 0x61, 0x6a, 0x94, 0xb9, 0xb8, 0x43, 0x71, 0x29, 0x62, 0x41, 0x35, 0xc8, 0xd8, + 0x08, 0x8b, 0x1a, 0x56, 0x34, 0x83, 0xb0, 0x2a, 0xe2, 0x85, 0x29, 0x52, 0xe4, 0xe2, 0x74, 0xca, + 0xcf, 0xcf, 0xc1, 0xa2, 0x84, 0x03, 0xc9, 0x9c, 0xe0, 0x92, 0xa2, 0xcc, 0xbc, 0x74, 0x2c, 0x8a, + 0x38, 0x91, 0x1c, 0xe4, 0x54, 0x59, 0x92, 0x5a, 0x8c, 0x45, 0x0d, 0x0f, 0x54, 0x8d, 0x53, 0x0d, + 0x97, 0x70, 0x72, 0x7e, 0xae, 0x1e, 0x5a, 0xe8, 0x3a, 0xf1, 0x86, 0x43, 0x83, 0x3f, 0x00, 0x24, + 0x12, 0xc0, 0x18, 0xa5, 0x95, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x9f, + 0x9e, 0x9f, 0x93, 0x98, 0x97, 0x8e, 0x88, 0xaa, 0x82, 0x92, 0xca, 0x82, 0xd4, 0x62, 0x78, 0x8c, + 0xfd, 0x60, 0x64, 0x5c, 0xc4, 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x6e, + 0x00, 0x54, 0xa9, 0x5e, 0x78, 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x4b, + 0x12, 0x1b, 0xd8, 0x0c, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x19, 0x6c, 0xb9, 0xb8, 0xfe, + 0x01, 0x00, 0x00, } diff --git a/vendor/github.com/golang/protobuf/regenerate.sh b/vendor/github.com/golang/protobuf/regenerate.sh new file mode 100755 index 0000000..db0a0d6 --- /dev/null +++ b/vendor/github.com/golang/protobuf/regenerate.sh @@ -0,0 +1,52 @@ +#!/bin/bash + +set -e + +# Install the working tree's protoc-gen-gen in a tempdir. +tmpdir=$(mktemp -d -t regen-wkt.XXXXXX) +trap 'rm -rf $tmpdir' EXIT +mkdir -p $tmpdir/bin +PATH=$tmpdir/bin:$PATH +GOBIN=$tmpdir/bin go install ./protoc-gen-go + +# Public imports require at least Go 1.9. +supportTypeAliases="" +if go list -f '{{context.ReleaseTags}}' runtime | grep -q go1.9; then + supportTypeAliases=1 +fi + +# Generate various test protos. +PROTO_DIRS=( + jsonpb/jsonpb_test_proto + proto + protoc-gen-go/testdata +) +for dir in ${PROTO_DIRS[@]}; do + for p in `find $dir -name "*.proto"`; do + if [[ $p == */import_public/* && ! $supportTypeAliases ]]; then + echo "# $p (skipped)" + continue; + fi + echo "# $p" + protoc -I$dir --go_out=plugins=grpc,paths=source_relative:$dir $p + done +done + +# Deriving the location of the source protos from the path to the +# protoc binary may be a bit odd, but this is what protoc itself does. +PROTO_INCLUDE=$(dirname $(dirname $(which protoc)))/include + +# Well-known types. +WKT_PROTOS=(any duration empty struct timestamp wrappers) +for p in ${WKT_PROTOS[@]}; do + echo "# google/protobuf/$p.proto" + protoc --go_out=paths=source_relative:$tmpdir google/protobuf/$p.proto + cp $tmpdir/google/protobuf/$p.pb.go ptypes/$p + cp $PROTO_INCLUDE/google/protobuf/$p.proto ptypes/$p +done + +# descriptor.proto. +echo "# google/protobuf/descriptor.proto" +protoc --go_out=paths=source_relative:$tmpdir google/protobuf/descriptor.proto +cp $tmpdir/google/protobuf/descriptor.pb.go protoc-gen-go/descriptor +cp $PROTO_INCLUDE/google/protobuf/descriptor.proto protoc-gen-go/descriptor diff --git a/vendor/github.com/google/go-github/.codecov.yml b/vendor/github.com/google/go-github/.codecov.yml new file mode 100644 index 0000000..746db53 --- /dev/null +++ b/vendor/github.com/google/go-github/.codecov.yml @@ -0,0 +1,3 @@ +ignore: + # ignore auto-generated code + - "github/github-accessors.go" diff --git a/vendor/github.com/google/go-github/.gitignore b/vendor/github.com/google/go-github/.gitignore index 9ed3b07..3515c4b 100644 --- a/vendor/github.com/google/go-github/.gitignore +++ b/vendor/github.com/google/go-github/.gitignore @@ -1 +1,2 @@ *.test +coverage.out diff --git a/vendor/github.com/google/go-github/.travis.yml b/vendor/github.com/google/go-github/.travis.yml index b82916a..a72785e 100644 --- a/vendor/github.com/google/go-github/.travis.yml +++ b/vendor/github.com/google/go-github/.travis.yml @@ -1,18 +1,30 @@ -sudo: false language: go + go: - - 1.7.x - - 1.8.x + - "1.x" + - "1.11.x" - master + matrix: allow_failures: - go: master fast_finish: true -install: - - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). + +cache: + directories: + - $HOME/.cache/go-build + - $HOME/gopath/pkg/mod + +env: + global: + - GO111MODULE=on + script: - - go get -t -v ./... - diff -u <(echo -n) <(gofmt -d -s .) - go generate -x ./... && git diff --exit-code; code=$?; git checkout -- .; (exit $code) # Check that go generate ./... produces a zero diff; clean up any changes afterwards. - - go tool vet . - - go test -v -race ./... + - go vet ./... + - go test -v -race -coverprofile=coverage.txt -covermode=atomic ./... + - go test -v -tags=integration -run=^$ ./test/integration # Check that integration test builds successfully, but don't run any of the tests (they hit live GitHub API). + +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/google/go-github/AUTHORS b/vendor/github.com/google/go-github/AUTHORS index f521b6d..1bde5b1 100644 --- a/vendor/github.com/google/go-github/AUTHORS +++ b/vendor/github.com/google/go-github/AUTHORS @@ -8,119 +8,208 @@ # Authors who wish to be recognized in this file should add themselves (or # their employer, as appropriate). +178inaba Abhinav Gupta +Ahmed Hagy Ainsley Chong Akeda Bagus +Akhil Mohan Alec Thomas +Aleks Clark Alex Bramley Alexander Harkness +Allen Sun Amey Sakhadeo Andreas Garnæs Andrew Ryabchun +Andy Grunwald Andy Hume Andy Lindeman Anshuman Bhartiya +Antoine +Antoine Pelisse +Anubha Kushwaha +appilon +Aravind +Arda Kuyumcu Arıl Bozoluk Austin Dizzy +Ben Batha +Benjamen Keroack Beshr Kayali Beyang Liu Billy Lynch Björn Häuser Brad Harris +Brad Moylan Bradley Falzon +Brandon Cook Brian Egizi Bryan Boreham Cami Diez Carlos Alexandro Becker chandresh-pancholi +Charles Fenwick Elliott Charlie Yan +Chris King Chris Roche Chris Schaefer +Christoph Sassenberg Colin Misare Craig Peterson Cristian Maglie Daehyeok Mun Daniel Leavitt +Daniel Nilsson Dave Du Cros Dave Henderson David Deng +David Jannotta +Davide Zipeto +Dennis Webb +Dhi Aurrahman Diego Lapiduz Dmitri Shuralyov dmnlk Don Petersen Doug Turner +Drew Fradette +Eli Uriegas +Elliott Beach +Emerson Wood +eperm +Erick Fejta erwinvaneyk Fabrice +Felix Geisendörfer Filippo Valsorda Florian Forster +Francesc Gil Francis Fredrik Jönsson Garrett Squire +George Kontridze Georgy Buranov Gnahz Google Inc. +Grachev Mikhail griffin_stewie +Guillaume Jacquet Guz Alexander +Guðmundur Bjarni Ólafsson Hanno Hecker Hari haran +haya14busa Huy Tr huydx i2bskn Isao Jonas isqua Jameel Haffejee +Jan Kosecki +Javier Campanini +Jens Rantil +Jeremy Morris +Jesse Newland Jihoon Chung +Jimmi Dyson +Joan Saum Joe Tsai +John Barton John Engelman +Jordan Sussman +Joshua Bezaleel Abednego +JP Phillips +jpbelanger-mtl Juan Basso +Julien Garcia Gonzalez Julien Rostand Justin Abrahms +Jusung Lee jzhoucliqr +Katrina Owen +Kautilya Tripathi < tripathi.kautilya@gmail.com> +Kautilya Tripathi Keita Urashima Kevin Burke Konrad Malawski Kookheon Kwon Krzysztof Kowalczyk +Kshitij Saraogi kyokomi +Lovro Mažgon Lucas Alcantara Luke Evers +Luke Kysow Luke Roberts Luke Young Maksim Zhylinski Martin-Louis Bright +Marwan Sulaiman Mat Geist +Matt Matt Brender +Matt Gaunt Matt Landis Maxime Bury +Michael Spiegel Michael Tiller Michał Glapa Nathan VanBenschoten +Navaneeth Suresh Neil O'Toole Nick Miyake Nick Spragg +Nikhita Raghunath +Noah Zoschke +ns-cweber +Oleg Kovalov Ondřej Kupka +Palash Nigam Panagiotis Moustafellos +Parham Alvani Parker Moore +parkhyukjun89 +Pavel Shtanko +Pete Wagner +Petr Shevtsov Pierre Carrier Piotr Zurek Quinn Slack Rackspace US, Inc. +Radek Simko +Radliński Ignacy +Rajendra arora +RaviTeja Pothana rc1140 Red Hat, Inc. Rob Figueiredo +Rohit Upadhyay Ronak Jain Ruben Vereecken +Ryan Leung Ryan Lower Sahil Dua saisi +Sam Minnée +Sandeep Sukhani Sander van Harmelen +Sanket Payghan +Sarasa Kisaragi Sean Wang +Sebastian Mandrean +Sebastian Mæland Pedersen +Sergey Romanov Sevki +Shagun Khemka +shakeelrao Shawn Catanzarite Shawn Smith sona-tar SoundCloud, Ltd. +Sridhar Mocherla Stian Eikeland +Tasya Aditya Rukmana Thomas Bruyelle Timothée Peignier Trey Tacon @@ -128,8 +217,11 @@ ttacon Varadarajan Aravamudhan Victor Castell Victor Vrantchan +Vlad Ungureanu +Wasim Thabraze Will Maier William Bailey +xibz Yann Malet Yannick Utard Yicheng Qin diff --git a/vendor/github.com/google/go-github/CONTRIBUTING.md b/vendor/github.com/google/go-github/CONTRIBUTING.md index 68a23f0..e2a2b28 100644 --- a/vendor/github.com/google/go-github/CONTRIBUTING.md +++ b/vendor/github.com/google/go-github/CONTRIBUTING.md @@ -17,6 +17,18 @@ You generally only need to submit a CLA once, so if you've already submitted one again. +## Reporting issues ## + +Bugs, feature requests, and development-related questions should be directed to +our [GitHub issue tracker](https://github.com/google/go-github/issues). If +reporting a bug, please try and provide as much context as possible such as +your operating system, Go version, and anything else that might be relevant to +the bug. For feature requests, please explain what you're trying to do, and +how the requested feature would help you do that. + +Security related bugs can either be reported in the issue tracker, or if they +are more sensitive, emailed to . + ## Submitting a patch ## 1. It's generally best to start by opening a new issue describing the bug or @@ -55,7 +67,7 @@ again. [forking]: https://help.github.com/articles/fork-a-repo [golint]: https://github.com/golang/lint -[golint readme]: https://github.com/golang/lint/blob/master/README +[golint readme]: https://github.com/golang/lint/blob/master/README.md [gocov]: https://github.com/axw/gocov [gocov-html]: https://github.com/matm/gocov-html [well-formed commit messages]: http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html diff --git a/vendor/github.com/google/go-github/LICENSE b/vendor/github.com/google/go-github/LICENSE index 53d5374..28b6486 100644 --- a/vendor/github.com/google/go-github/LICENSE +++ b/vendor/github.com/google/go-github/LICENSE @@ -25,317 +25,3 @@ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - ----------- - -Some documentation is taken from the GitHub Developer site -, which is available under the following Creative -Commons Attribution 3.0 License. This applies only to the go-github source -code and would not apply to any compiled binaries. - -THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE -COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY -COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS -AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. - -BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE -TO BE BOUND BY THE TERMS OF THIS LICENSE. TO THE EXTENT THIS LICENSE MAY -BE CONSIDERED TO BE A CONTRACT, THE LICENSOR GRANTS YOU THE RIGHTS -CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND -CONDITIONS. - -1. Definitions - - a. "Adaptation" means a work based upon the Work, or upon the Work and - other pre-existing works, such as a translation, adaptation, - derivative work, arrangement of music or other alterations of a - literary or artistic work, or phonogram or performance and includes - cinematographic adaptations or any other form in which the Work may be - recast, transformed, or adapted including in any form recognizably - derived from the original, except that a work that constitutes a - Collection will not be considered an Adaptation for the purpose of - this License. For the avoidance of doubt, where the Work is a musical - work, performance or phonogram, the synchronization of the Work in - timed-relation with a moving image ("synching") will be considered an - Adaptation for the purpose of this License. - b. "Collection" means a collection of literary or artistic works, such as - encyclopedias and anthologies, or performances, phonograms or - broadcasts, or other works or subject matter other than works listed - in Section 1(f) below, which, by reason of the selection and - arrangement of their contents, constitute intellectual creations, in - which the Work is included in its entirety in unmodified form along - with one or more other contributions, each constituting separate and - independent works in themselves, which together are assembled into a - collective whole. A work that constitutes a Collection will not be - considered an Adaptation (as defined above) for the purposes of this - License. - c. "Distribute" means to make available to the public the original and - copies of the Work or Adaptation, as appropriate, through sale or - other transfer of ownership. - d. "Licensor" means the individual, individuals, entity or entities that - offer(s) the Work under the terms of this License. - e. "Original Author" means, in the case of a literary or artistic work, - the individual, individuals, entity or entities who created the Work - or if no individual or entity can be identified, the publisher; and in - addition (i) in the case of a performance the actors, singers, - musicians, dancers, and other persons who act, sing, deliver, declaim, - play in, interpret or otherwise perform literary or artistic works or - expressions of folklore; (ii) in the case of a phonogram the producer - being the person or legal entity who first fixes the sounds of a - performance or other sounds; and, (iii) in the case of broadcasts, the - organization that transmits the broadcast. - f. "Work" means the literary and/or artistic work offered under the terms - of this License including without limitation any production in the - literary, scientific and artistic domain, whatever may be the mode or - form of its expression including digital form, such as a book, - pamphlet and other writing; a lecture, address, sermon or other work - of the same nature; a dramatic or dramatico-musical work; a - choreographic work or entertainment in dumb show; a musical - composition with or without words; a cinematographic work to which are - assimilated works expressed by a process analogous to cinematography; - a work of drawing, painting, architecture, sculpture, engraving or - lithography; a photographic work to which are assimilated works - expressed by a process analogous to photography; a work of applied - art; an illustration, map, plan, sketch or three-dimensional work - relative to geography, topography, architecture or science; a - performance; a broadcast; a phonogram; a compilation of data to the - extent it is protected as a copyrightable work; or a work performed by - a variety or circus performer to the extent it is not otherwise - considered a literary or artistic work. - g. "You" means an individual or entity exercising rights under this - License who has not previously violated the terms of this License with - respect to the Work, or who has received express permission from the - Licensor to exercise rights under this License despite a previous - violation. - h. "Publicly Perform" means to perform public recitations of the Work and - to communicate to the public those public recitations, by any means or - process, including by wire or wireless means or public digital - performances; to make available to the public Works in such a way that - members of the public may access these Works from a place and at a - place individually chosen by them; to perform the Work to the public - by any means or process and the communication to the public of the - performances of the Work, including by public digital performance; to - broadcast and rebroadcast the Work by any means including signs, - sounds or images. - i. "Reproduce" means to make copies of the Work by any means including - without limitation by sound or visual recordings and the right of - fixation and reproducing fixations of the Work, including storage of a - protected performance or phonogram in digital form or other electronic - medium. - -2. Fair Dealing Rights. Nothing in this License is intended to reduce, -limit, or restrict any uses free from copyright or rights arising from -limitations or exceptions that are provided for in connection with the -copyright protection under copyright law or other applicable laws. - -3. License Grant. Subject to the terms and conditions of this License, -Licensor hereby grants You a worldwide, royalty-free, non-exclusive, -perpetual (for the duration of the applicable copyright) license to -exercise the rights in the Work as stated below: - - a. to Reproduce the Work, to incorporate the Work into one or more - Collections, and to Reproduce the Work as incorporated in the - Collections; - b. to create and Reproduce Adaptations provided that any such Adaptation, - including any translation in any medium, takes reasonable steps to - clearly label, demarcate or otherwise identify that changes were made - to the original Work. For example, a translation could be marked "The - original work was translated from English to Spanish," or a - modification could indicate "The original work has been modified."; - c. to Distribute and Publicly Perform the Work including as incorporated - in Collections; and, - d. to Distribute and Publicly Perform Adaptations. - e. For the avoidance of doubt: - - i. Non-waivable Compulsory License Schemes. In those jurisdictions in - which the right to collect royalties through any statutory or - compulsory licensing scheme cannot be waived, the Licensor - reserves the exclusive right to collect such royalties for any - exercise by You of the rights granted under this License; - ii. Waivable Compulsory License Schemes. In those jurisdictions in - which the right to collect royalties through any statutory or - compulsory licensing scheme can be waived, the Licensor waives the - exclusive right to collect such royalties for any exercise by You - of the rights granted under this License; and, - iii. Voluntary License Schemes. The Licensor waives the right to - collect royalties, whether individually or, in the event that the - Licensor is a member of a collecting society that administers - voluntary licensing schemes, via that society, from any exercise - by You of the rights granted under this License. - -The above rights may be exercised in all media and formats whether now -known or hereafter devised. The above rights include the right to make -such modifications as are technically necessary to exercise the rights in -other media and formats. Subject to Section 8(f), all rights not expressly -granted by Licensor are hereby reserved. - -4. Restrictions. The license granted in Section 3 above is expressly made -subject to and limited by the following restrictions: - - a. You may Distribute or Publicly Perform the Work only under the terms - of this License. You must include a copy of, or the Uniform Resource - Identifier (URI) for, this License with every copy of the Work You - Distribute or Publicly Perform. You may not offer or impose any terms - on the Work that restrict the terms of this License or the ability of - the recipient of the Work to exercise the rights granted to that - recipient under the terms of the License. You may not sublicense the - Work. You must keep intact all notices that refer to this License and - to the disclaimer of warranties with every copy of the Work You - Distribute or Publicly Perform. When You Distribute or Publicly - Perform the Work, You may not impose any effective technological - measures on the Work that restrict the ability of a recipient of the - Work from You to exercise the rights granted to that recipient under - the terms of the License. This Section 4(a) applies to the Work as - incorporated in a Collection, but this does not require the Collection - apart from the Work itself to be made subject to the terms of this - License. If You create a Collection, upon notice from any Licensor You - must, to the extent practicable, remove from the Collection any credit - as required by Section 4(b), as requested. If You create an - Adaptation, upon notice from any Licensor You must, to the extent - practicable, remove from the Adaptation any credit as required by - Section 4(b), as requested. - b. If You Distribute, or Publicly Perform the Work or any Adaptations or - Collections, You must, unless a request has been made pursuant to - Section 4(a), keep intact all copyright notices for the Work and - provide, reasonable to the medium or means You are utilizing: (i) the - name of the Original Author (or pseudonym, if applicable) if supplied, - and/or if the Original Author and/or Licensor designate another party - or parties (e.g., a sponsor institute, publishing entity, journal) for - attribution ("Attribution Parties") in Licensor's copyright notice, - terms of service or by other reasonable means, the name of such party - or parties; (ii) the title of the Work if supplied; (iii) to the - extent reasonably practicable, the URI, if any, that Licensor - specifies to be associated with the Work, unless such URI does not - refer to the copyright notice or licensing information for the Work; - and (iv) , consistent with Section 3(b), in the case of an Adaptation, - a credit identifying the use of the Work in the Adaptation (e.g., - "French translation of the Work by Original Author," or "Screenplay - based on original Work by Original Author"). The credit required by - this Section 4 (b) may be implemented in any reasonable manner; - provided, however, that in the case of a Adaptation or Collection, at - a minimum such credit will appear, if a credit for all contributing - authors of the Adaptation or Collection appears, then as part of these - credits and in a manner at least as prominent as the credits for the - other contributing authors. For the avoidance of doubt, You may only - use the credit required by this Section for the purpose of attribution - in the manner set out above and, by exercising Your rights under this - License, You may not implicitly or explicitly assert or imply any - connection with, sponsorship or endorsement by the Original Author, - Licensor and/or Attribution Parties, as appropriate, of You or Your - use of the Work, without the separate, express prior written - permission of the Original Author, Licensor and/or Attribution - Parties. - c. Except as otherwise agreed in writing by the Licensor or as may be - otherwise permitted by applicable law, if You Reproduce, Distribute or - Publicly Perform the Work either by itself or as part of any - Adaptations or Collections, You must not distort, mutilate, modify or - take other derogatory action in relation to the Work which would be - prejudicial to the Original Author's honor or reputation. Licensor - agrees that in those jurisdictions (e.g. Japan), in which any exercise - of the right granted in Section 3(b) of this License (the right to - make Adaptations) would be deemed to be a distortion, mutilation, - modification or other derogatory action prejudicial to the Original - Author's honor and reputation, the Licensor will waive or not assert, - as appropriate, this Section, to the fullest extent permitted by the - applicable national law, to enable You to reasonably exercise Your - right under Section 3(b) of this License (right to make Adaptations) - but not otherwise. - -5. Representations, Warranties and Disclaimer - -UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR -OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY -KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, -INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, -FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF -LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, -WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION -OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. - -6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE -LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR -ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES -ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS -BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. Termination - - a. This License and the rights granted hereunder will terminate - automatically upon any breach by You of the terms of this License. - Individuals or entities who have received Adaptations or Collections - from You under this License, however, will not have their licenses - terminated provided such individuals or entities remain in full - compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will - survive any termination of this License. - b. Subject to the above terms and conditions, the license granted here is - perpetual (for the duration of the applicable copyright in the Work). - Notwithstanding the above, Licensor reserves the right to release the - Work under different license terms or to stop distributing the Work at - any time; provided, however that any such election will not serve to - withdraw this License (or any other license that has been, or is - required to be, granted under the terms of this License), and this - License will continue in full force and effect unless terminated as - stated above. - -8. Miscellaneous - - a. Each time You Distribute or Publicly Perform the Work or a Collection, - the Licensor offers to the recipient a license to the Work on the same - terms and conditions as the license granted to You under this License. - b. Each time You Distribute or Publicly Perform an Adaptation, Licensor - offers to the recipient a license to the original Work on the same - terms and conditions as the license granted to You under this License. - c. If any provision of this License is invalid or unenforceable under - applicable law, it shall not affect the validity or enforceability of - the remainder of the terms of this License, and without further action - by the parties to this agreement, such provision shall be reformed to - the minimum extent necessary to make such provision valid and - enforceable. - d. No term or provision of this License shall be deemed waived and no - breach consented to unless such waiver or consent shall be in writing - and signed by the party to be charged with such waiver or consent. - e. This License constitutes the entire agreement between the parties with - respect to the Work licensed here. There are no understandings, - agreements or representations with respect to the Work not specified - here. Licensor shall not be bound by any additional provisions that - may appear in any communication from You. This License may not be - modified without the mutual written agreement of the Licensor and You. - f. The rights granted under, and the subject matter referenced, in this - License were drafted utilizing the terminology of the Berne Convention - for the Protection of Literary and Artistic Works (as amended on - September 28, 1979), the Rome Convention of 1961, the WIPO Copyright - Treaty of 1996, the WIPO Performances and Phonograms Treaty of 1996 - and the Universal Copyright Convention (as revised on July 24, 1971). - These rights and subject matter take effect in the relevant - jurisdiction in which the License terms are sought to be enforced - according to the corresponding provisions of the implementation of - those treaty provisions in the applicable national law. If the - standard suite of rights granted under applicable copyright law - includes additional rights not granted under this License, such - additional rights are deemed to be included in the License; this - License is not intended to restrict the license of any rights under - applicable law. - - -Creative Commons Notice - - Creative Commons is not a party to this License, and makes no warranty - whatsoever in connection with the Work. Creative Commons will not be - liable to You or any party on any legal theory for any damages - whatsoever, including without limitation any general, special, - incidental or consequential damages arising in connection to this - license. Notwithstanding the foregoing two (2) sentences, if Creative - Commons has expressly identified itself as the Licensor hereunder, it - shall have all rights and obligations of Licensor. - - Except for the limited purpose of indicating to the public that the - Work is licensed under the CCPL, Creative Commons does not authorize - the use by either party of the trademark "Creative Commons" or any - related trademark or logo of Creative Commons without the prior - written consent of Creative Commons. Any permitted use will be in - compliance with Creative Commons' then-current trademark usage - guidelines, as may be published on its website or otherwise made - available upon request from time to time. For the avoidance of doubt, - this trademark restriction does not form part of this License. - - Creative Commons may be contacted at http://creativecommons.org/. diff --git a/vendor/github.com/google/go-github/README.md b/vendor/github.com/google/go-github/README.md index 9c12172..b9d2011 100644 --- a/vendor/github.com/google/go-github/README.md +++ b/vendor/github.com/google/go-github/README.md @@ -1,18 +1,24 @@ # go-github # -go-github is a Go client library for accessing the [GitHub API][]. +[![GoDoc](https://godoc.org/github.com/google/go-github/github?status.svg)](https://godoc.org/github.com/google/go-github/github) [![Build Status](https://travis-ci.org/google/go-github.svg?branch=master)](https://travis-ci.org/google/go-github) [![Test Coverage](https://codecov.io/gh/google/go-github/branch/master/graph/badge.svg)](https://codecov.io/gh/google/go-github) [![Discuss at go-github@googlegroups.com](https://img.shields.io/badge/discuss-go--github%40googlegroups.com-blue.svg)](https://groups.google.com/group/go-github) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/796/badge)](https://bestpractices.coreinfrastructure.org/projects/796) -**Documentation:** [![GoDoc](https://godoc.org/github.com/google/go-github/github?status.svg)](https://godoc.org/github.com/google/go-github/github) -**Mailing List:** [go-github@googlegroups.com](https://groups.google.com/group/go-github) -**Build Status:** [![Build Status](https://travis-ci.org/google/go-github.svg?branch=master)](https://travis-ci.org/google/go-github) -**Test Coverage:** [![Test Coverage](https://coveralls.io/repos/google/go-github/badge.svg?branch=master)](https://coveralls.io/r/google/go-github?branch=master) ([gocov report](https://drone.io/github.com/google/go-github/files/coverage.html)) +go-github is a Go client library for accessing the [GitHub API v3][]. -go-github requires Go version 1.7 or greater. +Currently, **go-github requires Go version 1.9 or greater**. go-github tracks +[Go's version support policy][support-policy]. We do our best not to break +older versions of Go if we don't have to, but due to tooling constraints, we +don't always test older versions. + +[support-policy]: https://golang.org/doc/devel/release.html#policy + +If you're interested in using the [GraphQL API v4][], the recommended library is +[shurcooL/githubv4][]. ## Usage ## ```go -import "github.com/google/go-github/github" +import "github.com/google/go-github/v24/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) +import "github.com/google/go-github/github" // with go modules disabled ``` Construct a new GitHub client, then use the various services on the client to @@ -22,7 +28,7 @@ access different parts of the GitHub API. For example: client := github.NewClient(nil) // list all organizations for user "willnorris" -orgs, _, err := client.Organizations.List(ctx, "willnorris", nil) +orgs, _, err := client.Organizations.List(context.Background(), "willnorris", nil) ``` Some API methods have optional parameters that can be passed. For example: @@ -32,13 +38,21 @@ client := github.NewClient(nil) // list public repositories for org "github" opt := &github.RepositoryListByOrgOptions{Type: "public"} -repos, _, err := client.Repositories.ListByOrg(ctx, "github", opt) +repos, _, err := client.Repositories.ListByOrg(context.Background(), "github", opt) ``` The services of a client divide the API into logical chunks and correspond to the structure of the GitHub API documentation at https://developer.github.com/v3/. +NOTE: Using the [context](https://godoc.org/context) package, one can easily +pass cancelation signals and deadlines to various services of the client for +handling a request. In case there is no context available, then `context.Background()` +can be used as a starting point. + +For more sample code snippets, head over to the +[example](https://github.com/google/go-github/tree/master/example) directory. + ### Authentication ### The go-github library does not directly handle authentication. Instead, when @@ -52,16 +66,16 @@ API token][]), you can use it with the oauth2 library using: import "golang.org/x/oauth2" func main() { - ctx := context.Background() - ts := oauth2.StaticTokenSource( - &oauth2.Token{AccessToken: "... your access token ..."}, - ) - tc := oauth2.NewClient(ctx, ts) + ctx := context.Background() + ts := oauth2.StaticTokenSource( + &oauth2.Token{AccessToken: "... your access token ..."}, + ) + tc := oauth2.NewClient(ctx, ts) - client := github.NewClient(tc) + client := github.NewClient(tc) - // list all repositories for the authenticated user - repos, _, err := client.Repositories.List(ctx, "", nil) + // list all repositories for the authenticated user + repos, _, err := client.Repositories.List(ctx, "", nil) } ``` @@ -81,16 +95,16 @@ package. import "github.com/bradleyfalzon/ghinstallation" func main() { - // Wrap the shared transport for use with the integration ID 1 authenticating with installation ID 99. - itr, err := ghinstallation.NewKeyFromFile(http.DefaultTransport, 1, 99, "2016-10-19.private-key.pem") - if err != nil { - // Handle error. - } + // Wrap the shared transport for use with the integration ID 1 authenticating with installation ID 99. + itr, err := ghinstallation.NewKeyFromFile(http.DefaultTransport, 1, 99, "2016-10-19.private-key.pem") + if err != nil { + // Handle error. + } - // Use installation transport with client. - client := github.NewClient(&http.Client{Transport: itr}) + // Use installation transport with client. + client := github.NewClient(&http.Client{Transport: itr}) - // Use client... + // Use client... } ``` @@ -98,9 +112,11 @@ func main() { GitHub imposes a rate limit on all API clients. Unauthenticated clients are limited to 60 requests per hour, while authenticated clients can make up to -5,000 requests per hour. To receive the higher rate limit when making calls -that are not issued on behalf of a user, use the -`UnauthenticatedRateLimitedTransport`. +5,000 requests per hour. The Search API has a custom rate limit. Unauthenticated +clients are limited to 10 requests per minute, while authenticated clients +can make up to 30 requests per minute. To receive the higher rate limit when +making calls that are not issued on behalf of a user, +use `UnauthenticatedRateLimitedTransport`. The returned `Response.Rate` value contains the rate limit information from the most recent API call. If a recent enough response isn't @@ -197,39 +213,54 @@ for { For complete usage of go-github, see the full [package docs][]. -[GitHub API]: https://developer.github.com/v3/ +[GitHub API v3]: https://developer.github.com/v3/ [oauth2]: https://github.com/golang/oauth2 [oauth2 docs]: https://godoc.org/golang.org/x/oauth2 [personal API token]: https://github.com/blog/1509-personal-api-tokens [package docs]: https://godoc.org/github.com/google/go-github/github +[GraphQL API v4]: https://developer.github.com/v4/ +[shurcooL/githubv4]: https://github.com/shurcooL/githubv4 ### Integration Tests ### -You can run integration tests from the `tests` directory. See the integration tests [README](tests/README.md). +You can run integration tests from the `test` directory. See the integration tests [README](test/README.md). + ## Roadmap ## This library is being initially developed for an internal application at Google, so API methods will likely be implemented in the order that they are needed by that application. You can track the status of implementation in -[this Google spreadsheet][roadmap]. Eventually, I would like to cover the entire -GitHub API, so contributions are of course [always welcome][contributing]. The -calling pattern is pretty well established, so adding new methods is relatively -straightforward. +[this Google spreadsheet][roadmap]. [roadmap]: https://docs.google.com/spreadsheet/ccc?key=0ApoVX4GOiXr-dGNKN1pObFh6ek1DR2FKUjBNZ1FmaEE&usp=sharing -[contributing]: CONTRIBUTING.md - - -## Google App Engine ## -Go on App Engine Classic (which as of this writing uses Go 1.6) can not use -the `"context"` import and still relies on `"golang.org/x/net/context"`. -As a result, if you wish to continue to use `go-github` on App Engine Classic, -you will need to rewrite all the `"context"` imports using the following command: - - `gofmt -w -r '"context" -> "golang.org/x/net/context"' *.go` - -See `with_appengine.go` for more details. +## Contributing ## +I would like to cover the entire GitHub API and contributions are of course always welcome. The +calling pattern is pretty well established, so adding new methods is relatively +straightforward. See [`CONTRIBUTING.md`](CONTRIBUTING.md) for details. + +## Versioning ## + +In general, go-github follows [semver](https://semver.org/) as closely as we +can for tagging releases of the package. For self-contained libraries, the +application of semantic versioning is relatively straightforward and generally +understood. But because go-github is a client library for the GitHub API, which +itself changes behavior, and because we are typically pretty aggressive about +implementing preview features of the GitHub API, we've adopted the following +versioning policy: + +* We increment the **major version** with any incompatible change to + non-preview functionality, including changes to the exported Go API surface + or behavior of the API. +* We increment the **minor version** with any backwards-compatible changes to + functionality, as well as any changes to preview functionality in the GitHub + API. GitHub makes no guarantee about the stability of preview functionality, + so neither do we consider it a stable part of the go-github API. +* We increment the **patch version** with any backwards-compatible bug fixes. + +Preview functionality may take the form of entire methods or simply additional +data returned from an otherwise non-preview method. Refer to the GitHub API +documentation for details on preview functionality. ## License ## diff --git a/vendor/github.com/google/go-github/example/appengine/app.go b/vendor/github.com/google/go-github/example/appengine/app.go new file mode 100644 index 0000000..b9a66e0 --- /dev/null +++ b/vendor/github.com/google/go-github/example/appengine/app.go @@ -0,0 +1,49 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package demo provides an app that shows how to use the github package on +// Google App Engine. +package demo + +import ( + "fmt" + "net/http" + "os" + + "github.com/google/go-github/v24/github" + "golang.org/x/oauth2" + "google.golang.org/appengine" + "google.golang.org/appengine/log" +) + +func init() { + http.HandleFunc("/", handler) +} + +func handler(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/" { + http.NotFound(w, r) + return + } + + ctx := appengine.NewContext(r) + ts := oauth2.StaticTokenSource( + &oauth2.Token{AccessToken: os.Getenv("GITHUB_AUTH_TOKEN")}, + ) + tc := oauth2.NewClient(ctx, ts) + client := github.NewClient(tc) + + commits, _, err := client.Repositories.ListCommits(ctx, "google", "go-github", nil) + if err != nil { + log.Errorf(ctx, "ListCommits: %v", err) + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "text/plain; charset=utf-8") + for _, commit := range commits { + fmt.Fprintln(w, commit.GetHTMLURL()) + } +} diff --git a/vendor/github.com/google/go-github/example/appengine/app.yaml b/vendor/github.com/google/go-github/example/appengine/app.yaml new file mode 100644 index 0000000..dca235f --- /dev/null +++ b/vendor/github.com/google/go-github/example/appengine/app.yaml @@ -0,0 +1,14 @@ +# Copyright 2017 The go-github AUTHORS. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file. + +runtime: go +api_version: go1 + +handlers: +- url: /.* + script: _go_app + +env_variables: + GITHUB_AUTH_TOKEN: "-your-auth-token-here-" diff --git a/vendor/github.com/google/go-github/examples/basicauth/main.go b/vendor/github.com/google/go-github/example/basicauth/main.go similarity index 96% rename from vendor/github.com/google/go-github/examples/basicauth/main.go rename to vendor/github.com/google/go-github/example/basicauth/main.go index f67c3d6..a622d71 100644 --- a/vendor/github.com/google/go-github/examples/basicauth/main.go +++ b/vendor/github.com/google/go-github/example/basicauth/main.go @@ -16,7 +16,7 @@ import ( "strings" "syscall" - "github.com/google/go-github/github" + "github.com/google/go-github/v24/github" "golang.org/x/crypto/ssh/terminal" ) diff --git a/vendor/github.com/google/go-github/example/commitpr/main.go b/vendor/github.com/google/go-github/example/commitpr/main.go new file mode 100644 index 0000000..c96f4aa --- /dev/null +++ b/vendor/github.com/google/go-github/example/commitpr/main.go @@ -0,0 +1,216 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The commitpr command utilizes go-github as a CLI tool for +// pushing files to a branch and creating a pull request from it. +// It takes an auth token as an environment variable and creates +// the commit and the PR under the account affiliated with that token. +// +// The purpose of this example is to show how to use refs, trees and commits to +// create commits and pull requests. +// +// Note, if you want to push a single file, you probably prefer to use the +// content API. An example is available here: +// https://godoc.org/github.com/google/go-github/github#example-RepositoriesService-CreateFile +// +// Note, for this to work at least 1 commit is needed, so you if you use this +// after creating a repository you might want to make sure you set `AutoInit` to +// `true`. +package main + +import ( + "context" + "errors" + "flag" + "fmt" + "io/ioutil" + "log" + "os" + "strings" + "time" + + "github.com/google/go-github/v24/github" + "golang.org/x/oauth2" +) + +var ( + sourceOwner = flag.String("source-owner", "", "Name of the owner (user or org) of the repo to create the commit in.") + sourceRepo = flag.String("source-repo", "", "Name of repo to create the commit in.") + commitMessage = flag.String("commit-message", "", "Content of the commit message.") + commitBranch = flag.String("commit-branch", "", "Name of branch to create the commit in. If it does not already exists, it will be created using the `base-branch` parameter") + baseBranch = flag.String("base-branch", "master", "Name of branch to create the `commit-branch` from.") + prRepoOwner = flag.String("merge-repo-owner", "", "Name of the owner (user or org) of the repo to create the PR against. If not specified, the value of the `-source-owner` flag will be used.") + prRepo = flag.String("merge-repo", "", "Name of repo to create the PR against. If not specified, the value of the `-source-repo` flag will be used.") + prBranch = flag.String("merge-branch", "master", "Name of branch to create the PR against (the one you want to merge your branch in via the PR).") + prSubject = flag.String("pr-title", "", "Title of the pull request. If not specified, no pull request will be created.") + prDescription = flag.String("pr-text", "", "Text to put in the description of the pull request.") + sourceFiles = flag.String("files", "", `Comma-separated list of files to commit and their location. +The local file is separated by its target location by a semi-colon. +If the file should be in the same location with the same name, you can just put the file name and omit the repetition. +Example: README.md,main.go:github/examples/commitpr/main.go`) + authorName = flag.String("author-name", "", "Name of the author of the commit.") + authorEmail = flag.String("author-email", "", "Email of the author of the commit.") +) + +var client *github.Client +var ctx = context.Background() + +// getRef returns the commit branch reference object if it exists or creates it +// from the base branch before returning it. +func getRef() (ref *github.Reference, err error) { + if ref, _, err = client.Git.GetRef(ctx, *sourceOwner, *sourceRepo, "refs/heads/"+*commitBranch); err == nil { + return ref, nil + } + + // We consider that an error means the branch has not been found and needs to + // be created. + if *commitBranch == *baseBranch { + return nil, errors.New("The commit branch does not exist but `-base-branch` is the same as `-commit-branch`") + } + + if *baseBranch == "" { + return nil, errors.New("The `-base-branch` should not be set to an empty string when the branch specified by `-commit-branch` does not exists") + } + + var baseRef *github.Reference + if baseRef, _, err = client.Git.GetRef(ctx, *sourceOwner, *sourceRepo, "refs/heads/"+*baseBranch); err != nil { + return nil, err + } + newRef := &github.Reference{Ref: github.String("refs/heads/" + *commitBranch), Object: &github.GitObject{SHA: baseRef.Object.SHA}} + ref, _, err = client.Git.CreateRef(ctx, *sourceOwner, *sourceRepo, newRef) + return ref, err +} + +// getTree generates the tree to commit based on the given files and the commit +// of the ref you got in getRef. +func getTree(ref *github.Reference) (tree *github.Tree, err error) { + // Create a tree with what to commit. + entries := []github.TreeEntry{} + + // Load each file into the tree. + for _, fileArg := range strings.Split(*sourceFiles, ",") { + file, content, err := getFileContent(fileArg) + if err != nil { + return nil, err + } + entries = append(entries, github.TreeEntry{Path: github.String(file), Type: github.String("blob"), Content: github.String(string(content)), Mode: github.String("100644")}) + } + + tree, _, err = client.Git.CreateTree(ctx, *sourceOwner, *sourceRepo, *ref.Object.SHA, entries) + return tree, err +} + +// getFileContent loads the local content of a file and return the target name +// of the file in the target repository and its contents. +func getFileContent(fileArg string) (targetName string, b []byte, err error) { + var localFile string + files := strings.Split(fileArg, ":") + switch { + case len(files) < 1: + return "", nil, errors.New("empty `-files` parameter") + case len(files) == 1: + localFile = files[0] + targetName = files[0] + default: + localFile = files[0] + targetName = files[1] + } + + b, err = ioutil.ReadFile(localFile) + return targetName, b, err +} + +// createCommit creates the commit in the given reference using the given tree. +func pushCommit(ref *github.Reference, tree *github.Tree) (err error) { + // Get the parent commit to attach the commit to. + parent, _, err := client.Repositories.GetCommit(ctx, *sourceOwner, *sourceRepo, *ref.Object.SHA) + if err != nil { + return err + } + // This is not always populated, but is needed. + parent.Commit.SHA = parent.SHA + + // Create the commit using the tree. + date := time.Now() + author := &github.CommitAuthor{Date: &date, Name: authorName, Email: authorEmail} + commit := &github.Commit{Author: author, Message: commitMessage, Tree: tree, Parents: []github.Commit{*parent.Commit}} + newCommit, _, err := client.Git.CreateCommit(ctx, *sourceOwner, *sourceRepo, commit) + if err != nil { + return err + } + + // Attach the commit to the master branch. + ref.Object.SHA = newCommit.SHA + _, _, err = client.Git.UpdateRef(ctx, *sourceOwner, *sourceRepo, ref, false) + return err +} + +// createPR creates a pull request. Based on: https://godoc.org/github.com/google/go-github/github#example-PullRequestsService-Create +func createPR() (err error) { + if *prSubject == "" { + return errors.New("missing `-pr-title` flag; skipping PR creation") + } + + if *prRepoOwner != "" && *prRepoOwner != *sourceOwner { + *commitBranch = fmt.Sprintf("%s:%s", *sourceOwner, *commitBranch) + } else { + prRepoOwner = sourceOwner + } + + if *prRepo == "" { + prRepo = sourceRepo + } + + newPR := &github.NewPullRequest{ + Title: prSubject, + Head: commitBranch, + Base: prBranch, + Body: prDescription, + MaintainerCanModify: github.Bool(true), + } + + pr, _, err := client.PullRequests.Create(ctx, *prRepoOwner, *prRepo, newPR) + if err != nil { + return err + } + + fmt.Printf("PR created: %s\n", pr.GetHTMLURL()) + return nil +} + +func main() { + flag.Parse() + token := os.Getenv("GITHUB_AUTH_TOKEN") + if token == "" { + log.Fatal("Unauthorized: No token present") + } + if *sourceOwner == "" || *sourceRepo == "" || *commitBranch == "" || *sourceFiles == "" || *authorName == "" || *authorEmail == "" { + log.Fatal("You need to specify a non-empty value for the flags `-source-owner`, `-source-repo`, `-commit-branch`, `-files`, `-author-name` and `-author-email`") + } + ts := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: token}) + tc := oauth2.NewClient(ctx, ts) + client = github.NewClient(tc) + + ref, err := getRef() + if err != nil { + log.Fatalf("Unable to get/create the commit reference: %s\n", err) + } + if ref == nil { + log.Fatalf("No error where returned but the reference is nil") + } + + tree, err := getTree(ref) + if err != nil { + log.Fatalf("Unable to create the tree based on the provided files: %s\n", err) + } + + if err := pushCommit(ref, tree); err != nil { + log.Fatalf("Unable to create the commit: %s\n", err) + } + + if err := createPR(); err != nil { + log.Fatalf("Error while creating the pull request: %s", err) + } +} diff --git a/vendor/github.com/google/go-github/example/migrations/main.go b/vendor/github.com/google/go-github/example/migrations/main.go new file mode 100644 index 0000000..f53278b --- /dev/null +++ b/vendor/github.com/google/go-github/example/migrations/main.go @@ -0,0 +1,41 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// migrations demonstrates the functionality of +// the user data migration API for the authenticated GitHub +// user and lists all the user migrations. +package main + +import ( + "context" + "fmt" + + "github.com/google/go-github/v24/github" + "golang.org/x/oauth2" +) + +func fetchAllUserMigrations() ([]*github.UserMigration, error) { + ctx := context.Background() + ts := oauth2.StaticTokenSource( + &oauth2.Token{AccessToken: ""}, + ) + tc := oauth2.NewClient(ctx, ts) + client := github.NewClient(tc) + + migrations, _, err := client.Migrations.ListUserMigrations(ctx) + return migrations, err +} + +func main() { + migrations, err := fetchAllUserMigrations() + if err != nil { + fmt.Printf("Error %v\n", err) + return + } + + for i, m := range migrations { + fmt.Printf("%v. %v", i+1, m.GetID()) + } +} diff --git a/vendor/github.com/google/go-github/example/newrepo/main.go b/vendor/github.com/google/go-github/example/newrepo/main.go new file mode 100644 index 0000000..df4327c --- /dev/null +++ b/vendor/github.com/google/go-github/example/newrepo/main.go @@ -0,0 +1,49 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The newrepo command utilizes go-github as a cli tool for +// creating new repositories. It takes an auth token as +// an enviroment variable and creates the new repo under +// the account affiliated with that token. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "os" + + "github.com/google/go-github/v24/github" + "golang.org/x/oauth2" +) + +var ( + name = flag.String("name", "", "Name of repo to create in authenticated user's GitHub account.") + description = flag.String("description", "", "Description of created repo.") + private = flag.Bool("private", false, "Will created repo be private.") +) + +func main() { + flag.Parse() + token := os.Getenv("GITHUB_AUTH_TOKEN") + if token == "" { + log.Fatal("Unauthorized: No token present") + } + if *name == "" { + log.Fatal("No name: New repos must be given a name") + } + ctx := context.Background() + ts := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: token}) + tc := oauth2.NewClient(ctx, ts) + client := github.NewClient(tc) + + r := &github.Repository{Name: name, Private: private, Description: description} + repo, _, err := client.Repositories.Create(ctx, "", r) + if err != nil { + log.Fatal(err) + } + fmt.Printf("Successfully created new repo: %v\n", repo.GetName()) +} diff --git a/vendor/github.com/google/go-github/example/simple/main.go b/vendor/github.com/google/go-github/example/simple/main.go new file mode 100644 index 0000000..24e33bb --- /dev/null +++ b/vendor/github.com/google/go-github/example/simple/main.go @@ -0,0 +1,40 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The simple command demonstrates a simple functionality which +// prompts the user for a GitHub username and lists all the public +// organization memberships of the specified username. +package main + +import ( + "context" + "fmt" + + "github.com/google/go-github/v24/github" +) + +// Fetch all the public organizations' membership of a user. +// +func FetchOrganizations(username string) ([]*github.Organization, error) { + client := github.NewClient(nil) + orgs, _, err := client.Organizations.List(context.Background(), username, nil) + return orgs, err +} + +func main() { + var username string + fmt.Print("Enter GitHub username: ") + fmt.Scanf("%s", &username) + + organizations, err := FetchOrganizations(username) + if err != nil { + fmt.Printf("Error: %v\n", err) + return + } + + for i, organization := range organizations { + fmt.Printf("%v. %v\n", i+1, organization.GetLogin()) + } +} diff --git a/vendor/github.com/google/go-github/github/activity_events.go b/vendor/github.com/google/go-github/github/activity_events.go index 6b35056..1754b78 100644 --- a/vendor/github.com/google/go-github/github/activity_events.go +++ b/vendor/github.com/google/go-github/github/activity_events.go @@ -7,116 +7,9 @@ package github import ( "context" - "encoding/json" "fmt" - "time" ) -// Event represents a GitHub event. -type Event struct { - Type *string `json:"type,omitempty"` - Public *bool `json:"public"` - RawPayload *json.RawMessage `json:"payload,omitempty"` - Repo *Repository `json:"repo,omitempty"` - Actor *User `json:"actor,omitempty"` - Org *Organization `json:"org,omitempty"` - CreatedAt *time.Time `json:"created_at,omitempty"` - ID *string `json:"id,omitempty"` -} - -func (e Event) String() string { - return Stringify(e) -} - -// ParsePayload parses the event payload. For recognized event types, -// a value of the corresponding struct type will be returned. -func (e *Event) ParsePayload() (payload interface{}, err error) { - switch *e.Type { - case "CommitCommentEvent": - payload = &CommitCommentEvent{} - case "CreateEvent": - payload = &CreateEvent{} - case "DeleteEvent": - payload = &DeleteEvent{} - case "DeploymentEvent": - payload = &DeploymentEvent{} - case "DeploymentStatusEvent": - payload = &DeploymentStatusEvent{} - case "ForkEvent": - payload = &ForkEvent{} - case "GollumEvent": - payload = &GollumEvent{} - case "InstallationEvent": - payload = &InstallationEvent{} - case "InstallationRepositoriesEvent": - payload = &InstallationRepositoriesEvent{} - case "IssueCommentEvent": - payload = &IssueCommentEvent{} - case "IssuesEvent": - payload = &IssuesEvent{} - case "LabelEvent": - payload = &LabelEvent{} - case "MemberEvent": - payload = &MemberEvent{} - case "MembershipEvent": - payload = &MembershipEvent{} - case "MilestoneEvent": - payload = &MilestoneEvent{} - case "OrganizationEvent": - payload = &OrganizationEvent{} - case "OrgBlockEvent": - payload = &OrgBlockEvent{} - case "PageBuildEvent": - payload = &PageBuildEvent{} - case "PingEvent": - payload = &PingEvent{} - case "ProjectEvent": - payload = &ProjectEvent{} - case "ProjectCardEvent": - payload = &ProjectCardEvent{} - case "ProjectColumnEvent": - payload = &ProjectColumnEvent{} - case "PublicEvent": - payload = &PublicEvent{} - case "PullRequestEvent": - payload = &PullRequestEvent{} - case "PullRequestReviewEvent": - payload = &PullRequestReviewEvent{} - case "PullRequestReviewCommentEvent": - payload = &PullRequestReviewCommentEvent{} - case "PushEvent": - payload = &PushEvent{} - case "ReleaseEvent": - payload = &ReleaseEvent{} - case "RepositoryEvent": - payload = &RepositoryEvent{} - case "StatusEvent": - payload = &StatusEvent{} - case "TeamEvent": - payload = &TeamEvent{} - case "TeamAddEvent": - payload = &TeamAddEvent{} - case "WatchEvent": - payload = &WatchEvent{} - } - err = json.Unmarshal(*e.RawPayload, &payload) - return payload, err -} - -// Payload returns the parsed event payload. For recognized event types, -// a value of the corresponding struct type will be returned. -// -// Deprecated: Use ParsePayload instead, which returns an error -// rather than panics if JSON unmarshaling raw payload fails. -func (e *Event) Payload() (payload interface{}) { - var err error - payload, err = e.ParsePayload() - if err != nil { - panic(err) - } - return payload -} - // ListEvents drinks from the firehose of all public events across GitHub. // // GitHub API docs: https://developer.github.com/v3/activity/events/#list-public-events diff --git a/vendor/github.com/google/go-github/github/activity_events_test.go b/vendor/github.com/google/go-github/github/activity_events_test.go index 2b59d11..0b01c4a 100644 --- a/vendor/github.com/google/go-github/github/activity_events_test.go +++ b/vendor/github.com/google/go-github/github/activity_events_test.go @@ -15,7 +15,7 @@ import ( ) func TestActivityService_ListEvents(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/events", func(w http.ResponseWriter, r *http.Request) { @@ -39,7 +39,7 @@ func TestActivityService_ListEvents(t *testing.T) { } func TestActivityService_ListRepositoryEvents(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/events", func(w http.ResponseWriter, r *http.Request) { @@ -63,12 +63,15 @@ func TestActivityService_ListRepositoryEvents(t *testing.T) { } func TestActivityService_ListRepositoryEvents_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Activity.ListRepositoryEvents(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestActivityService_ListIssueEventsForRepository(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/events", func(w http.ResponseWriter, r *http.Request) { @@ -85,19 +88,22 @@ func TestActivityService_ListIssueEventsForRepository(t *testing.T) { t.Errorf("Activities.ListIssueEventsForRepository returned error: %v", err) } - want := []*IssueEvent{{ID: Int(1)}, {ID: Int(2)}} + want := []*IssueEvent{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(events, want) { t.Errorf("Activities.ListIssueEventsForRepository returned %+v, want %+v", events, want) } } func TestActivityService_ListIssueEventsForRepository_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Activity.ListIssueEventsForRepository(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestActivityService_ListEventsForRepoNetwork(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/networks/o/r/events", func(w http.ResponseWriter, r *http.Request) { @@ -121,12 +127,15 @@ func TestActivityService_ListEventsForRepoNetwork(t *testing.T) { } func TestActivityService_ListEventsForRepoNetwork_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Activity.ListEventsForRepoNetwork(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestActivityService_ListEventsForOrganization(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/events", func(w http.ResponseWriter, r *http.Request) { @@ -150,12 +159,15 @@ func TestActivityService_ListEventsForOrganization(t *testing.T) { } func TestActivityService_ListEventsForOrganization_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Activity.ListEventsForOrganization(context.Background(), "%", nil) testURLParseError(t, err) } func TestActivityService_ListEventsPerformedByUser_all(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/events", func(w http.ResponseWriter, r *http.Request) { @@ -179,7 +191,7 @@ func TestActivityService_ListEventsPerformedByUser_all(t *testing.T) { } func TestActivityService_ListEventsPerformedByUser_publicOnly(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/events/public", func(w http.ResponseWriter, r *http.Request) { @@ -199,12 +211,15 @@ func TestActivityService_ListEventsPerformedByUser_publicOnly(t *testing.T) { } func TestActivityService_ListEventsPerformedByUser_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Activity.ListEventsPerformedByUser(context.Background(), "%", false, nil) testURLParseError(t, err) } func TestActivityService_ListEventsReceivedByUser_all(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/received_events", func(w http.ResponseWriter, r *http.Request) { @@ -228,7 +243,7 @@ func TestActivityService_ListEventsReceivedByUser_all(t *testing.T) { } func TestActivityService_ListEventsReceivedByUser_publicOnly(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/received_events/public", func(w http.ResponseWriter, r *http.Request) { @@ -248,12 +263,15 @@ func TestActivityService_ListEventsReceivedByUser_publicOnly(t *testing.T) { } func TestActivityService_ListEventsReceivedByUser_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Activity.ListEventsReceivedByUser(context.Background(), "%", false, nil) testURLParseError(t, err) } func TestActivityService_ListUserEventsForOrganization(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/events/orgs/o", func(w http.ResponseWriter, r *http.Request) { @@ -283,7 +301,7 @@ func TestActivityService_EventParsePayload_typed(t *testing.T) { t.Fatalf("Unmarshal Event returned error: %v", err) } - want := &PushEvent{PushID: Int(1)} + want := &PushEvent{PushID: Int64(1)} got, err := event.ParsePayload() if err != nil { t.Fatalf("ParsePayload returned unexpected error: %v", err) @@ -320,7 +338,7 @@ func TestActivityService_EventParsePayload_installation(t *testing.T) { t.Fatalf("Unmarshal Event returned error: %v", err) } - want := &PullRequestEvent{Installation: &Installation{ID: Int(1)}} + want := &PullRequestEvent{Installation: &Installation{ID: Int64(1)}} got, err := event.ParsePayload() if err != nil { t.Fatalf("ParsePayload returned unexpected error: %v", err) diff --git a/vendor/github.com/google/go-github/github/activity_notifications_test.go b/vendor/github.com/google/go-github/github/activity_notifications_test.go index cab8f94..a36b253 100644 --- a/vendor/github.com/google/go-github/github/activity_notifications_test.go +++ b/vendor/github.com/google/go-github/github/activity_notifications_test.go @@ -16,7 +16,7 @@ import ( ) func TestActivityService_ListNotification(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/notifications", func(w http.ResponseWriter, r *http.Request) { @@ -34,8 +34,8 @@ func TestActivityService_ListNotification(t *testing.T) { opt := &NotificationListOptions{ All: true, Participating: true, - Since: time.Date(2006, 01, 02, 15, 04, 05, 0, time.UTC), - Before: time.Date(2007, 03, 04, 15, 04, 05, 0, time.UTC), + Since: time.Date(2006, time.January, 02, 15, 04, 05, 0, time.UTC), + Before: time.Date(2007, time.March, 04, 15, 04, 05, 0, time.UTC), } notifications, _, err := client.Activity.ListNotifications(context.Background(), opt) if err != nil { @@ -49,7 +49,7 @@ func TestActivityService_ListNotification(t *testing.T) { } func TestActivityService_ListRepositoryNotification(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/notifications", func(w http.ResponseWriter, r *http.Request) { @@ -69,7 +69,7 @@ func TestActivityService_ListRepositoryNotification(t *testing.T) { } func TestActivityService_MarkNotificationsRead(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/notifications", func(w http.ResponseWriter, r *http.Request) { @@ -80,14 +80,14 @@ func TestActivityService_MarkNotificationsRead(t *testing.T) { w.WriteHeader(http.StatusResetContent) }) - _, err := client.Activity.MarkNotificationsRead(context.Background(), time.Date(2006, 01, 02, 15, 04, 05, 0, time.UTC)) + _, err := client.Activity.MarkNotificationsRead(context.Background(), time.Date(2006, time.January, 02, 15, 04, 05, 0, time.UTC)) if err != nil { t.Errorf("Activity.MarkNotificationsRead returned error: %v", err) } } func TestActivityService_MarkRepositoryNotificationsRead(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/notifications", func(w http.ResponseWriter, r *http.Request) { @@ -98,14 +98,14 @@ func TestActivityService_MarkRepositoryNotificationsRead(t *testing.T) { w.WriteHeader(http.StatusResetContent) }) - _, err := client.Activity.MarkRepositoryNotificationsRead(context.Background(), "o", "r", time.Date(2006, 01, 02, 15, 04, 05, 0, time.UTC)) + _, err := client.Activity.MarkRepositoryNotificationsRead(context.Background(), "o", "r", time.Date(2006, time.January, 02, 15, 04, 05, 0, time.UTC)) if err != nil { t.Errorf("Activity.MarkRepositoryNotificationsRead returned error: %v", err) } } func TestActivityService_GetThread(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/notifications/threads/1", func(w http.ResponseWriter, r *http.Request) { @@ -125,7 +125,7 @@ func TestActivityService_GetThread(t *testing.T) { } func TestActivityService_MarkThreadRead(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/notifications/threads/1", func(w http.ResponseWriter, r *http.Request) { @@ -140,7 +140,7 @@ func TestActivityService_MarkThreadRead(t *testing.T) { } func TestActivityService_GetThreadSubscription(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/notifications/threads/1/subscription", func(w http.ResponseWriter, r *http.Request) { @@ -160,7 +160,7 @@ func TestActivityService_GetThreadSubscription(t *testing.T) { } func TestActivityService_SetThreadSubscription(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Subscription{Subscribed: Bool(true)} @@ -189,7 +189,7 @@ func TestActivityService_SetThreadSubscription(t *testing.T) { } func TestActivityService_DeleteThreadSubscription(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/notifications/threads/1/subscription", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/activity_star.go b/vendor/github.com/google/go-github/github/activity_star.go index d5b0671..5ae5c10 100644 --- a/vendor/github.com/google/go-github/github/activity_star.go +++ b/vendor/github.com/google/go-github/github/activity_star.go @@ -8,6 +8,7 @@ package github import ( "context" "fmt" + "strings" ) // StarredRepository is returned by ListStarred. @@ -84,8 +85,9 @@ func (s *ActivityService) ListStarred(ctx context.Context, user string, opt *Act return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeStarringPreview) + // TODO: remove custom Accept header when APIs fully launch + acceptHeaders := []string{mediaTypeStarringPreview, mediaTypeTopicsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) var repos []*StarredRepository resp, err := s.client.Do(ctx, req, &repos) diff --git a/vendor/github.com/google/go-github/github/activity_star_test.go b/vendor/github.com/google/go-github/github/activity_star_test.go index 542bfc6..8a12247 100644 --- a/vendor/github.com/google/go-github/github/activity_star_test.go +++ b/vendor/github.com/google/go-github/github/activity_star_test.go @@ -10,12 +10,13 @@ import ( "fmt" "net/http" "reflect" + "strings" "testing" "time" ) func TestActivityService_ListStargazers(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/stargazers", func(w http.ResponseWriter, r *http.Request) { @@ -33,19 +34,19 @@ func TestActivityService_ListStargazers(t *testing.T) { t.Errorf("Activity.ListStargazers returned error: %v", err) } - want := []*Stargazer{{StarredAt: &Timestamp{time.Date(2002, time.February, 10, 15, 30, 0, 0, time.UTC)}, User: &User{ID: Int(1)}}} + want := []*Stargazer{{StarredAt: &Timestamp{time.Date(2002, time.February, 10, 15, 30, 0, 0, time.UTC)}, User: &User{ID: Int64(1)}}} if !reflect.DeepEqual(stargazers, want) { t.Errorf("Activity.ListStargazers returned %+v, want %+v", stargazers, want) } } func TestActivityService_ListStarred_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/starred", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeStarringPreview) + testHeader(t, r, "Accept", strings.Join([]string{mediaTypeStarringPreview, mediaTypeTopicsPreview}, ", ")) fmt.Fprint(w, `[{"starred_at":"2002-02-10T15:30:00Z","repo":{"id":1}}]`) }) @@ -54,19 +55,19 @@ func TestActivityService_ListStarred_authenticatedUser(t *testing.T) { t.Errorf("Activity.ListStarred returned error: %v", err) } - want := []*StarredRepository{{StarredAt: &Timestamp{time.Date(2002, time.February, 10, 15, 30, 0, 0, time.UTC)}, Repository: &Repository{ID: Int(1)}}} + want := []*StarredRepository{{StarredAt: &Timestamp{time.Date(2002, time.February, 10, 15, 30, 0, 0, time.UTC)}, Repository: &Repository{ID: Int64(1)}}} if !reflect.DeepEqual(repos, want) { t.Errorf("Activity.ListStarred returned %+v, want %+v", repos, want) } } func TestActivityService_ListStarred_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/starred", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeStarringPreview) + testHeader(t, r, "Accept", strings.Join([]string{mediaTypeStarringPreview, mediaTypeTopicsPreview}, ", ")) testFormValues(t, r, values{ "sort": "created", "direction": "asc", @@ -81,19 +82,22 @@ func TestActivityService_ListStarred_specifiedUser(t *testing.T) { t.Errorf("Activity.ListStarred returned error: %v", err) } - want := []*StarredRepository{{StarredAt: &Timestamp{time.Date(2002, time.February, 10, 15, 30, 0, 0, time.UTC)}, Repository: &Repository{ID: Int(2)}}} + want := []*StarredRepository{{StarredAt: &Timestamp{time.Date(2002, time.February, 10, 15, 30, 0, 0, time.UTC)}, Repository: &Repository{ID: Int64(2)}}} if !reflect.DeepEqual(repos, want) { t.Errorf("Activity.ListStarred returned %+v, want %+v", repos, want) } } func TestActivityService_ListStarred_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Activity.ListStarred(context.Background(), "%", nil) testURLParseError(t, err) } func TestActivityService_IsStarred_hasStar(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/starred/o/r", func(w http.ResponseWriter, r *http.Request) { @@ -111,7 +115,7 @@ func TestActivityService_IsStarred_hasStar(t *testing.T) { } func TestActivityService_IsStarred_noStar(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/starred/o/r", func(w http.ResponseWriter, r *http.Request) { @@ -129,12 +133,15 @@ func TestActivityService_IsStarred_noStar(t *testing.T) { } func TestActivityService_IsStarred_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Activity.IsStarred(context.Background(), "%", "%") testURLParseError(t, err) } func TestActivityService_Star(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/starred/o/r", func(w http.ResponseWriter, r *http.Request) { @@ -148,12 +155,15 @@ func TestActivityService_Star(t *testing.T) { } func TestActivityService_Star_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Activity.Star(context.Background(), "%", "%") testURLParseError(t, err) } func TestActivityService_Unstar(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/starred/o/r", func(w http.ResponseWriter, r *http.Request) { @@ -167,6 +177,9 @@ func TestActivityService_Unstar(t *testing.T) { } func TestActivityService_Unstar_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Activity.Unstar(context.Background(), "%", "%") testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/activity_test.go b/vendor/github.com/google/go-github/github/activity_test.go index 98567a2..92b42d3 100644 --- a/vendor/github.com/google/go-github/github/activity_test.go +++ b/vendor/github.com/google/go-github/github/activity_test.go @@ -13,7 +13,7 @@ import ( ) func TestActivityService_List(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/feeds", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/activity_watching_test.go b/vendor/github.com/google/go-github/github/activity_watching_test.go index d765f67..b017d8d 100644 --- a/vendor/github.com/google/go-github/github/activity_watching_test.go +++ b/vendor/github.com/google/go-github/github/activity_watching_test.go @@ -15,7 +15,7 @@ import ( ) func TestActivityService_ListWatchers(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/subscribers", func(w http.ResponseWriter, r *http.Request) { @@ -32,14 +32,14 @@ func TestActivityService_ListWatchers(t *testing.T) { t.Errorf("Activity.ListWatchers returned error: %v", err) } - want := []*User{{ID: Int(1)}} + want := []*User{{ID: Int64(1)}} if !reflect.DeepEqual(watchers, want) { t.Errorf("Activity.ListWatchers returned %+v, want %+v", watchers, want) } } func TestActivityService_ListWatched_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/subscriptions", func(w http.ResponseWriter, r *http.Request) { @@ -55,14 +55,14 @@ func TestActivityService_ListWatched_authenticatedUser(t *testing.T) { t.Errorf("Activity.ListWatched returned error: %v", err) } - want := []*Repository{{ID: Int(1)}} + want := []*Repository{{ID: Int64(1)}} if !reflect.DeepEqual(watched, want) { t.Errorf("Activity.ListWatched returned %+v, want %+v", watched, want) } } func TestActivityService_ListWatched_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/subscriptions", func(w http.ResponseWriter, r *http.Request) { @@ -78,14 +78,14 @@ func TestActivityService_ListWatched_specifiedUser(t *testing.T) { t.Errorf("Activity.ListWatched returned error: %v", err) } - want := []*Repository{{ID: Int(1)}} + want := []*Repository{{ID: Int64(1)}} if !reflect.DeepEqual(watched, want) { t.Errorf("Activity.ListWatched returned %+v, want %+v", watched, want) } } func TestActivityService_GetRepositorySubscription_true(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/subscription", func(w http.ResponseWriter, r *http.Request) { @@ -105,7 +105,7 @@ func TestActivityService_GetRepositorySubscription_true(t *testing.T) { } func TestActivityService_GetRepositorySubscription_false(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/subscription", func(w http.ResponseWriter, r *http.Request) { @@ -125,7 +125,7 @@ func TestActivityService_GetRepositorySubscription_false(t *testing.T) { } func TestActivityService_GetRepositorySubscription_error(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/subscription", func(w http.ResponseWriter, r *http.Request) { @@ -140,7 +140,7 @@ func TestActivityService_GetRepositorySubscription_error(t *testing.T) { } func TestActivityService_SetRepositorySubscription(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Subscription{Subscribed: Bool(true)} @@ -169,7 +169,7 @@ func TestActivityService_SetRepositorySubscription(t *testing.T) { } func TestActivityService_DeleteRepositorySubscription(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/subscription", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/admin.go b/vendor/github.com/google/go-github/github/admin.go index d0f055b..2d96733 100644 --- a/vendor/github.com/google/go-github/github/admin.go +++ b/vendor/github.com/google/go-github/github/admin.go @@ -19,7 +19,7 @@ type AdminService service // TeamLDAPMapping represents the mapping between a GitHub team and an LDAP group. type TeamLDAPMapping struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` LDAPDN *string `json:"ldap_dn,omitempty"` URL *string `json:"url,omitempty"` Name *string `json:"name,omitempty"` @@ -38,7 +38,7 @@ func (m TeamLDAPMapping) String() string { // UserLDAPMapping represents the mapping between a GitHub user and an LDAP user. type UserLDAPMapping struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` LDAPDN *string `json:"ldap_dn,omitempty"` Login *string `json:"login,omitempty"` AvatarURL *string `json:"avatar_url,omitempty"` @@ -84,7 +84,7 @@ func (s *AdminService) UpdateUserLDAPMapping(ctx context.Context, user string, m // UpdateTeamLDAPMapping updates the mapping between a GitHub team and an LDAP group. // // GitHub API docs: https://developer.github.com/v3/enterprise/ldap/#update-ldap-mapping-for-a-team -func (s *AdminService) UpdateTeamLDAPMapping(ctx context.Context, team int, mapping *TeamLDAPMapping) (*TeamLDAPMapping, *Response, error) { +func (s *AdminService) UpdateTeamLDAPMapping(ctx context.Context, team int64, mapping *TeamLDAPMapping) (*TeamLDAPMapping, *Response, error) { u := fmt.Sprintf("admin/ldap/teams/%v/mapping", team) req, err := s.client.NewRequest("PATCH", u, mapping) if err != nil { diff --git a/vendor/github.com/google/go-github/github/admin_stats.go b/vendor/github.com/google/go-github/github/admin_stats.go new file mode 100644 index 0000000..b5645f8 --- /dev/null +++ b/vendor/github.com/google/go-github/github/admin_stats.go @@ -0,0 +1,171 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// AdminStats represents a variety of stats of a Github Enterprise +// installation. +type AdminStats struct { + Issues *IssueStats `json:"issues,omitempty"` + Hooks *HookStats `json:"hooks,omitempty"` + Milestones *MilestoneStats `json:"milestones,omitempty"` + Orgs *OrgStats `json:"orgs,omitempty"` + Comments *CommentStats `json:"comments,omitempty"` + Pages *PageStats `json:"pages,omitempty"` + Users *UserStats `json:"users,omitempty"` + Gists *GistStats `json:"gists,omitempty"` + Pulls *PullStats `json:"pulls,omitempty"` + Repos *RepoStats `json:"repos,omitempty"` +} + +func (s AdminStats) String() string { + return Stringify(s) +} + +// IssueStats represents the number of total, open and closed issues. +type IssueStats struct { + TotalIssues *int `json:"total_issues,omitempty"` + OpenIssues *int `json:"open_issues,omitempty"` + ClosedIssues *int `json:"closed_issues,omitempty"` +} + +func (s IssueStats) String() string { + return Stringify(s) +} + +// HookStats represents the number of total, active and inactive hooks. +type HookStats struct { + TotalHooks *int `json:"total_hooks,omitempty"` + ActiveHooks *int `json:"active_hooks,omitempty"` + InactiveHooks *int `json:"inactive_hooks,omitempty"` +} + +func (s HookStats) String() string { + return Stringify(s) +} + +// MilestoneStats represents the number of total, open and close milestones. +type MilestoneStats struct { + TotalMilestones *int `json:"total_milestones,omitempty"` + OpenMilestones *int `json:"open_milestones,omitempty"` + ClosedMilestones *int `json:"closed_milestones,omitempty"` +} + +func (s MilestoneStats) String() string { + return Stringify(s) +} + +// OrgStats represents the number of total, disabled organizations and the team +// and team member count. +type OrgStats struct { + TotalOrgs *int `json:"total_orgs,omitempty"` + DisabledOrgs *int `json:"disabled_orgs,omitempty"` + TotalTeams *int `json:"total_teams,omitempty"` + TotalTeamMembers *int `json:"total_team_members,omitempty"` +} + +func (s OrgStats) String() string { + return Stringify(s) +} + +// CommentStats represents the number of total comments on commits, gists, issues +// and pull requests. +type CommentStats struct { + TotalCommitComments *int `json:"total_commit_comments,omitempty"` + TotalGistComments *int `json:"total_gist_comments,omitempty"` + TotalIssueComments *int `json:"total_issue_comments,omitempty"` + TotalPullRequestComments *int `json:"total_pull_request_comments,omitempty"` +} + +func (s CommentStats) String() string { + return Stringify(s) +} + +// PageStats represents the total number of github pages. +type PageStats struct { + TotalPages *int `json:"total_pages,omitempty"` +} + +func (s PageStats) String() string { + return Stringify(s) +} + +// UserStats represents the number of total, admin and suspended users. +type UserStats struct { + TotalUsers *int `json:"total_users,omitempty"` + AdminUsers *int `json:"admin_users,omitempty"` + SuspendedUsers *int `json:"suspended_users,omitempty"` +} + +func (s UserStats) String() string { + return Stringify(s) +} + +// GistStats represents the number of total, private and public gists. +type GistStats struct { + TotalGists *int `json:"total_gists,omitempty"` + PrivateGists *int `json:"private_gists,omitempty"` + PublicGists *int `json:"public_gists,omitempty"` +} + +func (s GistStats) String() string { + return Stringify(s) +} + +// PullStats represents the number of total, merged, mergable and unmergeable +// pull-requests. +type PullStats struct { + TotalPulls *int `json:"total_pulls,omitempty"` + MergedPulls *int `json:"merged_pulls,omitempty"` + MergablePulls *int `json:"mergeable_pulls,omitempty"` + UnmergablePulls *int `json:"unmergeable_pulls,omitempty"` +} + +func (s PullStats) String() string { + return Stringify(s) +} + +// RepoStats represents the number of total, root, fork, organization repositories +// together with the total number of pushes and wikis. +type RepoStats struct { + TotalRepos *int `json:"total_repos,omitempty"` + RootRepos *int `json:"root_repos,omitempty"` + ForkRepos *int `json:"fork_repos,omitempty"` + OrgRepos *int `json:"org_repos,omitempty"` + TotalPushes *int `json:"total_pushes,omitempty"` + TotalWikis *int `json:"total_wikis,omitempty"` +} + +func (s RepoStats) String() string { + return Stringify(s) +} + +// GetAdminStats returns a variety of metrics about a Github Enterprise +// installation. +// +// Please note that this is only available to site administrators, +// otherwise it will error with a 404 not found (instead of 401 or 403). +// +// GitHub API docs: https://developer.github.com/v3/enterprise-admin/admin_stats/ +func (s *AdminService) GetAdminStats(ctx context.Context) (*AdminStats, *Response, error) { + u := fmt.Sprintf("enterprise/stats/all") + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + m := new(AdminStats) + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} diff --git a/vendor/github.com/google/go-github/github/admin_stats_test.go b/vendor/github.com/google/go-github/github/admin_stats_test.go new file mode 100644 index 0000000..9433adc --- /dev/null +++ b/vendor/github.com/google/go-github/github/admin_stats_test.go @@ -0,0 +1,142 @@ +package github + +import ( + "context" + "fmt" + "net/http" + "reflect" + "testing" +) + +func TestAdminService_GetAdminStats(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/enterprise/stats/all", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + + fmt.Fprint(w, ` +{ + "repos": { + "total_repos": 212, + "root_repos": 194, + "fork_repos": 18, + "org_repos": 51, + "total_pushes": 3082, + "total_wikis": 15 + }, + "hooks": { + "total_hooks": 27, + "active_hooks": 23, + "inactive_hooks": 4 + }, + "pages": { + "total_pages": 36 + }, + "orgs": { + "total_orgs": 33, + "disabled_orgs": 0, + "total_teams": 60, + "total_team_members": 314 + }, + "users": { + "total_users": 254, + "admin_users": 45, + "suspended_users": 21 + }, + "pulls": { + "total_pulls": 86, + "merged_pulls": 60, + "mergeable_pulls": 21, + "unmergeable_pulls": 3 + }, + "issues": { + "total_issues": 179, + "open_issues": 83, + "closed_issues": 96 + }, + "milestones": { + "total_milestones": 7, + "open_milestones": 6, + "closed_milestones": 1 + }, + "gists": { + "total_gists": 178, + "private_gists": 151, + "public_gists": 25 + }, + "comments": { + "total_commit_comments": 6, + "total_gist_comments": 28, + "total_issue_comments": 366, + "total_pull_request_comments": 30 + } +} +`) + }) + + stats, _, err := client.Admin.GetAdminStats(context.Background()) + if err != nil { + t.Errorf("AdminService.GetAdminStats returned error: %v", err) + } + + want := &AdminStats{ + Repos: &RepoStats{ + TotalRepos: Int(212), + RootRepos: Int(194), + ForkRepos: Int(18), + OrgRepos: Int(51), + TotalPushes: Int(3082), + TotalWikis: Int(15), + }, + Hooks: &HookStats{ + TotalHooks: Int(27), + ActiveHooks: Int(23), + InactiveHooks: Int(4), + }, + Pages: &PageStats{ + TotalPages: Int(36), + }, + Orgs: &OrgStats{ + TotalOrgs: Int(33), + DisabledOrgs: Int(0), + TotalTeams: Int(60), + TotalTeamMembers: Int(314), + }, + Users: &UserStats{ + TotalUsers: Int(254), + AdminUsers: Int(45), + SuspendedUsers: Int(21), + }, + Pulls: &PullStats{ + TotalPulls: Int(86), + MergedPulls: Int(60), + MergablePulls: Int(21), + UnmergablePulls: Int(3), + }, + Issues: &IssueStats{ + TotalIssues: Int(179), + OpenIssues: Int(83), + ClosedIssues: Int(96), + }, + Milestones: &MilestoneStats{ + TotalMilestones: Int(7), + OpenMilestones: Int(6), + ClosedMilestones: Int(1), + }, + Gists: &GistStats{ + TotalGists: Int(178), + PrivateGists: Int(151), + PublicGists: Int(25), + }, + Comments: &CommentStats{ + TotalCommitComments: Int(6), + TotalGistComments: Int(28), + TotalIssueComments: Int(366), + TotalPullRequestComments: Int(30), + }, + } + if !reflect.DeepEqual(stats, want) { + t.Errorf("AdminService.GetAdminStats returned %+v, want %+v", stats, want) + } +} diff --git a/vendor/github.com/google/go-github/github/admin_test.go b/vendor/github.com/google/go-github/github/admin_test.go index 32a870e..8979686 100644 --- a/vendor/github.com/google/go-github/github/admin_test.go +++ b/vendor/github.com/google/go-github/github/admin_test.go @@ -15,7 +15,7 @@ import ( ) func TestAdminService_UpdateUserLDAPMapping(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &UserLDAPMapping{ @@ -39,7 +39,7 @@ func TestAdminService_UpdateUserLDAPMapping(t *testing.T) { } want := &UserLDAPMapping{ - ID: Int(1), + ID: Int64(1), LDAPDN: String("uid=asdf,ou=users,dc=github,dc=com"), } if !reflect.DeepEqual(mapping, want) { @@ -48,7 +48,7 @@ func TestAdminService_UpdateUserLDAPMapping(t *testing.T) { } func TestAdminService_UpdateTeamLDAPMapping(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &TeamLDAPMapping{ @@ -72,7 +72,7 @@ func TestAdminService_UpdateTeamLDAPMapping(t *testing.T) { } want := &TeamLDAPMapping{ - ID: Int(1), + ID: Int64(1), LDAPDN: String("cn=Enterprise Ops,ou=teams,dc=github,dc=com"), } if !reflect.DeepEqual(mapping, want) { diff --git a/vendor/github.com/google/go-github/github/apps.go b/vendor/github.com/google/go-github/github/apps.go index ff33893..ae3aabd 100644 --- a/vendor/github.com/google/go-github/github/apps.go +++ b/vendor/github.com/google/go-github/github/apps.go @@ -5,7 +5,11 @@ package github -import "context" +import ( + "context" + "fmt" + "time" +) // AppsService provides access to the installation related functions // in the GitHub API. @@ -13,6 +17,88 @@ import "context" // GitHub API docs: https://developer.github.com/v3/apps/ type AppsService service +// App represents a GitHub App. +type App struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Owner *User `json:"owner,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + ExternalURL *string `json:"external_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` +} + +// InstallationToken represents an installation token. +type InstallationToken struct { + Token *string `json:"token,omitempty"` + ExpiresAt *time.Time `json:"expires_at,omitempty"` +} + +// InstallationPermissions lists the permissions for metadata, contents, issues and single file for an installation. +type InstallationPermissions struct { + Metadata *string `json:"metadata,omitempty"` + Contents *string `json:"contents,omitempty"` + Issues *string `json:"issues,omitempty"` + SingleFile *string `json:"single_file,omitempty"` +} + +// Installation represents a GitHub Apps installation. +type Installation struct { + ID *int64 `json:"id,omitempty"` + AppID *int64 `json:"app_id,omitempty"` + TargetID *int64 `json:"target_id,omitempty"` + Account *User `json:"account,omitempty"` + AccessTokensURL *string `json:"access_tokens_url,omitempty"` + RepositoriesURL *string `json:"repositories_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + TargetType *string `json:"target_type,omitempty"` + SingleFileName *string `json:"single_file_name,omitempty"` + RepositorySelection *string `json:"repository_selection,omitempty"` + Events []string `json:"events,omitempty"` + Permissions *InstallationPermissions `json:"permissions,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` +} + +func (i Installation) String() string { + return Stringify(i) +} + +// Get a single GitHub App. Passing the empty string will get +// the authenticated GitHub App. +// +// Note: appSlug is just the URL-friendly name of your GitHub App. +// You can find this on the settings page for your GitHub App +// (e.g., https://github.com/settings/apps/:app_slug). +// +// GitHub API docs: https://developer.github.com/v3/apps/#get-a-single-github-app +func (s *AppsService) Get(ctx context.Context, appSlug string) (*App, *Response, error) { + var u string + if appSlug != "" { + u = fmt.Sprintf("apps/%v", appSlug) + } else { + u = "app" + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeIntegrationPreview) + + app := new(App) + resp, err := s.client.Do(ctx, req, app) + if err != nil { + return nil, resp, err + } + + return app, resp, nil +} + // ListInstallations lists the installations that the current GitHub App has. // // GitHub API docs: https://developer.github.com/v3/apps/#find-installations @@ -38,3 +124,107 @@ func (s *AppsService) ListInstallations(ctx context.Context, opt *ListOptions) ( return i, resp, nil } + +// GetInstallation returns the specified installation. +// +// GitHub API docs: https://developer.github.com/v3/apps/#get-a-single-installation +func (s *AppsService) GetInstallation(ctx context.Context, id int64) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("app/installations/%v", id)) +} + +// ListUserInstallations lists installations that are accessible to the authenticated user. +// +// GitHub API docs: https://developer.github.com/v3/apps/#list-installations-for-user +func (s *AppsService) ListUserInstallations(ctx context.Context, opt *ListOptions) ([]*Installation, *Response, error) { + u, err := addOptions("user/installations", opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeIntegrationPreview) + + var i struct { + Installations []*Installation `json:"installations"` + } + resp, err := s.client.Do(ctx, req, &i) + if err != nil { + return nil, resp, err + } + + return i.Installations, resp, nil +} + +// CreateInstallationToken creates a new installation token. +// +// GitHub API docs: https://developer.github.com/v3/apps/#create-a-new-installation-token +func (s *AppsService) CreateInstallationToken(ctx context.Context, id int64) (*InstallationToken, *Response, error) { + u := fmt.Sprintf("app/installations/%v/access_tokens", id) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeIntegrationPreview) + + t := new(InstallationToken) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// FindOrganizationInstallation finds the organization's installation information. +// +// GitHub API docs: https://developer.github.com/v3/apps/#find-organization-installation +func (s *AppsService) FindOrganizationInstallation(ctx context.Context, org string) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("orgs/%v/installation", org)) +} + +// FindRepositoryInstallation finds the repository's installation information. +// +// GitHub API docs: https://developer.github.com/v3/apps/#find-repository-installation +func (s *AppsService) FindRepositoryInstallation(ctx context.Context, owner, repo string) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("repos/%v/%v/installation", owner, repo)) +} + +// FindRepositoryInstallationByID finds the repository's installation information. +// +// Note: FindRepositoryInstallationByID uses the undocumented GitHub API endpoint /repositories/:id/installation. +func (s *AppsService) FindRepositoryInstallationByID(ctx context.Context, id int64) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("repositories/%d/installation", id)) +} + +// FindUserInstallation finds the user's installation information. +// +// GitHub API docs: https://developer.github.com/v3/apps/#find-repository-installation +func (s *AppsService) FindUserInstallation(ctx context.Context, user string) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("users/%v/installation", user)) +} + +func (s *AppsService) getInstallation(ctx context.Context, url string) (*Installation, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeIntegrationPreview) + + i := new(Installation) + resp, err := s.client.Do(ctx, req, i) + if err != nil { + return nil, resp, err + } + + return i, resp, nil +} diff --git a/vendor/github.com/google/go-github/github/apps_installation.go b/vendor/github.com/google/go-github/github/apps_installation.go index 6a27799..09d4043 100644 --- a/vendor/github.com/google/go-github/github/apps_installation.go +++ b/vendor/github.com/google/go-github/github/apps_installation.go @@ -5,20 +5,10 @@ package github -import "context" - -// Installation represents a GitHub Apps installation. -type Installation struct { - ID *int `json:"id,omitempty"` - Account *User `json:"account,omitempty"` - AccessTokensURL *string `json:"access_tokens_url,omitempty"` - RepositoriesURL *string `json:"repositories_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` -} - -func (i Installation) String() string { - return Stringify(i) -} +import ( + "context" + "fmt" +) // ListRepos lists the repositories that are accessible to the authenticated installation. // @@ -47,3 +37,67 @@ func (s *AppsService) ListRepos(ctx context.Context, opt *ListOptions) ([]*Repos return r.Repositories, resp, nil } + +// ListUserRepos lists repositories that are accessible +// to the authenticated user for an installation. +// +// GitHub API docs: https://developer.github.com/v3/apps/installations/#list-repositories-accessible-to-the-user-for-an-installation +func (s *AppsService) ListUserRepos(ctx context.Context, id int64, opt *ListOptions) ([]*Repository, *Response, error) { + u := fmt.Sprintf("user/installations/%v/repositories", id) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeIntegrationPreview) + + var r struct { + Repositories []*Repository `json:"repositories"` + } + resp, err := s.client.Do(ctx, req, &r) + if err != nil { + return nil, resp, err + } + + return r.Repositories, resp, nil +} + +// AddRepository adds a single repository to an installation. +// +// GitHub API docs: https://developer.github.com/v3/apps/installations/#add-repository-to-installation +func (s *AppsService) AddRepository(ctx context.Context, instID, repoID int64) (*Repository, *Response, error) { + u := fmt.Sprintf("user/installations/%v/repositories/%v", instID, repoID) + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, nil, err + } + req.Header.Set("Accept", mediaTypeIntegrationPreview) + + r := new(Repository) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// RemoveRepository removes a single repository from an installation. +// +// GitHub docs: https://developer.github.com/v3/apps/installations/#remove-repository-from-installation +func (s *AppsService) RemoveRepository(ctx context.Context, instID, repoID int64) (*Response, error) { + u := fmt.Sprintf("user/installations/%v/repositories/%v", instID, repoID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + req.Header.Set("Accept", mediaTypeIntegrationPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/apps_installation_test.go b/vendor/github.com/google/go-github/github/apps_installation_test.go index 462d15a..745e1e5 100644 --- a/vendor/github.com/google/go-github/github/apps_installation_test.go +++ b/vendor/github.com/google/go-github/github/apps_installation_test.go @@ -14,7 +14,7 @@ import ( ) func TestAppsService_ListRepos(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/installation/repositories", func(w http.ResponseWriter, r *http.Request) { @@ -33,8 +33,71 @@ func TestAppsService_ListRepos(t *testing.T) { t.Errorf("Apps.ListRepos returned error: %v", err) } - want := []*Repository{{ID: Int(1)}} + want := []*Repository{{ID: Int64(1)}} if !reflect.DeepEqual(repositories, want) { t.Errorf("Apps.ListRepos returned %+v, want %+v", repositories, want) } } + +func TestAppsService_ListUserRepos(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/installations/1/repositories", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + testFormValues(t, r, values{ + "page": "1", + "per_page": "2", + }) + fmt.Fprint(w, `{"repositories": [{"id":1}]}`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + repositories, _, err := client.Apps.ListUserRepos(context.Background(), 1, opt) + if err != nil { + t.Errorf("Apps.ListUserRepos returned error: %v", err) + } + + want := []*Repository{{ID: Int64(1)}} + if !reflect.DeepEqual(repositories, want) { + t.Errorf("Apps.ListUserRepos returned %+v, want %+v", repositories, want) + } +} + +func TestAppsService_AddRepository(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/installations/1/repositories/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + fmt.Fprint(w, `{"id":1,"name":"n","description":"d","owner":{"login":"l"},"license":{"key":"mit"}}`) + }) + + repo, _, err := client.Apps.AddRepository(context.Background(), 1, 1) + if err != nil { + t.Errorf("Apps.AddRepository returned error: %v", err) + } + + want := &Repository{ID: Int64(1), Name: String("n"), Description: String("d"), Owner: &User{Login: String("l")}, License: &License{Key: String("mit")}} + if !reflect.DeepEqual(repo, want) { + t.Errorf("AddRepository returned %+v, want %+v", repo, want) + } +} + +func TestAppsService_RemoveRepository(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/installations/1/repositories/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Apps.RemoveRepository(context.Background(), 1, 1) + if err != nil { + t.Errorf("Apps.RemoveRepository returned error: %v", err) + } +} diff --git a/vendor/github.com/google/go-github/github/apps_marketplace.go b/vendor/github.com/google/go-github/github/apps_marketplace.go new file mode 100644 index 0000000..6dd568a --- /dev/null +++ b/vendor/github.com/google/go-github/github/apps_marketplace.go @@ -0,0 +1,183 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// MarketplaceService handles communication with the marketplace related +// methods of the GitHub API. +// +// GitHub API docs: https://developer.github.com/v3/apps/marketplace/ +type MarketplaceService struct { + client *Client + // Stubbed controls whether endpoints that return stubbed data are used + // instead of production endpoints. Stubbed data is fake data that's useful + // for testing your GitHub Apps. Stubbed data is hard-coded and will not + // change based on actual subscriptions. + // + // GitHub API docs: https://developer.github.com/v3/apps/marketplace/ + Stubbed bool +} + +// MarketplacePlan represents a GitHub Apps Marketplace Listing Plan. +type MarketplacePlan struct { + URL *string `json:"url,omitempty"` + AccountsURL *string `json:"accounts_url,omitempty"` + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + MonthlyPriceInCents *int `json:"monthly_price_in_cents,omitempty"` + YearlyPriceInCents *int `json:"yearly_price_in_cents,omitempty"` + // The pricing model for this listing. Can be one of "flat-rate", "per-unit", or "free". + PriceModel *string `json:"price_model,omitempty"` + UnitName *string `json:"unit_name,omitempty"` + Bullets *[]string `json:"bullets,omitempty"` + // State can be one of the values "draft" or "published". + State *string `json:"state,omitempty"` + HasFreeTrial *bool `json:"has_free_trial,omitempty"` +} + +// MarketplacePurchase represents a GitHub Apps Marketplace Purchase. +type MarketplacePurchase struct { + // BillingCycle can be one of the values "yearly", "monthly" or nil. + BillingCycle *string `json:"billing_cycle,omitempty"` + NextBillingDate *Timestamp `json:"next_billing_date,omitempty"` + UnitCount *int `json:"unit_count,omitempty"` + Plan *MarketplacePlan `json:"plan,omitempty"` + Account *MarketplacePlanAccount `json:"account,omitempty"` + OnFreeTrial *bool `json:"on_free_trial,omitempty"` + FreeTrialEndsOn *Timestamp `json:"free_trial_ends_on,omitempty"` +} + +// MarketplacePendingChange represents a pending change to a GitHub Apps Marketplace Plan. +type MarketplacePendingChange struct { + EffectiveDate *Timestamp `json:"effective_date,omitempty"` + UnitCount *int `json:"unit_count,omitempty"` + ID *int64 `json:"id,omitempty"` + Plan *MarketplacePlan `json:"plan,omitempty"` +} + +// MarketplacePlanAccount represents a GitHub Account (user or organization) on a specific plan. +type MarketplacePlanAccount struct { + URL *string `json:"url,omitempty"` + Type *string `json:"type,omitempty"` + ID *int64 `json:"id,omitempty"` + Login *string `json:"login,omitempty"` + Email *string `json:"email,omitempty"` + OrganizationBillingEmail *string `json:"organization_billing_email,omitempty"` + MarketplacePurchase *MarketplacePurchase `json:"marketplace_purchase,omitempty"` + MarketplacePendingChange *MarketplacePendingChange `json:"marketplace_pending_change,omitempty"` +} + +// ListPlans lists all plans for your Marketplace listing. +// +// GitHub API docs: https://developer.github.com/v3/apps/marketplace/#list-all-plans-for-your-marketplace-listing +func (s *MarketplaceService) ListPlans(ctx context.Context, opt *ListOptions) ([]*MarketplacePlan, *Response, error) { + uri := s.marketplaceURI("plans") + u, err := addOptions(uri, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var plans []*MarketplacePlan + resp, err := s.client.Do(ctx, req, &plans) + if err != nil { + return nil, resp, err + } + + return plans, resp, nil +} + +// ListPlanAccountsForPlan lists all GitHub accounts (user or organization) on a specific plan. +// +// GitHub API docs: https://developer.github.com/v3/apps/marketplace/#list-all-github-accounts-user-or-organization-on-a-specific-plan +func (s *MarketplaceService) ListPlanAccountsForPlan(ctx context.Context, planID int64, opt *ListOptions) ([]*MarketplacePlanAccount, *Response, error) { + uri := s.marketplaceURI(fmt.Sprintf("plans/%v/accounts", planID)) + u, err := addOptions(uri, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var accounts []*MarketplacePlanAccount + resp, err := s.client.Do(ctx, req, &accounts) + if err != nil { + return nil, resp, err + } + + return accounts, resp, nil +} + +// ListPlanAccountsForAccount lists all GitHub accounts (user or organization) associated with an account. +// +// GitHub API docs: https://developer.github.com/v3/apps/marketplace/#check-if-a-github-account-is-associated-with-any-marketplace-listing +func (s *MarketplaceService) ListPlanAccountsForAccount(ctx context.Context, accountID int64, opt *ListOptions) ([]*MarketplacePlanAccount, *Response, error) { + uri := s.marketplaceURI(fmt.Sprintf("accounts/%v", accountID)) + u, err := addOptions(uri, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var accounts []*MarketplacePlanAccount + resp, err := s.client.Do(ctx, req, &accounts) + if err != nil { + return nil, resp, err + } + + return accounts, resp, nil +} + +// ListMarketplacePurchasesForUser lists all GitHub marketplace purchases made by a user. +// +// GitHub API docs: https://developer.github.com/v3/apps/marketplace/#get-a-users-marketplace-purchases +func (s *MarketplaceService) ListMarketplacePurchasesForUser(ctx context.Context, opt *ListOptions) ([]*MarketplacePurchase, *Response, error) { + uri := "user/marketplace_purchases" + if s.Stubbed { + uri = "user/marketplace_purchases/stubbed" + } + + u, err := addOptions(uri, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var purchases []*MarketplacePurchase + resp, err := s.client.Do(ctx, req, &purchases) + if err != nil { + return nil, resp, err + } + return purchases, resp, nil +} + +func (s *MarketplaceService) marketplaceURI(endpoint string) string { + url := "marketplace_listing" + if s.Stubbed { + url = "marketplace_listing/stubbed" + } + return url + "/" + endpoint +} diff --git a/vendor/github.com/google/go-github/github/apps_marketplace_test.go b/vendor/github.com/google/go-github/github/apps_marketplace_test.go new file mode 100644 index 0000000..e293c83 --- /dev/null +++ b/vendor/github.com/google/go-github/github/apps_marketplace_test.go @@ -0,0 +1,194 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" + "reflect" + "testing" +) + +func TestMarketplaceService_ListPlans(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/marketplace_listing/plans", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testFormValues(t, r, values{ + "page": "1", + "per_page": "2", + }) + fmt.Fprint(w, `[{"id":1}]`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + client.Marketplace.Stubbed = false + plans, _, err := client.Marketplace.ListPlans(context.Background(), opt) + if err != nil { + t.Errorf("Marketplace.ListPlans returned error: %v", err) + } + + want := []*MarketplacePlan{{ID: Int64(1)}} + if !reflect.DeepEqual(plans, want) { + t.Errorf("Marketplace.ListPlans returned %+v, want %+v", plans, want) + } +} + +func TestMarketplaceService_Stubbed_ListPlans(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/marketplace_listing/stubbed/plans", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + fmt.Fprint(w, `[{"id":1}]`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + client.Marketplace.Stubbed = true + plans, _, err := client.Marketplace.ListPlans(context.Background(), opt) + if err != nil { + t.Errorf("Marketplace.ListPlans (Stubbed) returned error: %v", err) + } + + want := []*MarketplacePlan{{ID: Int64(1)}} + if !reflect.DeepEqual(plans, want) { + t.Errorf("Marketplace.ListPlans (Stubbed) returned %+v, want %+v", plans, want) + } +} + +func TestMarketplaceService_ListPlanAccountsForPlan(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/marketplace_listing/plans/1/accounts", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + fmt.Fprint(w, `[{"id":1}]`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + client.Marketplace.Stubbed = false + accounts, _, err := client.Marketplace.ListPlanAccountsForPlan(context.Background(), 1, opt) + if err != nil { + t.Errorf("Marketplace.ListPlanAccountsForPlan returned error: %v", err) + } + + want := []*MarketplacePlanAccount{{ID: Int64(1)}} + if !reflect.DeepEqual(accounts, want) { + t.Errorf("Marketplace.ListPlanAccountsForPlan returned %+v, want %+v", accounts, want) + } +} + +func TestMarketplaceService_Stubbed_ListPlanAccountsForPlan(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/marketplace_listing/stubbed/plans/1/accounts", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + fmt.Fprint(w, `[{"id":1}]`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + client.Marketplace.Stubbed = true + accounts, _, err := client.Marketplace.ListPlanAccountsForPlan(context.Background(), 1, opt) + if err != nil { + t.Errorf("Marketplace.ListPlanAccountsForPlan (Stubbed) returned error: %v", err) + } + + want := []*MarketplacePlanAccount{{ID: Int64(1)}} + if !reflect.DeepEqual(accounts, want) { + t.Errorf("Marketplace.ListPlanAccountsForPlan (Stubbed) returned %+v, want %+v", accounts, want) + } +} + +func TestMarketplaceService_ListPlanAccountsForAccount(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/marketplace_listing/accounts/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + fmt.Fprint(w, `[{"id":1, "marketplace_pending_change": {"id": 77}}]`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + client.Marketplace.Stubbed = false + accounts, _, err := client.Marketplace.ListPlanAccountsForAccount(context.Background(), 1, opt) + if err != nil { + t.Errorf("Marketplace.ListPlanAccountsForAccount returned error: %v", err) + } + + want := []*MarketplacePlanAccount{{ID: Int64(1), MarketplacePendingChange: &MarketplacePendingChange{ID: Int64(77)}}} + if !reflect.DeepEqual(accounts, want) { + t.Errorf("Marketplace.ListPlanAccountsForAccount returned %+v, want %+v", accounts, want) + } +} + +func TestMarketplaceService_Stubbed_ListPlanAccountsForAccount(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/marketplace_listing/stubbed/accounts/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + fmt.Fprint(w, `[{"id":1}]`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + client.Marketplace.Stubbed = true + accounts, _, err := client.Marketplace.ListPlanAccountsForAccount(context.Background(), 1, opt) + if err != nil { + t.Errorf("Marketplace.ListPlanAccountsForAccount (Stubbed) returned error: %v", err) + } + + want := []*MarketplacePlanAccount{{ID: Int64(1)}} + if !reflect.DeepEqual(accounts, want) { + t.Errorf("Marketplace.ListPlanAccountsForAccount (Stubbed) returned %+v, want %+v", accounts, want) + } +} + +func TestMarketplaceService_ListMarketplacePurchasesForUser(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/marketplace_purchases", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + fmt.Fprint(w, `[{"billing_cycle":"monthly"}]`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + client.Marketplace.Stubbed = false + purchases, _, err := client.Marketplace.ListMarketplacePurchasesForUser(context.Background(), opt) + if err != nil { + t.Errorf("Marketplace.ListMarketplacePurchasesForUser returned error: %v", err) + } + + want := []*MarketplacePurchase{{BillingCycle: String("monthly")}} + if !reflect.DeepEqual(purchases, want) { + t.Errorf("Marketplace.ListMarketplacePurchasesForUser returned %+v, want %+v", purchases, want) + } +} + +func TestMarketplaceService_Stubbed_ListMarketplacePurchasesForUser(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/marketplace_purchases/stubbed", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + fmt.Fprint(w, `[{"billing_cycle":"monthly"}]`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + client.Marketplace.Stubbed = true + purchases, _, err := client.Marketplace.ListMarketplacePurchasesForUser(context.Background(), opt) + if err != nil { + t.Errorf("Marketplace.ListMarketplacePurchasesForUser returned error: %v", err) + } + + want := []*MarketplacePurchase{{BillingCycle: String("monthly")}} + if !reflect.DeepEqual(purchases, want) { + t.Errorf("Marketplace.ListMarketplacePurchasesForUser returned %+v, want %+v", purchases, want) + } +} diff --git a/vendor/github.com/google/go-github/github/apps_test.go b/vendor/github.com/google/go-github/github/apps_test.go index f847b84..a594eab 100644 --- a/vendor/github.com/google/go-github/github/apps_test.go +++ b/vendor/github.com/google/go-github/github/apps_test.go @@ -11,10 +11,53 @@ import ( "net/http" "reflect" "testing" + "time" ) +func TestAppsService_Get_authenticatedApp(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/app", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + fmt.Fprint(w, `{"id":1}`) + }) + + app, _, err := client.Apps.Get(context.Background(), "") + if err != nil { + t.Errorf("Apps.Get returned error: %v", err) + } + + want := &App{ID: Int64(1)} + if !reflect.DeepEqual(app, want) { + t.Errorf("Apps.Get returned %+v, want %+v", app, want) + } +} + +func TestAppsService_Get_specifiedApp(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/apps/a", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + fmt.Fprint(w, `{"html_url":"https://github.com/apps/a"}`) + }) + + app, _, err := client.Apps.Get(context.Background(), "a") + if err != nil { + t.Errorf("Apps.Get returned error: %v", err) + } + + want := &App{HTMLURL: String("https://github.com/apps/a")} + if !reflect.DeepEqual(app, want) { + t.Errorf("Apps.Get returned %+v, want %+v", *app.HTMLURL, *want.HTMLURL) + } +} + func TestAppsService_ListInstallations(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/app/installations", func(w http.ResponseWriter, r *http.Request) { @@ -24,7 +67,26 @@ func TestAppsService_ListInstallations(t *testing.T) { "page": "1", "per_page": "2", }) - fmt.Fprint(w, `[{"id":1}]`) + fmt.Fprint(w, `[{ + "id":1, + "app_id":1, + "target_id":1, + "target_type": "Organization", + "permissions": { + "metadata": "read", + "contents": "read", + "issues": "write", + "single_file": "write" + }, + "events": [ + "push", + "pull_request" + ], + "single_file_name": "config.yml", + "repository_selection": "selected", + "created_at": "2018-01-01T00:00:00Z", + "updated_at": "2018-01-01T00:00:00Z"}]`, + ) }) opt := &ListOptions{Page: 1, PerPage: 2} @@ -33,8 +95,176 @@ func TestAppsService_ListInstallations(t *testing.T) { t.Errorf("Apps.ListInstallations returned error: %v", err) } - want := []*Installation{{ID: Int(1)}} + date := Timestamp{Time: time.Date(2018, time.January, 1, 0, 0, 0, 0, time.UTC)} + want := []*Installation{{ + ID: Int64(1), + AppID: Int64(1), + TargetID: Int64(1), + TargetType: String("Organization"), + SingleFileName: String("config.yml"), + RepositorySelection: String("selected"), + Permissions: &InstallationPermissions{ + Metadata: String("read"), + Contents: String("read"), + Issues: String("write"), + SingleFile: String("write")}, + Events: []string{"push", "pull_request"}, + CreatedAt: &date, + UpdatedAt: &date, + }} if !reflect.DeepEqual(installations, want) { t.Errorf("Apps.ListInstallations returned %+v, want %+v", installations, want) } } + +func TestAppsService_GetInstallation(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/app/installations/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + fmt.Fprint(w, `{"id":1, "app_id":1, "target_id":1, "target_type": "Organization"}`) + }) + + installation, _, err := client.Apps.GetInstallation(context.Background(), 1) + if err != nil { + t.Errorf("Apps.GetInstallation returned error: %v", err) + } + + want := &Installation{ID: Int64(1), AppID: Int64(1), TargetID: Int64(1), TargetType: String("Organization")} + if !reflect.DeepEqual(installation, want) { + t.Errorf("Apps.GetInstallation returned %+v, want %+v", installation, want) + } +} + +func TestAppsService_ListUserInstallations(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/installations", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + testFormValues(t, r, values{ + "page": "1", + "per_page": "2", + }) + fmt.Fprint(w, `{"installations":[{"id":1, "app_id":1, "target_id":1, "target_type": "Organization"}]}`) + }) + + opt := &ListOptions{Page: 1, PerPage: 2} + installations, _, err := client.Apps.ListUserInstallations(context.Background(), opt) + if err != nil { + t.Errorf("Apps.ListUserInstallations returned error: %v", err) + } + + want := []*Installation{{ID: Int64(1), AppID: Int64(1), TargetID: Int64(1), TargetType: String("Organization")}} + if !reflect.DeepEqual(installations, want) { + t.Errorf("Apps.ListUserInstallations returned %+v, want %+v", installations, want) + } +} + +func TestAppsService_CreateInstallationToken(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/app/installations/1/access_tokens", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + fmt.Fprint(w, `{"token":"t"}`) + }) + + token, _, err := client.Apps.CreateInstallationToken(context.Background(), 1) + if err != nil { + t.Errorf("Apps.CreateInstallationToken returned error: %v", err) + } + + want := &InstallationToken{Token: String("t")} + if !reflect.DeepEqual(token, want) { + t.Errorf("Apps.CreateInstallationToken returned %+v, want %+v", token, want) + } +} + +func TestAppsService_FindOrganizationInstallation(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/orgs/o/installation", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + fmt.Fprint(w, `{"id":1, "app_id":1, "target_id":1, "target_type": "Organization"}`) + }) + + installation, _, err := client.Apps.FindOrganizationInstallation(context.Background(), "o") + if err != nil { + t.Errorf("Apps.FindOrganizationInstallation returned error: %v", err) + } + + want := &Installation{ID: Int64(1), AppID: Int64(1), TargetID: Int64(1), TargetType: String("Organization")} + if !reflect.DeepEqual(installation, want) { + t.Errorf("Apps.FindOrganizationInstallation returned %+v, want %+v", installation, want) + } +} + +func TestAppsService_FindRepositoryInstallation(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/installation", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + fmt.Fprint(w, `{"id":1, "app_id":1, "target_id":1, "target_type": "Organization"}`) + }) + + installation, _, err := client.Apps.FindRepositoryInstallation(context.Background(), "o", "r") + if err != nil { + t.Errorf("Apps.FindRepositoryInstallation returned error: %v", err) + } + + want := &Installation{ID: Int64(1), AppID: Int64(1), TargetID: Int64(1), TargetType: String("Organization")} + if !reflect.DeepEqual(installation, want) { + t.Errorf("Apps.FindRepositoryInstallation returned %+v, want %+v", installation, want) + } +} + +func TestAppsService_FindRepositoryInstallationByID(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repositories/1/installation", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + fmt.Fprint(w, `{"id":1, "app_id":1, "target_id":1, "target_type": "Organization"}`) + }) + + installation, _, err := client.Apps.FindRepositoryInstallationByID(context.Background(), 1) + if err != nil { + t.Errorf("Apps.FindRepositoryInstallationByID returned error: %v", err) + } + + want := &Installation{ID: Int64(1), AppID: Int64(1), TargetID: Int64(1), TargetType: String("Organization")} + if !reflect.DeepEqual(installation, want) { + t.Errorf("Apps.FindRepositoryInstallationByID returned %+v, want %+v", installation, want) + } +} + +func TestAppsService_FindUserInstallation(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/users/u/installation", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeIntegrationPreview) + fmt.Fprint(w, `{"id":1, "app_id":1, "target_id":1, "target_type": "User"}`) + }) + + installation, _, err := client.Apps.FindUserInstallation(context.Background(), "u") + if err != nil { + t.Errorf("Apps.FindUserInstallation returned error: %v", err) + } + + want := &Installation{ID: Int64(1), AppID: Int64(1), TargetID: Int64(1), TargetType: String("User")} + if !reflect.DeepEqual(installation, want) { + t.Errorf("Apps.FindUserInstallation returned %+v, want %+v", installation, want) + } +} diff --git a/vendor/github.com/google/go-github/github/authorizations.go b/vendor/github.com/google/go-github/github/authorizations.go index d87adc0..8ad1432 100644 --- a/vendor/github.com/google/go-github/github/authorizations.go +++ b/vendor/github.com/google/go-github/github/authorizations.go @@ -54,7 +54,7 @@ type AuthorizationsService service // Authorization represents an individual GitHub authorization. type Authorization struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` URL *string `json:"url,omitempty"` Scopes []Scope `json:"scopes,omitempty"` Token *string `json:"token,omitempty"` @@ -63,7 +63,7 @@ type Authorization struct { App *AuthorizationApp `json:"app,omitempty"` Note *string `json:"note,omitempty"` NoteURL *string `json:"note_url,omitempty"` - UpdateAt *Timestamp `json:"updated_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` Fingerprint *string `json:"fingerprint,omitempty"` @@ -88,7 +88,7 @@ func (a AuthorizationApp) String() string { // Grant represents an OAuth application that has been granted access to an account. type Grant struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` URL *string `json:"url,omitempty"` App *AuthorizationApp `json:"app,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` @@ -160,7 +160,7 @@ func (s *AuthorizationsService) List(ctx context.Context, opt *ListOptions) ([]* // Get a single authorization. // // GitHub API docs: https://developer.github.com/v3/oauth_authorizations/#get-a-single-authorization -func (s *AuthorizationsService) Get(ctx context.Context, id int) (*Authorization, *Response, error) { +func (s *AuthorizationsService) Get(ctx context.Context, id int64) (*Authorization, *Response, error) { u := fmt.Sprintf("authorizations/%d", id) req, err := s.client.NewRequest("GET", u, nil) @@ -234,7 +234,7 @@ func (s *AuthorizationsService) GetOrCreateForApp(ctx context.Context, clientID // Edit a single authorization. // // GitHub API docs: https://developer.github.com/v3/oauth_authorizations/#update-an-existing-authorization -func (s *AuthorizationsService) Edit(ctx context.Context, id int, auth *AuthorizationUpdateRequest) (*Authorization, *Response, error) { +func (s *AuthorizationsService) Edit(ctx context.Context, id int64, auth *AuthorizationUpdateRequest) (*Authorization, *Response, error) { u := fmt.Sprintf("authorizations/%d", id) req, err := s.client.NewRequest("PATCH", u, auth) @@ -254,7 +254,7 @@ func (s *AuthorizationsService) Edit(ctx context.Context, id int, auth *Authoriz // Delete a single authorization. // // GitHub API docs: https://developer.github.com/v3/oauth_authorizations/#delete-an-authorization -func (s *AuthorizationsService) Delete(ctx context.Context, id int) (*Response, error) { +func (s *AuthorizationsService) Delete(ctx context.Context, id int64) (*Response, error) { u := fmt.Sprintf("authorizations/%d", id) req, err := s.client.NewRequest("DELETE", u, nil) @@ -366,7 +366,7 @@ func (s *AuthorizationsService) ListGrants(ctx context.Context, opt *ListOptions // GetGrant gets a single OAuth application grant. // // GitHub API docs: https://developer.github.com/v3/oauth_authorizations/#get-a-single-grant -func (s *AuthorizationsService) GetGrant(ctx context.Context, id int) (*Grant, *Response, error) { +func (s *AuthorizationsService) GetGrant(ctx context.Context, id int64) (*Grant, *Response, error) { u := fmt.Sprintf("applications/grants/%d", id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { @@ -387,7 +387,7 @@ func (s *AuthorizationsService) GetGrant(ctx context.Context, id int) (*Grant, * // the user. // // GitHub API docs: https://developer.github.com/v3/oauth_authorizations/#delete-a-grant -func (s *AuthorizationsService) DeleteGrant(ctx context.Context, id int) (*Response, error) { +func (s *AuthorizationsService) DeleteGrant(ctx context.Context, id int64) (*Response, error) { u := fmt.Sprintf("applications/grants/%d", id) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { @@ -403,7 +403,7 @@ func (s *AuthorizationsService) DeleteGrant(ctx context.Context, id int) (*Respo // you can e.g. create or delete a user's public SSH key. NOTE: creating a // new token automatically revokes an existing one. // -// GitHub API docs: https://developer.github.com/enterprise/2.5/v3/users/administration/#create-an-impersonation-oauth-token +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#create-an-impersonation-oauth-token func (s *AuthorizationsService) CreateImpersonation(ctx context.Context, username string, authReq *AuthorizationRequest) (*Authorization, *Response, error) { u := fmt.Sprintf("admin/users/%v/authorizations", username) req, err := s.client.NewRequest("POST", u, authReq) @@ -423,7 +423,7 @@ func (s *AuthorizationsService) CreateImpersonation(ctx context.Context, usernam // // NOTE: there can be only one at a time. // -// GitHub API docs: https://developer.github.com/enterprise/2.5/v3/users/administration/#delete-an-impersonation-oauth-token +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#delete-an-impersonation-oauth-token func (s *AuthorizationsService) DeleteImpersonation(ctx context.Context, username string) (*Response, error) { u := fmt.Sprintf("admin/users/%v/authorizations", username) req, err := s.client.NewRequest("DELETE", u, nil) diff --git a/vendor/github.com/google/go-github/github/authorizations_test.go b/vendor/github.com/google/go-github/github/authorizations_test.go index 61bf212..faf2574 100644 --- a/vendor/github.com/google/go-github/github/authorizations_test.go +++ b/vendor/github.com/google/go-github/github/authorizations_test.go @@ -15,7 +15,7 @@ import ( ) func TestAuthorizationsService_List(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/authorizations", func(w http.ResponseWriter, r *http.Request) { @@ -30,14 +30,14 @@ func TestAuthorizationsService_List(t *testing.T) { t.Errorf("Authorizations.List returned error: %v", err) } - want := []*Authorization{{ID: Int(1)}} + want := []*Authorization{{ID: Int64(1)}} if !reflect.DeepEqual(got, want) { t.Errorf("Authorizations.List returned %+v, want %+v", *got[0].ID, *want[0].ID) } } func TestAuthorizationsService_Get(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/authorizations/1", func(w http.ResponseWriter, r *http.Request) { @@ -50,14 +50,14 @@ func TestAuthorizationsService_Get(t *testing.T) { t.Errorf("Authorizations.Get returned error: %v", err) } - want := &Authorization{ID: Int(1)} + want := &Authorization{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("Authorizations.Get returned auth %+v, want %+v", got, want) } } func TestAuthorizationsService_Create(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &AuthorizationRequest{ @@ -81,14 +81,14 @@ func TestAuthorizationsService_Create(t *testing.T) { t.Errorf("Authorizations.Create returned error: %v", err) } - want := &Authorization{ID: Int(1)} + want := &Authorization{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("Authorization.Create returned %+v, want %+v", got, want) } } func TestAuthorizationsService_GetOrCreateForApp(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &AuthorizationRequest{ @@ -112,14 +112,14 @@ func TestAuthorizationsService_GetOrCreateForApp(t *testing.T) { t.Errorf("Authorizations.GetOrCreateForApp returned error: %v", err) } - want := &Authorization{ID: Int(1)} + want := &Authorization{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("Authorization.GetOrCreateForApp returned %+v, want %+v", got, want) } } func TestAuthorizationsService_GetOrCreateForApp_Fingerprint(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &AuthorizationRequest{ @@ -144,14 +144,14 @@ func TestAuthorizationsService_GetOrCreateForApp_Fingerprint(t *testing.T) { t.Errorf("Authorizations.GetOrCreateForApp returned error: %v", err) } - want := &Authorization{ID: Int(1)} + want := &Authorization{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("Authorization.GetOrCreateForApp returned %+v, want %+v", got, want) } } func TestAuthorizationsService_Edit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &AuthorizationUpdateRequest{ @@ -175,14 +175,14 @@ func TestAuthorizationsService_Edit(t *testing.T) { t.Errorf("Authorizations.Edit returned error: %v", err) } - want := &Authorization{ID: Int(1)} + want := &Authorization{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("Authorization.Update returned %+v, want %+v", got, want) } } func TestAuthorizationsService_Delete(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/authorizations/1", func(w http.ResponseWriter, r *http.Request) { @@ -197,7 +197,7 @@ func TestAuthorizationsService_Delete(t *testing.T) { } func TestAuthorizationsService_Check(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/applications/id/tokens/t", func(w http.ResponseWriter, r *http.Request) { @@ -210,14 +210,14 @@ func TestAuthorizationsService_Check(t *testing.T) { t.Errorf("Authorizations.Check returned error: %v", err) } - want := &Authorization{ID: Int(1)} + want := &Authorization{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("Authorizations.Check returned auth %+v, want %+v", got, want) } } func TestAuthorizationsService_Reset(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/applications/id/tokens/t", func(w http.ResponseWriter, r *http.Request) { @@ -230,14 +230,14 @@ func TestAuthorizationsService_Reset(t *testing.T) { t.Errorf("Authorizations.Reset returned error: %v", err) } - want := &Authorization{ID: Int(1)} + want := &Authorization{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("Authorizations.Reset returned auth %+v, want %+v", got, want) } } func TestAuthorizationsService_Revoke(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/applications/id/tokens/t", func(w http.ResponseWriter, r *http.Request) { @@ -252,7 +252,7 @@ func TestAuthorizationsService_Revoke(t *testing.T) { } func TestListGrants(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/applications/grants", func(w http.ResponseWriter, r *http.Request) { @@ -265,14 +265,14 @@ func TestListGrants(t *testing.T) { t.Errorf("OAuthAuthorizations.ListGrants returned error: %v", err) } - want := []*Grant{{ID: Int(1)}} + want := []*Grant{{ID: Int64(1)}} if !reflect.DeepEqual(got, want) { t.Errorf("OAuthAuthorizations.ListGrants = %+v, want %+v", got, want) } } func TestListGrants_withOptions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/applications/grants", func(w http.ResponseWriter, r *http.Request) { @@ -290,7 +290,7 @@ func TestListGrants_withOptions(t *testing.T) { } func TestGetGrant(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/applications/grants/1", func(w http.ResponseWriter, r *http.Request) { @@ -303,14 +303,14 @@ func TestGetGrant(t *testing.T) { t.Errorf("OAuthAuthorizations.GetGrant returned error: %v", err) } - want := &Grant{ID: Int(1)} + want := &Grant{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("OAuthAuthorizations.GetGrant = %+v, want %+v", got, want) } } func TestDeleteGrant(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/applications/grants/1", func(w http.ResponseWriter, r *http.Request) { @@ -324,7 +324,7 @@ func TestDeleteGrant(t *testing.T) { } func TestAuthorizationsService_CreateImpersonation(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/admin/users/u/authorizations", func(w http.ResponseWriter, r *http.Request) { @@ -338,14 +338,14 @@ func TestAuthorizationsService_CreateImpersonation(t *testing.T) { t.Errorf("Authorizations.CreateImpersonation returned error: %+v", err) } - want := &Authorization{ID: Int(1)} + want := &Authorization{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("Authorizations.CreateImpersonation returned %+v, want %+v", *got.ID, *want.ID) } } func TestAuthorizationsService_DeleteImpersonation(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/admin/users/u/authorizations", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/checks.go b/vendor/github.com/google/go-github/github/checks.go new file mode 100644 index 0000000..67222d8 --- /dev/null +++ b/vendor/github.com/google/go-github/github/checks.go @@ -0,0 +1,435 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/url" +) + +// ChecksService provides access to the Checks API in the +// GitHub API. +// +// GitHub API docs: https://developer.github.com/v3/checks/ +type ChecksService service + +// CheckRun represents a GitHub check run on a repository associated with a GitHub app. +type CheckRun struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + HeadSHA *string `json:"head_sha,omitempty"` + ExternalID *string `json:"external_id,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + DetailsURL *string `json:"details_url,omitempty"` + Status *string `json:"status,omitempty"` + Conclusion *string `json:"conclusion,omitempty"` + StartedAt *Timestamp `json:"started_at,omitempty"` + CompletedAt *Timestamp `json:"completed_at,omitempty"` + Output *CheckRunOutput `json:"output,omitempty"` + Name *string `json:"name,omitempty"` + CheckSuite *CheckSuite `json:"check_suite,omitempty"` + App *App `json:"app,omitempty"` + PullRequests []*PullRequest `json:"pull_requests,omitempty"` +} + +// CheckRunOutput represents the output of a CheckRun. +type CheckRunOutput struct { + Title *string `json:"title,omitempty"` + Summary *string `json:"summary,omitempty"` + Text *string `json:"text,omitempty"` + AnnotationsCount *int `json:"annotations_count,omitempty"` + AnnotationsURL *string `json:"annotations_url,omitempty"` + Annotations []*CheckRunAnnotation `json:"annotations,omitempty"` + Images []*CheckRunImage `json:"images,omitempty"` +} + +// CheckRunAnnotation represents an annotation object for a CheckRun output. +type CheckRunAnnotation struct { + Path *string `json:"path,omitempty"` + BlobHRef *string `json:"blob_href,omitempty"` + StartLine *int `json:"start_line,omitempty"` + EndLine *int `json:"end_line,omitempty"` + AnnotationLevel *string `json:"annotation_level,omitempty"` + Message *string `json:"message,omitempty"` + Title *string `json:"title,omitempty"` + RawDetails *string `json:"raw_details,omitempty"` +} + +// CheckRunImage represents an image object for a CheckRun output. +type CheckRunImage struct { + Alt *string `json:"alt,omitempty"` + ImageURL *string `json:"image_url,omitempty"` + Caption *string `json:"caption,omitempty"` +} + +// CheckSuite represents a suite of check runs. +type CheckSuite struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + HeadBranch *string `json:"head_branch,omitempty"` + HeadSHA *string `json:"head_sha,omitempty"` + URL *string `json:"url,omitempty"` + BeforeSHA *string `json:"before,omitempty"` + AfterSHA *string `json:"after,omitempty"` + Status *string `json:"status,omitempty"` + Conclusion *string `json:"conclusion,omitempty"` + App *App `json:"app,omitempty"` + Repository *Repository `json:"repository,omitempty"` + PullRequests []*PullRequest `json:"pull_requests,omitempty"` + + // The following fields are only populated by Webhook events. + HeadCommit *Commit `json:"head_commit,omitempty"` +} + +func (c CheckRun) String() string { + return Stringify(c) +} + +func (c CheckSuite) String() string { + return Stringify(c) +} + +// GetCheckRun gets a check-run for a repository. +// +// GitHub API docs: https://developer.github.com/v3/checks/runs/#get-a-single-check-run +func (s *ChecksService) GetCheckRun(ctx context.Context, owner, repo string, checkRunID int64) (*CheckRun, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-runs/%v", owner, repo, checkRunID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkRun := new(CheckRun) + resp, err := s.client.Do(ctx, req, checkRun) + if err != nil { + return nil, resp, err + } + + return checkRun, resp, nil +} + +// GetCheckSuite gets a single check suite. +// +// GitHub API docs: https://developer.github.com/v3/checks/suites/#get-a-single-check-suite +func (s *ChecksService) GetCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64) (*CheckSuite, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites/%v", owner, repo, checkSuiteID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkSuite := new(CheckSuite) + resp, err := s.client.Do(ctx, req, checkSuite) + if err != nil { + return nil, resp, err + } + + return checkSuite, resp, nil +} + +// CreateCheckRunOptions sets up parameters needed to create a CheckRun. +type CreateCheckRunOptions struct { + Name string `json:"name"` // The name of the check (e.g., "code-coverage"). (Required.) + HeadBranch string `json:"head_branch"` // The name of the branch to perform a check against. (Required.) + HeadSHA string `json:"head_sha"` // The SHA of the commit. (Required.) + DetailsURL *string `json:"details_url,omitempty"` // The URL of the integrator's site that has the full details of the check. (Optional.) + ExternalID *string `json:"external_id,omitempty"` // A reference for the run on the integrator's system. (Optional.) + Status *string `json:"status,omitempty"` // The current status. Can be one of "queued", "in_progress", or "completed". Default: "queued". (Optional.) + Conclusion *string `json:"conclusion,omitempty"` // Can be one of "success", "failure", "neutral", "cancelled", "timed_out", or "action_required". (Optional. Required if you provide a status of "completed".) + StartedAt *Timestamp `json:"started_at,omitempty"` // The time that the check run began. (Optional.) + CompletedAt *Timestamp `json:"completed_at,omitempty"` // The time the check completed. (Optional. Required if you provide conclusion.) + Output *CheckRunOutput `json:"output,omitempty"` // Provide descriptive details about the run. (Optional) + Actions []*CheckRunAction `json:"actions,omitempty"` // Possible further actions the integrator can perform, which a user may trigger. (Optional.) +} + +// CheckRunAction exposes further actions the integrator can perform, which a user may trigger. +type CheckRunAction struct { + Label string `json:"label"` // The text to be displayed on a button in the web UI. The maximum size is 20 characters. (Required.) + Description string `json:"description"` // A short explanation of what this action would do. The maximum size is 40 characters. (Required.) + Identifier string `json:"identifier"` // A reference for the action on the integrator's system. The maximum size is 20 characters. (Required.) +} + +// CreateCheckRun creates a check run for repository. +// +// GitHub API docs: https://developer.github.com/v3/checks/runs/#create-a-check-run +func (s *ChecksService) CreateCheckRun(ctx context.Context, owner, repo string, opt CreateCheckRunOptions) (*CheckRun, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-runs", owner, repo) + req, err := s.client.NewRequest("POST", u, opt) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkRun := new(CheckRun) + resp, err := s.client.Do(ctx, req, checkRun) + if err != nil { + return nil, resp, err + } + + return checkRun, resp, nil +} + +// UpdateCheckRunOptions sets up parameters needed to update a CheckRun. +type UpdateCheckRunOptions struct { + Name string `json:"name"` // The name of the check (e.g., "code-coverage"). (Required.) + HeadBranch *string `json:"head_branch,omitempty"` // The name of the branch to perform a check against. (Optional.) + HeadSHA *string `json:"head_sha,omitempty"` // The SHA of the commit. (Optional.) + DetailsURL *string `json:"details_url,omitempty"` // The URL of the integrator's site that has the full details of the check. (Optional.) + ExternalID *string `json:"external_id,omitempty"` // A reference for the run on the integrator's system. (Optional.) + Status *string `json:"status,omitempty"` // The current status. Can be one of "queued", "in_progress", or "completed". Default: "queued". (Optional.) + Conclusion *string `json:"conclusion,omitempty"` // Can be one of "success", "failure", "neutral", "cancelled", "timed_out", or "action_required". (Optional. Required if you provide a status of "completed".) + CompletedAt *Timestamp `json:"completed_at,omitempty"` // The time the check completed. (Optional. Required if you provide conclusion.) + Output *CheckRunOutput `json:"output,omitempty"` // Provide descriptive details about the run. (Optional) + Actions []*CheckRunAction `json:"actions,omitempty"` // Possible further actions the integrator can perform, which a user may trigger. (Optional.) +} + +// UpdateCheckRun updates a check run for a specific commit in a repository. +// +// GitHub API docs: https://developer.github.com/v3/checks/runs/#update-a-check-run +func (s *ChecksService) UpdateCheckRun(ctx context.Context, owner, repo string, checkRunID int64, opt UpdateCheckRunOptions) (*CheckRun, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-runs/%v", owner, repo, checkRunID) + req, err := s.client.NewRequest("PATCH", u, opt) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkRun := new(CheckRun) + resp, err := s.client.Do(ctx, req, checkRun) + if err != nil { + return nil, resp, err + } + + return checkRun, resp, nil +} + +// ListCheckRunAnnotations lists the annotations for a check run. +// +// GitHub API docs: https://developer.github.com/v3/checks/runs/#list-annotations-for-a-check-run +func (s *ChecksService) ListCheckRunAnnotations(ctx context.Context, owner, repo string, checkRunID int64, opt *ListOptions) ([]*CheckRunAnnotation, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-runs/%v/annotations", owner, repo, checkRunID) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkRunAnnotations []*CheckRunAnnotation + resp, err := s.client.Do(ctx, req, &checkRunAnnotations) + if err != nil { + return nil, resp, err + } + + return checkRunAnnotations, resp, nil +} + +// ListCheckRunsOptions represents parameters to list check runs. +type ListCheckRunsOptions struct { + CheckName *string `url:"check_name,omitempty"` // Returns check runs with the specified name. + Status *string `url:"status,omitempty"` // Returns check runs with the specified status. Can be one of "queued", "in_progress", or "completed". + Filter *string `url:"filter,omitempty"` // Filters check runs by their completed_at timestamp. Can be one of "latest" (returning the most recent check runs) or "all". Default: "latest" + + ListOptions +} + +// ListCheckRunsResults represents the result of a check run list. +type ListCheckRunsResults struct { + Total *int `json:"total_count,omitempty"` + CheckRuns []*CheckRun `json:"check_runs,omitempty"` +} + +// ListCheckRunsForRef lists check runs for a specific ref. +// +// GitHub API docs: https://developer.github.com/v3/checks/runs/#list-check-runs-for-a-specific-ref +func (s *ChecksService) ListCheckRunsForRef(ctx context.Context, owner, repo, ref string, opt *ListCheckRunsOptions) (*ListCheckRunsResults, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/check-runs", owner, repo, url.QueryEscape(ref)) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkRunResults *ListCheckRunsResults + resp, err := s.client.Do(ctx, req, &checkRunResults) + if err != nil { + return nil, resp, err + } + + return checkRunResults, resp, nil +} + +// ListCheckRunsCheckSuite lists check runs for a check suite. +// +// GitHub API docs: https://developer.github.com/v3/checks/runs/#list-check-runs-in-a-check-suite +func (s *ChecksService) ListCheckRunsCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64, opt *ListCheckRunsOptions) (*ListCheckRunsResults, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites/%v/check-runs", owner, repo, checkSuiteID) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkRunResults *ListCheckRunsResults + resp, err := s.client.Do(ctx, req, &checkRunResults) + if err != nil { + return nil, resp, err + } + + return checkRunResults, resp, nil +} + +// ListCheckSuiteOptions represents parameters to list check suites. +type ListCheckSuiteOptions struct { + CheckName *string `url:"check_name,omitempty"` // Filters checks suites by the name of the check run. + AppID *int `url:"app_id,omitempty"` // Filters check suites by GitHub App id. + + ListOptions +} + +// ListCheckSuiteResults represents the result of a check run list. +type ListCheckSuiteResults struct { + Total *int `json:"total_count,omitempty"` + CheckSuites []*CheckSuite `json:"check_suites,omitempty"` +} + +// ListCheckSuitesForRef lists check suite for a specific ref. +// +// GitHub API docs: https://developer.github.com/v3/checks/suites/#list-check-suites-for-a-specific-ref +func (s *ChecksService) ListCheckSuitesForRef(ctx context.Context, owner, repo, ref string, opt *ListCheckSuiteOptions) (*ListCheckSuiteResults, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/check-suites", owner, repo, url.QueryEscape(ref)) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkSuiteResults *ListCheckSuiteResults + resp, err := s.client.Do(ctx, req, &checkSuiteResults) + if err != nil { + return nil, resp, err + } + + return checkSuiteResults, resp, nil +} + +// AutoTriggerCheck enables or disables automatic creation of CheckSuite events upon pushes to the repository. +type AutoTriggerCheck struct { + AppID *int64 `json:"app_id,omitempty"` // The id of the GitHub App. (Required.) + Setting *bool `json:"setting,omitempty"` // Set to "true" to enable automatic creation of CheckSuite events upon pushes to the repository, or "false" to disable them. Default: "true" (Required.) +} + +// CheckSuitePreferenceOptions set options for check suite preferences for a repository. +type CheckSuitePreferenceOptions struct { + PreferenceList *PreferenceList `json:"auto_trigger_checks,omitempty"` // A list of auto trigger checks that can be set for a check suite in a repository. +} + +// CheckSuitePreferenceResults represents the results of the preference set operation. +type CheckSuitePreferenceResults struct { + Preferences *PreferenceList `json:"preferences,omitempty"` + Repository *Repository `json:"repository,omitempty"` +} + +// PreferenceList represents a list of auto trigger checks for repository +type PreferenceList struct { + AutoTriggerChecks []*AutoTriggerCheck `json:"auto_trigger_checks,omitempty"` // A slice of auto trigger checks that can be set for a check suite in a repository. +} + +// SetCheckSuitePreferences changes the default automatic flow when creating check suites. +// +// GitHub API docs: https://developer.github.com/v3/checks/suites/#set-preferences-for-check-suites-on-a-repository +func (s *ChecksService) SetCheckSuitePreferences(ctx context.Context, owner, repo string, opt CheckSuitePreferenceOptions) (*CheckSuitePreferenceResults, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites/preferences", owner, repo) + req, err := s.client.NewRequest("PATCH", u, opt) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkSuitePrefResults *CheckSuitePreferenceResults + resp, err := s.client.Do(ctx, req, &checkSuitePrefResults) + if err != nil { + return nil, resp, err + } + + return checkSuitePrefResults, resp, nil +} + +// CreateCheckSuiteOptions sets up parameters to manually create a check suites +type CreateCheckSuiteOptions struct { + HeadSHA string `json:"head_sha"` // The sha of the head commit. (Required.) + HeadBranch *string `json:"head_branch,omitempty"` // The name of the head branch where the code changes are implemented. +} + +// CreateCheckSuite manually creates a check suite for a repository. +// +// GitHub API docs: https://developer.github.com/v3/checks/suites/#create-a-check-suite +func (s *ChecksService) CreateCheckSuite(ctx context.Context, owner, repo string, opt CreateCheckSuiteOptions) (*CheckSuite, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites", owner, repo) + req, err := s.client.NewRequest("POST", u, opt) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkSuite := new(CheckSuite) + resp, err := s.client.Do(ctx, req, checkSuite) + if err != nil { + return nil, resp, err + } + + return checkSuite, resp, nil +} + +// ReRequestCheckSuite triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. +// +// GitHub API docs: https://developer.github.com/v3/checks/suites/#rerequest-check-suite +func (s *ChecksService) ReRequestCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites/%v/rerequest", owner, repo, checkSuiteID) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + resp, err := s.client.Do(ctx, req, nil) + return resp, err +} diff --git a/vendor/github.com/google/go-github/github/checks_test.go b/vendor/github.com/google/go-github/github/checks_test.go new file mode 100644 index 0000000..3170f70 --- /dev/null +++ b/vendor/github.com/google/go-github/github/checks_test.go @@ -0,0 +1,479 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" + "reflect" + "testing" + "time" +) + +func TestChecksService_GetCheckRun(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/check-runs/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + fmt.Fprint(w, `{ + "id": 1, + "name":"testCheckRun", + "status": "completed", + "conclusion": "neutral", + "started_at": "2018-05-04T01:14:52Z", + "completed_at": "2018-05-04T01:14:52Z"}`) + }) + checkRun, _, err := client.Checks.GetCheckRun(context.Background(), "o", "r", 1) + if err != nil { + t.Errorf("Checks.GetCheckRun return error: %v", err) + } + startedAt, _ := time.Parse(time.RFC3339, "2018-05-04T01:14:52Z") + completeAt, _ := time.Parse(time.RFC3339, "2018-05-04T01:14:52Z") + + want := &CheckRun{ + ID: Int64(1), + Status: String("completed"), + Conclusion: String("neutral"), + StartedAt: &Timestamp{startedAt}, + CompletedAt: &Timestamp{completeAt}, + Name: String("testCheckRun"), + } + if !reflect.DeepEqual(checkRun, want) { + t.Errorf("Checks.GetCheckRun return %+v, want %+v", checkRun, want) + } +} + +func TestChecksService_GetCheckSuite(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/check-suites/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + fmt.Fprint(w, `{ + "id": 1, + "head_branch":"master", + "head_sha": "deadbeef", + "conclusion": "neutral", + "before": "deadbeefb", + "after": "deadbeefa", + "status": "completed"}`) + }) + checkSuite, _, err := client.Checks.GetCheckSuite(context.Background(), "o", "r", 1) + if err != nil { + t.Errorf("Checks.GetCheckSuite return error: %v", err) + } + want := &CheckSuite{ + ID: Int64(1), + HeadBranch: String("master"), + HeadSHA: String("deadbeef"), + AfterSHA: String("deadbeefa"), + BeforeSHA: String("deadbeefb"), + Status: String("completed"), + Conclusion: String("neutral"), + } + if !reflect.DeepEqual(checkSuite, want) { + t.Errorf("Checks.GetCheckSuite return %+v, want %+v", checkSuite, want) + } +} + +func TestChecksService_CreateCheckRun(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/check-runs", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + fmt.Fprint(w, `{ + "id": 1, + "name":"testCreateCheckRun", + "head_sha":"deadbeef", + "status": "in_progress", + "conclusion": null, + "started_at": "2018-05-04T01:14:52Z", + "completed_at": null, + "output":{"title": "Mighty test report", "summary":"", "text":""}}`) + }) + startedAt, _ := time.Parse(time.RFC3339, "2018-05-04T01:14:52Z") + checkRunOpt := CreateCheckRunOptions{ + HeadBranch: "master", + Name: "testCreateCheckRun", + HeadSHA: "deadbeef", + Status: String("in_progress"), + StartedAt: &Timestamp{startedAt}, + Output: &CheckRunOutput{ + Title: String("Mighty test report"), + Summary: String(""), + Text: String(""), + }, + } + + checkRun, _, err := client.Checks.CreateCheckRun(context.Background(), "o", "r", checkRunOpt) + if err != nil { + t.Errorf("Checks.CreateCheckRun return error: %v", err) + } + + want := &CheckRun{ + ID: Int64(1), + Status: String("in_progress"), + StartedAt: &Timestamp{startedAt}, + HeadSHA: String("deadbeef"), + Name: String("testCreateCheckRun"), + Output: &CheckRunOutput{ + Title: String("Mighty test report"), + Summary: String(""), + Text: String(""), + }, + } + if !reflect.DeepEqual(checkRun, want) { + t.Errorf("Checks.CreateCheckRun return %+v, want %+v", checkRun, want) + } +} + +func TestChecksService_ListCheckRunAnnotations(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/check-runs/1/annotations", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + testFormValues(t, r, values{ + "page": "1", + }) + fmt.Fprint(w, `[{ + "path": "README.md", + "blob_href": "https://github.com/octocat/Hello-World/blob/837db83be4137ca555d9a5598d0a1ea2987ecfee/README.md", + "start_line": 2, + "end_line": 2, + "annotation_level": "warning", + "message": "Check your spelling for 'banaas'.", + "title": "Spell check", + "raw_details": "Do you mean 'bananas' or 'banana'?"}]`, + ) + }) + + checkRunAnnotations, _, err := client.Checks.ListCheckRunAnnotations(context.Background(), "o", "r", 1, &ListOptions{Page: 1}) + if err != nil { + t.Errorf("Checks.ListCheckRunAnnotations return error: %v", err) + } + + want := []*CheckRunAnnotation{{ + Path: String("README.md"), + BlobHRef: String("https://github.com/octocat/Hello-World/blob/837db83be4137ca555d9a5598d0a1ea2987ecfee/README.md"), + StartLine: Int(2), + EndLine: Int(2), + AnnotationLevel: String("warning"), + Message: String("Check your spelling for 'banaas'."), + RawDetails: String("Do you mean 'bananas' or 'banana'?"), + Title: String("Spell check"), + }} + + if !reflect.DeepEqual(checkRunAnnotations, want) { + t.Errorf("Checks.ListCheckRunAnnotations returned %+v, want %+v", checkRunAnnotations, want) + } +} + +func TestChecksService_UpdateCheckRun(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/check-runs/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PATCH") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + fmt.Fprint(w, `{ + "id": 1, + "name":"testUpdateCheckRun", + "head_sha":"deadbeef", + "status": "completed", + "conclusion": "neutral", + "started_at": "2018-05-04T01:14:52Z", + "completed_at": "2018-05-04T01:14:52Z", + "output":{"title": "Mighty test report", "summary":"There are 0 failures, 2 warnings and 1 notice", "text":"You may have misspelled some words."}}`) + }) + startedAt, _ := time.Parse(time.RFC3339, "2018-05-04T01:14:52Z") + updateCheckRunOpt := UpdateCheckRunOptions{ + HeadBranch: String("master"), + Name: "testUpdateCheckRun", + HeadSHA: String("deadbeef"), + Status: String("completed"), + CompletedAt: &Timestamp{startedAt}, + Output: &CheckRunOutput{ + Title: String("Mighty test report"), + Summary: String("There are 0 failures, 2 warnings and 1 notice"), + Text: String("You may have misspelled some words."), + }, + } + + checkRun, _, err := client.Checks.UpdateCheckRun(context.Background(), "o", "r", 1, updateCheckRunOpt) + if err != nil { + t.Errorf("Checks.UpdateCheckRun return error: %v", err) + } + + want := &CheckRun{ + ID: Int64(1), + Status: String("completed"), + StartedAt: &Timestamp{startedAt}, + CompletedAt: &Timestamp{startedAt}, + Conclusion: String("neutral"), + HeadSHA: String("deadbeef"), + Name: String("testUpdateCheckRun"), + Output: &CheckRunOutput{ + Title: String("Mighty test report"), + Summary: String("There are 0 failures, 2 warnings and 1 notice"), + Text: String("You may have misspelled some words."), + }, + } + if !reflect.DeepEqual(checkRun, want) { + t.Errorf("Checks.UpdateCheckRun return %+v, want %+v", checkRun, want) + } +} + +func TestChecksService_ListCheckRunsForRef(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/commits/master/check-runs", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + testFormValues(t, r, values{ + "check_name": "testing", + "page": "1", + "status": "completed", + "filter": "all", + }) + fmt.Fprint(w, `{"total_count":1, + "check_runs": [{ + "id": 1, + "head_sha": "deadbeef", + "status": "completed", + "conclusion": "neutral", + "started_at": "2018-05-04T01:14:52Z", + "completed_at": "2018-05-04T01:14:52Z"}]}`, + ) + }) + + opt := &ListCheckRunsOptions{ + CheckName: String("testing"), + Status: String("completed"), + Filter: String("all"), + ListOptions: ListOptions{Page: 1}, + } + checkRuns, _, err := client.Checks.ListCheckRunsForRef(context.Background(), "o", "r", "master", opt) + if err != nil { + t.Errorf("Checks.ListCheckRunsForRef return error: %v", err) + } + startedAt, _ := time.Parse(time.RFC3339, "2018-05-04T01:14:52Z") + want := &ListCheckRunsResults{ + Total: Int(1), + CheckRuns: []*CheckRun{{ + ID: Int64(1), + Status: String("completed"), + StartedAt: &Timestamp{startedAt}, + CompletedAt: &Timestamp{startedAt}, + Conclusion: String("neutral"), + HeadSHA: String("deadbeef"), + }}, + } + + if !reflect.DeepEqual(checkRuns, want) { + t.Errorf("Checks.ListCheckRunsForRef returned %+v, want %+v", checkRuns, want) + } +} + +func TestChecksService_ListCheckRunsCheckSuite(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/check-suites/1/check-runs", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + testFormValues(t, r, values{ + "check_name": "testing", + "page": "1", + "status": "completed", + "filter": "all", + }) + fmt.Fprint(w, `{"total_count":1, + "check_runs": [{ + "id": 1, + "head_sha": "deadbeef", + "status": "completed", + "conclusion": "neutral", + "started_at": "2018-05-04T01:14:52Z", + "completed_at": "2018-05-04T01:14:52Z"}]}`, + ) + }) + + opt := &ListCheckRunsOptions{ + CheckName: String("testing"), + Status: String("completed"), + Filter: String("all"), + ListOptions: ListOptions{Page: 1}, + } + checkRuns, _, err := client.Checks.ListCheckRunsCheckSuite(context.Background(), "o", "r", 1, opt) + if err != nil { + t.Errorf("Checks.ListCheckRunsCheckSuite return error: %v", err) + } + startedAt, _ := time.Parse(time.RFC3339, "2018-05-04T01:14:52Z") + want := &ListCheckRunsResults{ + Total: Int(1), + CheckRuns: []*CheckRun{{ + ID: Int64(1), + Status: String("completed"), + StartedAt: &Timestamp{startedAt}, + CompletedAt: &Timestamp{startedAt}, + Conclusion: String("neutral"), + HeadSHA: String("deadbeef"), + }}, + } + + if !reflect.DeepEqual(checkRuns, want) { + t.Errorf("Checks.ListCheckRunsCheckSuite returned %+v, want %+v", checkRuns, want) + } +} + +func TestChecksService_ListCheckSuiteForRef(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/commits/master/check-suites", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + testFormValues(t, r, values{ + "check_name": "testing", + "page": "1", + "app_id": "2", + }) + fmt.Fprint(w, `{"total_count":1, + "check_suites": [{ + "id": 1, + "head_sha": "deadbeef", + "head_branch": "master", + "status": "completed", + "conclusion": "neutral", + "before": "deadbeefb", + "after": "deadbeefa"}]}`, + ) + }) + + opt := &ListCheckSuiteOptions{ + CheckName: String("testing"), + AppID: Int(2), + ListOptions: ListOptions{Page: 1}, + } + checkSuites, _, err := client.Checks.ListCheckSuitesForRef(context.Background(), "o", "r", "master", opt) + if err != nil { + t.Errorf("Checks.ListCheckSuitesForRef return error: %v", err) + } + want := &ListCheckSuiteResults{ + Total: Int(1), + CheckSuites: []*CheckSuite{{ + ID: Int64(1), + Status: String("completed"), + Conclusion: String("neutral"), + HeadSHA: String("deadbeef"), + HeadBranch: String("master"), + BeforeSHA: String("deadbeefb"), + AfterSHA: String("deadbeefa"), + }}, + } + + if !reflect.DeepEqual(checkSuites, want) { + t.Errorf("Checks.ListCheckSuitesForRef returned %+v, want %+v", checkSuites, want) + } +} + +func TestChecksService_SetCheckSuitePreferences(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/check-suites/preferences", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PATCH") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + fmt.Fprint(w, `{"preferences":{"auto_trigger_checks":[{"app_id": 2,"setting": false}]}}`) + }) + p := &PreferenceList{ + AutoTriggerChecks: []*AutoTriggerCheck{{ + AppID: Int64(2), + Setting: Bool(false), + }}, + } + opt := CheckSuitePreferenceOptions{PreferenceList: p} + prefResults, _, err := client.Checks.SetCheckSuitePreferences(context.Background(), "o", "r", opt) + if err != nil { + t.Errorf("Checks.SetCheckSuitePreferences return error: %v", err) + } + + want := &CheckSuitePreferenceResults{ + Preferences: p, + } + + if !reflect.DeepEqual(prefResults, want) { + t.Errorf("Checks.SetCheckSuitePreferences return %+v, want %+v", prefResults, want) + } +} + +func TestChecksService_CreateCheckSuite(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/check-suites", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + fmt.Fprint(w, `{ + "id": 2, + "head_branch":"master", + "head_sha":"deadbeef", + "status": "completed", + "conclusion": "neutral", + "before": "deadbeefb", + "after": "deadbeefa"}`) + }) + + checkSuiteOpt := CreateCheckSuiteOptions{ + HeadSHA: "deadbeef", + HeadBranch: String("master"), + } + + checkSuite, _, err := client.Checks.CreateCheckSuite(context.Background(), "o", "r", checkSuiteOpt) + if err != nil { + t.Errorf("Checks.CreateCheckSuite return error: %v", err) + } + + want := &CheckSuite{ + ID: Int64(2), + Status: String("completed"), + HeadSHA: String("deadbeef"), + HeadBranch: String("master"), + Conclusion: String("neutral"), + BeforeSHA: String("deadbeefb"), + AfterSHA: String("deadbeefa"), + } + if !reflect.DeepEqual(checkSuite, want) { + t.Errorf("Checks.CreateCheckSuite return %+v, want %+v", checkSuite, want) + } +} + +func TestChecksService_ReRequestCheckSuite(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/check-suites/1/rerequest", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeCheckRunsPreview) + w.WriteHeader(http.StatusCreated) + }) + resp, err := client.Checks.ReRequestCheckSuite(context.Background(), "o", "r", 1) + if err != nil { + t.Errorf("Checks.ReRequestCheckSuite return error: %v", err) + } + if got, want := resp.StatusCode, http.StatusCreated; got != want { + t.Errorf("Checks.ReRequestCheckSuite = %v, want %v", got, want) + } +} diff --git a/vendor/github.com/google/go-github/github/doc.go b/vendor/github.com/google/go-github/github/doc.go index abbcf64..21dc231 100644 --- a/vendor/github.com/google/go-github/github/doc.go +++ b/vendor/github.com/google/go-github/github/doc.go @@ -8,7 +8,8 @@ Package github provides a client for using the GitHub API. Usage: - import "github.com/google/go-github/github" + import "github.com/google/go-github/v24/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) + import "github.com/google/go-github/github" // with go modules disabled Construct a new GitHub client, then use the various services on the client to access different parts of the GitHub API. For example: @@ -30,6 +31,13 @@ The services of a client divide the API into logical chunks and correspond to the structure of the GitHub API documentation at https://developer.github.com/v3/. +NOTE: Using the https://godoc.org/context package, one can easily +pass cancelation signals and deadlines to various services of the client for +handling a request. In case there is no context available, then context.Background() +can be used as a starting point. + +For more sample code snippets, head over to the https://github.com/google/go-github/tree/master/example directory. + Authentication The go-github library does not directly handle authentication. Instead, when @@ -85,9 +93,11 @@ Rate Limiting GitHub imposes a rate limit on all API clients. Unauthenticated clients are limited to 60 requests per hour, while authenticated clients can make up to -5,000 requests per hour. To receive the higher rate limit when making calls -that are not issued on behalf of a user, use the -UnauthenticatedRateLimitedTransport. +5,000 requests per hour. The Search API has a custom rate limit. Unauthenticated +clients are limited to 10 requests per minute, while authenticated clients +can make up to 30 requests per minute. To receive the higher rate limit when +making calls that are not issued on behalf of a user, +use UnauthenticatedRateLimitedTransport. The returned Response.Rate value contains the rate limit information from the most recent API call. If a recent enough response isn't @@ -174,16 +184,5 @@ github.Response struct. opt.Page = resp.NextPage } -Google App Engine - -Go on App Engine Classic (which as of this writing uses Go 1.6) can not use -the "context" import and still relies on "golang.org/x/net/context". -As a result, if you wish to continue to use "go-github" on App Engine Classic, -you will need to rewrite all the "context" imports using the following command: - - gofmt -w -r '"context" -> "golang.org/x/net/context"' *.go - -See "with_appengine.go" for more details. - */ package github diff --git a/vendor/github.com/google/go-github/github/event.go b/vendor/github.com/google/go-github/github/event.go new file mode 100644 index 0000000..04c8845 --- /dev/null +++ b/vendor/github.com/google/go-github/github/event.go @@ -0,0 +1,126 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "encoding/json" + "time" +) + +// Event represents a GitHub event. +type Event struct { + Type *string `json:"type,omitempty"` + Public *bool `json:"public,omitempty"` + RawPayload *json.RawMessage `json:"payload,omitempty"` + Repo *Repository `json:"repo,omitempty"` + Actor *User `json:"actor,omitempty"` + Org *Organization `json:"org,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + ID *string `json:"id,omitempty"` +} + +func (e Event) String() string { + return Stringify(e) +} + +// ParsePayload parses the event payload. For recognized event types, +// a value of the corresponding struct type will be returned. +func (e *Event) ParsePayload() (payload interface{}, err error) { + switch *e.Type { + case "CheckRunEvent": + payload = &CheckRunEvent{} + case "CheckSuiteEvent": + payload = &CheckSuiteEvent{} + case "CommitCommentEvent": + payload = &CommitCommentEvent{} + case "CreateEvent": + payload = &CreateEvent{} + case "DeleteEvent": + payload = &DeleteEvent{} + case "DeploymentEvent": + payload = &DeploymentEvent{} + case "DeploymentStatusEvent": + payload = &DeploymentStatusEvent{} + case "ForkEvent": + payload = &ForkEvent{} + case "GitHubAppAuthorizationEvent": + payload = &GitHubAppAuthorizationEvent{} + case "GollumEvent": + payload = &GollumEvent{} + case "InstallationEvent": + payload = &InstallationEvent{} + case "InstallationRepositoriesEvent": + payload = &InstallationRepositoriesEvent{} + case "IssueCommentEvent": + payload = &IssueCommentEvent{} + case "IssuesEvent": + payload = &IssuesEvent{} + case "LabelEvent": + payload = &LabelEvent{} + case "MarketplacePurchaseEvent": + payload = &MarketplacePurchaseEvent{} + case "MemberEvent": + payload = &MemberEvent{} + case "MembershipEvent": + payload = &MembershipEvent{} + case "MilestoneEvent": + payload = &MilestoneEvent{} + case "OrganizationEvent": + payload = &OrganizationEvent{} + case "OrgBlockEvent": + payload = &OrgBlockEvent{} + case "PageBuildEvent": + payload = &PageBuildEvent{} + case "PingEvent": + payload = &PingEvent{} + case "ProjectEvent": + payload = &ProjectEvent{} + case "ProjectCardEvent": + payload = &ProjectCardEvent{} + case "ProjectColumnEvent": + payload = &ProjectColumnEvent{} + case "PublicEvent": + payload = &PublicEvent{} + case "PullRequestEvent": + payload = &PullRequestEvent{} + case "PullRequestReviewEvent": + payload = &PullRequestReviewEvent{} + case "PullRequestReviewCommentEvent": + payload = &PullRequestReviewCommentEvent{} + case "PushEvent": + payload = &PushEvent{} + case "ReleaseEvent": + payload = &ReleaseEvent{} + case "RepositoryEvent": + payload = &RepositoryEvent{} + case "RepositoryVulnerabilityAlertEvent": + payload = &RepositoryVulnerabilityAlertEvent{} + case "StatusEvent": + payload = &StatusEvent{} + case "TeamEvent": + payload = &TeamEvent{} + case "TeamAddEvent": + payload = &TeamAddEvent{} + case "WatchEvent": + payload = &WatchEvent{} + } + err = json.Unmarshal(*e.RawPayload, &payload) + return payload, err +} + +// Payload returns the parsed event payload. For recognized event types, +// a value of the corresponding struct type will be returned. +// +// Deprecated: Use ParsePayload instead, which returns an error +// rather than panics if JSON unmarshaling raw payload fails. +func (e *Event) Payload() (payload interface{}) { + var err error + payload, err = e.ParsePayload() + if err != nil { + panic(err) + } + return payload +} diff --git a/vendor/github.com/google/go-github/github/event_types.go b/vendor/github.com/google/go-github/github/event_types.go index c6b29b9..27498a9 100644 --- a/vendor/github.com/google/go-github/github/event_types.go +++ b/vendor/github.com/google/go-github/github/event_types.go @@ -7,6 +7,47 @@ package github +// RequestedAction is included in a CheckRunEvent when a user has invoked an action, +// i.e. when the CheckRunEvent's Action field is "requested_action". +type RequestedAction struct { + Identifier string `json:"identifier"` // The integrator reference of the action requested by the user. +} + +// CheckRunEvent is triggered when a check run is "created", "updated", or "re-requested". +// The Webhook event name is "check_run". +// +// GitHub API docs: https://developer.github.com/v3/activity/events/types/#checkrunevent +type CheckRunEvent struct { + CheckRun *CheckRun `json:"check_run,omitempty"` + // The action performed. Can be "created", "updated", "rerequested" or "requested_action". + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The action requested by the user. Populated when the Action is "requested_action". + RequestedAction *RequestedAction `json:"requested_action,omitempty"` // +} + +// CheckSuiteEvent is triggered when a check suite is "completed", "requested", or "re-requested". +// The Webhook event name is "check_suite". +// +// GitHub API docs: https://developer.github.com/v3/activity/events/types/#checksuiteevent +type CheckSuiteEvent struct { + CheckSuite *CheckSuite `json:"check_suite,omitempty"` + // The action performed. Can be "completed", "requested" or "re-requested". + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + // CommitCommentEvent is triggered when a commit comment is created. // The Webhook event name is "commit_comment". // @@ -107,6 +148,18 @@ type ForkEvent struct { Installation *Installation `json:"installation,omitempty"` } +// GitHubAppAuthorizationEvent is triggered when a user's authorization for a +// GitHub Application is revoked. +// +// GitHub API docs: https://developer.github.com/v3/activity/events/types/#githubappauthorizationevent +type GitHubAppAuthorizationEvent struct { + // The action performed. Can be "revoked". + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + Sender *User `json:"sender,omitempty"` +} + // Page represents a single Wiki page. type Page struct { PageName *string `json:"page_name,omitempty"` @@ -194,6 +247,7 @@ type TeamChange struct { type InstallationEvent struct { // The action that was performed. Can be either "created" or "deleted". Action *string `json:"action,omitempty"` + Repositories []*Repository `json:"repositories,omitempty"` Sender *User `json:"sender,omitempty"` Installation *Installation `json:"installation,omitempty"` } @@ -268,6 +322,24 @@ type LabelEvent struct { Installation *Installation `json:"installation,omitempty"` } +// MarketplacePurchaseEvent is triggered when a user purchases, cancels, or changes +// their GitHub Marketplace plan. +// Webhook event name "marketplace_purchase". +// +// Github API docs: https://developer.github.com/v3/activity/events/types/#marketplacepurchaseevent +type MarketplacePurchaseEvent struct { + // Action is the action that was performed. Possible values are: + // "purchased", "cancelled", "pending_change", "pending_change_cancelled", "changed". + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + EffectiveDate *Timestamp `json:"effective_date,omitempty"` + MarketplacePurchase *MarketplacePurchase `json:"marketplace_purchase,omitempty"` + PreviousMarketplacePurchase *MarketplacePurchase `json:"previous_marketplace_purchase,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + // MemberEvent is triggered when a user is added as a collaborator to a repository. // The Webhook event name is "member". // @@ -374,7 +446,7 @@ type PageBuildEvent struct { Build *PagesBuild `json:"build,omitempty"` // The following fields are only populated by Webhook events. - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` Repo *Repository `json:"repository,omitempty"` Sender *User `json:"sender,omitempty"` Installation *Installation `json:"installation,omitempty"` @@ -387,7 +459,7 @@ type PingEvent struct { // Random string of GitHub zen. Zen *string `json:"zen,omitempty"` // The ID of the webhook that triggered the ping. - HookID *int `json:"hook_id,omitempty"` + HookID *int64 `json:"hook_id,omitempty"` // The webhook configuration. Hook *Hook `json:"hook,omitempty"` Installation *Installation `json:"installation,omitempty"` @@ -416,7 +488,7 @@ type ProjectEvent struct { type ProjectCardEvent struct { Action *string `json:"action,omitempty"` Changes *ProjectCardChange `json:"changes,omitempty"` - AfterID *int `json:"after_id,omitempty"` + AfterID *int64 `json:"after_id,omitempty"` ProjectCard *ProjectCard `json:"project_card,omitempty"` // The following fields are only populated by Webhook events. @@ -433,7 +505,7 @@ type ProjectCardEvent struct { type ProjectColumnEvent struct { Action *string `json:"action,omitempty"` Changes *ProjectColumnChange `json:"changes,omitempty"` - AfterID *int `json:"after_id,omitempty"` + AfterID *int64 `json:"after_id,omitempty"` ProjectColumn *ProjectColumn `json:"project_column,omitempty"` // The following fields are only populated by Webhook events. @@ -461,20 +533,31 @@ type PublicEvent struct { // // GitHub API docs: https://developer.github.com/v3/activity/events/types/#pullrequestevent type PullRequestEvent struct { - // Action is the action that was performed. Possible values are: "assigned", - // "unassigned", "labeled", "unlabeled", "opened", "closed", or "reopened", - // "synchronize", "edited". If the action is "closed" and the merged key is false, + // Action is the action that was performed. Possible values are: + // "assigned", "unassigned", "review_requested", "review_request_removed", "labeled", "unlabeled", + // "opened", "closed", "ready_for_review", "reopened", "synchronize", "edited". + // If the action is "closed" and the merged key is false, // the pull request was closed with unmerged commits. If the action is "closed" // and the merged key is true, the pull request was merged. Action *string `json:"action,omitempty"` + Assignee *User `json:"assignee,omitempty"` Number *int `json:"number,omitempty"` PullRequest *PullRequest `json:"pull_request,omitempty"` // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` + Changes *EditChange `json:"changes,omitempty"` + // RequestedReviewer is populated in "review_requested", "review_request_removed" event deliveries. + // A request affecting multiple reviewers at once is split into multiple + // such event deliveries, each with a single, different RequestedReviewer. + RequestedReviewer *User `json:"requested_reviewer,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Label *Label `json:"label,omitempty"` // Populated in "labeled" event deliveries. + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Organization *Organization `json:"organization,omitempty"` } // PullRequestReviewEvent is triggered when a review is submitted on a pull @@ -521,7 +604,7 @@ type PullRequestReviewCommentEvent struct { // // GitHub API docs: https://developer.github.com/v3/activity/events/types/#pushevent type PushEvent struct { - PushID *int `json:"push_id,omitempty"` + PushID *int64 `json:"push_id,omitempty"` Head *string `json:"head,omitempty"` Ref *string `json:"ref,omitempty"` Size *int `json:"size,omitempty"` @@ -573,38 +656,39 @@ func (p PushEventCommit) String() string { // PushEventRepository represents the repo object in a PushEvent payload. type PushEventRepository struct { - ID *int `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - FullName *string `json:"full_name,omitempty"` - Owner *PushEventRepoOwner `json:"owner,omitempty"` - Private *bool `json:"private,omitempty"` - Description *string `json:"description,omitempty"` - Fork *bool `json:"fork,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PushedAt *Timestamp `json:"pushed_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Homepage *string `json:"homepage,omitempty"` - Size *int `json:"size,omitempty"` - StargazersCount *int `json:"stargazers_count,omitempty"` - WatchersCount *int `json:"watchers_count,omitempty"` - Language *string `json:"language,omitempty"` - HasIssues *bool `json:"has_issues,omitempty"` - HasDownloads *bool `json:"has_downloads,omitempty"` - HasWiki *bool `json:"has_wiki,omitempty"` - HasPages *bool `json:"has_pages,omitempty"` - ForksCount *int `json:"forks_count,omitempty"` - OpenIssuesCount *int `json:"open_issues_count,omitempty"` - DefaultBranch *string `json:"default_branch,omitempty"` - MasterBranch *string `json:"master_branch,omitempty"` - Organization *string `json:"organization,omitempty"` - URL *string `json:"url,omitempty"` - ArchiveURL *string `json:"archive_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - SSHURL *string `json:"ssh_url,omitempty"` - CloneURL *string `json:"clone_url,omitempty"` - SVNURL *string `json:"svn_url,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Name *string `json:"name,omitempty"` + FullName *string `json:"full_name,omitempty"` + Owner *User `json:"owner,omitempty"` + Private *bool `json:"private,omitempty"` + Description *string `json:"description,omitempty"` + Fork *bool `json:"fork,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + PushedAt *Timestamp `json:"pushed_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Homepage *string `json:"homepage,omitempty"` + Size *int `json:"size,omitempty"` + StargazersCount *int `json:"stargazers_count,omitempty"` + WatchersCount *int `json:"watchers_count,omitempty"` + Language *string `json:"language,omitempty"` + HasIssues *bool `json:"has_issues,omitempty"` + HasDownloads *bool `json:"has_downloads,omitempty"` + HasWiki *bool `json:"has_wiki,omitempty"` + HasPages *bool `json:"has_pages,omitempty"` + ForksCount *int `json:"forks_count,omitempty"` + OpenIssuesCount *int `json:"open_issues_count,omitempty"` + DefaultBranch *string `json:"default_branch,omitempty"` + MasterBranch *string `json:"master_branch,omitempty"` + Organization *string `json:"organization,omitempty"` + URL *string `json:"url,omitempty"` + ArchiveURL *string `json:"archive_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + StatusesURL *string `json:"statuses_url,omitempty"` + GitURL *string `json:"git_url,omitempty"` + SSHURL *string `json:"ssh_url,omitempty"` + CloneURL *string `json:"clone_url,omitempty"` + SVNURL *string `json:"svn_url,omitempty"` } // PushEventRepoOwner is a basic representation of user/org in a PushEvent payload. @@ -647,6 +731,27 @@ type RepositoryEvent struct { Installation *Installation `json:"installation,omitempty"` } +// RepositoryVulnerabilityAlertEvent is triggered when a security alert is created, dismissed, or resolved. +// +// GitHub API docs: https://developer.github.com/v3/activity/events/types/#repositoryvulnerabilityalertevent +type RepositoryVulnerabilityAlertEvent struct { + // Action is the action that was performed. This can be: "create", "dismiss", "resolve". + Action *string `json:"action,omitempty"` + + //The security alert of the vulnerable dependency. + Alert *struct { + ID *int64 `json:"id,omitempty"` + AffectedRange *string `json:"affected_range,omitempty"` + AffectedPackageName *string `json:"affected_package_name,omitempty"` + ExternalReference *string `json:"external_reference,omitempty"` + ExternalIdentifier *string `json:"external_identifier,omitempty"` + FixedIn *string `json:"fixed_in,omitempty"` + Dismisser *User `json:"dismisser,omitempty"` + DismissReason *string `json:"dismiss_reason,omitempty"` + DismissedAt *Timestamp `json:"dismissed_at,omitempty"` + } `json:"alert,omitempty"` +} + // StatusEvent is triggered when the status of a Git commit changes. // The Webhook event name is "status". // @@ -663,7 +768,7 @@ type StatusEvent struct { Branches []*Branch `json:"branches,omitempty"` // The following fields are only populated by Webhook events. - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` Name *string `json:"name,omitempty"` Context *string `json:"context,omitempty"` Commit *RepositoryCommit `json:"commit,omitempty"` diff --git a/vendor/github.com/google/go-github/github/examples_test.go b/vendor/github.com/google/go-github/github/examples_test.go index f09d650..7c0eda2 100644 --- a/vendor/github.com/google/go-github/github/examples_test.go +++ b/vendor/github.com/google/go-github/github/examples_test.go @@ -3,6 +3,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +// These examples are inlined in godoc. + package github_test import ( @@ -10,7 +12,7 @@ import ( "fmt" "log" - "github.com/google/go-github/github" + "github.com/google/go-github/v24/github" ) func ExampleClient_Markdown() { @@ -59,6 +61,35 @@ func ExampleRepositoriesService_List() { fmt.Printf("Recently updated repositories by %q: %v", user, github.Stringify(repos)) } +func ExampleRepositoriesService_CreateFile() { + // In this example we're creating a new file in a repository using the + // Contents API. Only 1 file per commit can be managed through that API. + + // Note that authentication is needed here as you are performing a modification + // so you will need to modify the example to provide an oauth client to + // github.NewClient() instead of nil. See the following documentation for more + // information on how to authenticate with the client: + // https://godoc.org/github.com/google/go-github/github#hdr-Authentication + client := github.NewClient(nil) + + ctx := context.Background() + fileContent := []byte("This is the content of my file\nand the 2nd line of it") + + // Note: the file needs to be absent from the repository as you are not + // specifying a SHA reference here. + opts := &github.RepositoryContentFileOptions{ + Message: github.String("This is my commit message"), + Content: fileContent, + Branch: github.String("master"), + Committer: &github.CommitAuthor{Name: github.String("FirstName LastName"), Email: github.String("user@example.com")}, + } + _, _, err := client.Repositories.CreateFile(ctx, "myOrganization", "myRepository", "myNewFile.md", opts) + if err != nil { + fmt.Println(err) + return + } +} + func ExampleUsersService_ListAll() { client := github.NewClient(nil) opts := &github.UserListOptions{} @@ -74,3 +105,66 @@ func ExampleUsersService_ListAll() { // Process users... } } + +func ExamplePullRequestsService_Create() { + // In this example we're creating a PR and displaying the HTML url at the end. + + // Note that authentication is needed here as you are performing a modification + // so you will need to modify the example to provide an oauth client to + // github.NewClient() instead of nil. See the following documentation for more + // information on how to authenticate with the client: + // https://godoc.org/github.com/google/go-github/github#hdr-Authentication + client := github.NewClient(nil) + + newPR := &github.NewPullRequest{ + Title: github.String("My awesome pull request"), + Head: github.String("branch_to_merge"), + Base: github.String("master"), + Body: github.String("This is the description of the PR created with the package `github.com/google/go-github/github`"), + MaintainerCanModify: github.Bool(true), + } + + pr, _, err := client.PullRequests.Create(context.Background(), "myOrganization", "myRepository", newPR) + if err != nil { + fmt.Println(err) + return + } + + fmt.Printf("PR created: %s\n", pr.GetHTMLURL()) +} + +func ExampleTeamsService_ListTeams() { + // This example shows how to get a team ID corresponding to a given team name. + + // Note that authentication is needed here as you are performing a lookup on + // an organization's administrative configuration, so you will need to modify + // the example to provide an oauth client to github.NewClient() instead of nil. + // See the following documentation for more information on how to authenticate + // with the client: + // https://godoc.org/github.com/google/go-github/github#hdr-Authentication + client := github.NewClient(nil) + + teamName := "Developers team" + ctx := context.Background() + opts := &github.ListOptions{} + + for { + teams, resp, err := client.Teams.ListTeams(ctx, "myOrganization", opts) + if err != nil { + fmt.Println(err) + return + } + for _, t := range teams { + if t.GetName() == teamName { + fmt.Printf("Team %q has ID %d\n", teamName, t.GetID()) + return + } + } + if resp.NextPage == 0 { + break + } + opts.Page = resp.NextPage + } + + fmt.Printf("Team %q was not found\n", teamName) +} diff --git a/vendor/github.com/google/go-github/github/gen-accessors.go b/vendor/github.com/google/go-github/github/gen-accessors.go index 131c56c..fe92206 100644 --- a/vendor/github.com/google/go-github/github/gen-accessors.go +++ b/vendor/github.com/google/go-github/github/gen-accessors.go @@ -25,7 +25,6 @@ import ( "sort" "strings" "text/template" - "time" ) const ( @@ -37,8 +36,8 @@ var ( sourceTmpl = template.Must(template.New("source").Parse(source)) - // blacklist lists which "struct.method" combos to not generate. - blacklist = map[string]bool{ + // blacklistStructMethod lists "struct.method" combos to skip. + blacklistStructMethod = map[string]bool{ "RepositoryContent.GetContent": true, "Client.GetBaseURL": true, "Client.GetUploadURL": true, @@ -46,6 +45,10 @@ var ( "RateLimitError.GetResponse": true, "AbuseRateLimitError.GetResponse": true, } + // blacklistStruct lists structs to skip. + blacklistStruct = map[string]bool{ + "Client": true, + } ) func logf(fmt string, args ...interface{}) { @@ -67,7 +70,7 @@ func main() { for pkgName, pkg := range pkgs { t := &templateData{ filename: pkgName + fileSuffix, - Year: time.Now().Year(), + Year: 2017, Package: pkgName, Imports: map[string]string{}, } @@ -95,6 +98,16 @@ func (t *templateData) processAST(f *ast.File) error { if !ok { continue } + // Skip unexported identifiers. + if !ts.Name.IsExported() { + logf("Struct %v is unexported; skipping.", ts.Name) + continue + } + // Check if the struct is blacklisted. + if blacklistStruct[ts.Name.Name] { + logf("Struct %v is blacklisted; skipping.", ts.Name) + continue + } st, ok := ts.Type.(*ast.StructType) if !ok { continue @@ -106,8 +119,14 @@ func (t *templateData) processAST(f *ast.File) error { } fieldName := field.Names[0] - if key := fmt.Sprintf("%v.Get%v", ts.Name, fieldName); blacklist[key] { - logf("Method %v blacklisted; skipping.", key) + // Skip unexported identifiers. + if !fieldName.IsExported() { + logf("Field %v is unexported; skipping.", fieldName) + continue + } + // Check if "struct.method" is blacklisted. + if key := fmt.Sprintf("%v.Get%v", ts.Name, fieldName); blacklistStructMethod[key] { + logf("Method %v is blacklisted; skipping.", key) continue } @@ -139,7 +158,7 @@ func (t *templateData) dump() error { return nil } - // Sort getters by ReceiverType.FieldName + // Sort getters by ReceiverType.FieldName. sort.Sort(byName(t.Getters)) var buf bytes.Buffer @@ -155,7 +174,7 @@ func (t *templateData) dump() error { return ioutil.WriteFile(t.filename, clean, 0644) } -func newGetter(receiverType, fieldName, fieldType, zeroValue string) *getter { +func newGetter(receiverType, fieldName, fieldType, zeroValue string, namedStruct bool) *getter { return &getter{ sortVal: strings.ToLower(receiverType) + "." + strings.ToLower(fieldName), ReceiverVar: strings.ToLower(receiverType[:1]), @@ -163,6 +182,7 @@ func newGetter(receiverType, fieldName, fieldType, zeroValue string) *getter { FieldName: fieldName, FieldType: fieldType, ZeroValue: zeroValue, + NamedStruct: namedStruct, } } @@ -176,13 +196,14 @@ func (t *templateData) addArrayType(x *ast.ArrayType, receiverType, fieldName st return } - t.Getters = append(t.Getters, newGetter(receiverType, fieldName, "[]"+eltType, "nil")) + t.Getters = append(t.Getters, newGetter(receiverType, fieldName, "[]"+eltType, "nil", false)) } func (t *templateData) addIdent(x *ast.Ident, receiverType, fieldName string) { var zeroValue string + var namedStruct = false switch x.String() { - case "int": + case "int", "int64": zeroValue = "0" case "string": zeroValue = `""` @@ -190,11 +211,12 @@ func (t *templateData) addIdent(x *ast.Ident, receiverType, fieldName string) { zeroValue = "false" case "Timestamp": zeroValue = "Timestamp{}" - default: // other structs handled by their receivers directly. - return + default: + zeroValue = "nil" + namedStruct = true } - t.Getters = append(t.Getters, newGetter(receiverType, fieldName, x.String(), zeroValue)) + t.Getters = append(t.Getters, newGetter(receiverType, fieldName, x.String(), zeroValue, namedStruct)) } func (t *templateData) addMapType(x *ast.MapType, receiverType, fieldName string) { @@ -218,11 +240,11 @@ func (t *templateData) addMapType(x *ast.MapType, receiverType, fieldName string fieldType := fmt.Sprintf("map[%v]%v", keyType, valueType) zeroValue := fmt.Sprintf("map[%v]%v{}", keyType, valueType) - t.Getters = append(t.Getters, newGetter(receiverType, fieldName, fieldType, zeroValue)) + t.Getters = append(t.Getters, newGetter(receiverType, fieldName, fieldType, zeroValue, false)) } func (t *templateData) addSelectorExpr(x *ast.SelectorExpr, receiverType, fieldName string) { - if strings.ToLower(fieldName[:1]) == fieldName[:1] { // non-exported field + if strings.ToLower(fieldName[:1]) == fieldName[:1] { // Non-exported field. return } @@ -243,7 +265,7 @@ func (t *templateData) addSelectorExpr(x *ast.SelectorExpr, receiverType, fieldN if xX == "time" && x.Sel.Name == "Duration" { zeroValue = "0" } - t.Getters = append(t.Getters, newGetter(receiverType, fieldName, fieldType, zeroValue)) + t.Getters = append(t.Getters, newGetter(receiverType, fieldName, fieldType, zeroValue, false)) default: logf("addSelectorExpr: xX %q, type %q, field %q: unknown x=%+v; skipping.", xX, receiverType, fieldName, x) } @@ -258,12 +280,13 @@ type templateData struct { } type getter struct { - sortVal string // lower-case version of "ReceiverType.FieldName" - ReceiverVar string // the one-letter variable name to match the ReceiverType + sortVal string // Lower-case version of "ReceiverType.FieldName". + ReceiverVar string // The one-letter variable name to match the ReceiverType. ReceiverType string FieldName string FieldType string ZeroValue string + NamedStruct bool // Getter for named struct. } type byName []*getter @@ -288,6 +311,15 @@ import ( ) {{end}} {{range .Getters}} +{{if .NamedStruct}} +// Get{{.FieldName}} returns the {{.FieldName}} field. +func ({{.ReceiverVar}} *{{.ReceiverType}}) Get{{.FieldName}}() *{{.FieldType}} { + if {{.ReceiverVar}} == nil { + return {{.ZeroValue}} + } + return {{.ReceiverVar}}.{{.FieldName}} +} +{{else}} // Get{{.FieldName}} returns the {{.FieldName}} field if it's non-nil, zero value otherwise. func ({{.ReceiverVar}} *{{.ReceiverType}}) Get{{.FieldName}}() {{.FieldType}} { if {{.ReceiverVar}} == nil || {{.ReceiverVar}}.{{.FieldName}} == nil { @@ -296,4 +328,5 @@ func ({{.ReceiverVar}} *{{.ReceiverType}}) Get{{.FieldName}}() {{.FieldType}} { return *{{.ReceiverVar}}.{{.FieldName}} } {{end}} +{{end}} ` diff --git a/vendor/github.com/google/go-github/github/gists.go b/vendor/github.com/google/go-github/github/gists.go index 15276ea..15e0bc2 100644 --- a/vendor/github.com/google/go-github/github/gists.go +++ b/vendor/github.com/google/go-github/github/gists.go @@ -30,6 +30,7 @@ type Gist struct { GitPushURL *string `json:"git_push_url,omitempty"` CreatedAt *time.Time `json:"created_at,omitempty"` UpdatedAt *time.Time `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` } func (g Gist) String() string { @@ -60,6 +61,7 @@ type GistCommit struct { User *User `json:"user,omitempty"` ChangeStatus *CommitStats `json:"change_status,omitempty"` CommittedAt *Timestamp `json:"committed_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` } func (gc GistCommit) String() string { @@ -73,6 +75,7 @@ type GistFork struct { ID *string `json:"id,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` } func (gf GistFork) String() string { @@ -175,6 +178,7 @@ func (s *GistsService) Get(ctx context.Context, id string) (*Gist, *Response, er if err != nil { return nil, nil, err } + gist := new(Gist) resp, err := s.client.Do(ctx, req, gist) if err != nil { @@ -193,6 +197,7 @@ func (s *GistsService) GetRevision(ctx context.Context, id, sha string) (*Gist, if err != nil { return nil, nil, err } + gist := new(Gist) resp, err := s.client.Do(ctx, req, gist) if err != nil { @@ -211,6 +216,7 @@ func (s *GistsService) Create(ctx context.Context, gist *Gist) (*Gist, *Response if err != nil { return nil, nil, err } + g := new(Gist) resp, err := s.client.Do(ctx, req, g) if err != nil { @@ -229,6 +235,7 @@ func (s *GistsService) Edit(ctx context.Context, id string, gist *Gist) (*Gist, if err != nil { return nil, nil, err } + g := new(Gist) resp, err := s.client.Do(ctx, req, g) if err != nil { diff --git a/vendor/github.com/google/go-github/github/gists_comments.go b/vendor/github.com/google/go-github/github/gists_comments.go index 2d07223..d5322e3 100644 --- a/vendor/github.com/google/go-github/github/gists_comments.go +++ b/vendor/github.com/google/go-github/github/gists_comments.go @@ -13,7 +13,7 @@ import ( // GistComment represents a Gist comment. type GistComment struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` URL *string `json:"url,omitempty"` Body *string `json:"body,omitempty"` User *User `json:"user,omitempty"` @@ -51,7 +51,7 @@ func (s *GistsService) ListComments(ctx context.Context, gistID string, opt *Lis // GetComment retrieves a single comment from a gist. // // GitHub API docs: https://developer.github.com/v3/gists/comments/#get-a-single-comment -func (s *GistsService) GetComment(ctx context.Context, gistID string, commentID int) (*GistComment, *Response, error) { +func (s *GistsService) GetComment(ctx context.Context, gistID string, commentID int64) (*GistComment, *Response, error) { u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) req, err := s.client.NewRequest("GET", u, nil) if err != nil { @@ -89,7 +89,7 @@ func (s *GistsService) CreateComment(ctx context.Context, gistID string, comment // EditComment edits an existing gist comment. // // GitHub API docs: https://developer.github.com/v3/gists/comments/#edit-a-comment -func (s *GistsService) EditComment(ctx context.Context, gistID string, commentID int, comment *GistComment) (*GistComment, *Response, error) { +func (s *GistsService) EditComment(ctx context.Context, gistID string, commentID int64, comment *GistComment) (*GistComment, *Response, error) { u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) req, err := s.client.NewRequest("PATCH", u, comment) if err != nil { @@ -108,7 +108,7 @@ func (s *GistsService) EditComment(ctx context.Context, gistID string, commentID // DeleteComment deletes a gist comment. // // GitHub API docs: https://developer.github.com/v3/gists/comments/#delete-a-comment -func (s *GistsService) DeleteComment(ctx context.Context, gistID string, commentID int) (*Response, error) { +func (s *GistsService) DeleteComment(ctx context.Context, gistID string, commentID int64) (*Response, error) { u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { diff --git a/vendor/github.com/google/go-github/github/gists_comments_test.go b/vendor/github.com/google/go-github/github/gists_comments_test.go index 4b68413..d2f5a95 100644 --- a/vendor/github.com/google/go-github/github/gists_comments_test.go +++ b/vendor/github.com/google/go-github/github/gists_comments_test.go @@ -15,7 +15,7 @@ import ( ) func TestGistsService_ListComments(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/comments", func(w http.ResponseWriter, r *http.Request) { @@ -30,19 +30,22 @@ func TestGistsService_ListComments(t *testing.T) { t.Errorf("Gists.Comments returned error: %v", err) } - want := []*GistComment{{ID: Int(1)}} + want := []*GistComment{{ID: Int64(1)}} if !reflect.DeepEqual(comments, want) { t.Errorf("Gists.ListComments returned %+v, want %+v", comments, want) } } func TestGistsService_ListComments_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.ListComments(context.Background(), "%", nil) testURLParseError(t, err) } func TestGistsService_GetComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/comments/2", func(w http.ResponseWriter, r *http.Request) { @@ -55,22 +58,25 @@ func TestGistsService_GetComment(t *testing.T) { t.Errorf("Gists.GetComment returned error: %v", err) } - want := &GistComment{ID: Int(1)} + want := &GistComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("Gists.GetComment returned %+v, want %+v", comment, want) } } func TestGistsService_GetComment_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.GetComment(context.Background(), "%", 1) testURLParseError(t, err) } func TestGistsService_CreateComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &GistComment{ID: Int(1), Body: String("b")} + input := &GistComment{ID: Int64(1), Body: String("b")} mux.HandleFunc("/gists/1/comments", func(w http.ResponseWriter, r *http.Request) { v := new(GistComment) @@ -89,22 +95,25 @@ func TestGistsService_CreateComment(t *testing.T) { t.Errorf("Gists.CreateComment returned error: %v", err) } - want := &GistComment{ID: Int(1)} + want := &GistComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("Gists.CreateComment returned %+v, want %+v", comment, want) } } func TestGistsService_CreateComment_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.CreateComment(context.Background(), "%", nil) testURLParseError(t, err) } func TestGistsService_EditComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &GistComment{ID: Int(1), Body: String("b")} + input := &GistComment{ID: Int64(1), Body: String("b")} mux.HandleFunc("/gists/1/comments/2", func(w http.ResponseWriter, r *http.Request) { v := new(GistComment) @@ -123,19 +132,22 @@ func TestGistsService_EditComment(t *testing.T) { t.Errorf("Gists.EditComment returned error: %v", err) } - want := &GistComment{ID: Int(1)} + want := &GistComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("Gists.EditComment returned %+v, want %+v", comment, want) } } func TestGistsService_EditComment_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.EditComment(context.Background(), "%", 1, nil) testURLParseError(t, err) } func TestGistsService_DeleteComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/comments/2", func(w http.ResponseWriter, r *http.Request) { @@ -149,6 +161,9 @@ func TestGistsService_DeleteComment(t *testing.T) { } func TestGistsService_DeleteComment_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Gists.DeleteComment(context.Background(), "%", 1) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/gists_test.go b/vendor/github.com/google/go-github/github/gists_test.go index 6065f27..3d31c9a 100644 --- a/vendor/github.com/google/go-github/github/gists_test.go +++ b/vendor/github.com/google/go-github/github/gists_test.go @@ -16,7 +16,7 @@ import ( ) func TestGistsService_List_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() since := "2013-01-01T00:00:00Z" @@ -42,7 +42,7 @@ func TestGistsService_List_specifiedUser(t *testing.T) { } func TestGistsService_List_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists", func(w http.ResponseWriter, r *http.Request) { @@ -62,12 +62,15 @@ func TestGistsService_List_authenticatedUser(t *testing.T) { } func TestGistsService_List_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.List(context.Background(), "%", nil) testURLParseError(t, err) } func TestGistsService_ListAll(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() since := "2013-01-01T00:00:00Z" @@ -93,7 +96,7 @@ func TestGistsService_ListAll(t *testing.T) { } func TestGistsService_ListStarred(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() since := "2013-01-01T00:00:00Z" @@ -119,7 +122,7 @@ func TestGistsService_ListStarred(t *testing.T) { } func TestGistsService_Get(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1", func(w http.ResponseWriter, r *http.Request) { @@ -139,12 +142,15 @@ func TestGistsService_Get(t *testing.T) { } func TestGistsService_Get_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.Get(context.Background(), "%") testURLParseError(t, err) } func TestGistsService_GetRevision(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/s", func(w http.ResponseWriter, r *http.Request) { @@ -164,12 +170,15 @@ func TestGistsService_GetRevision(t *testing.T) { } func TestGistsService_GetRevision_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.GetRevision(context.Background(), "%", "%") testURLParseError(t, err) } func TestGistsService_Create(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Gist{ @@ -222,7 +231,7 @@ func TestGistsService_Create(t *testing.T) { } func TestGistsService_Edit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Gist{ @@ -278,12 +287,15 @@ func TestGistsService_Edit(t *testing.T) { } func TestGistsService_Edit_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.Edit(context.Background(), "%", nil) testURLParseError(t, err) } func TestGistsService_ListCommits(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/commits", func(w http.ResponseWriter, r *http.Request) { @@ -316,8 +328,8 @@ func TestGistsService_ListCommits(t *testing.T) { want := []*GistCommit{{ URL: String("https://api.github.com/gists/1/1"), Version: String("1"), - User: &User{ID: Int(1)}, - CommittedAt: &Timestamp{time.Date(2010, 1, 1, 00, 00, 00, 0, time.UTC)}, + User: &User{ID: Int64(1)}, + CommittedAt: &Timestamp{time.Date(2010, time.January, 1, 00, 00, 00, 0, time.UTC)}, ChangeStatus: &CommitStats{ Additions: Int(180), Deletions: Int(0), @@ -330,7 +342,7 @@ func TestGistsService_ListCommits(t *testing.T) { } func TestGistsService_ListCommits_withOptions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/commits", func(w http.ResponseWriter, r *http.Request) { @@ -348,7 +360,7 @@ func TestGistsService_ListCommits_withOptions(t *testing.T) { } func TestGistsService_Delete(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1", func(w http.ResponseWriter, r *http.Request) { @@ -362,12 +374,15 @@ func TestGistsService_Delete(t *testing.T) { } func TestGistsService_Delete_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Gists.Delete(context.Background(), "%") testURLParseError(t, err) } func TestGistsService_Star(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/star", func(w http.ResponseWriter, r *http.Request) { @@ -381,12 +396,15 @@ func TestGistsService_Star(t *testing.T) { } func TestGistsService_Star_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Gists.Star(context.Background(), "%") testURLParseError(t, err) } func TestGistsService_Unstar(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/star", func(w http.ResponseWriter, r *http.Request) { @@ -400,12 +418,15 @@ func TestGistsService_Unstar(t *testing.T) { } func TestGistsService_Unstar_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Gists.Unstar(context.Background(), "%") testURLParseError(t, err) } func TestGistsService_IsStarred_hasStar(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/star", func(w http.ResponseWriter, r *http.Request) { @@ -423,7 +444,7 @@ func TestGistsService_IsStarred_hasStar(t *testing.T) { } func TestGistsService_IsStarred_noStar(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/star", func(w http.ResponseWriter, r *http.Request) { @@ -441,12 +462,15 @@ func TestGistsService_IsStarred_noStar(t *testing.T) { } func TestGistsService_IsStarred_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.IsStarred(context.Background(), "%") testURLParseError(t, err) } func TestGistsService_Fork(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/forks", func(w http.ResponseWriter, r *http.Request) { @@ -466,7 +490,7 @@ func TestGistsService_Fork(t *testing.T) { } func TestGistsService_ListForks(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gists/1/forks", func(w http.ResponseWriter, r *http.Request) { @@ -492,9 +516,9 @@ func TestGistsService_ListForks(t *testing.T) { want := []*GistFork{{ URL: String("https://api.github.com/gists/1"), ID: String("1"), - User: &User{ID: Int(1)}, - CreatedAt: &Timestamp{time.Date(2010, 1, 1, 00, 00, 00, 0, time.UTC)}, - UpdatedAt: &Timestamp{time.Date(2013, 1, 1, 00, 00, 00, 0, time.UTC)}}} + User: &User{ID: Int64(1)}, + CreatedAt: &Timestamp{time.Date(2010, time.January, 1, 00, 00, 00, 0, time.UTC)}, + UpdatedAt: &Timestamp{time.Date(2013, time.January, 1, 00, 00, 00, 0, time.UTC)}}} if !reflect.DeepEqual(gistForks, want) { t.Errorf("Gists.ListForks returned %+v, want %+v", gistForks, want) @@ -502,6 +526,9 @@ func TestGistsService_ListForks(t *testing.T) { } func TestGistsService_Fork_invalidID(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gists.Fork(context.Background(), "%") testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/git_blobs.go b/vendor/github.com/google/go-github/github/git_blobs.go index 67ea74a..70aee14 100644 --- a/vendor/github.com/google/go-github/github/git_blobs.go +++ b/vendor/github.com/google/go-github/github/git_blobs.go @@ -6,6 +6,7 @@ package github import ( + "bytes" "context" "fmt" ) @@ -17,9 +18,10 @@ type Blob struct { SHA *string `json:"sha,omitempty"` Size *int `json:"size,omitempty"` URL *string `json:"url,omitempty"` + NodeID *string `json:"node_id,omitempty"` } -// GetBlob fetchs a blob from a repo given a SHA. +// GetBlob fetches a blob from a repo given a SHA. // // GitHub API docs: https://developer.github.com/v3/git/blobs/#get-a-blob func (s *GitService) GetBlob(ctx context.Context, owner string, repo string, sha string) (*Blob, *Response, error) { @@ -34,6 +36,23 @@ func (s *GitService) GetBlob(ctx context.Context, owner string, repo string, sha return blob, resp, err } +// GetBlobRaw fetches a blob's contents from a repo. +// Unlike GetBlob, it returns the raw bytes rather than the base64-encoded data. +// +// GitHub API docs: https://developer.github.com/v3/git/blobs/#get-a-blob +func (s *GitService) GetBlobRaw(ctx context.Context, owner, repo, sha string) ([]byte, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/blobs/%v", owner, repo, sha) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + req.Header.Set("Accept", "application/vnd.github.v3.raw") + + var buf bytes.Buffer + resp, err := s.client.Do(ctx, req, &buf) + return buf.Bytes(), resp, err +} + // CreateBlob creates a blob object. // // GitHub API docs: https://developer.github.com/v3/git/blobs/#create-a-blob diff --git a/vendor/github.com/google/go-github/github/git_blobs_test.go b/vendor/github.com/google/go-github/github/git_blobs_test.go index 00861bb..9945d35 100644 --- a/vendor/github.com/google/go-github/github/git_blobs_test.go +++ b/vendor/github.com/google/go-github/github/git_blobs_test.go @@ -6,6 +6,7 @@ package github import ( + "bytes" "context" "encoding/json" "fmt" @@ -15,13 +16,12 @@ import ( ) func TestGitService_GetBlob(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/blobs/s", func(w http.ResponseWriter, r *http.Request) { - if m := "GET"; m != r.Method { - t.Errorf("Request method = %v, want %v", r.Method, m) - } + testMethod(t, r, "GET") + fmt.Fprint(w, `{ "sha": "s", "content": "blob content" @@ -44,12 +44,37 @@ func TestGitService_GetBlob(t *testing.T) { } func TestGitService_GetBlob_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Git.GetBlob(context.Background(), "%", "%", "%") testURLParseError(t, err) } +func TestGitService_GetBlobRaw(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/git/blobs/s", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", "application/vnd.github.v3.raw") + + fmt.Fprint(w, `raw contents here`) + }) + + blob, _, err := client.Git.GetBlobRaw(context.Background(), "o", "r", "s") + if err != nil { + t.Errorf("Git.GetBlobRaw returned error: %v", err) + } + + want := []byte("raw contents here") + if !bytes.Equal(blob, want) { + t.Errorf("GetBlobRaw returned %q, want %q", blob, want) + } +} + func TestGitService_CreateBlob(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Blob{ @@ -63,9 +88,7 @@ func TestGitService_CreateBlob(t *testing.T) { v := new(Blob) json.NewDecoder(r.Body).Decode(v) - if m := "POST"; m != r.Method { - t.Errorf("Request method = %v, want %v", r.Method, m) - } + testMethod(t, r, "POST") want := input if !reflect.DeepEqual(v, want) { @@ -93,6 +116,9 @@ func TestGitService_CreateBlob(t *testing.T) { } func TestGitService_CreateBlob_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Git.CreateBlob(context.Background(), "%", "%", &Blob{}) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/git_commits.go b/vendor/github.com/google/go-github/github/git_commits.go index 3c49a8a..9dfd3af 100644 --- a/vendor/github.com/google/go-github/github/git_commits.go +++ b/vendor/github.com/google/go-github/github/git_commits.go @@ -31,6 +31,7 @@ type Commit struct { HTMLURL *string `json:"html_url,omitempty"` URL *string `json:"url,omitempty"` Verification *SignatureVerification `json:"verification,omitempty"` + NodeID *string `json:"node_id,omitempty"` // CommentCount is the number of GitHub comments on the commit. This // is only populated for requests that fetch GitHub data like @@ -57,7 +58,7 @@ func (c CommitAuthor) String() string { return Stringify(c) } -// GetCommit fetchs the Commit object for a given SHA. +// GetCommit fetches the Commit object for a given SHA. // // GitHub API docs: https://developer.github.com/v3/git/commits/#get-a-commit func (s *GitService) GetCommit(ctx context.Context, owner string, repo string, sha string) (*Commit, *Response, error) { @@ -67,9 +68,6 @@ func (s *GitService) GetCommit(ctx context.Context, owner string, repo string, s return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGitSigningPreview) - c := new(Commit) resp, err := s.client.Do(ctx, req, c) if err != nil { @@ -86,6 +84,7 @@ type createCommit struct { Message *string `json:"message,omitempty"` Tree *string `json:"tree,omitempty"` Parents []string `json:"parents,omitempty"` + Signature *string `json:"signature,omitempty"` } // CreateCommit creates a new commit in a repository. @@ -117,6 +116,9 @@ func (s *GitService) CreateCommit(ctx context.Context, owner string, repo string if commit.Tree != nil { body.Tree = commit.Tree.SHA } + if commit.Verification != nil { + body.Signature = commit.Verification.Signature + } req, err := s.client.NewRequest("POST", u, body) if err != nil { diff --git a/vendor/github.com/google/go-github/github/git_commits_test.go b/vendor/github.com/google/go-github/github/git_commits_test.go index cd39232..83f5640 100644 --- a/vendor/github.com/google/go-github/github/git_commits_test.go +++ b/vendor/github.com/google/go-github/github/git_commits_test.go @@ -15,12 +15,11 @@ import ( ) func TestGitService_GetCommit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/commits/s", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeGitSigningPreview) fmt.Fprint(w, `{"sha":"s","message":"m","author":{"name":"n"}}`) }) @@ -36,12 +35,15 @@ func TestGitService_GetCommit(t *testing.T) { } func TestGitService_GetCommit_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Git.GetCommit(context.Background(), "%", "%", "%") testURLParseError(t, err) } func TestGitService_CreateCommit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Commit{ @@ -78,7 +80,54 @@ func TestGitService_CreateCommit(t *testing.T) { } } +func TestGitService_CreateSignedCommit(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + signature := "----- BEGIN PGP SIGNATURE -----\n\naaaa\naaaa\n----- END PGP SIGNATURE -----" + + input := &Commit{ + Message: String("m"), + Tree: &Tree{SHA: String("t")}, + Parents: []Commit{{SHA: String("p")}}, + Verification: &SignatureVerification{ + Signature: String(signature), + }, + } + + mux.HandleFunc("/repos/o/r/git/commits", func(w http.ResponseWriter, r *http.Request) { + v := new(createCommit) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "POST") + + want := &createCommit{ + Message: input.Message, + Tree: String("t"), + Parents: []string{"p"}, + Signature: String(signature), + } + if !reflect.DeepEqual(v, want) { + t.Errorf("Request body = %+v, want %+v", v, want) + } + fmt.Fprint(w, `{"sha":"s"}`) + }) + + commit, _, err := client.Git.CreateCommit(context.Background(), "o", "r", input) + if err != nil { + t.Errorf("Git.CreateCommit returned error: %v", err) + } + + want := &Commit{SHA: String("s")} + if !reflect.DeepEqual(commit, want) { + t.Errorf("Git.CreateCommit returned %+v, want %+v", commit, want) + } +} + func TestGitService_CreateCommit_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Git.CreateCommit(context.Background(), "%", "%", &Commit{}) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/git_refs.go b/vendor/github.com/google/go-github/github/git_refs.go index e78fdc6..3f381d5 100644 --- a/vendor/github.com/google/go-github/github/git_refs.go +++ b/vendor/github.com/google/go-github/github/git_refs.go @@ -10,6 +10,7 @@ import ( "encoding/json" "errors" "fmt" + "net/url" "strings" ) @@ -18,6 +19,7 @@ type Reference struct { Ref *string `json:"ref"` URL *string `json:"url"` Object *GitObject `json:"object"` + NodeID *string `json:"node_id,omitempty"` } func (r Reference) String() string { @@ -56,7 +58,7 @@ type updateRefRequest struct { // GitHub API docs: https://developer.github.com/v3/git/refs/#get-a-reference func (s *GitService) GetRef(ctx context.Context, owner string, repo string, ref string) (*Reference, *Response, error) { ref = strings.TrimPrefix(ref, "refs/") - u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, ref) + u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, url.QueryEscape(ref)) req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err @@ -87,7 +89,7 @@ func (s *GitService) GetRef(ctx context.Context, owner string, repo string, ref // GitHub API docs: https://developer.github.com/v3/git/refs/#get-a-reference func (s *GitService) GetRefs(ctx context.Context, owner string, repo string, ref string) ([]*Reference, *Response, error) { ref = strings.TrimPrefix(ref, "refs/") - u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, ref) + u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, url.QueryEscape(ref)) req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err @@ -207,7 +209,7 @@ func (s *GitService) UpdateRef(ctx context.Context, owner string, repo string, r // GitHub API docs: https://developer.github.com/v3/git/refs/#delete-a-reference func (s *GitService) DeleteRef(ctx context.Context, owner string, repo string, ref string) (*Response, error) { ref = strings.TrimPrefix(ref, "refs/") - u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, ref) + u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, url.QueryEscape(ref)) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { return nil, err diff --git a/vendor/github.com/google/go-github/github/git_refs_test.go b/vendor/github.com/google/go-github/github/git_refs_test.go index 62025bf..3cae3c0 100644 --- a/vendor/github.com/google/go-github/github/git_refs_test.go +++ b/vendor/github.com/google/go-github/github/git_refs_test.go @@ -15,7 +15,7 @@ import ( ) func TestGitService_GetRef_singleRef(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/refs/heads/b", func(w http.ResponseWriter, r *http.Request) { @@ -57,7 +57,7 @@ func TestGitService_GetRef_singleRef(t *testing.T) { } func TestGitService_GetRef_multipleRefs(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/refs/heads/b", func(w http.ResponseWriter, r *http.Request) { @@ -95,7 +95,7 @@ func TestGitService_GetRef_multipleRefs(t *testing.T) { } func TestGitService_GetRefs_singleRef(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/refs/heads/b", func(w http.ResponseWriter, r *http.Request) { @@ -138,7 +138,7 @@ func TestGitService_GetRefs_singleRef(t *testing.T) { } func TestGitService_GetRefs_multipleRefs(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/refs/heads/b", func(w http.ResponseWriter, r *http.Request) { @@ -188,7 +188,7 @@ func TestGitService_GetRefs_multipleRefs(t *testing.T) { // TestGitService_GetRefs_noRefs tests for behaviour resulting from an unexpected GH response. This should never actually happen. func TestGitService_GetRefs_noRefs(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/refs/heads/b", func(w http.ResponseWriter, r *http.Request) { @@ -205,7 +205,7 @@ func TestGitService_GetRefs_noRefs(t *testing.T) { } func TestGitService_ListRefs(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/refs", func(w http.ResponseWriter, r *http.Request) { @@ -264,7 +264,7 @@ func TestGitService_ListRefs(t *testing.T) { } func TestGitService_ListRefs_options(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/refs/t", func(w http.ResponseWriter, r *http.Request) { @@ -286,7 +286,7 @@ func TestGitService_ListRefs_options(t *testing.T) { } func TestGitService_CreateRef(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() args := &createRefRequest{ @@ -350,7 +350,7 @@ func TestGitService_CreateRef(t *testing.T) { } func TestGitService_UpdateRef(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() args := &updateRefRequest{ @@ -410,7 +410,7 @@ func TestGitService_UpdateRef(t *testing.T) { } func TestGitService_DeleteRef(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/refs/heads/b", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/git_tags.go b/vendor/github.com/google/go-github/github/git_tags.go index 08df3d3..abdbde6 100644 --- a/vendor/github.com/google/go-github/github/git_tags.go +++ b/vendor/github.com/google/go-github/github/git_tags.go @@ -19,6 +19,7 @@ type Tag struct { Tagger *CommitAuthor `json:"tagger,omitempty"` Object *GitObject `json:"object,omitempty"` Verification *SignatureVerification `json:"verification,omitempty"` + NodeID *string `json:"node_id,omitempty"` } // createTagRequest represents the body of a CreateTag request. This is mostly @@ -32,7 +33,7 @@ type createTagRequest struct { Tagger *CommitAuthor `json:"tagger,omitempty"` } -// GetTag fetchs a tag from a repo given a SHA. +// GetTag fetches a tag from a repo given a SHA. // // GitHub API docs: https://developer.github.com/v3/git/tags/#get-a-tag func (s *GitService) GetTag(ctx context.Context, owner string, repo string, sha string) (*Tag, *Response, error) { @@ -42,9 +43,6 @@ func (s *GitService) GetTag(ctx context.Context, owner string, repo string, sha return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGitSigningPreview) - tag := new(Tag) resp, err := s.client.Do(ctx, req, tag) return tag, resp, err diff --git a/vendor/github.com/google/go-github/github/git_tags_test.go b/vendor/github.com/google/go-github/github/git_tags_test.go index 7031fe8..9623952 100644 --- a/vendor/github.com/google/go-github/github/git_tags_test.go +++ b/vendor/github.com/google/go-github/github/git_tags_test.go @@ -15,13 +15,11 @@ import ( ) func TestGitService_GetTag(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/tags/s", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeGitSigningPreview) - fmt.Fprint(w, `{"tag": "t"}`) }) @@ -37,7 +35,7 @@ func TestGitService_GetTag(t *testing.T) { } func TestGitService_CreateTag(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &createTagRequest{Tag: String("t"), Object: String("s")} diff --git a/vendor/github.com/google/go-github/github/git_trees.go b/vendor/github.com/google/go-github/github/git_trees.go index 4d6809a..4bc2913 100644 --- a/vendor/github.com/google/go-github/github/git_trees.go +++ b/vendor/github.com/google/go-github/github/git_trees.go @@ -14,6 +14,12 @@ import ( type Tree struct { SHA *string `json:"sha,omitempty"` Entries []TreeEntry `json:"tree,omitempty"` + + // Truncated is true if the number of items in the tree + // exceeded GitHub's maximum limit and the Entries were truncated + // in the response. Only populated for requests that fetch + // trees like Git.GetTree. + Truncated *bool `json:"truncated,omitempty"` } func (t Tree) String() string { diff --git a/vendor/github.com/google/go-github/github/git_trees_test.go b/vendor/github.com/google/go-github/github/git_trees_test.go index 29728ee..1f7024f 100644 --- a/vendor/github.com/google/go-github/github/git_trees_test.go +++ b/vendor/github.com/google/go-github/github/git_trees_test.go @@ -15,16 +15,15 @@ import ( ) func TestGitService_GetTree(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/trees/s", func(w http.ResponseWriter, r *http.Request) { - if m := "GET"; m != r.Method { - t.Errorf("Request method = %v, want %v", r.Method, m) - } + testMethod(t, r, "GET") fmt.Fprint(w, `{ "sha": "s", - "tree": [ { "type": "blob" } ] + "tree": [ { "type": "blob" } ], + "truncated": true }`) }) @@ -40,6 +39,7 @@ func TestGitService_GetTree(t *testing.T) { Type: String("blob"), }, }, + Truncated: Bool(true), } if !reflect.DeepEqual(*tree, want) { t.Errorf("Tree.Get returned %+v, want %+v", *tree, want) @@ -47,12 +47,15 @@ func TestGitService_GetTree(t *testing.T) { } func TestGitService_GetTree_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Git.GetTree(context.Background(), "%", "%", "%", false) testURLParseError(t, err) } func TestGitService_CreateTree(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := []TreeEntry{ @@ -68,9 +71,7 @@ func TestGitService_CreateTree(t *testing.T) { v := new(createTree) json.NewDecoder(r.Body).Decode(v) - if m := "POST"; m != r.Method { - t.Errorf("Request method = %v, want %v", r.Method, m) - } + testMethod(t, r, "POST") want := &createTree{ BaseTree: "b", @@ -110,6 +111,7 @@ func TestGitService_CreateTree(t *testing.T) { SHA: String("7c258a9869f33c1e1e1f74fbb32f07c86cb5a75b"), }, }, + nil, } if !reflect.DeepEqual(*tree, want) { @@ -118,7 +120,7 @@ func TestGitService_CreateTree(t *testing.T) { } func TestGitService_CreateTree_Content(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := []TreeEntry{ @@ -133,9 +135,7 @@ func TestGitService_CreateTree_Content(t *testing.T) { v := new(createTree) json.NewDecoder(r.Body).Decode(v) - if m := "POST"; m != r.Method { - t.Errorf("Request method = %v, want %v", r.Method, m) - } + testMethod(t, r, "POST") want := &createTree{ BaseTree: "b", @@ -178,6 +178,7 @@ func TestGitService_CreateTree_Content(t *testing.T) { URL: String("https://api.github.com/repos/o/r/git/blobs/aad8feacf6f8063150476a7b2bd9770f2794c08b"), }, }, + nil, } if !reflect.DeepEqual(*tree, want) { @@ -186,6 +187,9 @@ func TestGitService_CreateTree_Content(t *testing.T) { } func TestGitService_CreateTree_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Git.CreateTree(context.Background(), "%", "%", "", nil) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/github-accessors.go b/vendor/github.com/google/go-github/github/github-accessors.go index 5368001..1001559 100644 --- a/vendor/github.com/google/go-github/github/github-accessors.go +++ b/vendor/github.com/google/go-github/github/github-accessors.go @@ -28,6 +28,86 @@ func (a *AdminEnforcement) GetURL() string { return *a.URL } +// GetComments returns the Comments field. +func (a *AdminStats) GetComments() *CommentStats { + if a == nil { + return nil + } + return a.Comments +} + +// GetGists returns the Gists field. +func (a *AdminStats) GetGists() *GistStats { + if a == nil { + return nil + } + return a.Gists +} + +// GetHooks returns the Hooks field. +func (a *AdminStats) GetHooks() *HookStats { + if a == nil { + return nil + } + return a.Hooks +} + +// GetIssues returns the Issues field. +func (a *AdminStats) GetIssues() *IssueStats { + if a == nil { + return nil + } + return a.Issues +} + +// GetMilestones returns the Milestones field. +func (a *AdminStats) GetMilestones() *MilestoneStats { + if a == nil { + return nil + } + return a.Milestones +} + +// GetOrgs returns the Orgs field. +func (a *AdminStats) GetOrgs() *OrgStats { + if a == nil { + return nil + } + return a.Orgs +} + +// GetPages returns the Pages field. +func (a *AdminStats) GetPages() *PageStats { + if a == nil { + return nil + } + return a.Pages +} + +// GetPulls returns the Pulls field. +func (a *AdminStats) GetPulls() *PullStats { + if a == nil { + return nil + } + return a.Pulls +} + +// GetRepos returns the Repos field. +func (a *AdminStats) GetRepos() *RepoStats { + if a == nil { + return nil + } + return a.Repos +} + +// GetUsers returns the Users field. +func (a *AdminStats) GetUsers() *UserStats { + if a == nil { + return nil + } + return a.Users +} + // GetVerifiablePasswordAuthentication returns the VerifiablePasswordAuthentication field if it's non-nil, zero value otherwise. func (a *APIMeta) GetVerifiablePasswordAuthentication() bool { if a == nil || a.VerifiablePasswordAuthentication == nil { @@ -36,6 +116,86 @@ func (a *APIMeta) GetVerifiablePasswordAuthentication() bool { return *a.VerifiablePasswordAuthentication } +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (a *App) GetCreatedAt() time.Time { + if a == nil || a.CreatedAt == nil { + return time.Time{} + } + return *a.CreatedAt +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (a *App) GetDescription() string { + if a == nil || a.Description == nil { + return "" + } + return *a.Description +} + +// GetExternalURL returns the ExternalURL field if it's non-nil, zero value otherwise. +func (a *App) GetExternalURL() string { + if a == nil || a.ExternalURL == nil { + return "" + } + return *a.ExternalURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (a *App) GetHTMLURL() string { + if a == nil || a.HTMLURL == nil { + return "" + } + return *a.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (a *App) GetID() int64 { + if a == nil || a.ID == nil { + return 0 + } + return *a.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (a *App) GetName() string { + if a == nil || a.Name == nil { + return "" + } + return *a.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (a *App) GetNodeID() string { + if a == nil || a.NodeID == nil { + return "" + } + return *a.NodeID +} + +// GetOwner returns the Owner field. +func (a *App) GetOwner() *User { + if a == nil { + return nil + } + return a.Owner +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (a *App) GetUpdatedAt() time.Time { + if a == nil || a.UpdatedAt == nil { + return time.Time{} + } + return *a.UpdatedAt +} + +// GetApp returns the App field. +func (a *Authorization) GetApp() *AuthorizationApp { + if a == nil { + return nil + } + return a.App +} + // GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. func (a *Authorization) GetCreatedAt() Timestamp { if a == nil || a.CreatedAt == nil { @@ -61,7 +221,7 @@ func (a *Authorization) GetHashedToken() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *Authorization) GetID() int { +func (a *Authorization) GetID() int64 { if a == nil || a.ID == nil { return 0 } @@ -100,12 +260,12 @@ func (a *Authorization) GetTokenLastEight() string { return *a.TokenLastEight } -// GetUpdateAt returns the UpdateAt field if it's non-nil, zero value otherwise. -func (a *Authorization) GetUpdateAt() Timestamp { - if a == nil || a.UpdateAt == nil { +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (a *Authorization) GetUpdatedAt() Timestamp { + if a == nil || a.UpdatedAt == nil { return Timestamp{} } - return *a.UpdateAt + return *a.UpdatedAt } // GetURL returns the URL field if it's non-nil, zero value otherwise. @@ -116,6 +276,14 @@ func (a *Authorization) GetURL() string { return *a.URL } +// GetUser returns the User field. +func (a *Authorization) GetUser() *User { + if a == nil { + return nil + } + return a.User +} + // GetClientID returns the ClientID field if it's non-nil, zero value otherwise. func (a *AuthorizationApp) GetClientID() string { if a == nil || a.ClientID == nil { @@ -204,6 +372,22 @@ func (a *AuthorizationUpdateRequest) GetNoteURL() string { return *a.NoteURL } +// GetAppID returns the AppID field if it's non-nil, zero value otherwise. +func (a *AutoTriggerCheck) GetAppID() int64 { + if a == nil || a.AppID == nil { + return 0 + } + return *a.AppID +} + +// GetSetting returns the Setting field if it's non-nil, zero value otherwise. +func (a *AutoTriggerCheck) GetSetting() bool { + if a == nil || a.Setting == nil { + return false + } + return *a.Setting +} + // GetContent returns the Content field if it's non-nil, zero value otherwise. func (b *Blob) GetContent() string { if b == nil || b.Content == nil { @@ -220,6 +404,14 @@ func (b *Blob) GetEncoding() string { return *b.Encoding } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (b *Blob) GetNodeID() string { + if b == nil || b.NodeID == nil { + return "" + } + return *b.NodeID +} + // GetSHA returns the SHA field if it's non-nil, zero value otherwise. func (b *Blob) GetSHA() string { if b == nil || b.SHA == nil { @@ -244,6 +436,14 @@ func (b *Blob) GetURL() string { return *b.URL } +// GetCommit returns the Commit field. +func (b *Branch) GetCommit() *RepositoryCommit { + if b == nil { + return nil + } + return b.Commit +} + // GetName returns the Name field if it's non-nil, zero value otherwise. func (b *Branch) GetName() string { if b == nil || b.Name == nil { @@ -260,3128 +460,6360 @@ func (b *Branch) GetProtected() bool { return *b.Protected } -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetBody() string { - if c == nil || c.Body == nil { - return "" +// GetApp returns the App field. +func (c *CheckRun) GetApp() *App { + if c == nil { + return nil } - return *c.Body + return c.App } -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetKey() string { - if c == nil || c.Key == nil { - return "" +// GetCheckSuite returns the CheckSuite field. +func (c *CheckRun) GetCheckSuite() *CheckSuite { + if c == nil { + return nil } - return *c.Key + return c.CheckSuite } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetName() string { - if c == nil || c.Name == nil { - return "" +// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetCompletedAt() Timestamp { + if c == nil || c.CompletedAt == nil { + return Timestamp{} } - return *c.Name + return *c.CompletedAt } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetURL() string { - if c == nil || c.URL == nil { +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetConclusion() string { + if c == nil || c.Conclusion == nil { return "" } - return *c.URL + return *c.Conclusion } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { +// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetDetailsURL() string { + if c == nil || c.DetailsURL == nil { return "" } - return *c.HTMLURL + return *c.DetailsURL } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetName() string { - if c == nil || c.Name == nil { +// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetExternalID() string { + if c == nil || c.ExternalID == nil { return "" } - return *c.Name + return *c.ExternalID } -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetPath() string { - if c == nil || c.Path == nil { +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetHeadSHA() string { + if c == nil || c.HeadSHA == nil { return "" } - return *c.Path + return *c.HeadSHA } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetSHA() string { - if c == nil || c.SHA == nil { +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { return "" } - return *c.SHA + return *c.HTMLURL } -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (c *CodeSearchResult) GetIncompleteResults() bool { - if c == nil || c.IncompleteResults == nil { - return false +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetID() int64 { + if c == nil || c.ID == nil { + return 0 } - return *c.IncompleteResults + return *c.ID } -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *CodeSearchResult) GetTotal() int { - if c == nil || c.Total == nil { - return 0 +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetName() string { + if c == nil || c.Name == nil { + return "" } - return *c.Total + return *c.Name } -// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetCommitURL() string { - if c == nil || c.CommitURL == nil { +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetNodeID() string { + if c == nil || c.NodeID == nil { return "" } - return *c.CommitURL + return *c.NodeID } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetName() string { - if c == nil || c.Name == nil { - return "" +// GetOutput returns the Output field. +func (c *CheckRun) GetOutput() *CheckRunOutput { + if c == nil { + return nil } - return *c.Name + return c.Output } -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetRepositoryURL() string { - if c == nil || c.RepositoryURL == nil { - return "" +// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetStartedAt() Timestamp { + if c == nil || c.StartedAt == nil { + return Timestamp{} } - return *c.RepositoryURL + return *c.StartedAt } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetSHA() string { - if c == nil || c.SHA == nil { +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetStatus() string { + if c == nil || c.Status == nil { return "" } - return *c.SHA + return *c.Status } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetState() string { - if c == nil || c.State == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetURL() string { + if c == nil || c.URL == nil { return "" } - return *c.State + return *c.URL } -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetTotalCount() int { - if c == nil || c.TotalCount == nil { - return 0 +// GetAnnotationLevel returns the AnnotationLevel field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetAnnotationLevel() string { + if c == nil || c.AnnotationLevel == nil { + return "" } - return *c.TotalCount + return *c.AnnotationLevel } -// GetCommentCount returns the CommentCount field if it's non-nil, zero value otherwise. -func (c *Commit) GetCommentCount() int { - if c == nil || c.CommentCount == nil { - return 0 +// GetBlobHRef returns the BlobHRef field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetBlobHRef() string { + if c == nil || c.BlobHRef == nil { + return "" } - return *c.CommentCount + return *c.BlobHRef } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *Commit) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" +// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetEndLine() int { + if c == nil || c.EndLine == nil { + return 0 } - return *c.HTMLURL + return *c.EndLine } // GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (c *Commit) GetMessage() string { +func (c *CheckRunAnnotation) GetMessage() string { if c == nil || c.Message == nil { return "" } return *c.Message } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *Commit) GetSHA() string { - if c == nil || c.SHA == nil { +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetPath() string { + if c == nil || c.Path == nil { return "" } - return *c.SHA + return *c.Path } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *Commit) GetURL() string { - if c == nil || c.URL == nil { +// GetRawDetails returns the RawDetails field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetRawDetails() string { + if c == nil || c.RawDetails == nil { return "" } - return *c.URL + return *c.RawDetails } -// GetDate returns the Date field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetDate() time.Time { - if c == nil || c.Date == nil { - return time.Time{} +// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetStartLine() int { + if c == nil || c.StartLine == nil { + return 0 } - return *c.Date + return *c.StartLine } -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetEmail() string { - if c == nil || c.Email == nil { +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetTitle() string { + if c == nil || c.Title == nil { return "" } - return *c.Email + return *c.Title } -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetLogin() string { - if c == nil || c.Login == nil { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (c *CheckRunEvent) GetAction() string { + if c == nil || c.Action == nil { return "" } - return *c.Login + return *c.Action } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetName() string { - if c == nil || c.Name == nil { - return "" +// GetCheckRun returns the CheckRun field. +func (c *CheckRunEvent) GetCheckRun() *CheckRun { + if c == nil { + return nil } - return *c.Name + return c.CheckRun } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *CommitCommentEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" +// GetInstallation returns the Installation field. +func (c *CheckRunEvent) GetInstallation() *Installation { + if c == nil { + return nil } - return *c.Action + return c.Installation } -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetAdditions() int { - if c == nil || c.Additions == nil { - return 0 +// GetOrg returns the Org field. +func (c *CheckRunEvent) GetOrg() *Organization { + if c == nil { + return nil } - return *c.Additions + return c.Org } -// GetBlobURL returns the BlobURL field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetBlobURL() string { - if c == nil || c.BlobURL == nil { - return "" +// GetRepo returns the Repo field. +func (c *CheckRunEvent) GetRepo() *Repository { + if c == nil { + return nil } - return *c.BlobURL + return c.Repo } -// GetChanges returns the Changes field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetChanges() int { - if c == nil || c.Changes == nil { - return 0 +// GetRequestedAction returns the RequestedAction field. +func (c *CheckRunEvent) GetRequestedAction() *RequestedAction { + if c == nil { + return nil } - return *c.Changes + return c.RequestedAction } -// GetContentsURL returns the ContentsURL field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetContentsURL() string { - if c == nil || c.ContentsURL == nil { - return "" +// GetSender returns the Sender field. +func (c *CheckRunEvent) GetSender() *User { + if c == nil { + return nil } - return *c.ContentsURL + return c.Sender } -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetDeletions() int { - if c == nil || c.Deletions == nil { - return 0 +// GetAlt returns the Alt field if it's non-nil, zero value otherwise. +func (c *CheckRunImage) GetAlt() string { + if c == nil || c.Alt == nil { + return "" } - return *c.Deletions + return *c.Alt } -// GetFilename returns the Filename field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetFilename() string { - if c == nil || c.Filename == nil { +// GetCaption returns the Caption field if it's non-nil, zero value otherwise. +func (c *CheckRunImage) GetCaption() string { + if c == nil || c.Caption == nil { return "" } - return *c.Filename + return *c.Caption } -// GetPatch returns the Patch field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetPatch() string { - if c == nil || c.Patch == nil { +// GetImageURL returns the ImageURL field if it's non-nil, zero value otherwise. +func (c *CheckRunImage) GetImageURL() string { + if c == nil || c.ImageURL == nil { return "" } - return *c.Patch + return *c.ImageURL } -// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetRawURL() string { - if c == nil || c.RawURL == nil { +// GetAnnotationsCount returns the AnnotationsCount field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetAnnotationsCount() int { + if c == nil || c.AnnotationsCount == nil { + return 0 + } + return *c.AnnotationsCount +} + +// GetAnnotationsURL returns the AnnotationsURL field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetAnnotationsURL() string { + if c == nil || c.AnnotationsURL == nil { return "" } - return *c.RawURL + return *c.AnnotationsURL } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetSHA() string { - if c == nil || c.SHA == nil { +// GetSummary returns the Summary field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetSummary() string { + if c == nil || c.Summary == nil { return "" } - return *c.SHA + return *c.Summary } -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetStatus() string { - if c == nil || c.Status == nil { +// GetText returns the Text field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetText() string { + if c == nil || c.Text == nil { return "" } - return *c.Status + return *c.Text } -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetCommentsURL() string { - if c == nil || c.CommentsURL == nil { +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetTitle() string { + if c == nil || c.Title == nil { return "" } - return *c.CommentsURL + return *c.Title } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { +// GetAfterSHA returns the AfterSHA field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetAfterSHA() string { + if c == nil || c.AfterSHA == nil { return "" } - return *c.HTMLURL + return *c.AfterSHA } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetSHA() string { - if c == nil || c.SHA == nil { +// GetApp returns the App field. +func (c *CheckSuite) GetApp() *App { + if c == nil { + return nil + } + return c.App +} + +// GetBeforeSHA returns the BeforeSHA field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetBeforeSHA() string { + if c == nil || c.BeforeSHA == nil { return "" } - return *c.SHA + return *c.BeforeSHA } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetURL() string { - if c == nil || c.URL == nil { +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetConclusion() string { + if c == nil || c.Conclusion == nil { return "" } - return *c.URL + return *c.Conclusion } -// GetAheadBy returns the AheadBy field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetAheadBy() int { - if c == nil || c.AheadBy == nil { - return 0 +// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetHeadBranch() string { + if c == nil || c.HeadBranch == nil { + return "" } - return *c.AheadBy + return *c.HeadBranch } -// GetBehindBy returns the BehindBy field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetBehindBy() int { - if c == nil || c.BehindBy == nil { - return 0 +// GetHeadCommit returns the HeadCommit field. +func (c *CheckSuite) GetHeadCommit() *Commit { + if c == nil { + return nil } - return *c.BehindBy + return c.HeadCommit } -// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetDiffURL() string { - if c == nil || c.DiffURL == nil { +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetHeadSHA() string { + if c == nil || c.HeadSHA == nil { return "" } - return *c.DiffURL + return *c.HeadSHA } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetID() int64 { + if c == nil || c.ID == nil { + return 0 } - return *c.HTMLURL + return *c.ID } -// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetPatchURL() string { - if c == nil || c.PatchURL == nil { +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetNodeID() string { + if c == nil || c.NodeID == nil { return "" } - return *c.PatchURL + return *c.NodeID } -// GetPermalinkURL returns the PermalinkURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetPermalinkURL() string { - if c == nil || c.PermalinkURL == nil { - return "" +// GetRepository returns the Repository field. +func (c *CheckSuite) GetRepository() *Repository { + if c == nil { + return nil } - return *c.PermalinkURL + return c.Repository } // GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetStatus() string { +func (c *CheckSuite) GetStatus() string { if c == nil || c.Status == nil { return "" } return *c.Status } -// GetTotalCommits returns the TotalCommits field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetTotalCommits() int { - if c == nil || c.TotalCommits == nil { - return 0 - } - return *c.TotalCommits -} - // GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetURL() string { +func (c *CheckSuite) GetURL() string { if c == nil || c.URL == nil { return "" } return *c.URL } -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (c *CommitsSearchResult) GetIncompleteResults() bool { - if c == nil || c.IncompleteResults == nil { - return false - } - return *c.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *CommitsSearchResult) GetTotal() int { - if c == nil || c.Total == nil { - return 0 +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (c *CheckSuiteEvent) GetAction() string { + if c == nil || c.Action == nil { + return "" } - return *c.Total + return *c.Action } -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (c *CommitStats) GetAdditions() int { - if c == nil || c.Additions == nil { - return 0 +// GetCheckSuite returns the CheckSuite field. +func (c *CheckSuiteEvent) GetCheckSuite() *CheckSuite { + if c == nil { + return nil } - return *c.Additions + return c.CheckSuite } -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (c *CommitStats) GetDeletions() int { - if c == nil || c.Deletions == nil { - return 0 +// GetInstallation returns the Installation field. +func (c *CheckSuiteEvent) GetInstallation() *Installation { + if c == nil { + return nil } - return *c.Deletions + return c.Installation } -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *CommitStats) GetTotal() int { - if c == nil || c.Total == nil { - return 0 +// GetOrg returns the Org field. +func (c *CheckSuiteEvent) GetOrg() *Organization { + if c == nil { + return nil } - return *c.Total + return c.Org } -// GetHealthPercentage returns the HealthPercentage field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetHealthPercentage() int { - if c == nil || c.HealthPercentage == nil { - return 0 +// GetRepo returns the Repo field. +func (c *CheckSuiteEvent) GetRepo() *Repository { + if c == nil { + return nil } - return *c.HealthPercentage + return c.Repo } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetUpdatedAt() time.Time { - if c == nil || c.UpdatedAt == nil { - return time.Time{} +// GetSender returns the Sender field. +func (c *CheckSuiteEvent) GetSender() *User { + if c == nil { + return nil } - return *c.UpdatedAt + return c.Sender } -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetAvatarURL() string { - if c == nil || c.AvatarURL == nil { - return "" +// GetPreferenceList returns the PreferenceList field. +func (c *CheckSuitePreferenceOptions) GetPreferenceList() *PreferenceList { + if c == nil { + return nil } - return *c.AvatarURL + return c.PreferenceList } -// GetContributions returns the Contributions field if it's non-nil, zero value otherwise. -func (c *Contributor) GetContributions() int { - if c == nil || c.Contributions == nil { - return 0 +// GetPreferences returns the Preferences field. +func (c *CheckSuitePreferenceResults) GetPreferences() *PreferenceList { + if c == nil { + return nil } - return *c.Contributions + return c.Preferences } -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetEventsURL() string { - if c == nil || c.EventsURL == nil { - return "" +// GetRepository returns the Repository field. +func (c *CheckSuitePreferenceResults) GetRepository() *Repository { + if c == nil { + return nil } - return *c.EventsURL + return c.Repository } -// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetFollowersURL() string { - if c == nil || c.FollowersURL == nil { +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (c *CodeOfConduct) GetBody() string { + if c == nil || c.Body == nil { return "" } - return *c.FollowersURL + return *c.Body } -// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetFollowingURL() string { - if c == nil || c.FollowingURL == nil { +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (c *CodeOfConduct) GetKey() string { + if c == nil || c.Key == nil { return "" } - return *c.FollowingURL + return *c.Key } -// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetGistsURL() string { - if c == nil || c.GistsURL == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CodeOfConduct) GetName() string { + if c == nil || c.Name == nil { return "" } - return *c.GistsURL + return *c.Name } -// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. -func (c *Contributor) GetGravatarID() string { - if c == nil || c.GravatarID == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CodeOfConduct) GetURL() string { + if c == nil || c.URL == nil { return "" } - return *c.GravatarID + return *c.URL } // GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetHTMLURL() string { +func (c *CodeResult) GetHTMLURL() string { if c == nil || c.HTMLURL == nil { return "" } return *c.HTMLURL } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *Contributor) GetID() int { - if c == nil || c.ID == nil { - return 0 +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CodeResult) GetName() string { + if c == nil || c.Name == nil { + return "" } - return *c.ID + return *c.Name } -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (c *Contributor) GetLogin() string { - if c == nil || c.Login == nil { +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (c *CodeResult) GetPath() string { + if c == nil || c.Path == nil { return "" } - return *c.Login + return *c.Path } -// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetOrganizationsURL() string { - if c == nil || c.OrganizationsURL == nil { - return "" +// GetRepository returns the Repository field. +func (c *CodeResult) GetRepository() *Repository { + if c == nil { + return nil } - return *c.OrganizationsURL + return c.Repository } -// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetReceivedEventsURL() string { - if c == nil || c.ReceivedEventsURL == nil { +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *CodeResult) GetSHA() string { + if c == nil || c.SHA == nil { return "" } - return *c.ReceivedEventsURL + return *c.SHA } -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetReposURL() string { - if c == nil || c.ReposURL == nil { - return "" +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (c *CodeSearchResult) GetIncompleteResults() bool { + if c == nil || c.IncompleteResults == nil { + return false } - return *c.ReposURL + return *c.IncompleteResults } -// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. -func (c *Contributor) GetSiteAdmin() bool { - if c == nil || c.SiteAdmin == nil { - return false +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (c *CodeSearchResult) GetTotal() int { + if c == nil || c.Total == nil { + return 0 } - return *c.SiteAdmin + return *c.Total } -// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetStarredURL() string { - if c == nil || c.StarredURL == nil { +// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetCommitURL() string { + if c == nil || c.CommitURL == nil { return "" } - return *c.StarredURL + return *c.CommitURL } -// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetSubscriptionsURL() string { - if c == nil || c.SubscriptionsURL == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetName() string { + if c == nil || c.Name == nil { return "" } - return *c.SubscriptionsURL + return *c.Name } -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (c *Contributor) GetType() string { - if c == nil || c.Type == nil { +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetRepositoryURL() string { + if c == nil || c.RepositoryURL == nil { return "" } - return *c.Type + return *c.RepositoryURL } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetURL() string { - if c == nil || c.URL == nil { +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetSHA() string { + if c == nil || c.SHA == nil { return "" } - return *c.URL + return *c.SHA } -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *ContributorStats) GetTotal() int { - if c == nil || c.Total == nil { - return 0 +// GetState returns the State field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetState() string { + if c == nil || c.State == nil { + return "" } - return *c.Total + return *c.State } -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (c *createCommit) GetMessage() string { - if c == nil || c.Message == nil { - return "" +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetTotalCount() int { + if c == nil || c.TotalCount == nil { + return 0 } - return *c.Message + return *c.TotalCount } -// GetTree returns the Tree field if it's non-nil, zero value otherwise. -func (c *createCommit) GetTree() string { - if c == nil || c.Tree == nil { - return "" +// GetTotalCommitComments returns the TotalCommitComments field if it's non-nil, zero value otherwise. +func (c *CommentStats) GetTotalCommitComments() int { + if c == nil || c.TotalCommitComments == nil { + return 0 } - return *c.Tree + return *c.TotalCommitComments } -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetDescription() string { - if c == nil || c.Description == nil { - return "" +// GetTotalGistComments returns the TotalGistComments field if it's non-nil, zero value otherwise. +func (c *CommentStats) GetTotalGistComments() int { + if c == nil || c.TotalGistComments == nil { + return 0 } - return *c.Description + return *c.TotalGistComments } -// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetMasterBranch() string { - if c == nil || c.MasterBranch == nil { - return "" +// GetTotalIssueComments returns the TotalIssueComments field if it's non-nil, zero value otherwise. +func (c *CommentStats) GetTotalIssueComments() int { + if c == nil || c.TotalIssueComments == nil { + return 0 } - return *c.MasterBranch + return *c.TotalIssueComments } -// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetPusherType() string { - if c == nil || c.PusherType == nil { - return "" +// GetTotalPullRequestComments returns the TotalPullRequestComments field if it's non-nil, zero value otherwise. +func (c *CommentStats) GetTotalPullRequestComments() int { + if c == nil || c.TotalPullRequestComments == nil { + return 0 } - return *c.PusherType + return *c.TotalPullRequestComments } -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetRef() string { - if c == nil || c.Ref == nil { - return "" +// GetAuthor returns the Author field. +func (c *Commit) GetAuthor() *CommitAuthor { + if c == nil { + return nil } - return *c.Ref + return c.Author } -// GetRefType returns the RefType field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetRefType() string { - if c == nil || c.RefType == nil { - return "" +// GetCommentCount returns the CommentCount field if it's non-nil, zero value otherwise. +func (c *Commit) GetCommentCount() int { + if c == nil || c.CommentCount == nil { + return 0 } - return *c.RefType + return *c.CommentCount } -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *createRefRequest) GetRef() string { - if c == nil || c.Ref == nil { - return "" +// GetCommitter returns the Committer field. +func (c *Commit) GetCommitter() *CommitAuthor { + if c == nil { + return nil } - return *c.Ref + return c.Committer } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *createRefRequest) GetSHA() string { - if c == nil || c.SHA == nil { +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *Commit) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { return "" } - return *c.SHA + return *c.HTMLURL } // GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (c *createTagRequest) GetMessage() string { +func (c *Commit) GetMessage() string { if c == nil || c.Message == nil { return "" } return *c.Message } -// GetObject returns the Object field if it's non-nil, zero value otherwise. -func (c *createTagRequest) GetObject() string { - if c == nil || c.Object == nil { +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (c *Commit) GetNodeID() string { + if c == nil || c.NodeID == nil { return "" } - return *c.Object + return *c.NodeID } -// GetTag returns the Tag field if it's non-nil, zero value otherwise. -func (c *createTagRequest) GetTag() string { - if c == nil || c.Tag == nil { +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *Commit) GetSHA() string { + if c == nil || c.SHA == nil { return "" } - return *c.Tag + return *c.SHA } -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (c *createTagRequest) GetType() string { - if c == nil || c.Type == nil { - return "" +// GetStats returns the Stats field. +func (c *Commit) GetStats() *CommitStats { + if c == nil { + return nil } - return *c.Type + return c.Stats } -// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. -func (d *DeleteEvent) GetPusherType() string { - if d == nil || d.PusherType == nil { - return "" +// GetTree returns the Tree field. +func (c *Commit) GetTree() *Tree { + if c == nil { + return nil } - return *d.PusherType + return c.Tree } -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (d *DeleteEvent) GetRef() string { - if d == nil || d.Ref == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *Commit) GetURL() string { + if c == nil || c.URL == nil { return "" } - return *d.Ref + return *c.URL } -// GetRefType returns the RefType field if it's non-nil, zero value otherwise. -func (d *DeleteEvent) GetRefType() string { - if d == nil || d.RefType == nil { - return "" +// GetVerification returns the Verification field. +func (c *Commit) GetVerification() *SignatureVerification { + if c == nil { + return nil } - return *d.RefType + return c.Verification } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *Deployment) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} +// GetDate returns the Date field if it's non-nil, zero value otherwise. +func (c *CommitAuthor) GetDate() time.Time { + if c == nil || c.Date == nil { + return time.Time{} } - return *d.CreatedAt + return *c.Date } -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *Deployment) GetDescription() string { - if d == nil || d.Description == nil { +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (c *CommitAuthor) GetEmail() string { + if c == nil || c.Email == nil { return "" } - return *d.Description + return *c.Email } -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *Deployment) GetEnvironment() string { - if d == nil || d.Environment == nil { +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (c *CommitAuthor) GetLogin() string { + if c == nil || c.Login == nil { return "" } - return *d.Environment + return *c.Login } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *Deployment) GetID() int { - if d == nil || d.ID == nil { - return 0 +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CommitAuthor) GetName() string { + if c == nil || c.Name == nil { + return "" } - return *d.ID + return *c.Name } -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (d *Deployment) GetRef() string { - if d == nil || d.Ref == nil { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (c *CommitCommentEvent) GetAction() string { + if c == nil || c.Action == nil { return "" } - return *d.Ref + return *c.Action } -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (d *Deployment) GetRepositoryURL() string { - if d == nil || d.RepositoryURL == nil { - return "" +// GetComment returns the Comment field. +func (c *CommitCommentEvent) GetComment() *RepositoryComment { + if c == nil { + return nil } - return *d.RepositoryURL + return c.Comment } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (d *Deployment) GetSHA() string { - if d == nil || d.SHA == nil { - return "" +// GetInstallation returns the Installation field. +func (c *CommitCommentEvent) GetInstallation() *Installation { + if c == nil { + return nil } - return *d.SHA + return c.Installation } -// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. -func (d *Deployment) GetStatusesURL() string { - if d == nil || d.StatusesURL == nil { - return "" +// GetRepo returns the Repo field. +func (c *CommitCommentEvent) GetRepo() *Repository { + if c == nil { + return nil } - return *d.StatusesURL + return c.Repo } -// GetTask returns the Task field if it's non-nil, zero value otherwise. -func (d *Deployment) GetTask() string { - if d == nil || d.Task == nil { - return "" +// GetSender returns the Sender field. +func (c *CommitCommentEvent) GetSender() *User { + if c == nil { + return nil } - return *d.Task + return c.Sender } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *Deployment) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} +// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetAdditions() int { + if c == nil || c.Additions == nil { + return 0 } - return *d.UpdatedAt + return *c.Additions } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (d *Deployment) GetURL() string { - if d == nil || d.URL == nil { +// GetBlobURL returns the BlobURL field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetBlobURL() string { + if c == nil || c.BlobURL == nil { return "" } - return *d.URL + return *c.BlobURL } -// GetAutoMerge returns the AutoMerge field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetAutoMerge() bool { - if d == nil || d.AutoMerge == nil { - return false +// GetChanges returns the Changes field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetChanges() int { + if c == nil || c.Changes == nil { + return 0 } - return *d.AutoMerge + return *c.Changes } -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetDescription() string { - if d == nil || d.Description == nil { +// GetContentsURL returns the ContentsURL field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetContentsURL() string { + if c == nil || c.ContentsURL == nil { return "" } - return *d.Description + return *c.ContentsURL } -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetDeletions() int { + if c == nil || c.Deletions == nil { + return 0 + } + return *c.Deletions +} + +// GetFilename returns the Filename field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetFilename() string { + if c == nil || c.Filename == nil { + return "" + } + return *c.Filename +} + +// GetPatch returns the Patch field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetPatch() string { + if c == nil || c.Patch == nil { + return "" + } + return *c.Patch +} + +// GetPreviousFilename returns the PreviousFilename field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetPreviousFilename() string { + if c == nil || c.PreviousFilename == nil { + return "" + } + return *c.PreviousFilename +} + +// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetRawURL() string { + if c == nil || c.RawURL == nil { + return "" + } + return *c.RawURL +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetSHA() string { + if c == nil || c.SHA == nil { + return "" + } + return *c.SHA +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetStatus() string { + if c == nil || c.Status == nil { + return "" + } + return *c.Status +} + +// GetAuthor returns the Author field. +func (c *CommitResult) GetAuthor() *User { + if c == nil { + return nil + } + return c.Author +} + +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (c *CommitResult) GetCommentsURL() string { + if c == nil || c.CommentsURL == nil { + return "" + } + return *c.CommentsURL +} + +// GetCommit returns the Commit field. +func (c *CommitResult) GetCommit() *Commit { + if c == nil { + return nil + } + return c.Commit +} + +// GetCommitter returns the Committer field. +func (c *CommitResult) GetCommitter() *User { + if c == nil { + return nil + } + return c.Committer +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *CommitResult) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetRepository returns the Repository field. +func (c *CommitResult) GetRepository() *Repository { + if c == nil { + return nil + } + return c.Repository +} + +// GetScore returns the Score field. +func (c *CommitResult) GetScore() *float64 { + if c == nil { + return nil + } + return c.Score +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *CommitResult) GetSHA() string { + if c == nil || c.SHA == nil { + return "" + } + return *c.SHA +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CommitResult) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetAheadBy returns the AheadBy field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetAheadBy() int { + if c == nil || c.AheadBy == nil { + return 0 + } + return *c.AheadBy +} + +// GetBaseCommit returns the BaseCommit field. +func (c *CommitsComparison) GetBaseCommit() *RepositoryCommit { + if c == nil { + return nil + } + return c.BaseCommit +} + +// GetBehindBy returns the BehindBy field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetBehindBy() int { + if c == nil || c.BehindBy == nil { + return 0 + } + return *c.BehindBy +} + +// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetDiffURL() string { + if c == nil || c.DiffURL == nil { + return "" + } + return *c.DiffURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetMergeBaseCommit returns the MergeBaseCommit field. +func (c *CommitsComparison) GetMergeBaseCommit() *RepositoryCommit { + if c == nil { + return nil + } + return c.MergeBaseCommit +} + +// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetPatchURL() string { + if c == nil || c.PatchURL == nil { + return "" + } + return *c.PatchURL +} + +// GetPermalinkURL returns the PermalinkURL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetPermalinkURL() string { + if c == nil || c.PermalinkURL == nil { + return "" + } + return *c.PermalinkURL +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetStatus() string { + if c == nil || c.Status == nil { + return "" + } + return *c.Status +} + +// GetTotalCommits returns the TotalCommits field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetTotalCommits() int { + if c == nil || c.TotalCommits == nil { + return 0 + } + return *c.TotalCommits +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (c *CommitsSearchResult) GetIncompleteResults() bool { + if c == nil || c.IncompleteResults == nil { + return false + } + return *c.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (c *CommitsSearchResult) GetTotal() int { + if c == nil || c.Total == nil { + return 0 + } + return *c.Total +} + +// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. +func (c *CommitStats) GetAdditions() int { + if c == nil || c.Additions == nil { + return 0 + } + return *c.Additions +} + +// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. +func (c *CommitStats) GetDeletions() int { + if c == nil || c.Deletions == nil { + return 0 + } + return *c.Deletions +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (c *CommitStats) GetTotal() int { + if c == nil || c.Total == nil { + return 0 + } + return *c.Total +} + +// GetCodeOfConduct returns the CodeOfConduct field. +func (c *CommunityHealthFiles) GetCodeOfConduct() *Metric { + if c == nil { + return nil + } + return c.CodeOfConduct +} + +// GetContributing returns the Contributing field. +func (c *CommunityHealthFiles) GetContributing() *Metric { + if c == nil { + return nil + } + return c.Contributing +} + +// GetIssueTemplate returns the IssueTemplate field. +func (c *CommunityHealthFiles) GetIssueTemplate() *Metric { + if c == nil { + return nil + } + return c.IssueTemplate +} + +// GetLicense returns the License field. +func (c *CommunityHealthFiles) GetLicense() *Metric { + if c == nil { + return nil + } + return c.License +} + +// GetPullRequestTemplate returns the PullRequestTemplate field. +func (c *CommunityHealthFiles) GetPullRequestTemplate() *Metric { + if c == nil { + return nil + } + return c.PullRequestTemplate +} + +// GetReadme returns the Readme field. +func (c *CommunityHealthFiles) GetReadme() *Metric { + if c == nil { + return nil + } + return c.Readme +} + +// GetFiles returns the Files field. +func (c *CommunityHealthMetrics) GetFiles() *CommunityHealthFiles { + if c == nil { + return nil + } + return c.Files +} + +// GetHealthPercentage returns the HealthPercentage field if it's non-nil, zero value otherwise. +func (c *CommunityHealthMetrics) GetHealthPercentage() int { + if c == nil || c.HealthPercentage == nil { + return 0 + } + return *c.HealthPercentage +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (c *CommunityHealthMetrics) GetUpdatedAt() time.Time { + if c == nil || c.UpdatedAt == nil { + return time.Time{} + } + return *c.UpdatedAt +} + +// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetAvatarURL() string { + if c == nil || c.AvatarURL == nil { + return "" + } + return *c.AvatarURL +} + +// GetContributions returns the Contributions field if it's non-nil, zero value otherwise. +func (c *Contributor) GetContributions() int { + if c == nil || c.Contributions == nil { + return 0 + } + return *c.Contributions +} + +// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetEventsURL() string { + if c == nil || c.EventsURL == nil { + return "" + } + return *c.EventsURL +} + +// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetFollowersURL() string { + if c == nil || c.FollowersURL == nil { + return "" + } + return *c.FollowersURL +} + +// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetFollowingURL() string { + if c == nil || c.FollowingURL == nil { + return "" + } + return *c.FollowingURL +} + +// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetGistsURL() string { + if c == nil || c.GistsURL == nil { + return "" + } + return *c.GistsURL +} + +// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. +func (c *Contributor) GetGravatarID() string { + if c == nil || c.GravatarID == nil { + return "" + } + return *c.GravatarID +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *Contributor) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (c *Contributor) GetLogin() string { + if c == nil || c.Login == nil { + return "" + } + return *c.Login +} + +// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetOrganizationsURL() string { + if c == nil || c.OrganizationsURL == nil { + return "" + } + return *c.OrganizationsURL +} + +// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetReceivedEventsURL() string { + if c == nil || c.ReceivedEventsURL == nil { + return "" + } + return *c.ReceivedEventsURL +} + +// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetReposURL() string { + if c == nil || c.ReposURL == nil { + return "" + } + return *c.ReposURL +} + +// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. +func (c *Contributor) GetSiteAdmin() bool { + if c == nil || c.SiteAdmin == nil { + return false + } + return *c.SiteAdmin +} + +// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetStarredURL() string { + if c == nil || c.StarredURL == nil { + return "" + } + return *c.StarredURL +} + +// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetSubscriptionsURL() string { + if c == nil || c.SubscriptionsURL == nil { + return "" + } + return *c.SubscriptionsURL +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (c *Contributor) GetType() string { + if c == nil || c.Type == nil { + return "" + } + return *c.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetAuthor returns the Author field. +func (c *ContributorStats) GetAuthor() *Contributor { + if c == nil { + return nil + } + return c.Author +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (c *ContributorStats) GetTotal() int { + if c == nil || c.Total == nil { + return 0 + } + return *c.Total +} + +// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetCompletedAt() Timestamp { + if c == nil || c.CompletedAt == nil { + return Timestamp{} + } + return *c.CompletedAt +} + +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetConclusion() string { + if c == nil || c.Conclusion == nil { + return "" + } + return *c.Conclusion +} + +// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetDetailsURL() string { + if c == nil || c.DetailsURL == nil { + return "" + } + return *c.DetailsURL +} + +// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetExternalID() string { + if c == nil || c.ExternalID == nil { + return "" + } + return *c.ExternalID +} + +// GetOutput returns the Output field. +func (c *CreateCheckRunOptions) GetOutput() *CheckRunOutput { + if c == nil { + return nil + } + return c.Output +} + +// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetStartedAt() Timestamp { + if c == nil || c.StartedAt == nil { + return Timestamp{} + } + return *c.StartedAt +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetStatus() string { + if c == nil || c.Status == nil { + return "" + } + return *c.Status +} + +// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. +func (c *CreateCheckSuiteOptions) GetHeadBranch() string { + if c == nil || c.HeadBranch == nil { + return "" + } + return *c.HeadBranch +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetDescription() string { + if c == nil || c.Description == nil { + return "" + } + return *c.Description +} + +// GetInstallation returns the Installation field. +func (c *CreateEvent) GetInstallation() *Installation { + if c == nil { + return nil + } + return c.Installation +} + +// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetMasterBranch() string { + if c == nil || c.MasterBranch == nil { + return "" + } + return *c.MasterBranch +} + +// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetPusherType() string { + if c == nil || c.PusherType == nil { + return "" + } + return *c.PusherType +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetRef() string { + if c == nil || c.Ref == nil { + return "" + } + return *c.Ref +} + +// GetRefType returns the RefType field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetRefType() string { + if c == nil || c.RefType == nil { + return "" + } + return *c.RefType +} + +// GetRepo returns the Repo field. +func (c *CreateEvent) GetRepo() *Repository { + if c == nil { + return nil + } + return c.Repo +} + +// GetSender returns the Sender field. +func (c *CreateEvent) GetSender() *User { + if c == nil { + return nil + } + return c.Sender +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (c *CreateOrgInvitationOptions) GetEmail() string { + if c == nil || c.Email == nil { + return "" + } + return *c.Email +} + +// GetInviteeID returns the InviteeID field if it's non-nil, zero value otherwise. +func (c *CreateOrgInvitationOptions) GetInviteeID() int64 { + if c == nil || c.InviteeID == nil { + return 0 + } + return *c.InviteeID +} + +// GetRole returns the Role field if it's non-nil, zero value otherwise. +func (c *CreateOrgInvitationOptions) GetRole() string { + if c == nil || c.Role == nil { + return "" + } + return *c.Role +} + +// GetInstallation returns the Installation field. +func (d *DeleteEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. +func (d *DeleteEvent) GetPusherType() string { + if d == nil || d.PusherType == nil { + return "" + } + return *d.PusherType +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (d *DeleteEvent) GetRef() string { + if d == nil || d.Ref == nil { + return "" + } + return *d.Ref +} + +// GetRefType returns the RefType field if it's non-nil, zero value otherwise. +func (d *DeleteEvent) GetRefType() string { + if d == nil || d.RefType == nil { + return "" + } + return *d.RefType +} + +// GetRepo returns the Repo field. +func (d *DeleteEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DeleteEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (d *Deployment) GetCreatedAt() Timestamp { + if d == nil || d.CreatedAt == nil { + return Timestamp{} + } + return *d.CreatedAt +} + +// GetCreator returns the Creator field. +func (d *Deployment) GetCreator() *User { + if d == nil { + return nil + } + return d.Creator +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *Deployment) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +func (d *Deployment) GetEnvironment() string { + if d == nil || d.Environment == nil { + return "" + } + return *d.Environment +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (d *Deployment) GetID() int64 { + if d == nil || d.ID == nil { + return 0 + } + return *d.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (d *Deployment) GetNodeID() string { + if d == nil || d.NodeID == nil { + return "" + } + return *d.NodeID +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (d *Deployment) GetRef() string { + if d == nil || d.Ref == nil { + return "" + } + return *d.Ref +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (d *Deployment) GetRepositoryURL() string { + if d == nil || d.RepositoryURL == nil { + return "" + } + return *d.RepositoryURL +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (d *Deployment) GetSHA() string { + if d == nil || d.SHA == nil { + return "" + } + return *d.SHA +} + +// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. +func (d *Deployment) GetStatusesURL() string { + if d == nil || d.StatusesURL == nil { + return "" + } + return *d.StatusesURL +} + +// GetTask returns the Task field if it's non-nil, zero value otherwise. +func (d *Deployment) GetTask() string { + if d == nil || d.Task == nil { + return "" + } + return *d.Task +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *Deployment) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (d *Deployment) GetURL() string { + if d == nil || d.URL == nil { + return "" + } + return *d.URL +} + +// GetDeployment returns the Deployment field. +func (d *DeploymentEvent) GetDeployment() *Deployment { + if d == nil { + return nil + } + return d.Deployment +} + +// GetInstallation returns the Installation field. +func (d *DeploymentEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetRepo returns the Repo field. +func (d *DeploymentEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DeploymentEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetAutoMerge returns the AutoMerge field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetAutoMerge() bool { + if d == nil || d.AutoMerge == nil { + return false + } + return *d.AutoMerge +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. func (d *DeploymentRequest) GetEnvironment() string { if d == nil || d.Environment == nil { return "" } - return *d.Environment + return *d.Environment +} + +// GetPayload returns the Payload field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetPayload() string { + if d == nil || d.Payload == nil { + return "" + } + return *d.Payload +} + +// GetProductionEnvironment returns the ProductionEnvironment field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetProductionEnvironment() bool { + if d == nil || d.ProductionEnvironment == nil { + return false + } + return *d.ProductionEnvironment +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetRef() string { + if d == nil || d.Ref == nil { + return "" + } + return *d.Ref +} + +// GetRequiredContexts returns the RequiredContexts field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetRequiredContexts() []string { + if d == nil || d.RequiredContexts == nil { + return nil + } + return *d.RequiredContexts +} + +// GetTask returns the Task field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetTask() string { + if d == nil || d.Task == nil { + return "" + } + return *d.Task +} + +// GetTransientEnvironment returns the TransientEnvironment field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetTransientEnvironment() bool { + if d == nil || d.TransientEnvironment == nil { + return false + } + return *d.TransientEnvironment +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetCreatedAt() Timestamp { + if d == nil || d.CreatedAt == nil { + return Timestamp{} + } + return *d.CreatedAt +} + +// GetCreator returns the Creator field. +func (d *DeploymentStatus) GetCreator() *User { + if d == nil { + return nil + } + return d.Creator +} + +// GetDeploymentURL returns the DeploymentURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetDeploymentURL() string { + if d == nil || d.DeploymentURL == nil { + return "" + } + return *d.DeploymentURL +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetID() int64 { + if d == nil || d.ID == nil { + return 0 + } + return *d.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetNodeID() string { + if d == nil || d.NodeID == nil { + return "" + } + return *d.NodeID +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetRepositoryURL() string { + if d == nil || d.RepositoryURL == nil { + return "" + } + return *d.RepositoryURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetState() string { + if d == nil || d.State == nil { + return "" + } + return *d.State +} + +// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetTargetURL() string { + if d == nil || d.TargetURL == nil { + return "" + } + return *d.TargetURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetDeployment returns the Deployment field. +func (d *DeploymentStatusEvent) GetDeployment() *Deployment { + if d == nil { + return nil + } + return d.Deployment +} + +// GetDeploymentStatus returns the DeploymentStatus field. +func (d *DeploymentStatusEvent) GetDeploymentStatus() *DeploymentStatus { + if d == nil { + return nil + } + return d.DeploymentStatus +} + +// GetInstallation returns the Installation field. +func (d *DeploymentStatusEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetRepo returns the Repo field. +func (d *DeploymentStatusEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DeploymentStatusEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetAutoInactive returns the AutoInactive field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetAutoInactive() bool { + if d == nil || d.AutoInactive == nil { + return false + } + return *d.AutoInactive +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetEnvironment() string { + if d == nil || d.Environment == nil { + return "" + } + return *d.Environment +} + +// GetEnvironmentURL returns the EnvironmentURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetEnvironmentURL() string { + if d == nil || d.EnvironmentURL == nil { + return "" + } + return *d.EnvironmentURL +} + +// GetLogURL returns the LogURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetLogURL() string { + if d == nil || d.LogURL == nil { + return "" + } + return *d.LogURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetState() string { + if d == nil || d.State == nil { + return "" + } + return *d.State +} + +// GetAuthor returns the Author field. +func (d *DiscussionComment) GetAuthor() *User { + if d == nil { + return nil + } + return d.Author +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetBody() string { + if d == nil || d.Body == nil { + return "" + } + return *d.Body +} + +// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetBodyHTML() string { + if d == nil || d.BodyHTML == nil { + return "" + } + return *d.BodyHTML +} + +// GetBodyVersion returns the BodyVersion field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetBodyVersion() string { + if d == nil || d.BodyVersion == nil { + return "" + } + return *d.BodyVersion +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetCreatedAt() Timestamp { + if d == nil || d.CreatedAt == nil { + return Timestamp{} + } + return *d.CreatedAt +} + +// GetDiscussionURL returns the DiscussionURL field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetDiscussionURL() string { + if d == nil || d.DiscussionURL == nil { + return "" + } + return *d.DiscussionURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetHTMLURL() string { + if d == nil || d.HTMLURL == nil { + return "" + } + return *d.HTMLURL +} + +// GetLastEditedAt returns the LastEditedAt field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetLastEditedAt() Timestamp { + if d == nil || d.LastEditedAt == nil { + return Timestamp{} + } + return *d.LastEditedAt +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetNodeID() string { + if d == nil || d.NodeID == nil { + return "" + } + return *d.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetNumber() int { + if d == nil || d.Number == nil { + return 0 + } + return *d.Number +} + +// GetReactions returns the Reactions field. +func (d *DiscussionComment) GetReactions() *Reactions { + if d == nil { + return nil + } + return d.Reactions +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetURL() string { + if d == nil || d.URL == nil { + return "" + } + return *d.URL +} + +// GetTeams returns the Teams field if it's non-nil, zero value otherwise. +func (d *DismissalRestrictionsRequest) GetTeams() []string { + if d == nil || d.Teams == nil { + return nil + } + return *d.Teams +} + +// GetUsers returns the Users field if it's non-nil, zero value otherwise. +func (d *DismissalRestrictionsRequest) GetUsers() []string { + if d == nil || d.Users == nil { + return nil + } + return *d.Users +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetBody() string { + if d == nil || d.Body == nil { + return "" + } + return *d.Body +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetPath() string { + if d == nil || d.Path == nil { + return "" + } + return *d.Path +} + +// GetPosition returns the Position field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetPosition() int { + if d == nil || d.Position == nil { + return 0 + } + return *d.Position +} + +// GetActor returns the Actor field. +func (e *Event) GetActor() *User { + if e == nil { + return nil + } + return e.Actor +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (e *Event) GetCreatedAt() time.Time { + if e == nil || e.CreatedAt == nil { + return time.Time{} + } + return *e.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (e *Event) GetID() string { + if e == nil || e.ID == nil { + return "" + } + return *e.ID +} + +// GetOrg returns the Org field. +func (e *Event) GetOrg() *Organization { + if e == nil { + return nil + } + return e.Org +} + +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (e *Event) GetPublic() bool { + if e == nil || e.Public == nil { + return false + } + return *e.Public +} + +// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. +func (e *Event) GetRawPayload() json.RawMessage { + if e == nil || e.RawPayload == nil { + return json.RawMessage{} + } + return *e.RawPayload +} + +// GetRepo returns the Repo field. +func (e *Event) GetRepo() *Repository { + if e == nil { + return nil + } + return e.Repo +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (e *Event) GetType() string { + if e == nil || e.Type == nil { + return "" + } + return *e.Type +} + +// GetHRef returns the HRef field if it's non-nil, zero value otherwise. +func (f *FeedLink) GetHRef() string { + if f == nil || f.HRef == nil { + return "" + } + return *f.HRef +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (f *FeedLink) GetType() string { + if f == nil || f.Type == nil { + return "" + } + return *f.Type +} + +// GetCurrentUserActorURL returns the CurrentUserActorURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetCurrentUserActorURL() string { + if f == nil || f.CurrentUserActorURL == nil { + return "" + } + return *f.CurrentUserActorURL +} + +// GetCurrentUserOrganizationURL returns the CurrentUserOrganizationURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetCurrentUserOrganizationURL() string { + if f == nil || f.CurrentUserOrganizationURL == nil { + return "" + } + return *f.CurrentUserOrganizationURL +} + +// GetCurrentUserPublicURL returns the CurrentUserPublicURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetCurrentUserPublicURL() string { + if f == nil || f.CurrentUserPublicURL == nil { + return "" + } + return *f.CurrentUserPublicURL +} + +// GetCurrentUserURL returns the CurrentUserURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetCurrentUserURL() string { + if f == nil || f.CurrentUserURL == nil { + return "" + } + return *f.CurrentUserURL +} + +// GetTimelineURL returns the TimelineURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetTimelineURL() string { + if f == nil || f.TimelineURL == nil { + return "" + } + return *f.TimelineURL +} + +// GetUserURL returns the UserURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetUserURL() string { + if f == nil || f.UserURL == nil { + return "" + } + return *f.UserURL +} + +// GetForkee returns the Forkee field. +func (f *ForkEvent) GetForkee() *Repository { + if f == nil { + return nil + } + return f.Forkee +} + +// GetInstallation returns the Installation field. +func (f *ForkEvent) GetInstallation() *Installation { + if f == nil { + return nil + } + return f.Installation +} + +// GetRepo returns the Repo field. +func (f *ForkEvent) GetRepo() *Repository { + if f == nil { + return nil + } + return f.Repo +} + +// GetSender returns the Sender field. +func (f *ForkEvent) GetSender() *User { + if f == nil { + return nil + } + return f.Sender +} + +// GetComments returns the Comments field if it's non-nil, zero value otherwise. +func (g *Gist) GetComments() int { + if g == nil || g.Comments == nil { + return 0 + } + return *g.Comments +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *Gist) GetCreatedAt() time.Time { + if g == nil || g.CreatedAt == nil { + return time.Time{} + } + return *g.CreatedAt +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (g *Gist) GetDescription() string { + if g == nil || g.Description == nil { + return "" + } + return *g.Description +} + +// GetGitPullURL returns the GitPullURL field if it's non-nil, zero value otherwise. +func (g *Gist) GetGitPullURL() string { + if g == nil || g.GitPullURL == nil { + return "" + } + return *g.GitPullURL +} + +// GetGitPushURL returns the GitPushURL field if it's non-nil, zero value otherwise. +func (g *Gist) GetGitPushURL() string { + if g == nil || g.GitPushURL == nil { + return "" + } + return *g.GitPushURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (g *Gist) GetHTMLURL() string { + if g == nil || g.HTMLURL == nil { + return "" + } + return *g.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *Gist) GetID() string { + if g == nil || g.ID == nil { + return "" + } + return *g.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (g *Gist) GetNodeID() string { + if g == nil || g.NodeID == nil { + return "" + } + return *g.NodeID +} + +// GetOwner returns the Owner field. +func (g *Gist) GetOwner() *User { + if g == nil { + return nil + } + return g.Owner +} + +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (g *Gist) GetPublic() bool { + if g == nil || g.Public == nil { + return false + } + return *g.Public +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (g *Gist) GetUpdatedAt() time.Time { + if g == nil || g.UpdatedAt == nil { + return time.Time{} + } + return *g.UpdatedAt +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (g *GistComment) GetBody() string { + if g == nil || g.Body == nil { + return "" + } + return *g.Body +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *GistComment) GetCreatedAt() time.Time { + if g == nil || g.CreatedAt == nil { + return time.Time{} + } + return *g.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *GistComment) GetID() int64 { + if g == nil || g.ID == nil { + return 0 + } + return *g.ID +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *GistComment) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetUser returns the User field. +func (g *GistComment) GetUser() *User { + if g == nil { + return nil + } + return g.User +} + +// GetChangeStatus returns the ChangeStatus field. +func (g *GistCommit) GetChangeStatus() *CommitStats { + if g == nil { + return nil + } + return g.ChangeStatus +} + +// GetCommittedAt returns the CommittedAt field if it's non-nil, zero value otherwise. +func (g *GistCommit) GetCommittedAt() Timestamp { + if g == nil || g.CommittedAt == nil { + return Timestamp{} + } + return *g.CommittedAt +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (g *GistCommit) GetNodeID() string { + if g == nil || g.NodeID == nil { + return "" + } + return *g.NodeID +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *GistCommit) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetUser returns the User field. +func (g *GistCommit) GetUser() *User { + if g == nil { + return nil + } + return g.User +} + +// GetVersion returns the Version field if it's non-nil, zero value otherwise. +func (g *GistCommit) GetVersion() string { + if g == nil || g.Version == nil { + return "" + } + return *g.Version +} + +// GetContent returns the Content field if it's non-nil, zero value otherwise. +func (g *GistFile) GetContent() string { + if g == nil || g.Content == nil { + return "" + } + return *g.Content +} + +// GetFilename returns the Filename field if it's non-nil, zero value otherwise. +func (g *GistFile) GetFilename() string { + if g == nil || g.Filename == nil { + return "" + } + return *g.Filename +} + +// GetLanguage returns the Language field if it's non-nil, zero value otherwise. +func (g *GistFile) GetLanguage() string { + if g == nil || g.Language == nil { + return "" + } + return *g.Language +} + +// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. +func (g *GistFile) GetRawURL() string { + if g == nil || g.RawURL == nil { + return "" + } + return *g.RawURL +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (g *GistFile) GetSize() int { + if g == nil || g.Size == nil { + return 0 + } + return *g.Size +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (g *GistFile) GetType() string { + if g == nil || g.Type == nil { + return "" + } + return *g.Type +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *GistFork) GetCreatedAt() Timestamp { + if g == nil || g.CreatedAt == nil { + return Timestamp{} + } + return *g.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *GistFork) GetID() string { + if g == nil || g.ID == nil { + return "" + } + return *g.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (g *GistFork) GetNodeID() string { + if g == nil || g.NodeID == nil { + return "" + } + return *g.NodeID +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (g *GistFork) GetUpdatedAt() Timestamp { + if g == nil || g.UpdatedAt == nil { + return Timestamp{} + } + return *g.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *GistFork) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetUser returns the User field. +func (g *GistFork) GetUser() *User { + if g == nil { + return nil + } + return g.User +} + +// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. +func (g *GistStats) GetPrivateGists() int { + if g == nil || g.PrivateGists == nil { + return 0 + } + return *g.PrivateGists +} + +// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. +func (g *GistStats) GetPublicGists() int { + if g == nil || g.PublicGists == nil { + return 0 + } + return *g.PublicGists +} + +// GetTotalGists returns the TotalGists field if it's non-nil, zero value otherwise. +func (g *GistStats) GetTotalGists() int { + if g == nil || g.TotalGists == nil { + return 0 + } + return *g.TotalGists +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (g *GitHubAppAuthorizationEvent) GetAction() string { + if g == nil || g.Action == nil { + return "" + } + return *g.Action +} + +// GetSender returns the Sender field. +func (g *GitHubAppAuthorizationEvent) GetSender() *User { + if g == nil { + return nil + } + return g.Sender +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (g *Gitignore) GetName() string { + if g == nil || g.Name == nil { + return "" + } + return *g.Name +} + +// GetSource returns the Source field if it's non-nil, zero value otherwise. +func (g *Gitignore) GetSource() string { + if g == nil || g.Source == nil { + return "" + } + return *g.Source +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (g *GitObject) GetSHA() string { + if g == nil || g.SHA == nil { + return "" + } + return *g.SHA +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (g *GitObject) GetType() string { + if g == nil || g.Type == nil { + return "" + } + return *g.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *GitObject) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetInstallation returns the Installation field. +func (g *GollumEvent) GetInstallation() *Installation { + if g == nil { + return nil + } + return g.Installation +} + +// GetRepo returns the Repo field. +func (g *GollumEvent) GetRepo() *Repository { + if g == nil { + return nil + } + return g.Repo +} + +// GetSender returns the Sender field. +func (g *GollumEvent) GetSender() *User { + if g == nil { + return nil + } + return g.Sender +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (g *GPGEmail) GetEmail() string { + if g == nil || g.Email == nil { + return "" + } + return *g.Email +} + +// GetVerified returns the Verified field if it's non-nil, zero value otherwise. +func (g *GPGEmail) GetVerified() bool { + if g == nil || g.Verified == nil { + return false + } + return *g.Verified +} + +// GetCanCertify returns the CanCertify field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCanCertify() bool { + if g == nil || g.CanCertify == nil { + return false + } + return *g.CanCertify +} + +// GetCanEncryptComms returns the CanEncryptComms field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCanEncryptComms() bool { + if g == nil || g.CanEncryptComms == nil { + return false + } + return *g.CanEncryptComms +} + +// GetCanEncryptStorage returns the CanEncryptStorage field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCanEncryptStorage() bool { + if g == nil || g.CanEncryptStorage == nil { + return false + } + return *g.CanEncryptStorage +} + +// GetCanSign returns the CanSign field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCanSign() bool { + if g == nil || g.CanSign == nil { + return false + } + return *g.CanSign +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCreatedAt() time.Time { + if g == nil || g.CreatedAt == nil { + return time.Time{} + } + return *g.CreatedAt +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetExpiresAt() time.Time { + if g == nil || g.ExpiresAt == nil { + return time.Time{} + } + return *g.ExpiresAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetID() int64 { + if g == nil || g.ID == nil { + return 0 + } + return *g.ID +} + +// GetKeyID returns the KeyID field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetKeyID() string { + if g == nil || g.KeyID == nil { + return "" + } + return *g.KeyID +} + +// GetPrimaryKeyID returns the PrimaryKeyID field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetPrimaryKeyID() int64 { + if g == nil || g.PrimaryKeyID == nil { + return 0 + } + return *g.PrimaryKeyID +} + +// GetPublicKey returns the PublicKey field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetPublicKey() string { + if g == nil || g.PublicKey == nil { + return "" + } + return *g.PublicKey +} + +// GetApp returns the App field. +func (g *Grant) GetApp() *AuthorizationApp { + if g == nil { + return nil + } + return g.App +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *Grant) GetCreatedAt() Timestamp { + if g == nil || g.CreatedAt == nil { + return Timestamp{} + } + return *g.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *Grant) GetID() int64 { + if g == nil || g.ID == nil { + return 0 + } + return *g.ID +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (g *Grant) GetUpdatedAt() Timestamp { + if g == nil || g.UpdatedAt == nil { + return Timestamp{} + } + return *g.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *Grant) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetActive returns the Active field if it's non-nil, zero value otherwise. +func (h *Hook) GetActive() bool { + if h == nil || h.Active == nil { + return false + } + return *h.Active +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (h *Hook) GetCreatedAt() time.Time { + if h == nil || h.CreatedAt == nil { + return time.Time{} + } + return *h.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (h *Hook) GetID() int64 { + if h == nil || h.ID == nil { + return 0 + } + return *h.ID +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (h *Hook) GetUpdatedAt() time.Time { + if h == nil || h.UpdatedAt == nil { + return time.Time{} + } + return *h.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (h *Hook) GetURL() string { + if h == nil || h.URL == nil { + return "" + } + return *h.URL +} + +// GetActiveHooks returns the ActiveHooks field if it's non-nil, zero value otherwise. +func (h *HookStats) GetActiveHooks() int { + if h == nil || h.ActiveHooks == nil { + return 0 + } + return *h.ActiveHooks +} + +// GetInactiveHooks returns the InactiveHooks field if it's non-nil, zero value otherwise. +func (h *HookStats) GetInactiveHooks() int { + if h == nil || h.InactiveHooks == nil { + return 0 + } + return *h.InactiveHooks +} + +// GetTotalHooks returns the TotalHooks field if it's non-nil, zero value otherwise. +func (h *HookStats) GetTotalHooks() int { + if h == nil || h.TotalHooks == nil { + return 0 + } + return *h.TotalHooks +} + +// GetAuthorsCount returns the AuthorsCount field if it's non-nil, zero value otherwise. +func (i *Import) GetAuthorsCount() int { + if i == nil || i.AuthorsCount == nil { + return 0 + } + return *i.AuthorsCount +} + +// GetAuthorsURL returns the AuthorsURL field if it's non-nil, zero value otherwise. +func (i *Import) GetAuthorsURL() string { + if i == nil || i.AuthorsURL == nil { + return "" + } + return *i.AuthorsURL +} + +// GetCommitCount returns the CommitCount field if it's non-nil, zero value otherwise. +func (i *Import) GetCommitCount() int { + if i == nil || i.CommitCount == nil { + return 0 + } + return *i.CommitCount +} + +// GetFailedStep returns the FailedStep field if it's non-nil, zero value otherwise. +func (i *Import) GetFailedStep() string { + if i == nil || i.FailedStep == nil { + return "" + } + return *i.FailedStep +} + +// GetHasLargeFiles returns the HasLargeFiles field if it's non-nil, zero value otherwise. +func (i *Import) GetHasLargeFiles() bool { + if i == nil || i.HasLargeFiles == nil { + return false + } + return *i.HasLargeFiles +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (i *Import) GetHTMLURL() string { + if i == nil || i.HTMLURL == nil { + return "" + } + return *i.HTMLURL +} + +// GetHumanName returns the HumanName field if it's non-nil, zero value otherwise. +func (i *Import) GetHumanName() string { + if i == nil || i.HumanName == nil { + return "" + } + return *i.HumanName +} + +// GetLargeFilesCount returns the LargeFilesCount field if it's non-nil, zero value otherwise. +func (i *Import) GetLargeFilesCount() int { + if i == nil || i.LargeFilesCount == nil { + return 0 + } + return *i.LargeFilesCount +} + +// GetLargeFilesSize returns the LargeFilesSize field if it's non-nil, zero value otherwise. +func (i *Import) GetLargeFilesSize() int { + if i == nil || i.LargeFilesSize == nil { + return 0 + } + return *i.LargeFilesSize +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (i *Import) GetMessage() string { + if i == nil || i.Message == nil { + return "" + } + return *i.Message +} + +// GetPercent returns the Percent field if it's non-nil, zero value otherwise. +func (i *Import) GetPercent() int { + if i == nil || i.Percent == nil { + return 0 + } + return *i.Percent +} + +// GetPushPercent returns the PushPercent field if it's non-nil, zero value otherwise. +func (i *Import) GetPushPercent() int { + if i == nil || i.PushPercent == nil { + return 0 + } + return *i.PushPercent +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (i *Import) GetRepositoryURL() string { + if i == nil || i.RepositoryURL == nil { + return "" + } + return *i.RepositoryURL +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (i *Import) GetStatus() string { + if i == nil || i.Status == nil { + return "" + } + return *i.Status +} + +// GetStatusText returns the StatusText field if it's non-nil, zero value otherwise. +func (i *Import) GetStatusText() string { + if i == nil || i.StatusText == nil { + return "" + } + return *i.StatusText +} + +// GetTFVCProject returns the TFVCProject field if it's non-nil, zero value otherwise. +func (i *Import) GetTFVCProject() string { + if i == nil || i.TFVCProject == nil { + return "" + } + return *i.TFVCProject +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (i *Import) GetURL() string { + if i == nil || i.URL == nil { + return "" + } + return *i.URL +} + +// GetUseLFS returns the UseLFS field if it's non-nil, zero value otherwise. +func (i *Import) GetUseLFS() string { + if i == nil || i.UseLFS == nil { + return "" + } + return *i.UseLFS +} + +// GetVCS returns the VCS field if it's non-nil, zero value otherwise. +func (i *Import) GetVCS() string { + if i == nil || i.VCS == nil { + return "" + } + return *i.VCS +} + +// GetVCSPassword returns the VCSPassword field if it's non-nil, zero value otherwise. +func (i *Import) GetVCSPassword() string { + if i == nil || i.VCSPassword == nil { + return "" + } + return *i.VCSPassword +} + +// GetVCSURL returns the VCSURL field if it's non-nil, zero value otherwise. +func (i *Import) GetVCSURL() string { + if i == nil || i.VCSURL == nil { + return "" + } + return *i.VCSURL +} + +// GetVCSUsername returns the VCSUsername field if it's non-nil, zero value otherwise. +func (i *Import) GetVCSUsername() string { + if i == nil || i.VCSUsername == nil { + return "" + } + return *i.VCSUsername +} + +// GetAccessTokensURL returns the AccessTokensURL field if it's non-nil, zero value otherwise. +func (i *Installation) GetAccessTokensURL() string { + if i == nil || i.AccessTokensURL == nil { + return "" + } + return *i.AccessTokensURL +} + +// GetAccount returns the Account field. +func (i *Installation) GetAccount() *User { + if i == nil { + return nil + } + return i.Account +} + +// GetAppID returns the AppID field if it's non-nil, zero value otherwise. +func (i *Installation) GetAppID() int64 { + if i == nil || i.AppID == nil { + return 0 + } + return *i.AppID +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *Installation) GetCreatedAt() Timestamp { + if i == nil || i.CreatedAt == nil { + return Timestamp{} + } + return *i.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (i *Installation) GetHTMLURL() string { + if i == nil || i.HTMLURL == nil { + return "" + } + return *i.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *Installation) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetPermissions returns the Permissions field. +func (i *Installation) GetPermissions() *InstallationPermissions { + if i == nil { + return nil + } + return i.Permissions +} + +// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. +func (i *Installation) GetRepositoriesURL() string { + if i == nil || i.RepositoriesURL == nil { + return "" + } + return *i.RepositoriesURL +} + +// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. +func (i *Installation) GetRepositorySelection() string { + if i == nil || i.RepositorySelection == nil { + return "" + } + return *i.RepositorySelection +} + +// GetSingleFileName returns the SingleFileName field if it's non-nil, zero value otherwise. +func (i *Installation) GetSingleFileName() string { + if i == nil || i.SingleFileName == nil { + return "" + } + return *i.SingleFileName +} + +// GetTargetID returns the TargetID field if it's non-nil, zero value otherwise. +func (i *Installation) GetTargetID() int64 { + if i == nil || i.TargetID == nil { + return 0 + } + return *i.TargetID +} + +// GetTargetType returns the TargetType field if it's non-nil, zero value otherwise. +func (i *Installation) GetTargetType() string { + if i == nil || i.TargetType == nil { + return "" + } + return *i.TargetType +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (i *Installation) GetUpdatedAt() Timestamp { + if i == nil || i.UpdatedAt == nil { + return Timestamp{} + } + return *i.UpdatedAt +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *InstallationEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetInstallation returns the Installation field. +func (i *InstallationEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetSender returns the Sender field. +func (i *InstallationEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetContents returns the Contents field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetContents() string { + if i == nil || i.Contents == nil { + return "" + } + return *i.Contents +} + +// GetIssues returns the Issues field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetIssues() string { + if i == nil || i.Issues == nil { + return "" + } + return *i.Issues +} + +// GetMetadata returns the Metadata field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetMetadata() string { + if i == nil || i.Metadata == nil { + return "" + } + return *i.Metadata +} + +// GetSingleFile returns the SingleFile field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetSingleFile() string { + if i == nil || i.SingleFile == nil { + return "" + } + return *i.SingleFile +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *InstallationRepositoriesEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetInstallation returns the Installation field. +func (i *InstallationRepositoriesEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. +func (i *InstallationRepositoriesEvent) GetRepositorySelection() string { + if i == nil || i.RepositorySelection == nil { + return "" + } + return *i.RepositorySelection +} + +// GetSender returns the Sender field. +func (i *InstallationRepositoriesEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (i *InstallationToken) GetExpiresAt() time.Time { + if i == nil || i.ExpiresAt == nil { + return time.Time{} + } + return *i.ExpiresAt +} + +// GetToken returns the Token field if it's non-nil, zero value otherwise. +func (i *InstallationToken) GetToken() string { + if i == nil || i.Token == nil { + return "" + } + return *i.Token +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (i *InteractionRestriction) GetExpiresAt() Timestamp { + if i == nil || i.ExpiresAt == nil { + return Timestamp{} + } + return *i.ExpiresAt +} + +// GetLimit returns the Limit field if it's non-nil, zero value otherwise. +func (i *InteractionRestriction) GetLimit() string { + if i == nil || i.Limit == nil { + return "" + } + return *i.Limit +} + +// GetOrigin returns the Origin field if it's non-nil, zero value otherwise. +func (i *InteractionRestriction) GetOrigin() string { + if i == nil || i.Origin == nil { + return "" + } + return *i.Origin +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *Invitation) GetCreatedAt() time.Time { + if i == nil || i.CreatedAt == nil { + return time.Time{} + } + return *i.CreatedAt +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (i *Invitation) GetEmail() string { + if i == nil || i.Email == nil { + return "" + } + return *i.Email +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *Invitation) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetInvitationTeamURL returns the InvitationTeamURL field if it's non-nil, zero value otherwise. +func (i *Invitation) GetInvitationTeamURL() string { + if i == nil || i.InvitationTeamURL == nil { + return "" + } + return *i.InvitationTeamURL +} + +// GetInviter returns the Inviter field. +func (i *Invitation) GetInviter() *User { + if i == nil { + return nil + } + return i.Inviter +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (i *Invitation) GetLogin() string { + if i == nil || i.Login == nil { + return "" + } + return *i.Login +} + +// GetRole returns the Role field if it's non-nil, zero value otherwise. +func (i *Invitation) GetRole() string { + if i == nil || i.Role == nil { + return "" + } + return *i.Role +} + +// GetTeamCount returns the TeamCount field if it's non-nil, zero value otherwise. +func (i *Invitation) GetTeamCount() int { + if i == nil || i.TeamCount == nil { + return 0 + } + return *i.TeamCount +} + +// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. +func (i *Issue) GetActiveLockReason() string { + if i == nil || i.ActiveLockReason == nil { + return "" + } + return *i.ActiveLockReason +} + +// GetAssignee returns the Assignee field. +func (i *Issue) GetAssignee() *User { + if i == nil { + return nil + } + return i.Assignee +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (i *Issue) GetBody() string { + if i == nil || i.Body == nil { + return "" + } + return *i.Body +} + +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (i *Issue) GetClosedAt() time.Time { + if i == nil || i.ClosedAt == nil { + return time.Time{} + } + return *i.ClosedAt +} + +// GetClosedBy returns the ClosedBy field. +func (i *Issue) GetClosedBy() *User { + if i == nil { + return nil + } + return i.ClosedBy +} + +// GetComments returns the Comments field if it's non-nil, zero value otherwise. +func (i *Issue) GetComments() int { + if i == nil || i.Comments == nil { + return 0 + } + return *i.Comments +} + +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetCommentsURL() string { + if i == nil || i.CommentsURL == nil { + return "" + } + return *i.CommentsURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *Issue) GetCreatedAt() time.Time { + if i == nil || i.CreatedAt == nil { + return time.Time{} + } + return *i.CreatedAt +} + +// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetEventsURL() string { + if i == nil || i.EventsURL == nil { + return "" + } + return *i.EventsURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetHTMLURL() string { + if i == nil || i.HTMLURL == nil { + return "" + } + return *i.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *Issue) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetLabelsURL() string { + if i == nil || i.LabelsURL == nil { + return "" + } + return *i.LabelsURL +} + +// GetLocked returns the Locked field if it's non-nil, zero value otherwise. +func (i *Issue) GetLocked() bool { + if i == nil || i.Locked == nil { + return false + } + return *i.Locked +} + +// GetMilestone returns the Milestone field. +func (i *Issue) GetMilestone() *Milestone { + if i == nil { + return nil + } + return i.Milestone +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (i *Issue) GetNodeID() string { + if i == nil || i.NodeID == nil { + return "" + } + return *i.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (i *Issue) GetNumber() int { + if i == nil || i.Number == nil { + return 0 + } + return *i.Number +} + +// GetPullRequestLinks returns the PullRequestLinks field. +func (i *Issue) GetPullRequestLinks() *PullRequestLinks { + if i == nil { + return nil + } + return i.PullRequestLinks +} + +// GetReactions returns the Reactions field. +func (i *Issue) GetReactions() *Reactions { + if i == nil { + return nil + } + return i.Reactions +} + +// GetRepository returns the Repository field. +func (i *Issue) GetRepository() *Repository { + if i == nil { + return nil + } + return i.Repository +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetRepositoryURL() string { + if i == nil || i.RepositoryURL == nil { + return "" + } + return *i.RepositoryURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (i *Issue) GetState() string { + if i == nil || i.State == nil { + return "" + } + return *i.State +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (i *Issue) GetTitle() string { + if i == nil || i.Title == nil { + return "" + } + return *i.Title +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (i *Issue) GetUpdatedAt() time.Time { + if i == nil || i.UpdatedAt == nil { + return time.Time{} + } + return *i.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (i *Issue) GetURL() string { + if i == nil || i.URL == nil { + return "" + } + return *i.URL +} + +// GetUser returns the User field. +func (i *Issue) GetUser() *User { + if i == nil { + return nil + } + return i.User +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetAuthorAssociation() string { + if i == nil || i.AuthorAssociation == nil { + return "" + } + return *i.AuthorAssociation +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetBody() string { + if i == nil || i.Body == nil { + return "" + } + return *i.Body +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetCreatedAt() time.Time { + if i == nil || i.CreatedAt == nil { + return time.Time{} + } + return *i.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetHTMLURL() string { + if i == nil || i.HTMLURL == nil { + return "" + } + return *i.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetIssueURL returns the IssueURL field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetIssueURL() string { + if i == nil || i.IssueURL == nil { + return "" + } + return *i.IssueURL +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetNodeID() string { + if i == nil || i.NodeID == nil { + return "" + } + return *i.NodeID +} + +// GetReactions returns the Reactions field. +func (i *IssueComment) GetReactions() *Reactions { + if i == nil { + return nil + } + return i.Reactions +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetUpdatedAt() time.Time { + if i == nil || i.UpdatedAt == nil { + return time.Time{} + } + return *i.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetURL() string { + if i == nil || i.URL == nil { + return "" + } + return *i.URL +} + +// GetUser returns the User field. +func (i *IssueComment) GetUser() *User { + if i == nil { + return nil + } + return i.User +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *IssueCommentEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetChanges returns the Changes field. +func (i *IssueCommentEvent) GetChanges() *EditChange { + if i == nil { + return nil + } + return i.Changes +} + +// GetComment returns the Comment field. +func (i *IssueCommentEvent) GetComment() *IssueComment { + if i == nil { + return nil + } + return i.Comment +} + +// GetInstallation returns the Installation field. +func (i *IssueCommentEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetIssue returns the Issue field. +func (i *IssueCommentEvent) GetIssue() *Issue { + if i == nil { + return nil + } + return i.Issue +} + +// GetRepo returns the Repo field. +func (i *IssueCommentEvent) GetRepo() *Repository { + if i == nil { + return nil + } + return i.Repo +} + +// GetSender returns the Sender field. +func (i *IssueCommentEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetActor returns the Actor field. +func (i *IssueEvent) GetActor() *User { + if i == nil { + return nil + } + return i.Actor +} + +// GetAssignee returns the Assignee field. +func (i *IssueEvent) GetAssignee() *User { + if i == nil { + return nil + } + return i.Assignee +} + +// GetAssigner returns the Assigner field. +func (i *IssueEvent) GetAssigner() *User { + if i == nil { + return nil + } + return i.Assigner +} + +// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetCommitID() string { + if i == nil || i.CommitID == nil { + return "" + } + return *i.CommitID +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetCreatedAt() time.Time { + if i == nil || i.CreatedAt == nil { + return time.Time{} + } + return *i.CreatedAt +} + +// GetEvent returns the Event field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetEvent() string { + if i == nil || i.Event == nil { + return "" + } + return *i.Event +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetIssue returns the Issue field. +func (i *IssueEvent) GetIssue() *Issue { + if i == nil { + return nil + } + return i.Issue +} + +// GetLabel returns the Label field. +func (i *IssueEvent) GetLabel() *Label { + if i == nil { + return nil + } + return i.Label +} + +// GetLockReason returns the LockReason field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetLockReason() string { + if i == nil || i.LockReason == nil { + return "" + } + return *i.LockReason +} + +// GetMilestone returns the Milestone field. +func (i *IssueEvent) GetMilestone() *Milestone { + if i == nil { + return nil + } + return i.Milestone +} + +// GetProjectCard returns the ProjectCard field. +func (i *IssueEvent) GetProjectCard() *ProjectCard { + if i == nil { + return nil + } + return i.ProjectCard +} + +// GetRename returns the Rename field. +func (i *IssueEvent) GetRename() *Rename { + if i == nil { + return nil + } + return i.Rename +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetURL() string { + if i == nil || i.URL == nil { + return "" + } + return *i.URL +} + +// GetAssignee returns the Assignee field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetAssignee() string { + if i == nil || i.Assignee == nil { + return "" + } + return *i.Assignee +} + +// GetAssignees returns the Assignees field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetAssignees() []string { + if i == nil || i.Assignees == nil { + return nil + } + return *i.Assignees +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetBody() string { + if i == nil || i.Body == nil { + return "" + } + return *i.Body +} + +// GetLabels returns the Labels field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetLabels() []string { + if i == nil || i.Labels == nil { + return nil + } + return *i.Labels +} + +// GetMilestone returns the Milestone field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetMilestone() int { + if i == nil || i.Milestone == nil { + return 0 + } + return *i.Milestone +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetState() string { + if i == nil || i.State == nil { + return "" + } + return *i.State +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetTitle() string { + if i == nil || i.Title == nil { + return "" + } + return *i.Title +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *IssuesEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetAssignee returns the Assignee field. +func (i *IssuesEvent) GetAssignee() *User { + if i == nil { + return nil + } + return i.Assignee +} + +// GetChanges returns the Changes field. +func (i *IssuesEvent) GetChanges() *EditChange { + if i == nil { + return nil + } + return i.Changes +} + +// GetInstallation returns the Installation field. +func (i *IssuesEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetIssue returns the Issue field. +func (i *IssuesEvent) GetIssue() *Issue { + if i == nil { + return nil + } + return i.Issue +} + +// GetLabel returns the Label field. +func (i *IssuesEvent) GetLabel() *Label { + if i == nil { + return nil + } + return i.Label +} + +// GetRepo returns the Repo field. +func (i *IssuesEvent) GetRepo() *Repository { + if i == nil { + return nil + } + return i.Repo +} + +// GetSender returns the Sender field. +func (i *IssuesEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (i *IssuesSearchResult) GetIncompleteResults() bool { + if i == nil || i.IncompleteResults == nil { + return false + } + return *i.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (i *IssuesSearchResult) GetTotal() int { + if i == nil || i.Total == nil { + return 0 + } + return *i.Total +} + +// GetClosedIssues returns the ClosedIssues field if it's non-nil, zero value otherwise. +func (i *IssueStats) GetClosedIssues() int { + if i == nil || i.ClosedIssues == nil { + return 0 + } + return *i.ClosedIssues +} + +// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. +func (i *IssueStats) GetOpenIssues() int { + if i == nil || i.OpenIssues == nil { + return 0 + } + return *i.OpenIssues +} + +// GetTotalIssues returns the TotalIssues field if it's non-nil, zero value otherwise. +func (i *IssueStats) GetTotalIssues() int { + if i == nil || i.TotalIssues == nil { + return 0 + } + return *i.TotalIssues +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (k *Key) GetID() int64 { + if k == nil || k.ID == nil { + return 0 + } + return *k.ID +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (k *Key) GetKey() string { + if k == nil || k.Key == nil { + return "" + } + return *k.Key +} + +// GetReadOnly returns the ReadOnly field if it's non-nil, zero value otherwise. +func (k *Key) GetReadOnly() bool { + if k == nil || k.ReadOnly == nil { + return false + } + return *k.ReadOnly +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (k *Key) GetTitle() string { + if k == nil || k.Title == nil { + return "" + } + return *k.Title } -// GetPayload returns the Payload field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetPayload() string { - if d == nil || d.Payload == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (k *Key) GetURL() string { + if k == nil || k.URL == nil { return "" } - return *d.Payload + return *k.URL } -// GetProductionEnvironment returns the ProductionEnvironment field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetProductionEnvironment() bool { - if d == nil || d.ProductionEnvironment == nil { +// GetColor returns the Color field if it's non-nil, zero value otherwise. +func (l *Label) GetColor() string { + if l == nil || l.Color == nil { + return "" + } + return *l.Color +} + +// GetDefault returns the Default field if it's non-nil, zero value otherwise. +func (l *Label) GetDefault() bool { + if l == nil || l.Default == nil { return false } - return *d.ProductionEnvironment + return *l.Default } -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetRef() string { - if d == nil || d.Ref == nil { +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (l *Label) GetDescription() string { + if l == nil || l.Description == nil { return "" } - return *d.Ref + return *l.Description } -// GetRequiredContexts returns the RequiredContexts field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetRequiredContexts() []string { - if d == nil || d.RequiredContexts == nil { - return nil +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (l *Label) GetID() int64 { + if l == nil || l.ID == nil { + return 0 } - return *d.RequiredContexts + return *l.ID } -// GetTask returns the Task field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetTask() string { - if d == nil || d.Task == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (l *Label) GetName() string { + if l == nil || l.Name == nil { return "" } - return *d.Task + return *l.Name } -// GetTransientEnvironment returns the TransientEnvironment field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetTransientEnvironment() bool { - if d == nil || d.TransientEnvironment == nil { - return false +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (l *Label) GetNodeID() string { + if l == nil || l.NodeID == nil { + return "" } - return *d.TransientEnvironment + return *l.NodeID } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (l *Label) GetURL() string { + if l == nil || l.URL == nil { + return "" } - return *d.CreatedAt + return *l.URL } -// GetDeploymentURL returns the DeploymentURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetDeploymentURL() string { - if d == nil || d.DeploymentURL == nil { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (l *LabelEvent) GetAction() string { + if l == nil || l.Action == nil { return "" } - return *d.DeploymentURL + return *l.Action +} + +// GetChanges returns the Changes field. +func (l *LabelEvent) GetChanges() *EditChange { + if l == nil { + return nil + } + return l.Changes +} + +// GetInstallation returns the Installation field. +func (l *LabelEvent) GetInstallation() *Installation { + if l == nil { + return nil + } + return l.Installation +} + +// GetLabel returns the Label field. +func (l *LabelEvent) GetLabel() *Label { + if l == nil { + return nil + } + return l.Label +} + +// GetOrg returns the Org field. +func (l *LabelEvent) GetOrg() *Organization { + if l == nil { + return nil + } + return l.Org +} + +// GetRepo returns the Repo field. +func (l *LabelEvent) GetRepo() *Repository { + if l == nil { + return nil + } + return l.Repo +} + +// GetColor returns the Color field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetColor() string { + if l == nil || l.Color == nil { + return "" + } + return *l.Color +} + +// GetDefault returns the Default field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetDefault() bool { + if l == nil || l.Default == nil { + return false + } + return *l.Default } // GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetDescription() string { - if d == nil || d.Description == nil { +func (l *LabelResult) GetDescription() string { + if l == nil || l.Description == nil { return "" } - return *d.Description + return *l.Description } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetID() int { - if d == nil || d.ID == nil { +func (l *LabelResult) GetID() int64 { + if l == nil || l.ID == nil { return 0 } - return *d.ID + return *l.ID } -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetRepositoryURL() string { - if d == nil || d.RepositoryURL == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetName() string { + if l == nil || l.Name == nil { return "" } - return *d.RepositoryURL + return *l.Name } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetState() string { - if d == nil || d.State == nil { +// GetScore returns the Score field. +func (l *LabelResult) GetScore() *float64 { + if l == nil { + return nil + } + return l.Score +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetURL() string { + if l == nil || l.URL == nil { return "" } - return *d.State + return *l.URL } -// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetTargetURL() string { - if d == nil || d.TargetURL == nil { +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (l *LabelsSearchResult) GetIncompleteResults() bool { + if l == nil || l.IncompleteResults == nil { + return false + } + return *l.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (l *LabelsSearchResult) GetTotal() int { + if l == nil || l.Total == nil { + return 0 + } + return *l.Total +} + +// GetOID returns the OID field if it's non-nil, zero value otherwise. +func (l *LargeFile) GetOID() string { + if l == nil || l.OID == nil { return "" } - return *d.TargetURL + return *l.OID } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (l *LargeFile) GetPath() string { + if l == nil || l.Path == nil { + return "" } - return *d.UpdatedAt + return *l.Path } -// GetAutoInactive returns the AutoInactive field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetAutoInactive() bool { - if d == nil || d.AutoInactive == nil { - return false +// GetRefName returns the RefName field if it's non-nil, zero value otherwise. +func (l *LargeFile) GetRefName() string { + if l == nil || l.RefName == nil { + return "" } - return *d.AutoInactive + return *l.RefName +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (l *LargeFile) GetSize() int { + if l == nil || l.Size == nil { + return 0 + } + return *l.Size +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (l *License) GetBody() string { + if l == nil || l.Body == nil { + return "" + } + return *l.Body +} + +// GetConditions returns the Conditions field if it's non-nil, zero value otherwise. +func (l *License) GetConditions() []string { + if l == nil || l.Conditions == nil { + return nil + } + return *l.Conditions } // GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetDescription() string { - if d == nil || d.Description == nil { +func (l *License) GetDescription() string { + if l == nil || l.Description == nil { + return "" + } + return *l.Description +} + +// GetFeatured returns the Featured field if it's non-nil, zero value otherwise. +func (l *License) GetFeatured() bool { + if l == nil || l.Featured == nil { + return false + } + return *l.Featured +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (l *License) GetHTMLURL() string { + if l == nil || l.HTMLURL == nil { + return "" + } + return *l.HTMLURL +} + +// GetImplementation returns the Implementation field if it's non-nil, zero value otherwise. +func (l *License) GetImplementation() string { + if l == nil || l.Implementation == nil { + return "" + } + return *l.Implementation +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (l *License) GetKey() string { + if l == nil || l.Key == nil { + return "" + } + return *l.Key +} + +// GetLimitations returns the Limitations field if it's non-nil, zero value otherwise. +func (l *License) GetLimitations() []string { + if l == nil || l.Limitations == nil { + return nil + } + return *l.Limitations +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (l *License) GetName() string { + if l == nil || l.Name == nil { return "" } - return *d.Description + return *l.Name +} + +// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. +func (l *License) GetPermissions() []string { + if l == nil || l.Permissions == nil { + return nil + } + return *l.Permissions } -// GetEnvironmentURL returns the EnvironmentURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetEnvironmentURL() string { - if d == nil || d.EnvironmentURL == nil { +// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. +func (l *License) GetSPDXID() string { + if l == nil || l.SPDXID == nil { return "" } - return *d.EnvironmentURL + return *l.SPDXID } -// GetLogURL returns the LogURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetLogURL() string { - if d == nil || d.LogURL == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (l *License) GetURL() string { + if l == nil || l.URL == nil { return "" } - return *d.LogURL + return *l.URL } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetState() string { - if d == nil || d.State == nil { +// GetCheckName returns the CheckName field if it's non-nil, zero value otherwise. +func (l *ListCheckRunsOptions) GetCheckName() string { + if l == nil || l.CheckName == nil { return "" } - return *d.State + return *l.CheckName } -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetBody() string { - if d == nil || d.Body == nil { +// GetFilter returns the Filter field if it's non-nil, zero value otherwise. +func (l *ListCheckRunsOptions) GetFilter() string { + if l == nil || l.Filter == nil { return "" } - return *d.Body + return *l.Filter } -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetPath() string { - if d == nil || d.Path == nil { +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (l *ListCheckRunsOptions) GetStatus() string { + if l == nil || l.Status == nil { return "" } - return *d.Path + return *l.Status } -// GetPosition returns the Position field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetPosition() int { - if d == nil || d.Position == nil { +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (l *ListCheckRunsResults) GetTotal() int { + if l == nil || l.Total == nil { return 0 } - return *d.Position + return *l.Total } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (e *Event) GetCreatedAt() time.Time { - if e == nil || e.CreatedAt == nil { - return time.Time{} +// GetAppID returns the AppID field if it's non-nil, zero value otherwise. +func (l *ListCheckSuiteOptions) GetAppID() int { + if l == nil || l.AppID == nil { + return 0 } - return *e.CreatedAt + return *l.AppID } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *Event) GetID() string { - if e == nil || e.ID == nil { +// GetCheckName returns the CheckName field if it's non-nil, zero value otherwise. +func (l *ListCheckSuiteOptions) GetCheckName() string { + if l == nil || l.CheckName == nil { return "" } - return *e.ID -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (e *Event) GetPublic() bool { - if e == nil || e.Public == nil { - return false - } - return *e.Public + return *l.CheckName } -// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. -func (e *Event) GetRawPayload() json.RawMessage { - if e == nil || e.RawPayload == nil { - return json.RawMessage{} +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (l *ListCheckSuiteResults) GetTotal() int { + if l == nil || l.Total == nil { + return 0 } - return *e.RawPayload + return *l.Total } -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (e *Event) GetType() string { - if e == nil || e.Type == nil { +// GetAffiliation returns the Affiliation field if it's non-nil, zero value otherwise. +func (l *ListCollaboratorOptions) GetAffiliation() string { + if l == nil || l.Affiliation == nil { return "" } - return *e.Type + return *l.Affiliation } -// GetHRef returns the HRef field if it's non-nil, zero value otherwise. -func (f *FeedLink) GetHRef() string { - if f == nil || f.HRef == nil { - return "" +// GetEffectiveDate returns the EffectiveDate field if it's non-nil, zero value otherwise. +func (m *MarketplacePendingChange) GetEffectiveDate() Timestamp { + if m == nil || m.EffectiveDate == nil { + return Timestamp{} } - return *f.HRef + return *m.EffectiveDate } -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (f *FeedLink) GetType() string { - if f == nil || f.Type == nil { - return "" +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *MarketplacePendingChange) GetID() int64 { + if m == nil || m.ID == nil { + return 0 } - return *f.Type + return *m.ID } -// GetCurrentUserActorURL returns the CurrentUserActorURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserActorURL() string { - if f == nil || f.CurrentUserActorURL == nil { - return "" +// GetPlan returns the Plan field. +func (m *MarketplacePendingChange) GetPlan() *MarketplacePlan { + if m == nil { + return nil } - return *f.CurrentUserActorURL + return m.Plan } -// GetCurrentUserOrganizationURL returns the CurrentUserOrganizationURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserOrganizationURL() string { - if f == nil || f.CurrentUserOrganizationURL == nil { - return "" +// GetUnitCount returns the UnitCount field if it's non-nil, zero value otherwise. +func (m *MarketplacePendingChange) GetUnitCount() int { + if m == nil || m.UnitCount == nil { + return 0 } - return *f.CurrentUserOrganizationURL + return *m.UnitCount } -// GetCurrentUserPublicURL returns the CurrentUserPublicURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserPublicURL() string { - if f == nil || f.CurrentUserPublicURL == nil { +// GetAccountsURL returns the AccountsURL field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetAccountsURL() string { + if m == nil || m.AccountsURL == nil { return "" } - return *f.CurrentUserPublicURL + return *m.AccountsURL } -// GetCurrentUserURL returns the CurrentUserURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserURL() string { - if f == nil || f.CurrentUserURL == nil { - return "" +// GetBullets returns the Bullets field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetBullets() []string { + if m == nil || m.Bullets == nil { + return nil } - return *f.CurrentUserURL + return *m.Bullets } -// GetTimelineURL returns the TimelineURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetTimelineURL() string { - if f == nil || f.TimelineURL == nil { +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetDescription() string { + if m == nil || m.Description == nil { return "" } - return *f.TimelineURL + return *m.Description } -// GetUserURL returns the UserURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetUserURL() string { - if f == nil || f.UserURL == nil { - return "" +// GetHasFreeTrial returns the HasFreeTrial field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetHasFreeTrial() bool { + if m == nil || m.HasFreeTrial == nil { + return false } - return *f.UserURL + return *m.HasFreeTrial } -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (g *Gist) GetComments() int { - if g == nil || g.Comments == nil { +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetID() int64 { + if m == nil || m.ID == nil { return 0 } - return *g.Comments + return *m.ID } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *Gist) GetCreatedAt() time.Time { - if g == nil || g.CreatedAt == nil { - return time.Time{} +// GetMonthlyPriceInCents returns the MonthlyPriceInCents field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetMonthlyPriceInCents() int { + if m == nil || m.MonthlyPriceInCents == nil { + return 0 } - return *g.CreatedAt + return *m.MonthlyPriceInCents } -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (g *Gist) GetDescription() string { - if g == nil || g.Description == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetName() string { + if m == nil || m.Name == nil { return "" } - return *g.Description + return *m.Name } -// GetGitPullURL returns the GitPullURL field if it's non-nil, zero value otherwise. -func (g *Gist) GetGitPullURL() string { - if g == nil || g.GitPullURL == nil { +// GetPriceModel returns the PriceModel field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetPriceModel() string { + if m == nil || m.PriceModel == nil { return "" } - return *g.GitPullURL + return *m.PriceModel } -// GetGitPushURL returns the GitPushURL field if it's non-nil, zero value otherwise. -func (g *Gist) GetGitPushURL() string { - if g == nil || g.GitPushURL == nil { +// GetState returns the State field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetState() string { + if m == nil || m.State == nil { return "" } - return *g.GitPushURL + return *m.State } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (g *Gist) GetHTMLURL() string { - if g == nil || g.HTMLURL == nil { +// GetUnitName returns the UnitName field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetUnitName() string { + if m == nil || m.UnitName == nil { return "" } - return *g.HTMLURL + return *m.UnitName } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *Gist) GetID() string { - if g == nil || g.ID == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetURL() string { + if m == nil || m.URL == nil { return "" } - return *g.ID -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (g *Gist) GetPublic() bool { - if g == nil || g.Public == nil { - return false - } - return *g.Public + return *m.URL } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (g *Gist) GetUpdatedAt() time.Time { - if g == nil || g.UpdatedAt == nil { - return time.Time{} +// GetYearlyPriceInCents returns the YearlyPriceInCents field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetYearlyPriceInCents() int { + if m == nil || m.YearlyPriceInCents == nil { + return 0 } - return *g.UpdatedAt + return *m.YearlyPriceInCents } -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (g *GistComment) GetBody() string { - if g == nil || g.Body == nil { +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetEmail() string { + if m == nil || m.Email == nil { return "" } - return *g.Body -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *GistComment) GetCreatedAt() time.Time { - if g == nil || g.CreatedAt == nil { - return time.Time{} - } - return *g.CreatedAt + return *m.Email } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GistComment) GetID() int { - if g == nil || g.ID == nil { +func (m *MarketplacePlanAccount) GetID() int64 { + if m == nil || m.ID == nil { return 0 } - return *g.ID + return *m.ID } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GistComment) GetURL() string { - if g == nil || g.URL == nil { +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetLogin() string { + if m == nil || m.Login == nil { return "" } - return *g.URL + return *m.Login } -// GetCommittedAt returns the CommittedAt field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetCommittedAt() Timestamp { - if g == nil || g.CommittedAt == nil { - return Timestamp{} +// GetMarketplacePendingChange returns the MarketplacePendingChange field. +func (m *MarketplacePlanAccount) GetMarketplacePendingChange() *MarketplacePendingChange { + if m == nil { + return nil } - return *g.CommittedAt + return m.MarketplacePendingChange } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetURL() string { - if g == nil || g.URL == nil { - return "" +// GetMarketplacePurchase returns the MarketplacePurchase field. +func (m *MarketplacePlanAccount) GetMarketplacePurchase() *MarketplacePurchase { + if m == nil { + return nil } - return *g.URL + return m.MarketplacePurchase } -// GetVersion returns the Version field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetVersion() string { - if g == nil || g.Version == nil { +// GetOrganizationBillingEmail returns the OrganizationBillingEmail field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetOrganizationBillingEmail() string { + if m == nil || m.OrganizationBillingEmail == nil { return "" } - return *g.Version + return *m.OrganizationBillingEmail } -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (g *GistFile) GetContent() string { - if g == nil || g.Content == nil { +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetType() string { + if m == nil || m.Type == nil { return "" } - return *g.Content + return *m.Type } -// GetFilename returns the Filename field if it's non-nil, zero value otherwise. -func (g *GistFile) GetFilename() string { - if g == nil || g.Filename == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetURL() string { + if m == nil || m.URL == nil { return "" } - return *g.Filename + return *m.URL } -// GetLanguage returns the Language field if it's non-nil, zero value otherwise. -func (g *GistFile) GetLanguage() string { - if g == nil || g.Language == nil { - return "" +// GetAccount returns the Account field. +func (m *MarketplacePurchase) GetAccount() *MarketplacePlanAccount { + if m == nil { + return nil } - return *g.Language + return m.Account } -// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. -func (g *GistFile) GetRawURL() string { - if g == nil || g.RawURL == nil { +// GetBillingCycle returns the BillingCycle field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetBillingCycle() string { + if m == nil || m.BillingCycle == nil { return "" } - return *g.RawURL + return *m.BillingCycle } -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (g *GistFile) GetSize() int { - if g == nil || g.Size == nil { - return 0 +// GetFreeTrialEndsOn returns the FreeTrialEndsOn field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetFreeTrialEndsOn() Timestamp { + if m == nil || m.FreeTrialEndsOn == nil { + return Timestamp{} } - return *g.Size + return *m.FreeTrialEndsOn } -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (g *GistFile) GetType() string { - if g == nil || g.Type == nil { - return "" +// GetNextBillingDate returns the NextBillingDate field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetNextBillingDate() Timestamp { + if m == nil || m.NextBillingDate == nil { + return Timestamp{} } - return *g.Type + return *m.NextBillingDate } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *GistFork) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} +// GetOnFreeTrial returns the OnFreeTrial field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetOnFreeTrial() bool { + if m == nil || m.OnFreeTrial == nil { + return false } - return *g.CreatedAt + return *m.OnFreeTrial } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GistFork) GetID() string { - if g == nil || g.ID == nil { - return "" +// GetPlan returns the Plan field. +func (m *MarketplacePurchase) GetPlan() *MarketplacePlan { + if m == nil { + return nil } - return *g.ID + return m.Plan } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (g *GistFork) GetUpdatedAt() Timestamp { - if g == nil || g.UpdatedAt == nil { - return Timestamp{} +// GetUnitCount returns the UnitCount field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetUnitCount() int { + if m == nil || m.UnitCount == nil { + return 0 } - return *g.UpdatedAt + return *m.UnitCount } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GistFork) GetURL() string { - if g == nil || g.URL == nil { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseEvent) GetAction() string { + if m == nil || m.Action == nil { return "" } - return *g.URL + return *m.Action } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (g *Gitignore) GetName() string { - if g == nil || g.Name == nil { - return "" +// GetEffectiveDate returns the EffectiveDate field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseEvent) GetEffectiveDate() Timestamp { + if m == nil || m.EffectiveDate == nil { + return Timestamp{} } - return *g.Name + return *m.EffectiveDate } -// GetSource returns the Source field if it's non-nil, zero value otherwise. -func (g *Gitignore) GetSource() string { - if g == nil || g.Source == nil { - return "" +// GetInstallation returns the Installation field. +func (m *MarketplacePurchaseEvent) GetInstallation() *Installation { + if m == nil { + return nil } - return *g.Source + return m.Installation } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (g *GitObject) GetSHA() string { - if g == nil || g.SHA == nil { - return "" +// GetMarketplacePurchase returns the MarketplacePurchase field. +func (m *MarketplacePurchaseEvent) GetMarketplacePurchase() *MarketplacePurchase { + if m == nil { + return nil } - return *g.SHA + return m.MarketplacePurchase } -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (g *GitObject) GetType() string { - if g == nil || g.Type == nil { - return "" +// GetPreviousMarketplacePurchase returns the PreviousMarketplacePurchase field. +func (m *MarketplacePurchaseEvent) GetPreviousMarketplacePurchase() *MarketplacePurchase { + if m == nil { + return nil } - return *g.Type + return m.PreviousMarketplacePurchase } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GitObject) GetURL() string { - if g == nil || g.URL == nil { +// GetSender returns the Sender field. +func (m *MarketplacePurchaseEvent) GetSender() *User { + if m == nil { + return nil + } + return m.Sender +} + +// GetText returns the Text field if it's non-nil, zero value otherwise. +func (m *Match) GetText() string { + if m == nil || m.Text == nil { return "" } - return *g.URL + return *m.Text } -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (g *GPGEmail) GetEmail() string { - if g == nil || g.Email == nil { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (m *MemberEvent) GetAction() string { + if m == nil || m.Action == nil { return "" } - return *g.Email + return *m.Action } -// GetVerified returns the Verified field if it's non-nil, zero value otherwise. -func (g *GPGEmail) GetVerified() bool { - if g == nil || g.Verified == nil { - return false +// GetInstallation returns the Installation field. +func (m *MemberEvent) GetInstallation() *Installation { + if m == nil { + return nil } - return *g.Verified + return m.Installation } -// GetCanCertify returns the CanCertify field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanCertify() bool { - if g == nil || g.CanCertify == nil { - return false +// GetMember returns the Member field. +func (m *MemberEvent) GetMember() *User { + if m == nil { + return nil } - return *g.CanCertify + return m.Member } -// GetCanEncryptComms returns the CanEncryptComms field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanEncryptComms() bool { - if g == nil || g.CanEncryptComms == nil { - return false +// GetRepo returns the Repo field. +func (m *MemberEvent) GetRepo() *Repository { + if m == nil { + return nil } - return *g.CanEncryptComms + return m.Repo } -// GetCanEncryptStorage returns the CanEncryptStorage field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanEncryptStorage() bool { - if g == nil || g.CanEncryptStorage == nil { - return false +// GetSender returns the Sender field. +func (m *MemberEvent) GetSender() *User { + if m == nil { + return nil } - return *g.CanEncryptStorage + return m.Sender } -// GetCanSign returns the CanSign field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanSign() bool { - if g == nil || g.CanSign == nil { - return false +// GetOrganization returns the Organization field. +func (m *Membership) GetOrganization() *Organization { + if m == nil { + return nil } - return *g.CanSign + return m.Organization } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCreatedAt() time.Time { - if g == nil || g.CreatedAt == nil { - return time.Time{} +// GetOrganizationURL returns the OrganizationURL field if it's non-nil, zero value otherwise. +func (m *Membership) GetOrganizationURL() string { + if m == nil || m.OrganizationURL == nil { + return "" } - return *g.CreatedAt + return *m.OrganizationURL } -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetExpiresAt() time.Time { - if g == nil || g.ExpiresAt == nil { - return time.Time{} +// GetRole returns the Role field if it's non-nil, zero value otherwise. +func (m *Membership) GetRole() string { + if m == nil || m.Role == nil { + return "" } - return *g.ExpiresAt + return *m.Role } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetID() int { - if g == nil || g.ID == nil { - return 0 +// GetState returns the State field if it's non-nil, zero value otherwise. +func (m *Membership) GetState() string { + if m == nil || m.State == nil { + return "" } - return *g.ID + return *m.State } -// GetKeyID returns the KeyID field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetKeyID() string { - if g == nil || g.KeyID == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *Membership) GetURL() string { + if m == nil || m.URL == nil { return "" } - return *g.KeyID + return *m.URL } -// GetPrimaryKeyID returns the PrimaryKeyID field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetPrimaryKeyID() int { - if g == nil || g.PrimaryKeyID == nil { - return 0 +// GetUser returns the User field. +func (m *Membership) GetUser() *User { + if m == nil { + return nil } - return *g.PrimaryKeyID + return m.User } -// GetPublicKey returns the PublicKey field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetPublicKey() string { - if g == nil || g.PublicKey == nil { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (m *MembershipEvent) GetAction() string { + if m == nil || m.Action == nil { return "" } - return *g.PublicKey + return *m.Action } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *Grant) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} +// GetInstallation returns the Installation field. +func (m *MembershipEvent) GetInstallation() *Installation { + if m == nil { + return nil } - return *g.CreatedAt + return m.Installation } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *Grant) GetID() int { - if g == nil || g.ID == nil { - return 0 +// GetMember returns the Member field. +func (m *MembershipEvent) GetMember() *User { + if m == nil { + return nil } - return *g.ID + return m.Member } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (g *Grant) GetUpdatedAt() Timestamp { - if g == nil || g.UpdatedAt == nil { - return Timestamp{} +// GetOrg returns the Org field. +func (m *MembershipEvent) GetOrg() *Organization { + if m == nil { + return nil } - return *g.UpdatedAt + return m.Org } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *Grant) GetURL() string { - if g == nil || g.URL == nil { +// GetScope returns the Scope field if it's non-nil, zero value otherwise. +func (m *MembershipEvent) GetScope() string { + if m == nil || m.Scope == nil { return "" } - return *g.URL + return *m.Scope } -// GetActive returns the Active field if it's non-nil, zero value otherwise. -func (h *Hook) GetActive() bool { - if h == nil || h.Active == nil { - return false +// GetSender returns the Sender field. +func (m *MembershipEvent) GetSender() *User { + if m == nil { + return nil } - return *h.Active + return m.Sender } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (h *Hook) GetCreatedAt() time.Time { - if h == nil || h.CreatedAt == nil { - return time.Time{} +// GetTeam returns the Team field. +func (m *MembershipEvent) GetTeam() *Team { + if m == nil { + return nil } - return *h.CreatedAt + return m.Team } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (h *Hook) GetID() int { - if h == nil || h.ID == nil { - return 0 +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (m *Metric) GetHTMLURL() string { + if m == nil || m.HTMLURL == nil { + return "" } - return *h.ID + return *m.HTMLURL } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (h *Hook) GetName() string { - if h == nil || h.Name == nil { +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (m *Metric) GetKey() string { + if m == nil || m.Key == nil { return "" } - return *h.Name + return *m.Key } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (h *Hook) GetUpdatedAt() time.Time { - if h == nil || h.UpdatedAt == nil { - return time.Time{} +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (m *Metric) GetName() string { + if m == nil || m.Name == nil { + return "" } - return *h.UpdatedAt + return *m.Name } // GetURL returns the URL field if it's non-nil, zero value otherwise. -func (h *Hook) GetURL() string { - if h == nil || h.URL == nil { +func (m *Metric) GetURL() string { + if m == nil || m.URL == nil { return "" } - return *h.URL -} - -// GetAuthorsCount returns the AuthorsCount field if it's non-nil, zero value otherwise. -func (i *Import) GetAuthorsCount() int { - if i == nil || i.AuthorsCount == nil { - return 0 - } - return *i.AuthorsCount + return *m.URL } -// GetAuthorsURL returns the AuthorsURL field if it's non-nil, zero value otherwise. -func (i *Import) GetAuthorsURL() string { - if i == nil || i.AuthorsURL == nil { +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (m *Migration) GetCreatedAt() string { + if m == nil || m.CreatedAt == nil { return "" } - return *i.AuthorsURL + return *m.CreatedAt } -// GetCommitCount returns the CommitCount field if it's non-nil, zero value otherwise. -func (i *Import) GetCommitCount() int { - if i == nil || i.CommitCount == nil { - return 0 +// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. +func (m *Migration) GetExcludeAttachments() bool { + if m == nil || m.ExcludeAttachments == nil { + return false } - return *i.CommitCount + return *m.ExcludeAttachments } -// GetFailedStep returns the FailedStep field if it's non-nil, zero value otherwise. -func (i *Import) GetFailedStep() string { - if i == nil || i.FailedStep == nil { +// GetGUID returns the GUID field if it's non-nil, zero value otherwise. +func (m *Migration) GetGUID() string { + if m == nil || m.GUID == nil { return "" } - return *i.FailedStep + return *m.GUID } -// GetHasLargeFiles returns the HasLargeFiles field if it's non-nil, zero value otherwise. -func (i *Import) GetHasLargeFiles() bool { - if i == nil || i.HasLargeFiles == nil { - return false +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *Migration) GetID() int64 { + if m == nil || m.ID == nil { + return 0 } - return *i.HasLargeFiles + return *m.ID } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *Import) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { - return "" +// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. +func (m *Migration) GetLockRepositories() bool { + if m == nil || m.LockRepositories == nil { + return false } - return *i.HTMLURL + return *m.LockRepositories } -// GetHumanName returns the HumanName field if it's non-nil, zero value otherwise. -func (i *Import) GetHumanName() string { - if i == nil || i.HumanName == nil { +// GetState returns the State field if it's non-nil, zero value otherwise. +func (m *Migration) GetState() string { + if m == nil || m.State == nil { return "" } - return *i.HumanName -} - -// GetLargeFilesCount returns the LargeFilesCount field if it's non-nil, zero value otherwise. -func (i *Import) GetLargeFilesCount() int { - if i == nil || i.LargeFilesCount == nil { - return 0 - } - return *i.LargeFilesCount + return *m.State } -// GetLargeFilesSize returns the LargeFilesSize field if it's non-nil, zero value otherwise. -func (i *Import) GetLargeFilesSize() int { - if i == nil || i.LargeFilesSize == nil { - return 0 +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (m *Migration) GetUpdatedAt() string { + if m == nil || m.UpdatedAt == nil { + return "" } - return *i.LargeFilesSize + return *m.UpdatedAt } -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (i *Import) GetMessage() string { - if i == nil || i.Message == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *Migration) GetURL() string { + if m == nil || m.URL == nil { return "" } - return *i.Message + return *m.URL } -// GetPercent returns the Percent field if it's non-nil, zero value otherwise. -func (i *Import) GetPercent() int { - if i == nil || i.Percent == nil { - return 0 +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (m *Milestone) GetClosedAt() time.Time { + if m == nil || m.ClosedAt == nil { + return time.Time{} } - return *i.Percent + return *m.ClosedAt } -// GetPushPercent returns the PushPercent field if it's non-nil, zero value otherwise. -func (i *Import) GetPushPercent() int { - if i == nil || i.PushPercent == nil { +// GetClosedIssues returns the ClosedIssues field if it's non-nil, zero value otherwise. +func (m *Milestone) GetClosedIssues() int { + if m == nil || m.ClosedIssues == nil { return 0 } - return *i.PushPercent + return *m.ClosedIssues } -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (i *Import) GetRepositoryURL() string { - if i == nil || i.RepositoryURL == nil { - return "" +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (m *Milestone) GetCreatedAt() time.Time { + if m == nil || m.CreatedAt == nil { + return time.Time{} } - return *i.RepositoryURL + return *m.CreatedAt } -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (i *Import) GetStatus() string { - if i == nil || i.Status == nil { - return "" +// GetCreator returns the Creator field. +func (m *Milestone) GetCreator() *User { + if m == nil { + return nil } - return *i.Status + return m.Creator } -// GetStatusText returns the StatusText field if it's non-nil, zero value otherwise. -func (i *Import) GetStatusText() string { - if i == nil || i.StatusText == nil { +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (m *Milestone) GetDescription() string { + if m == nil || m.Description == nil { return "" } - return *i.StatusText + return *m.Description } -// GetTFVCProject returns the TFVCProject field if it's non-nil, zero value otherwise. -func (i *Import) GetTFVCProject() string { - if i == nil || i.TFVCProject == nil { - return "" +// GetDueOn returns the DueOn field if it's non-nil, zero value otherwise. +func (m *Milestone) GetDueOn() time.Time { + if m == nil || m.DueOn == nil { + return time.Time{} } - return *i.TFVCProject + return *m.DueOn } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *Import) GetURL() string { - if i == nil || i.URL == nil { +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (m *Milestone) GetHTMLURL() string { + if m == nil || m.HTMLURL == nil { return "" } - return *i.URL + return *m.HTMLURL } -// GetUseLFS returns the UseLFS field if it's non-nil, zero value otherwise. -func (i *Import) GetUseLFS() string { - if i == nil || i.UseLFS == nil { - return "" +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *Milestone) GetID() int64 { + if m == nil || m.ID == nil { + return 0 } - return *i.UseLFS + return *m.ID } -// GetVCS returns the VCS field if it's non-nil, zero value otherwise. -func (i *Import) GetVCS() string { - if i == nil || i.VCS == nil { +// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. +func (m *Milestone) GetLabelsURL() string { + if m == nil || m.LabelsURL == nil { return "" } - return *i.VCS + return *m.LabelsURL } -// GetVCSPassword returns the VCSPassword field if it's non-nil, zero value otherwise. -func (i *Import) GetVCSPassword() string { - if i == nil || i.VCSPassword == nil { +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (m *Milestone) GetNodeID() string { + if m == nil || m.NodeID == nil { return "" } - return *i.VCSPassword + return *m.NodeID } -// GetVCSURL returns the VCSURL field if it's non-nil, zero value otherwise. -func (i *Import) GetVCSURL() string { - if i == nil || i.VCSURL == nil { - return "" +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (m *Milestone) GetNumber() int { + if m == nil || m.Number == nil { + return 0 } - return *i.VCSURL + return *m.Number } -// GetVCSUsername returns the VCSUsername field if it's non-nil, zero value otherwise. -func (i *Import) GetVCSUsername() string { - if i == nil || i.VCSUsername == nil { - return "" +// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. +func (m *Milestone) GetOpenIssues() int { + if m == nil || m.OpenIssues == nil { + return 0 } - return *i.VCSUsername + return *m.OpenIssues } -// GetAccessTokensURL returns the AccessTokensURL field if it's non-nil, zero value otherwise. -func (i *Installation) GetAccessTokensURL() string { - if i == nil || i.AccessTokensURL == nil { +// GetState returns the State field if it's non-nil, zero value otherwise. +func (m *Milestone) GetState() string { + if m == nil || m.State == nil { return "" } - return *i.AccessTokensURL + return *m.State } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *Installation) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (m *Milestone) GetTitle() string { + if m == nil || m.Title == nil { return "" } - return *i.HTMLURL + return *m.Title } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *Installation) GetID() int { - if i == nil || i.ID == nil { - return 0 +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (m *Milestone) GetUpdatedAt() time.Time { + if m == nil || m.UpdatedAt == nil { + return time.Time{} } - return *i.ID + return *m.UpdatedAt } -// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. -func (i *Installation) GetRepositoriesURL() string { - if i == nil || i.RepositoriesURL == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *Milestone) GetURL() string { + if m == nil || m.URL == nil { return "" } - return *i.RepositoriesURL + return *m.URL } // GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *InstallationEvent) GetAction() string { - if i == nil || i.Action == nil { +func (m *MilestoneEvent) GetAction() string { + if m == nil || m.Action == nil { return "" } - return *i.Action + return *m.Action } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *InstallationRepositoriesEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" +// GetChanges returns the Changes field. +func (m *MilestoneEvent) GetChanges() *EditChange { + if m == nil { + return nil } - return *i.Action + return m.Changes } -// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. -func (i *InstallationRepositoriesEvent) GetRepositorySelection() string { - if i == nil || i.RepositorySelection == nil { - return "" +// GetInstallation returns the Installation field. +func (m *MilestoneEvent) GetInstallation() *Installation { + if m == nil { + return nil } - return *i.RepositorySelection + return m.Installation } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *Invitation) GetCreatedAt() time.Time { - if i == nil || i.CreatedAt == nil { - return time.Time{} +// GetMilestone returns the Milestone field. +func (m *MilestoneEvent) GetMilestone() *Milestone { + if m == nil { + return nil } - return *i.CreatedAt + return m.Milestone } -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (i *Invitation) GetEmail() string { - if i == nil || i.Email == nil { - return "" +// GetOrg returns the Org field. +func (m *MilestoneEvent) GetOrg() *Organization { + if m == nil { + return nil } - return *i.Email + return m.Org } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *Invitation) GetID() int { - if i == nil || i.ID == nil { - return 0 +// GetRepo returns the Repo field. +func (m *MilestoneEvent) GetRepo() *Repository { + if m == nil { + return nil } - return *i.ID + return m.Repo } -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (i *Invitation) GetLogin() string { - if i == nil || i.Login == nil { - return "" +// GetSender returns the Sender field. +func (m *MilestoneEvent) GetSender() *User { + if m == nil { + return nil } - return *i.Login + return m.Sender } -// GetRole returns the Role field if it's non-nil, zero value otherwise. -func (i *Invitation) GetRole() string { - if i == nil || i.Role == nil { - return "" +// GetClosedMilestones returns the ClosedMilestones field if it's non-nil, zero value otherwise. +func (m *MilestoneStats) GetClosedMilestones() int { + if m == nil || m.ClosedMilestones == nil { + return 0 } - return *i.Role + return *m.ClosedMilestones } -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (i *Issue) GetBody() string { - if i == nil || i.Body == nil { - return "" +// GetOpenMilestones returns the OpenMilestones field if it's non-nil, zero value otherwise. +func (m *MilestoneStats) GetOpenMilestones() int { + if m == nil || m.OpenMilestones == nil { + return 0 } - return *i.Body + return *m.OpenMilestones } -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (i *Issue) GetClosedAt() time.Time { - if i == nil || i.ClosedAt == nil { - return time.Time{} +// GetTotalMilestones returns the TotalMilestones field if it's non-nil, zero value otherwise. +func (m *MilestoneStats) GetTotalMilestones() int { + if m == nil || m.TotalMilestones == nil { + return 0 } - return *i.ClosedAt + return *m.TotalMilestones } -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (i *Issue) GetComments() int { - if i == nil || i.Comments == nil { - return 0 +// GetBase returns the Base field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetBase() string { + if n == nil || n.Base == nil { + return "" } - return *i.Comments + return *n.Base } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *Issue) GetCreatedAt() time.Time { - if i == nil || i.CreatedAt == nil { - return time.Time{} +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetBody() string { + if n == nil || n.Body == nil { + return "" } - return *i.CreatedAt + return *n.Body } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { +// GetHead returns the Head field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetHead() string { + if n == nil || n.Head == nil { return "" } - return *i.HTMLURL + return *n.Head } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *Issue) GetID() int { - if i == nil || i.ID == nil { +// GetIssue returns the Issue field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetIssue() int { + if n == nil || n.Issue == nil { return 0 } - return *i.ID + return *n.Issue } -// GetLocked returns the Locked field if it's non-nil, zero value otherwise. -func (i *Issue) GetLocked() bool { - if i == nil || i.Locked == nil { +// GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetMaintainerCanModify() bool { + if n == nil || n.MaintainerCanModify == nil { return false } - return *i.Locked + return *n.MaintainerCanModify } -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (i *Issue) GetNumber() int { - if i == nil || i.Number == nil { - return 0 +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetTitle() string { + if n == nil || n.Title == nil { + return "" } - return *i.Number + return *n.Title } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (i *Issue) GetState() string { - if i == nil || i.State == nil { +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetDescription() string { + if n == nil || n.Description == nil { return "" } - return *i.State + return *n.Description } -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (i *Issue) GetTitle() string { - if i == nil || i.Title == nil { +// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetLDAPDN() string { + if n == nil || n.LDAPDN == nil { return "" } - return *i.Title + return *n.LDAPDN } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *Issue) GetUpdatedAt() time.Time { - if i == nil || i.UpdatedAt == nil { - return time.Time{} +// GetParentTeamID returns the ParentTeamID field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetParentTeamID() int64 { + if n == nil || n.ParentTeamID == nil { + return 0 } - return *i.UpdatedAt + return *n.ParentTeamID } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *Issue) GetURL() string { - if i == nil || i.URL == nil { +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetPermission() string { + if n == nil || n.Permission == nil { return "" } - return *i.URL + return *n.Permission } -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetBody() string { - if i == nil || i.Body == nil { +// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetPrivacy() string { + if n == nil || n.Privacy == nil { return "" } - return *i.Body + return *n.Privacy } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetCreatedAt() time.Time { - if i == nil || i.CreatedAt == nil { +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (n *Notification) GetID() string { + if n == nil || n.ID == nil { + return "" + } + return *n.ID +} + +// GetLastReadAt returns the LastReadAt field if it's non-nil, zero value otherwise. +func (n *Notification) GetLastReadAt() time.Time { + if n == nil || n.LastReadAt == nil { return time.Time{} } - return *i.CreatedAt + return *n.LastReadAt } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { +// GetReason returns the Reason field if it's non-nil, zero value otherwise. +func (n *Notification) GetReason() string { + if n == nil || n.Reason == nil { return "" } - return *i.HTMLURL + return *n.Reason } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetID() int { - if i == nil || i.ID == nil { - return 0 +// GetRepository returns the Repository field. +func (n *Notification) GetRepository() *Repository { + if n == nil { + return nil } - return *i.ID + return n.Repository } -// GetIssueURL returns the IssueURL field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetIssueURL() string { - if i == nil || i.IssueURL == nil { - return "" +// GetSubject returns the Subject field. +func (n *Notification) GetSubject() *NotificationSubject { + if n == nil { + return nil } - return *i.IssueURL + return n.Subject +} + +// GetUnread returns the Unread field if it's non-nil, zero value otherwise. +func (n *Notification) GetUnread() bool { + if n == nil || n.Unread == nil { + return false + } + return *n.Unread } // GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetUpdatedAt() time.Time { - if i == nil || i.UpdatedAt == nil { +func (n *Notification) GetUpdatedAt() time.Time { + if n == nil || n.UpdatedAt == nil { return time.Time{} } - return *i.UpdatedAt + return *n.UpdatedAt } // GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetURL() string { - if i == nil || i.URL == nil { +func (n *Notification) GetURL() string { + if n == nil || n.URL == nil { return "" } - return *i.URL + return *n.URL } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *IssueCommentEvent) GetAction() string { - if i == nil || i.Action == nil { +// GetLatestCommentURL returns the LatestCommentURL field if it's non-nil, zero value otherwise. +func (n *NotificationSubject) GetLatestCommentURL() string { + if n == nil || n.LatestCommentURL == nil { return "" } - return *i.Action + return *n.LatestCommentURL } -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetCommitID() string { - if i == nil || i.CommitID == nil { +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (n *NotificationSubject) GetTitle() string { + if n == nil || n.Title == nil { return "" } - return *i.CommitID + return *n.Title } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetCreatedAt() time.Time { - if i == nil || i.CreatedAt == nil { - return time.Time{} +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (n *NotificationSubject) GetType() string { + if n == nil || n.Type == nil { + return "" } - return *i.CreatedAt + return *n.Type } -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetEvent() string { - if i == nil || i.Event == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (n *NotificationSubject) GetURL() string { + if n == nil || n.URL == nil { return "" } - return *i.Event + return *n.URL } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetID() int { - if i == nil || i.ID == nil { - return 0 +// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetAvatarURL() string { + if o == nil || o.AvatarURL == nil { + return "" } - return *i.ID + return *o.AvatarURL } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetURL() string { - if i == nil || i.URL == nil { +// GetBillingEmail returns the BillingEmail field if it's non-nil, zero value otherwise. +func (o *Organization) GetBillingEmail() string { + if o == nil || o.BillingEmail == nil { return "" } - return *i.URL + return *o.BillingEmail } -// GetAssignee returns the Assignee field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetAssignee() string { - if i == nil || i.Assignee == nil { +// GetBlog returns the Blog field if it's non-nil, zero value otherwise. +func (o *Organization) GetBlog() string { + if o == nil || o.Blog == nil { return "" } - return *i.Assignee + return *o.Blog } -// GetAssignees returns the Assignees field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetAssignees() []string { - if i == nil || i.Assignees == nil { - return nil +// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. +func (o *Organization) GetCollaborators() int { + if o == nil || o.Collaborators == nil { + return 0 } - return *i.Assignees + return *o.Collaborators } -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetBody() string { - if i == nil || i.Body == nil { +// GetCompany returns the Company field if it's non-nil, zero value otherwise. +func (o *Organization) GetCompany() string { + if o == nil || o.Company == nil { return "" } - return *i.Body + return *o.Company } -// GetLabels returns the Labels field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetLabels() []string { - if i == nil || i.Labels == nil { - return nil +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (o *Organization) GetCreatedAt() time.Time { + if o == nil || o.CreatedAt == nil { + return time.Time{} } - return *i.Labels + return *o.CreatedAt } -// GetMilestone returns the Milestone field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetMilestone() int { - if i == nil || i.Milestone == nil { - return 0 +// GetDefaultRepoPermission returns the DefaultRepoPermission field if it's non-nil, zero value otherwise. +func (o *Organization) GetDefaultRepoPermission() string { + if o == nil || o.DefaultRepoPermission == nil { + return "" } - return *i.Milestone + return *o.DefaultRepoPermission } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetState() string { - if i == nil || i.State == nil { +// GetDefaultRepoSettings returns the DefaultRepoSettings field if it's non-nil, zero value otherwise. +func (o *Organization) GetDefaultRepoSettings() string { + if o == nil || o.DefaultRepoSettings == nil { return "" } - return *i.State + return *o.DefaultRepoSettings } -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetTitle() string { - if i == nil || i.Title == nil { +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (o *Organization) GetDescription() string { + if o == nil || o.Description == nil { return "" } - return *i.Title + return *o.Description } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *IssuesEvent) GetAction() string { - if i == nil || i.Action == nil { +// GetDiskUsage returns the DiskUsage field if it's non-nil, zero value otherwise. +func (o *Organization) GetDiskUsage() int { + if o == nil || o.DiskUsage == nil { + return 0 + } + return *o.DiskUsage +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (o *Organization) GetEmail() string { + if o == nil || o.Email == nil { return "" } - return *i.Action + return *o.Email } -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (i *IssuesSearchResult) GetIncompleteResults() bool { - if i == nil || i.IncompleteResults == nil { - return false +// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetEventsURL() string { + if o == nil || o.EventsURL == nil { + return "" } - return *i.IncompleteResults + return *o.EventsURL } -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (i *IssuesSearchResult) GetTotal() int { - if i == nil || i.Total == nil { +// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. +func (o *Organization) GetFollowers() int { + if o == nil || o.Followers == nil { return 0 } - return *i.Total + return *o.Followers } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (k *Key) GetID() int { - if k == nil || k.ID == nil { +// GetFollowing returns the Following field if it's non-nil, zero value otherwise. +func (o *Organization) GetFollowing() int { + if o == nil || o.Following == nil { return 0 } - return *k.ID + return *o.Following +} + +// GetHooksURL returns the HooksURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetHooksURL() string { + if o == nil || o.HooksURL == nil { + return "" + } + return *o.HooksURL } -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (k *Key) GetKey() string { - if k == nil || k.Key == nil { +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetHTMLURL() string { + if o == nil || o.HTMLURL == nil { return "" } - return *k.Key + return *o.HTMLURL } -// GetReadOnly returns the ReadOnly field if it's non-nil, zero value otherwise. -func (k *Key) GetReadOnly() bool { - if k == nil || k.ReadOnly == nil { - return false +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (o *Organization) GetID() int64 { + if o == nil || o.ID == nil { + return 0 } - return *k.ReadOnly + return *o.ID } -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (k *Key) GetTitle() string { - if k == nil || k.Title == nil { +// GetIssuesURL returns the IssuesURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetIssuesURL() string { + if o == nil || o.IssuesURL == nil { return "" } - return *k.Title + return *o.IssuesURL } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (k *Key) GetURL() string { - if k == nil || k.URL == nil { +// GetLocation returns the Location field if it's non-nil, zero value otherwise. +func (o *Organization) GetLocation() string { + if o == nil || o.Location == nil { return "" } - return *k.URL + return *o.Location } -// GetColor returns the Color field if it's non-nil, zero value otherwise. -func (l *Label) GetColor() string { - if l == nil || l.Color == nil { +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (o *Organization) GetLogin() string { + if o == nil || o.Login == nil { return "" } - return *l.Color + return *o.Login } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (l *Label) GetID() int { - if l == nil || l.ID == nil { - return 0 +// GetMembersCanCreateRepos returns the MembersCanCreateRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersCanCreateRepos() bool { + if o == nil || o.MembersCanCreateRepos == nil { + return false } - return *l.ID + return *o.MembersCanCreateRepos } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (l *Label) GetName() string { - if l == nil || l.Name == nil { +// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersURL() string { + if o == nil || o.MembersURL == nil { return "" } - return *l.Name + return *o.MembersURL } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (l *Label) GetURL() string { - if l == nil || l.URL == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (o *Organization) GetName() string { + if o == nil || o.Name == nil { return "" } - return *l.URL + return *o.Name } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (l *LabelEvent) GetAction() string { - if l == nil || l.Action == nil { +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (o *Organization) GetNodeID() string { + if o == nil || o.NodeID == nil { return "" } - return *l.Action + return *o.NodeID } -// GetOID returns the OID field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetOID() string { - if l == nil || l.OID == nil { - return "" +// GetOwnedPrivateRepos returns the OwnedPrivateRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetOwnedPrivateRepos() int { + if o == nil || o.OwnedPrivateRepos == nil { + return 0 } - return *l.OID + return *o.OwnedPrivateRepos } -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetPath() string { - if l == nil || l.Path == nil { - return "" +// GetPlan returns the Plan field. +func (o *Organization) GetPlan() *Plan { + if o == nil { + return nil } - return *l.Path + return o.Plan } -// GetRefName returns the RefName field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetRefName() string { - if l == nil || l.RefName == nil { - return "" +// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. +func (o *Organization) GetPrivateGists() int { + if o == nil || o.PrivateGists == nil { + return 0 } - return *l.RefName + return *o.PrivateGists } -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetSize() int { - if l == nil || l.Size == nil { +// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. +func (o *Organization) GetPublicGists() int { + if o == nil || o.PublicGists == nil { return 0 } - return *l.Size + return *o.PublicGists } -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (l *License) GetBody() string { - if l == nil || l.Body == nil { +// GetPublicMembersURL returns the PublicMembersURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetPublicMembersURL() string { + if o == nil || o.PublicMembersURL == nil { return "" } - return *l.Body + return *o.PublicMembersURL } -// GetConditions returns the Conditions field if it's non-nil, zero value otherwise. -func (l *License) GetConditions() []string { - if l == nil || l.Conditions == nil { - return nil +// GetPublicRepos returns the PublicRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetPublicRepos() int { + if o == nil || o.PublicRepos == nil { + return 0 } - return *l.Conditions + return *o.PublicRepos } -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (l *License) GetDescription() string { - if l == nil || l.Description == nil { +// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetReposURL() string { + if o == nil || o.ReposURL == nil { return "" } - return *l.Description + return *o.ReposURL } -// GetFeatured returns the Featured field if it's non-nil, zero value otherwise. -func (l *License) GetFeatured() bool { - if l == nil || l.Featured == nil { +// GetTotalPrivateRepos returns the TotalPrivateRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetTotalPrivateRepos() int { + if o == nil || o.TotalPrivateRepos == nil { + return 0 + } + return *o.TotalPrivateRepos +} + +// GetTwoFactorRequirementEnabled returns the TwoFactorRequirementEnabled field if it's non-nil, zero value otherwise. +func (o *Organization) GetTwoFactorRequirementEnabled() bool { + if o == nil || o.TwoFactorRequirementEnabled == nil { return false } - return *l.Featured + return *o.TwoFactorRequirementEnabled } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (l *License) GetHTMLURL() string { - if l == nil || l.HTMLURL == nil { +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (o *Organization) GetType() string { + if o == nil || o.Type == nil { return "" } - return *l.HTMLURL + return *o.Type } -// GetImplementation returns the Implementation field if it's non-nil, zero value otherwise. -func (l *License) GetImplementation() string { - if l == nil || l.Implementation == nil { - return "" +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (o *Organization) GetUpdatedAt() time.Time { + if o == nil || o.UpdatedAt == nil { + return time.Time{} } - return *l.Implementation + return *o.UpdatedAt } -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (l *License) GetKey() string { - if l == nil || l.Key == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (o *Organization) GetURL() string { + if o == nil || o.URL == nil { return "" } - return *l.Key + return *o.URL } -// GetLimitations returns the Limitations field if it's non-nil, zero value otherwise. -func (l *License) GetLimitations() []string { - if l == nil || l.Limitations == nil { - return nil +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (o *OrganizationEvent) GetAction() string { + if o == nil || o.Action == nil { + return "" } - return *l.Limitations + return *o.Action } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (l *License) GetName() string { - if l == nil || l.Name == nil { - return "" +// GetInstallation returns the Installation field. +func (o *OrganizationEvent) GetInstallation() *Installation { + if o == nil { + return nil } - return *l.Name + return o.Installation } -// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. -func (l *License) GetPermissions() []string { - if l == nil || l.Permissions == nil { +// GetInvitation returns the Invitation field. +func (o *OrganizationEvent) GetInvitation() *Invitation { + if o == nil { return nil } - return *l.Permissions + return o.Invitation } -// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. -func (l *License) GetSPDXID() string { - if l == nil || l.SPDXID == nil { - return "" +// GetMembership returns the Membership field. +func (o *OrganizationEvent) GetMembership() *Membership { + if o == nil { + return nil } - return *l.SPDXID + return o.Membership } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (l *License) GetURL() string { - if l == nil || l.URL == nil { - return "" +// GetOrganization returns the Organization field. +func (o *OrganizationEvent) GetOrganization() *Organization { + if o == nil { + return nil } - return *l.URL + return o.Organization } -// GetContext returns the Context field if it's non-nil, zero value otherwise. -func (m *markdownRequest) GetContext() string { - if m == nil || m.Context == nil { - return "" +// GetSender returns the Sender field. +func (o *OrganizationEvent) GetSender() *User { + if o == nil { + return nil } - return *m.Context + return o.Sender } -// GetMode returns the Mode field if it's non-nil, zero value otherwise. -func (m *markdownRequest) GetMode() string { - if m == nil || m.Mode == nil { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (o *OrgBlockEvent) GetAction() string { + if o == nil || o.Action == nil { return "" } - return *m.Mode + return *o.Action } -// GetText returns the Text field if it's non-nil, zero value otherwise. -func (m *markdownRequest) GetText() string { - if m == nil || m.Text == nil { - return "" +// GetBlockedUser returns the BlockedUser field. +func (o *OrgBlockEvent) GetBlockedUser() *User { + if o == nil { + return nil } - return *m.Text + return o.BlockedUser } -// GetText returns the Text field if it's non-nil, zero value otherwise. -func (m *Match) GetText() string { - if m == nil || m.Text == nil { - return "" +// GetInstallation returns the Installation field. +func (o *OrgBlockEvent) GetInstallation() *Installation { + if o == nil { + return nil } - return *m.Text + return o.Installation } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MemberEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" +// GetOrganization returns the Organization field. +func (o *OrgBlockEvent) GetOrganization() *Organization { + if o == nil { + return nil } - return *m.Action + return o.Organization } -// GetOrganizationURL returns the OrganizationURL field if it's non-nil, zero value otherwise. -func (m *Membership) GetOrganizationURL() string { - if m == nil || m.OrganizationURL == nil { - return "" +// GetSender returns the Sender field. +func (o *OrgBlockEvent) GetSender() *User { + if o == nil { + return nil } - return *m.OrganizationURL + return o.Sender } -// GetRole returns the Role field if it's non-nil, zero value otherwise. -func (m *Membership) GetRole() string { - if m == nil || m.Role == nil { - return "" +// GetDisabledOrgs returns the DisabledOrgs field if it's non-nil, zero value otherwise. +func (o *OrgStats) GetDisabledOrgs() int { + if o == nil || o.DisabledOrgs == nil { + return 0 } - return *m.Role + return *o.DisabledOrgs } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *Membership) GetState() string { - if m == nil || m.State == nil { - return "" +// GetTotalOrgs returns the TotalOrgs field if it's non-nil, zero value otherwise. +func (o *OrgStats) GetTotalOrgs() int { + if o == nil || o.TotalOrgs == nil { + return 0 } - return *m.State + return *o.TotalOrgs } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Membership) GetURL() string { - if m == nil || m.URL == nil { - return "" +// GetTotalTeamMembers returns the TotalTeamMembers field if it's non-nil, zero value otherwise. +func (o *OrgStats) GetTotalTeamMembers() int { + if o == nil || o.TotalTeamMembers == nil { + return 0 } - return *m.URL + return *o.TotalTeamMembers } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MembershipEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" +// GetTotalTeams returns the TotalTeams field if it's non-nil, zero value otherwise. +func (o *OrgStats) GetTotalTeams() int { + if o == nil || o.TotalTeams == nil { + return 0 } - return *m.Action + return *o.TotalTeams } -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (m *MembershipEvent) GetScope() string { - if m == nil || m.Scope == nil { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *Page) GetAction() string { + if p == nil || p.Action == nil { return "" } - return *m.Scope + return *p.Action } // GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (m *Metric) GetHTMLURL() string { - if m == nil || m.HTMLURL == nil { +func (p *Page) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { return "" } - return *m.HTMLURL + return *p.HTMLURL } -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (m *Metric) GetKey() string { - if m == nil || m.Key == nil { +// GetPageName returns the PageName field if it's non-nil, zero value otherwise. +func (p *Page) GetPageName() string { + if p == nil || p.PageName == nil { return "" } - return *m.Key + return *p.PageName } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (m *Metric) GetName() string { - if m == nil || m.Name == nil { +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (p *Page) GetSHA() string { + if p == nil || p.SHA == nil { return "" } - return *m.Name + return *p.SHA } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Metric) GetURL() string { - if m == nil || m.URL == nil { +// GetSummary returns the Summary field if it's non-nil, zero value otherwise. +func (p *Page) GetSummary() string { + if p == nil || p.Summary == nil { return "" } - return *m.URL + return *p.Summary } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (m *Migration) GetCreatedAt() string { - if m == nil || m.CreatedAt == nil { +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (p *Page) GetTitle() string { + if p == nil || p.Title == nil { return "" } - return *m.CreatedAt -} - -// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. -func (m *Migration) GetExcludeAttachments() bool { - if m == nil || m.ExcludeAttachments == nil { - return false - } - return *m.ExcludeAttachments + return *p.Title } -// GetGUID returns the GUID field if it's non-nil, zero value otherwise. -func (m *Migration) GetGUID() string { - if m == nil || m.GUID == nil { - return "" +// GetBuild returns the Build field. +func (p *PageBuildEvent) GetBuild() *PagesBuild { + if p == nil { + return nil } - return *m.GUID + return p.Build } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *Migration) GetID() int { - if m == nil || m.ID == nil { +func (p *PageBuildEvent) GetID() int64 { + if p == nil || p.ID == nil { return 0 } - return *m.ID + return *p.ID } -// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. -func (m *Migration) GetLockRepositories() bool { - if m == nil || m.LockRepositories == nil { - return false +// GetInstallation returns the Installation field. +func (p *PageBuildEvent) GetInstallation() *Installation { + if p == nil { + return nil } - return *m.LockRepositories + return p.Installation } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *Migration) GetState() string { - if m == nil || m.State == nil { - return "" +// GetRepo returns the Repo field. +func (p *PageBuildEvent) GetRepo() *Repository { + if p == nil { + return nil } - return *m.State + return p.Repo } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (m *Migration) GetUpdatedAt() string { - if m == nil || m.UpdatedAt == nil { - return "" +// GetSender returns the Sender field. +func (p *PageBuildEvent) GetSender() *User { + if p == nil { + return nil } - return *m.UpdatedAt + return p.Sender } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Migration) GetURL() string { - if m == nil || m.URL == nil { +// GetCNAME returns the CNAME field if it's non-nil, zero value otherwise. +func (p *Pages) GetCNAME() string { + if p == nil || p.CNAME == nil { return "" } - return *m.URL + return *p.CNAME } -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (m *Milestone) GetClosedAt() time.Time { - if m == nil || m.ClosedAt == nil { - return time.Time{} +// GetCustom404 returns the Custom404 field if it's non-nil, zero value otherwise. +func (p *Pages) GetCustom404() bool { + if p == nil || p.Custom404 == nil { + return false } - return *m.ClosedAt + return *p.Custom404 } -// GetClosedIssues returns the ClosedIssues field if it's non-nil, zero value otherwise. -func (m *Milestone) GetClosedIssues() int { - if m == nil || m.ClosedIssues == nil { - return 0 +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *Pages) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" } - return *m.ClosedIssues + return *p.HTMLURL } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (m *Milestone) GetCreatedAt() time.Time { - if m == nil || m.CreatedAt == nil { - return time.Time{} +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (p *Pages) GetStatus() string { + if p == nil || p.Status == nil { + return "" } - return *m.CreatedAt + return *p.Status } -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (m *Milestone) GetDescription() string { - if m == nil || m.Description == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *Pages) GetURL() string { + if p == nil || p.URL == nil { return "" } - return *m.Description + return *p.URL } -// GetDueOn returns the DueOn field if it's non-nil, zero value otherwise. -func (m *Milestone) GetDueOn() time.Time { - if m == nil || m.DueOn == nil { - return time.Time{} +// GetCommit returns the Commit field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetCommit() string { + if p == nil || p.Commit == nil { + return "" } - return *m.DueOn + return *p.Commit } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (m *Milestone) GetHTMLURL() string { - if m == nil || m.HTMLURL == nil { - return "" +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} } - return *m.HTMLURL + return *p.CreatedAt } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *Milestone) GetID() int { - if m == nil || m.ID == nil { +// GetDuration returns the Duration field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetDuration() int { + if p == nil || p.Duration == nil { return 0 } - return *m.ID + return *p.Duration } -// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. -func (m *Milestone) GetLabelsURL() string { - if m == nil || m.LabelsURL == nil { - return "" +// GetError returns the Error field. +func (p *PagesBuild) GetError() *PagesError { + if p == nil { + return nil } - return *m.LabelsURL + return p.Error } -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (m *Milestone) GetNumber() int { - if m == nil || m.Number == nil { - return 0 +// GetPusher returns the Pusher field. +func (p *PagesBuild) GetPusher() *User { + if p == nil { + return nil } - return *m.Number + return p.Pusher } -// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. -func (m *Milestone) GetOpenIssues() int { - if m == nil || m.OpenIssues == nil { - return 0 +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetStatus() string { + if p == nil || p.Status == nil { + return "" } - return *m.OpenIssues + return *p.Status } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *Milestone) GetState() string { - if m == nil || m.State == nil { +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetURL() string { + if p == nil || p.URL == nil { return "" } - return *m.State + return *p.URL } -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (m *Milestone) GetTitle() string { - if m == nil || m.Title == nil { +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (p *PagesError) GetMessage() string { + if p == nil || p.Message == nil { return "" } - return *m.Title + return *p.Message } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (m *Milestone) GetUpdatedAt() time.Time { - if m == nil || m.UpdatedAt == nil { - return time.Time{} +// GetTotalPages returns the TotalPages field if it's non-nil, zero value otherwise. +func (p *PageStats) GetTotalPages() int { + if p == nil || p.TotalPages == nil { + return 0 } - return *m.UpdatedAt + return *p.TotalPages } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Milestone) GetURL() string { - if m == nil || m.URL == nil { - return "" +// GetHook returns the Hook field. +func (p *PingEvent) GetHook() *Hook { + if p == nil { + return nil } - return *m.URL + return p.Hook } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MilestoneEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" +// GetHookID returns the HookID field if it's non-nil, zero value otherwise. +func (p *PingEvent) GetHookID() int64 { + if p == nil || p.HookID == nil { + return 0 } - return *m.Action + return *p.HookID } -// GetBase returns the Base field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetBase() string { - if n == nil || n.Base == nil { - return "" +// GetInstallation returns the Installation field. +func (p *PingEvent) GetInstallation() *Installation { + if p == nil { + return nil } - return *n.Base + return p.Installation } -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetBody() string { - if n == nil || n.Body == nil { +// GetZen returns the Zen field if it's non-nil, zero value otherwise. +func (p *PingEvent) GetZen() string { + if p == nil || p.Zen == nil { return "" } - return *n.Body + return *p.Zen } -// GetHead returns the Head field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetHead() string { - if n == nil || n.Head == nil { +// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. +func (p *Plan) GetCollaborators() int { + if p == nil || p.Collaborators == nil { + return 0 + } + return *p.Collaborators +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *Plan) GetName() string { + if p == nil || p.Name == nil { return "" } - return *n.Head + return *p.Name } -// GetIssue returns the Issue field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetIssue() int { - if n == nil || n.Issue == nil { +// GetPrivateRepos returns the PrivateRepos field if it's non-nil, zero value otherwise. +func (p *Plan) GetPrivateRepos() int { + if p == nil || p.PrivateRepos == nil { return 0 } - return *n.Issue + return *p.PrivateRepos } -// GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetMaintainerCanModify() bool { - if n == nil || n.MaintainerCanModify == nil { - return false +// GetSpace returns the Space field if it's non-nil, zero value otherwise. +func (p *Plan) GetSpace() int { + if p == nil || p.Space == nil { + return 0 } - return *n.MaintainerCanModify + return *p.Space } -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetTitle() string { - if n == nil || n.Title == nil { +// GetConfigURL returns the ConfigURL field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetConfigURL() string { + if p == nil || p.ConfigURL == nil { return "" } - return *n.Title + return *p.ConfigURL } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (n *Notification) GetID() string { - if n == nil || n.ID == nil { +// GetEnforcement returns the Enforcement field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetEnforcement() string { + if p == nil || p.Enforcement == nil { return "" } - return *n.ID + return *p.Enforcement } -// GetLastReadAt returns the LastReadAt field if it's non-nil, zero value otherwise. -func (n *Notification) GetLastReadAt() time.Time { - if n == nil || n.LastReadAt == nil { - return time.Time{} +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetID() int64 { + if p == nil || p.ID == nil { + return 0 } - return *n.LastReadAt + return *p.ID } -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (n *Notification) GetReason() string { - if n == nil || n.Reason == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetName() string { + if p == nil || p.Name == nil { return "" } - return *n.Reason + return *p.Name } -// GetUnread returns the Unread field if it's non-nil, zero value otherwise. -func (n *Notification) GetUnread() bool { - if n == nil || n.Unread == nil { - return false +// GetHRef returns the HRef field if it's non-nil, zero value otherwise. +func (p *PRLink) GetHRef() string { + if p == nil || p.HRef == nil { + return "" } - return *n.Unread + return *p.HRef } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (n *Notification) GetUpdatedAt() time.Time { - if n == nil || n.UpdatedAt == nil { - return time.Time{} +// GetComments returns the Comments field. +func (p *PRLinks) GetComments() *PRLink { + if p == nil { + return nil } - return *n.UpdatedAt + return p.Comments } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (n *Notification) GetURL() string { - if n == nil || n.URL == nil { - return "" +// GetCommits returns the Commits field. +func (p *PRLinks) GetCommits() *PRLink { + if p == nil { + return nil } - return *n.URL + return p.Commits } -// GetLatestCommentURL returns the LatestCommentURL field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetLatestCommentURL() string { - if n == nil || n.LatestCommentURL == nil { - return "" +// GetHTML returns the HTML field. +func (p *PRLinks) GetHTML() *PRLink { + if p == nil { + return nil } - return *n.LatestCommentURL + return p.HTML } -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetTitle() string { - if n == nil || n.Title == nil { - return "" +// GetIssue returns the Issue field. +func (p *PRLinks) GetIssue() *PRLink { + if p == nil { + return nil } - return *n.Title + return p.Issue } -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetType() string { - if n == nil || n.Type == nil { - return "" +// GetReviewComment returns the ReviewComment field. +func (p *PRLinks) GetReviewComment() *PRLink { + if p == nil { + return nil } - return *n.Type + return p.ReviewComment } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetURL() string { - if n == nil || n.URL == nil { - return "" +// GetReviewComments returns the ReviewComments field. +func (p *PRLinks) GetReviewComments() *PRLink { + if p == nil { + return nil } - return *n.URL + return p.ReviewComments } -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetAvatarURL() string { - if o == nil || o.AvatarURL == nil { - return "" +// GetSelf returns the Self field. +func (p *PRLinks) GetSelf() *PRLink { + if p == nil { + return nil } - return *o.AvatarURL + return p.Self } -// GetBillingEmail returns the BillingEmail field if it's non-nil, zero value otherwise. -func (o *Organization) GetBillingEmail() string { - if o == nil || o.BillingEmail == nil { - return "" +// GetStatuses returns the Statuses field. +func (p *PRLinks) GetStatuses() *PRLink { + if p == nil { + return nil } - return *o.BillingEmail + return p.Statuses } -// GetBlog returns the Blog field if it's non-nil, zero value otherwise. -func (o *Organization) GetBlog() string { - if o == nil || o.Blog == nil { +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *Project) GetBody() string { + if p == nil || p.Body == nil { return "" } - return *o.Blog + return *p.Body } -// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. -func (o *Organization) GetCollaborators() int { - if o == nil || o.Collaborators == nil { - return 0 +// GetColumnsURL returns the ColumnsURL field if it's non-nil, zero value otherwise. +func (p *Project) GetColumnsURL() string { + if p == nil || p.ColumnsURL == nil { + return "" } - return *o.Collaborators + return *p.ColumnsURL } -// GetCompany returns the Company field if it's non-nil, zero value otherwise. -func (o *Organization) GetCompany() string { - if o == nil || o.Company == nil { - return "" +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *Project) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} } - return *o.Company + return *p.CreatedAt } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (o *Organization) GetCreatedAt() time.Time { - if o == nil || o.CreatedAt == nil { - return time.Time{} +// GetCreator returns the Creator field. +func (p *Project) GetCreator() *User { + if p == nil { + return nil } - return *o.CreatedAt + return p.Creator } -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (o *Organization) GetDescription() string { - if o == nil || o.Description == nil { +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *Project) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { return "" } - return *o.Description + return *p.HTMLURL } -// GetDiskUsage returns the DiskUsage field if it's non-nil, zero value otherwise. -func (o *Organization) GetDiskUsage() int { - if o == nil || o.DiskUsage == nil { +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *Project) GetID() int64 { + if p == nil || p.ID == nil { return 0 } - return *o.DiskUsage + return *p.ID } -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (o *Organization) GetEmail() string { - if o == nil || o.Email == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *Project) GetName() string { + if p == nil || p.Name == nil { return "" } - return *o.Email + return *p.Name } -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetEventsURL() string { - if o == nil || o.EventsURL == nil { +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *Project) GetNodeID() string { + if p == nil || p.NodeID == nil { return "" } - return *o.EventsURL -} - -// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. -func (o *Organization) GetFollowers() int { - if o == nil || o.Followers == nil { - return 0 - } - return *o.Followers + return *p.NodeID } -// GetFollowing returns the Following field if it's non-nil, zero value otherwise. -func (o *Organization) GetFollowing() int { - if o == nil || o.Following == nil { +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (p *Project) GetNumber() int { + if p == nil || p.Number == nil { return 0 } - return *o.Following + return *p.Number } -// GetHooksURL returns the HooksURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetHooksURL() string { - if o == nil || o.HooksURL == nil { +// GetOwnerURL returns the OwnerURL field if it's non-nil, zero value otherwise. +func (p *Project) GetOwnerURL() string { + if p == nil || p.OwnerURL == nil { return "" } - return *o.HooksURL + return *p.OwnerURL } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetHTMLURL() string { - if o == nil || o.HTMLURL == nil { +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *Project) GetState() string { + if p == nil || p.State == nil { return "" } - return *o.HTMLURL + return *p.State } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (o *Organization) GetID() int { - if o == nil || o.ID == nil { - return 0 +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *Project) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} } - return *o.ID + return *p.UpdatedAt } -// GetIssuesURL returns the IssuesURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetIssuesURL() string { - if o == nil || o.IssuesURL == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *Project) GetURL() string { + if p == nil || p.URL == nil { return "" } - return *o.IssuesURL + return *p.URL } -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (o *Organization) GetLocation() string { - if o == nil || o.Location == nil { - return "" +// GetArchived returns the Archived field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetArchived() bool { + if p == nil || p.Archived == nil { + return false } - return *o.Location + return *p.Archived } -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (o *Organization) GetLogin() string { - if o == nil || o.Login == nil { - return "" +// GetColumnID returns the ColumnID field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetColumnID() int64 { + if p == nil || p.ColumnID == nil { + return 0 } - return *o.Login + return *p.ColumnID } -// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersURL() string { - if o == nil || o.MembersURL == nil { +// GetColumnName returns the ColumnName field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetColumnName() string { + if p == nil || p.ColumnName == nil { return "" } - return *o.MembersURL + return *p.ColumnName } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (o *Organization) GetName() string { - if o == nil || o.Name == nil { +// GetColumnURL returns the ColumnURL field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetColumnURL() string { + if p == nil || p.ColumnURL == nil { return "" } - return *o.Name -} - -// GetOwnedPrivateRepos returns the OwnedPrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetOwnedPrivateRepos() int { - if o == nil || o.OwnedPrivateRepos == nil { - return 0 - } - return *o.OwnedPrivateRepos + return *p.ColumnURL } -// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. -func (o *Organization) GetPrivateGists() int { - if o == nil || o.PrivateGists == nil { - return 0 +// GetContentURL returns the ContentURL field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetContentURL() string { + if p == nil || p.ContentURL == nil { + return "" } - return *o.PrivateGists + return *p.ContentURL } -// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. -func (o *Organization) GetPublicGists() int { - if o == nil || o.PublicGists == nil { - return 0 +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} } - return *o.PublicGists + return *p.CreatedAt } -// GetPublicMembersURL returns the PublicMembersURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetPublicMembersURL() string { - if o == nil || o.PublicMembersURL == nil { - return "" +// GetCreator returns the Creator field. +func (p *ProjectCard) GetCreator() *User { + if p == nil { + return nil } - return *o.PublicMembersURL + return p.Creator } -// GetPublicRepos returns the PublicRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetPublicRepos() int { - if o == nil || o.PublicRepos == nil { +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetID() int64 { + if p == nil || p.ID == nil { return 0 } - return *o.PublicRepos + return *p.ID } -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetReposURL() string { - if o == nil || o.ReposURL == nil { +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetNodeID() string { + if p == nil || p.NodeID == nil { return "" } - return *o.ReposURL + return *p.NodeID } -// GetTotalPrivateRepos returns the TotalPrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetTotalPrivateRepos() int { - if o == nil || o.TotalPrivateRepos == nil { - return 0 +// GetNote returns the Note field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetNote() string { + if p == nil || p.Note == nil { + return "" } - return *o.TotalPrivateRepos + return *p.Note } -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (o *Organization) GetType() string { - if o == nil || o.Type == nil { +// GetPreviousColumnName returns the PreviousColumnName field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetPreviousColumnName() string { + if p == nil || p.PreviousColumnName == nil { return "" } - return *o.Type + return *p.PreviousColumnName } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (o *Organization) GetUpdatedAt() time.Time { - if o == nil || o.UpdatedAt == nil { - return time.Time{} +// GetProjectID returns the ProjectID field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetProjectID() int64 { + if p == nil || p.ProjectID == nil { + return 0 } - return *o.UpdatedAt + return *p.ProjectID } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (o *Organization) GetURL() string { - if o == nil || o.URL == nil { +// GetProjectURL returns the ProjectURL field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetProjectURL() string { + if p == nil || p.ProjectURL == nil { return "" } - return *o.URL + return *p.ProjectURL } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (o *OrganizationEvent) GetAction() string { - if o == nil || o.Action == nil { - return "" +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} } - return *o.Action + return *p.UpdatedAt } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (o *OrgBlockEvent) GetAction() string { - if o == nil || o.Action == nil { +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetURL() string { + if p == nil || p.URL == nil { return "" } - return *o.Action + return *p.URL } // GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *Page) GetAction() string { +func (p *ProjectCardEvent) GetAction() string { if p == nil || p.Action == nil { return "" } return *p.Action } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Page) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetPageName returns the PageName field if it's non-nil, zero value otherwise. -func (p *Page) GetPageName() string { - if p == nil || p.PageName == nil { - return "" +// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. +func (p *ProjectCardEvent) GetAfterID() int64 { + if p == nil || p.AfterID == nil { + return 0 } - return *p.PageName + return *p.AfterID } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (p *Page) GetSHA() string { - if p == nil || p.SHA == nil { - return "" +// GetChanges returns the Changes field. +func (p *ProjectCardEvent) GetChanges() *ProjectCardChange { + if p == nil { + return nil } - return *p.SHA + return p.Changes } -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (p *Page) GetSummary() string { - if p == nil || p.Summary == nil { - return "" +// GetInstallation returns the Installation field. +func (p *ProjectCardEvent) GetInstallation() *Installation { + if p == nil { + return nil } - return *p.Summary + return p.Installation } -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (p *Page) GetTitle() string { - if p == nil || p.Title == nil { - return "" +// GetOrg returns the Org field. +func (p *ProjectCardEvent) GetOrg() *Organization { + if p == nil { + return nil } - return *p.Title + return p.Org } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PageBuildEvent) GetID() int { - if p == nil || p.ID == nil { - return 0 +// GetProjectCard returns the ProjectCard field. +func (p *ProjectCardEvent) GetProjectCard() *ProjectCard { + if p == nil { + return nil } - return *p.ID + return p.ProjectCard } -// GetCNAME returns the CNAME field if it's non-nil, zero value otherwise. -func (p *Pages) GetCNAME() string { - if p == nil || p.CNAME == nil { - return "" +// GetRepo returns the Repo field. +func (p *ProjectCardEvent) GetRepo() *Repository { + if p == nil { + return nil } - return *p.CNAME + return p.Repo } -// GetCustom404 returns the Custom404 field if it's non-nil, zero value otherwise. -func (p *Pages) GetCustom404() bool { - if p == nil || p.Custom404 == nil { - return false +// GetSender returns the Sender field. +func (p *ProjectCardEvent) GetSender() *User { + if p == nil { + return nil } - return *p.Custom404 + return p.Sender } -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Pages) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { +// GetArchivedState returns the ArchivedState field if it's non-nil, zero value otherwise. +func (p *ProjectCardListOptions) GetArchivedState() string { + if p == nil || p.ArchivedState == nil { return "" } - return *p.HTMLURL + return *p.ArchivedState } -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (p *Pages) GetStatus() string { - if p == nil || p.Status == nil { - return "" +// GetArchived returns the Archived field if it's non-nil, zero value otherwise. +func (p *ProjectCardOptions) GetArchived() bool { + if p == nil || p.Archived == nil { + return false } - return *p.Status + return *p.Archived } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Pages) GetURL() string { - if p == nil || p.URL == nil { +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (p *ProjectCollaboratorOptions) GetPermission() string { + if p == nil || p.Permission == nil { return "" } - return *p.URL + return *p.Permission } -// GetCommit returns the Commit field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetCommit() string { - if p == nil || p.Commit == nil { +// GetCardsURL returns the CardsURL field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetCardsURL() string { + if p == nil || p.CardsURL == nil { return "" } - return *p.Commit + return *p.CardsURL } // GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetCreatedAt() Timestamp { +func (p *ProjectColumn) GetCreatedAt() Timestamp { if p == nil || p.CreatedAt == nil { return Timestamp{} } return *p.CreatedAt } -// GetDuration returns the Duration field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetDuration() int { - if p == nil || p.Duration == nil { +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetID() int64 { + if p == nil || p.ID == nil { return 0 } - return *p.Duration + return *p.ID } -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetStatus() string { - if p == nil || p.Status == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetName() string { + if p == nil || p.Name == nil { return "" } - return *p.Status + return *p.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetProjectURL returns the ProjectURL field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetProjectURL() string { + if p == nil || p.ProjectURL == nil { + return "" + } + return *p.ProjectURL } // GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetUpdatedAt() Timestamp { +func (p *ProjectColumn) GetUpdatedAt() Timestamp { if p == nil || p.UpdatedAt == nil { return Timestamp{} } @@ -3389,275 +6821,275 @@ func (p *PagesBuild) GetUpdatedAt() Timestamp { } // GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetURL() string { +func (p *ProjectColumn) GetURL() string { if p == nil || p.URL == nil { return "" } return *p.URL } -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PagesError) GetMessage() string { - if p == nil || p.Message == nil { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *ProjectColumnEvent) GetAction() string { + if p == nil || p.Action == nil { return "" } - return *p.Message + return *p.Action } -// GetHookID returns the HookID field if it's non-nil, zero value otherwise. -func (p *PingEvent) GetHookID() int { - if p == nil || p.HookID == nil { +// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. +func (p *ProjectColumnEvent) GetAfterID() int64 { + if p == nil || p.AfterID == nil { return 0 } - return *p.HookID + return *p.AfterID } -// GetZen returns the Zen field if it's non-nil, zero value otherwise. -func (p *PingEvent) GetZen() string { - if p == nil || p.Zen == nil { - return "" +// GetChanges returns the Changes field. +func (p *ProjectColumnEvent) GetChanges() *ProjectColumnChange { + if p == nil { + return nil } - return *p.Zen + return p.Changes } -// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. -func (p *Plan) GetCollaborators() int { - if p == nil || p.Collaborators == nil { - return 0 +// GetInstallation returns the Installation field. +func (p *ProjectColumnEvent) GetInstallation() *Installation { + if p == nil { + return nil } - return *p.Collaborators + return p.Installation } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *Plan) GetName() string { - if p == nil || p.Name == nil { - return "" +// GetOrg returns the Org field. +func (p *ProjectColumnEvent) GetOrg() *Organization { + if p == nil { + return nil } - return *p.Name + return p.Org } -// GetPrivateRepos returns the PrivateRepos field if it's non-nil, zero value otherwise. -func (p *Plan) GetPrivateRepos() int { - if p == nil || p.PrivateRepos == nil { - return 0 +// GetProjectColumn returns the ProjectColumn field. +func (p *ProjectColumnEvent) GetProjectColumn() *ProjectColumn { + if p == nil { + return nil } - return *p.PrivateRepos + return p.ProjectColumn } -// GetSpace returns the Space field if it's non-nil, zero value otherwise. -func (p *Plan) GetSpace() int { - if p == nil || p.Space == nil { - return 0 +// GetRepo returns the Repo field. +func (p *ProjectColumnEvent) GetRepo() *Repository { + if p == nil { + return nil } - return *p.Space + return p.Repo } -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *Project) GetBody() string { - if p == nil || p.Body == nil { - return "" +// GetSender returns the Sender field. +func (p *ProjectColumnEvent) GetSender() *User { + if p == nil { + return nil } - return *p.Body + return p.Sender } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *Project) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *ProjectEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" } - return *p.CreatedAt + return *p.Action } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *Project) GetID() int { - if p == nil || p.ID == nil { - return 0 +// GetChanges returns the Changes field. +func (p *ProjectEvent) GetChanges() *ProjectChange { + if p == nil { + return nil } - return *p.ID + return p.Changes } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *Project) GetName() string { - if p == nil || p.Name == nil { - return "" +// GetInstallation returns the Installation field. +func (p *ProjectEvent) GetInstallation() *Installation { + if p == nil { + return nil } - return *p.Name + return p.Installation } -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *Project) GetNumber() int { - if p == nil || p.Number == nil { - return 0 +// GetOrg returns the Org field. +func (p *ProjectEvent) GetOrg() *Organization { + if p == nil { + return nil } - return *p.Number + return p.Org } -// GetOwnerURL returns the OwnerURL field if it's non-nil, zero value otherwise. -func (p *Project) GetOwnerURL() string { - if p == nil || p.OwnerURL == nil { - return "" +// GetProject returns the Project field. +func (p *ProjectEvent) GetProject() *Project { + if p == nil { + return nil } - return *p.OwnerURL + return p.Project } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *Project) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} +// GetRepo returns the Repo field. +func (p *ProjectEvent) GetRepo() *Repository { + if p == nil { + return nil } - return *p.UpdatedAt + return p.Repo } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Project) GetURL() string { - if p == nil || p.URL == nil { - return "" +// GetSender returns the Sender field. +func (p *ProjectEvent) GetSender() *User { + if p == nil { + return nil } - return *p.URL + return p.Sender } -// GetColumnID returns the ColumnID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetColumnID() int { - if p == nil || p.ColumnID == nil { - return 0 +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetBody() string { + if p == nil || p.Body == nil { + return "" } - return *p.ColumnID + return *p.Body } -// GetColumnURL returns the ColumnURL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetColumnURL() string { - if p == nil || p.ColumnURL == nil { +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetName() string { + if p == nil || p.Name == nil { return "" } - return *p.ColumnURL + return *p.Name } -// GetContentURL returns the ContentURL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetContentURL() string { - if p == nil || p.ContentURL == nil { +// GetOrganizationPermission returns the OrganizationPermission field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetOrganizationPermission() string { + if p == nil || p.OrganizationPermission == nil { return "" } - return *p.ContentURL + return *p.OrganizationPermission } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetPublic() bool { + if p == nil || p.Public == nil { + return false } - return *p.CreatedAt + return *p.Public } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetID() int { - if p == nil || p.ID == nil { - return 0 +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetState() string { + if p == nil || p.State == nil { + return "" } - return *p.ID + return *p.State } -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetNote() string { - if p == nil || p.Note == nil { +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (p *ProjectPermissionLevel) GetPermission() string { + if p == nil || p.Permission == nil { return "" } - return *p.Note + return *p.Permission } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} +// GetUser returns the User field. +func (p *ProjectPermissionLevel) GetUser() *User { + if p == nil { + return nil } - return *p.UpdatedAt + return p.User } -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetURL() string { - if p == nil || p.URL == nil { - return "" +// GetEnforceAdmins returns the EnforceAdmins field. +func (p *Protection) GetEnforceAdmins() *AdminEnforcement { + if p == nil { + return nil } - return *p.URL + return p.EnforceAdmins } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectCardEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" +// GetRequiredPullRequestReviews returns the RequiredPullRequestReviews field. +func (p *Protection) GetRequiredPullRequestReviews() *PullRequestReviewsEnforcement { + if p == nil { + return nil } - return *p.Action + return p.RequiredPullRequestReviews } -// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. -func (p *ProjectCardEvent) GetAfterID() int { - if p == nil || p.AfterID == nil { - return 0 +// GetRequiredStatusChecks returns the RequiredStatusChecks field. +func (p *Protection) GetRequiredStatusChecks() *RequiredStatusChecks { + if p == nil { + return nil } - return *p.AfterID + return p.RequiredStatusChecks } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} +// GetRestrictions returns the Restrictions field. +func (p *Protection) GetRestrictions() *BranchRestrictions { + if p == nil { + return nil } - return *p.CreatedAt + return p.Restrictions } -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetID() int { - if p == nil || p.ID == nil { - return 0 +// GetRequiredPullRequestReviews returns the RequiredPullRequestReviews field. +func (p *ProtectionRequest) GetRequiredPullRequestReviews() *PullRequestReviewsEnforcementRequest { + if p == nil { + return nil } - return *p.ID + return p.RequiredPullRequestReviews } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetName() string { - if p == nil || p.Name == nil { - return "" +// GetRequiredStatusChecks returns the RequiredStatusChecks field. +func (p *ProtectionRequest) GetRequiredStatusChecks() *RequiredStatusChecks { + if p == nil { + return nil } - return *p.Name + return p.RequiredStatusChecks } -// GetProjectURL returns the ProjectURL field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetProjectURL() string { - if p == nil || p.ProjectURL == nil { - return "" +// GetRestrictions returns the Restrictions field. +func (p *ProtectionRequest) GetRestrictions() *BranchRestrictionsRequest { + if p == nil { + return nil } - return *p.ProjectURL + return p.Restrictions } -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} +// GetInstallation returns the Installation field. +func (p *PublicEvent) GetInstallation() *Installation { + if p == nil { + return nil } - return *p.UpdatedAt + return p.Installation } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectColumnEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" +// GetRepo returns the Repo field. +func (p *PublicEvent) GetRepo() *Repository { + if p == nil { + return nil } - return *p.Action + return p.Repo } -// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. -func (p *ProjectColumnEvent) GetAfterID() int { - if p == nil || p.AfterID == nil { - return 0 +// GetSender returns the Sender field. +func (p *PublicEvent) GetSender() *User { + if p == nil { + return nil } - return *p.AfterID + return p.Sender } -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectEvent) GetAction() string { - if p == nil || p.Action == nil { +// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetActiveLockReason() string { + if p == nil || p.ActiveLockReason == nil { return "" } - return *p.Action + return *p.ActiveLockReason } // GetAdditions returns the Additions field if it's non-nil, zero value otherwise. @@ -3668,6 +7100,30 @@ func (p *PullRequest) GetAdditions() int { return *p.Additions } +// GetAssignee returns the Assignee field. +func (p *PullRequest) GetAssignee() *User { + if p == nil { + return nil + } + return p.Assignee +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetAuthorAssociation() string { + if p == nil || p.AuthorAssociation == nil { + return "" + } + return *p.AuthorAssociation +} + +// GetBase returns the Base field. +func (p *PullRequest) GetBase() *PullRequestBranch { + if p == nil { + return nil + } + return p.Base +} + // GetBody returns the Body field if it's non-nil, zero value otherwise. func (p *PullRequest) GetBody() string { if p == nil || p.Body == nil { @@ -3700,6 +7156,14 @@ func (p *PullRequest) GetComments() int { return *p.Comments } +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetCommentsURL() string { + if p == nil || p.CommentsURL == nil { + return "" + } + return *p.CommentsURL +} + // GetCommits returns the Commits field if it's non-nil, zero value otherwise. func (p *PullRequest) GetCommits() int { if p == nil || p.Commits == nil { @@ -3708,6 +7172,14 @@ func (p *PullRequest) GetCommits() int { return *p.Commits } +// GetCommitsURL returns the CommitsURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetCommitsURL() string { + if p == nil || p.CommitsURL == nil { + return "" + } + return *p.CommitsURL +} + // GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. func (p *PullRequest) GetCreatedAt() time.Time { if p == nil || p.CreatedAt == nil { @@ -3732,6 +7204,22 @@ func (p *PullRequest) GetDiffURL() string { return *p.DiffURL } +// GetDraft returns the Draft field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetDraft() bool { + if p == nil || p.Draft == nil { + return false + } + return *p.Draft +} + +// GetHead returns the Head field. +func (p *PullRequest) GetHead() *PullRequestBranch { + if p == nil { + return nil + } + return p.Head +} + // GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. func (p *PullRequest) GetHTMLURL() string { if p == nil || p.HTMLURL == nil { @@ -3741,7 +7229,7 @@ func (p *PullRequest) GetHTMLURL() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetID() int { +func (p *PullRequest) GetID() int64 { if p == nil || p.ID == nil { return 0 } @@ -3756,6 +7244,14 @@ func (p *PullRequest) GetIssueURL() string { return *p.IssueURL } +// GetLinks returns the Links field. +func (p *PullRequest) GetLinks() *PRLinks { + if p == nil { + return nil + } + return p.Links +} + // GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. func (p *PullRequest) GetMaintainerCanModify() bool { if p == nil || p.MaintainerCanModify == nil { @@ -3772,6 +7268,14 @@ func (p *PullRequest) GetMergeable() bool { return *p.Mergeable } +// GetMergeableState returns the MergeableState field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetMergeableState() string { + if p == nil || p.MergeableState == nil { + return "" + } + return *p.MergeableState +} + // GetMergeCommitSHA returns the MergeCommitSHA field if it's non-nil, zero value otherwise. func (p *PullRequest) GetMergeCommitSHA() string { if p == nil || p.MergeCommitSHA == nil { @@ -3796,6 +7300,30 @@ func (p *PullRequest) GetMergedAt() time.Time { return *p.MergedAt } +// GetMergedBy returns the MergedBy field. +func (p *PullRequest) GetMergedBy() *User { + if p == nil { + return nil + } + return p.MergedBy +} + +// GetMilestone returns the Milestone field. +func (p *PullRequest) GetMilestone() *Milestone { + if p == nil { + return nil + } + return p.Milestone +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + // GetNumber returns the Number field if it's non-nil, zero value otherwise. func (p *PullRequest) GetNumber() int { if p == nil || p.Number == nil { @@ -3812,6 +7340,14 @@ func (p *PullRequest) GetPatchURL() string { return *p.PatchURL } +// GetReviewComments returns the ReviewComments field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetReviewComments() int { + if p == nil || p.ReviewComments == nil { + return 0 + } + return *p.ReviewComments +} + // GetReviewCommentsURL returns the ReviewCommentsURL field if it's non-nil, zero value otherwise. func (p *PullRequest) GetReviewCommentsURL() string { if p == nil || p.ReviewCommentsURL == nil { @@ -3868,6 +7404,14 @@ func (p *PullRequest) GetURL() string { return *p.URL } +// GetUser returns the User field. +func (p *PullRequest) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + // GetLabel returns the Label field if it's non-nil, zero value otherwise. func (p *PullRequestBranch) GetLabel() string { if p == nil || p.Label == nil { @@ -3884,6 +7428,14 @@ func (p *PullRequestBranch) GetRef() string { return *p.Ref } +// GetRepo returns the Repo field. +func (p *PullRequestBranch) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + // GetSHA returns the SHA field if it's non-nil, zero value otherwise. func (p *PullRequestBranch) GetSHA() string { if p == nil || p.SHA == nil { @@ -3892,6 +7444,22 @@ func (p *PullRequestBranch) GetSHA() string { return *p.SHA } +// GetUser returns the User field. +func (p *PullRequestBranch) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetAuthorAssociation() string { + if p == nil || p.AuthorAssociation == nil { + return "" + } + return *p.AuthorAssociation +} + // GetBody returns the Body field if it's non-nil, zero value otherwise. func (p *PullRequestComment) GetBody() string { if p == nil || p.Body == nil { @@ -3933,7 +7501,7 @@ func (p *PullRequestComment) GetHTMLURL() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetID() int { +func (p *PullRequestComment) GetID() int64 { if p == nil || p.ID == nil { return 0 } @@ -3941,7 +7509,7 @@ func (p *PullRequestComment) GetID() int { } // GetInReplyTo returns the InReplyTo field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetInReplyTo() int { +func (p *PullRequestComment) GetInReplyTo() int64 { if p == nil || p.InReplyTo == nil { return 0 } @@ -3980,6 +7548,14 @@ func (p *PullRequestComment) GetPosition() int { return *p.Position } +// GetPullRequestReviewID returns the PullRequestReviewID field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetPullRequestReviewID() int64 { + if p == nil || p.PullRequestReviewID == nil { + return 0 + } + return *p.PullRequestReviewID +} + // GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. func (p *PullRequestComment) GetPullRequestURL() string { if p == nil || p.PullRequestURL == nil { @@ -3988,6 +7564,14 @@ func (p *PullRequestComment) GetPullRequestURL() string { return *p.PullRequestURL } +// GetReactions returns the Reactions field. +func (p *PullRequestComment) GetReactions() *Reactions { + if p == nil { + return nil + } + return p.Reactions +} + // GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. func (p *PullRequestComment) GetUpdatedAt() time.Time { if p == nil || p.UpdatedAt == nil { @@ -4004,6 +7588,14 @@ func (p *PullRequestComment) GetURL() string { return *p.URL } +// GetUser returns the User field. +func (p *PullRequestComment) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (p *PullRequestEvent) GetAction() string { if p == nil || p.Action == nil { @@ -4012,6 +7604,38 @@ func (p *PullRequestEvent) GetAction() string { return *p.Action } +// GetAssignee returns the Assignee field. +func (p *PullRequestEvent) GetAssignee() *User { + if p == nil { + return nil + } + return p.Assignee +} + +// GetChanges returns the Changes field. +func (p *PullRequestEvent) GetChanges() *EditChange { + if p == nil { + return nil + } + return p.Changes +} + +// GetInstallation returns the Installation field. +func (p *PullRequestEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetLabel returns the Label field. +func (p *PullRequestEvent) GetLabel() *Label { + if p == nil { + return nil + } + return p.Label +} + // GetNumber returns the Number field if it's non-nil, zero value otherwise. func (p *PullRequestEvent) GetNumber() int { if p == nil || p.Number == nil { @@ -4020,6 +7644,46 @@ func (p *PullRequestEvent) GetNumber() int { return *p.Number } +// GetOrganization returns the Organization field. +func (p *PullRequestEvent) GetOrganization() *Organization { + if p == nil { + return nil + } + return p.Organization +} + +// GetPullRequest returns the PullRequest field. +func (p *PullRequestEvent) GetPullRequest() *PullRequest { + if p == nil { + return nil + } + return p.PullRequest +} + +// GetRepo returns the Repo field. +func (p *PullRequestEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetRequestedReviewer returns the RequestedReviewer field. +func (p *PullRequestEvent) GetRequestedReviewer() *User { + if p == nil { + return nil + } + return p.RequestedReviewer +} + +// GetSender returns the Sender field. +func (p *PullRequestEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + // GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. func (p *PullRequestLinks) GetDiffURL() string { if p == nil || p.DiffURL == nil { @@ -4101,7 +7765,7 @@ func (p *PullRequestReview) GetHTMLURL() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetID() int { +func (p *PullRequestReview) GetID() int64 { if p == nil || p.ID == nil { return 0 } @@ -4132,12 +7796,68 @@ func (p *PullRequestReview) GetSubmittedAt() time.Time { return *p.SubmittedAt } +// GetUser returns the User field. +func (p *PullRequestReview) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (p *PullRequestReviewCommentEvent) GetAction() string { if p == nil || p.Action == nil { return "" } - return *p.Action + return *p.Action +} + +// GetChanges returns the Changes field. +func (p *PullRequestReviewCommentEvent) GetChanges() *EditChange { + if p == nil { + return nil + } + return p.Changes +} + +// GetComment returns the Comment field. +func (p *PullRequestReviewCommentEvent) GetComment() *PullRequestComment { + if p == nil { + return nil + } + return p.Comment +} + +// GetInstallation returns the Installation field. +func (p *PullRequestReviewCommentEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetPullRequest returns the PullRequest field. +func (p *PullRequestReviewCommentEvent) GetPullRequest() *PullRequest { + if p == nil { + return nil + } + return p.PullRequest +} + +// GetRepo returns the Repo field. +func (p *PullRequestReviewCommentEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *PullRequestReviewCommentEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender } // GetMessage returns the Message field if it's non-nil, zero value otherwise. @@ -4156,6 +7876,54 @@ func (p *PullRequestReviewEvent) GetAction() string { return *p.Action } +// GetInstallation returns the Installation field. +func (p *PullRequestReviewEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrganization returns the Organization field. +func (p *PullRequestReviewEvent) GetOrganization() *Organization { + if p == nil { + return nil + } + return p.Organization +} + +// GetPullRequest returns the PullRequest field. +func (p *PullRequestReviewEvent) GetPullRequest() *PullRequest { + if p == nil { + return nil + } + return p.PullRequest +} + +// GetRepo returns the Repo field. +func (p *PullRequestReviewEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetReview returns the Review field. +func (p *PullRequestReviewEvent) GetReview() *PullRequestReview { + if p == nil { + return nil + } + return p.Review +} + +// GetSender returns the Sender field. +func (p *PullRequestReviewEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + // GetBody returns the Body field if it's non-nil, zero value otherwise. func (p *PullRequestReviewRequest) GetBody() string { if p == nil || p.Body == nil { @@ -4164,6 +7932,14 @@ func (p *PullRequestReviewRequest) GetBody() string { return *p.Body } +// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewRequest) GetCommitID() string { + if p == nil || p.CommitID == nil { + return "" + } + return *p.CommitID +} + // GetEvent returns the Event field if it's non-nil, zero value otherwise. func (p *PullRequestReviewRequest) GetEvent() string { if p == nil || p.Event == nil { @@ -4172,6 +7948,22 @@ func (p *PullRequestReviewRequest) GetEvent() string { return *p.Event } +// GetDismissalRestrictionsRequest returns the DismissalRestrictionsRequest field. +func (p *PullRequestReviewsEnforcementRequest) GetDismissalRestrictionsRequest() *DismissalRestrictionsRequest { + if p == nil { + return nil + } + return p.DismissalRestrictionsRequest +} + +// GetDismissalRestrictionsRequest returns the DismissalRestrictionsRequest field. +func (p *PullRequestReviewsEnforcementUpdate) GetDismissalRestrictionsRequest() *DismissalRestrictionsRequest { + if p == nil { + return nil + } + return p.DismissalRestrictionsRequest +} + // GetDismissStaleReviews returns the DismissStaleReviews field if it's non-nil, zero value otherwise. func (p *PullRequestReviewsEnforcementUpdate) GetDismissStaleReviews() bool { if p == nil || p.DismissStaleReviews == nil { @@ -4180,44 +7972,36 @@ func (p *PullRequestReviewsEnforcementUpdate) GetDismissStaleReviews() bool { return *p.DismissStaleReviews } -// GetBase returns the Base field if it's non-nil, zero value otherwise. -func (p *pullRequestUpdate) GetBase() string { - if p == nil || p.Base == nil { - return "" - } - return *p.Base -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *pullRequestUpdate) GetBody() string { - if p == nil || p.Body == nil { - return "" +// GetMergablePulls returns the MergablePulls field if it's non-nil, zero value otherwise. +func (p *PullStats) GetMergablePulls() int { + if p == nil || p.MergablePulls == nil { + return 0 } - return *p.Body + return *p.MergablePulls } -// GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. -func (p *pullRequestUpdate) GetMaintainerCanModify() bool { - if p == nil || p.MaintainerCanModify == nil { - return false +// GetMergedPulls returns the MergedPulls field if it's non-nil, zero value otherwise. +func (p *PullStats) GetMergedPulls() int { + if p == nil || p.MergedPulls == nil { + return 0 } - return *p.MaintainerCanModify + return *p.MergedPulls } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *pullRequestUpdate) GetState() string { - if p == nil || p.State == nil { - return "" +// GetTotalPulls returns the TotalPulls field if it's non-nil, zero value otherwise. +func (p *PullStats) GetTotalPulls() int { + if p == nil || p.TotalPulls == nil { + return 0 } - return *p.State + return *p.TotalPulls } -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (p *pullRequestUpdate) GetTitle() string { - if p == nil || p.Title == nil { - return "" +// GetUnmergablePulls returns the UnmergablePulls field if it's non-nil, zero value otherwise. +func (p *PullStats) GetUnmergablePulls() int { + if p == nil || p.UnmergablePulls == nil { + return 0 } - return *p.Title + return *p.UnmergablePulls } // GetCommits returns the Commits field if it's non-nil, zero value otherwise. @@ -4316,8 +8100,32 @@ func (p *PushEvent) GetHead() string { return *p.Head } +// GetHeadCommit returns the HeadCommit field. +func (p *PushEvent) GetHeadCommit() *PushEventCommit { + if p == nil { + return nil + } + return p.HeadCommit +} + +// GetInstallation returns the Installation field. +func (p *PushEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetPusher returns the Pusher field. +func (p *PushEvent) GetPusher() *User { + if p == nil { + return nil + } + return p.Pusher +} + // GetPushID returns the PushID field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetPushID() int { +func (p *PushEvent) GetPushID() int64 { if p == nil || p.PushID == nil { return 0 } @@ -4332,6 +8140,22 @@ func (p *PushEvent) GetRef() string { return *p.Ref } +// GetRepo returns the Repo field. +func (p *PushEvent) GetRepo() *PushEventRepository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *PushEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + // GetSize returns the Size field if it's non-nil, zero value otherwise. func (p *PushEvent) GetSize() int { if p == nil || p.Size == nil { @@ -4340,6 +8164,22 @@ func (p *PushEvent) GetSize() int { return *p.Size } +// GetAuthor returns the Author field. +func (p *PushEventCommit) GetAuthor() *CommitAuthor { + if p == nil { + return nil + } + return p.Author +} + +// GetCommitter returns the Committer field. +func (p *PushEventCommit) GetCommitter() *CommitAuthor { + if p == nil { + return nil + } + return p.Committer +} + // GetDistinct returns the Distinct field if it's non-nil, zero value otherwise. func (p *PushEventCommit) GetDistinct() bool { if p == nil || p.Distinct == nil { @@ -4533,7 +8373,7 @@ func (p *PushEventRepository) GetHTMLURL() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetID() int { +func (p *PushEventRepository) GetID() int64 { if p == nil || p.ID == nil { return 0 } @@ -4564,6 +8404,14 @@ func (p *PushEventRepository) GetName() string { return *p.Name } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + // GetOpenIssuesCount returns the OpenIssuesCount field if it's non-nil, zero value otherwise. func (p *PushEventRepository) GetOpenIssuesCount() int { if p == nil || p.OpenIssuesCount == nil { @@ -4580,6 +8428,14 @@ func (p *PushEventRepository) GetOrganization() string { return *p.Organization } +// GetOwner returns the Owner field. +func (p *PushEventRepository) GetOwner() *User { + if p == nil { + return nil + } + return p.Owner +} + // GetPrivate returns the Private field if it's non-nil, zero value otherwise. func (p *PushEventRepository) GetPrivate() bool { if p == nil || p.Private == nil { @@ -4660,6 +8516,22 @@ func (p *PushEventRepository) GetWatchersCount() int { return *p.WatchersCount } +// GetCore returns the Core field. +func (r *RateLimits) GetCore() *Rate { + if r == nil { + return nil + } + return r.Core +} + +// GetSearch returns the Search field. +func (r *RateLimits) GetSearch() *Rate { + if r == nil { + return nil + } + return r.Search +} + // GetContent returns the Content field if it's non-nil, zero value otherwise. func (r *Reaction) GetContent() string { if r == nil || r.Content == nil { @@ -4669,13 +8541,29 @@ func (r *Reaction) GetContent() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Reaction) GetID() int { +func (r *Reaction) GetID() int64 { if r == nil || r.ID == nil { return 0 } return *r.ID } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *Reaction) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetUser returns the User field. +func (r *Reaction) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + // GetConfused returns the Confused field if it's non-nil, zero value otherwise. func (r *Reactions) GetConfused() int { if r == nil || r.Confused == nil { @@ -4740,6 +8628,22 @@ func (r *Reactions) GetURL() string { return *r.URL } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *Reference) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetObject returns the Object field. +func (r *Reference) GetObject() *GitObject { + if r == nil { + return nil + } + return r.Object +} + // GetRef returns the Ref field if it's non-nil, zero value otherwise. func (r *Reference) GetRef() string { if r == nil || r.Ref == nil { @@ -4789,7 +8693,7 @@ func (r *ReleaseAsset) GetDownloadCount() int { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetID() int { +func (r *ReleaseAsset) GetID() int64 { if r == nil || r.ID == nil { return 0 } @@ -4812,6 +8716,14 @@ func (r *ReleaseAsset) GetName() string { return *r.Name } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + // GetSize returns the Size field if it's non-nil, zero value otherwise. func (r *ReleaseAsset) GetSize() int { if r == nil || r.Size == nil { @@ -4836,6 +8748,14 @@ func (r *ReleaseAsset) GetUpdatedAt() Timestamp { return *r.UpdatedAt } +// GetUploader returns the Uploader field. +func (r *ReleaseAsset) GetUploader() *User { + if r == nil { + return nil + } + return r.Uploader +} + // GetURL returns the URL field if it's non-nil, zero value otherwise. func (r *ReleaseAsset) GetURL() string { if r == nil || r.URL == nil { @@ -4852,6 +8772,38 @@ func (r *ReleaseEvent) GetAction() string { return *r.Action } +// GetInstallation returns the Installation field. +func (r *ReleaseEvent) GetInstallation() *Installation { + if r == nil { + return nil + } + return r.Installation +} + +// GetRelease returns the Release field. +func (r *ReleaseEvent) GetRelease() *RepositoryRelease { + if r == nil { + return nil + } + return r.Release +} + +// GetRepo returns the Repo field. +func (r *ReleaseEvent) GetRepo() *Repository { + if r == nil { + return nil + } + return r.Repo +} + +// GetSender returns the Sender field. +func (r *ReleaseEvent) GetSender() *User { + if r == nil { + return nil + } + return r.Sender +} + // GetFrom returns the From field if it's non-nil, zero value otherwise. func (r *Rename) GetFrom() string { if r == nil || r.From == nil { @@ -4908,6 +8860,14 @@ func (r *Repository) GetAllowSquashMerge() bool { return *r.AllowSquashMerge } +// GetArchived returns the Archived field if it's non-nil, zero value otherwise. +func (r *Repository) GetArchived() bool { + if r == nil || r.Archived == nil { + return false + } + return *r.Archived +} + // GetArchiveURL returns the ArchiveURL field if it's non-nil, zero value otherwise. func (r *Repository) GetArchiveURL() string { if r == nil || r.ArchiveURL == nil { @@ -4956,6 +8916,14 @@ func (r *Repository) GetCloneURL() string { return *r.CloneURL } +// GetCodeOfConduct returns the CodeOfConduct field. +func (r *Repository) GetCodeOfConduct() *CodeOfConduct { + if r == nil { + return nil + } + return r.CodeOfConduct +} + // GetCollaboratorsURL returns the CollaboratorsURL field if it's non-nil, zero value otherwise. func (r *Repository) GetCollaboratorsURL() string { if r == nil || r.CollaboratorsURL == nil { @@ -5148,6 +9116,14 @@ func (r *Repository) GetHasPages() bool { return *r.HasPages } +// GetHasProjects returns the HasProjects field if it's non-nil, zero value otherwise. +func (r *Repository) GetHasProjects() bool { + if r == nil || r.HasProjects == nil { + return false + } + return *r.HasProjects +} + // GetHasWiki returns the HasWiki field if it's non-nil, zero value otherwise. func (r *Repository) GetHasWiki() bool { if r == nil || r.HasWiki == nil { @@ -5181,7 +9157,7 @@ func (r *Repository) GetHTMLURL() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Repository) GetID() int { +func (r *Repository) GetID() int64 { if r == nil || r.ID == nil { return 0 } @@ -5244,6 +9220,14 @@ func (r *Repository) GetLanguagesURL() string { return *r.LanguagesURL } +// GetLicense returns the License field. +func (r *Repository) GetLicense() *License { + if r == nil { + return nil + } + return r.License +} + // GetLicenseTemplate returns the LicenseTemplate field if it's non-nil, zero value otherwise. func (r *Repository) GetLicenseTemplate() string { if r == nil || r.LicenseTemplate == nil { @@ -5300,6 +9284,14 @@ func (r *Repository) GetNetworkCount() int { return *r.NetworkCount } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *Repository) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + // GetNotificationsURL returns the NotificationsURL field if it's non-nil, zero value otherwise. func (r *Repository) GetNotificationsURL() string { if r == nil || r.NotificationsURL == nil { @@ -5316,6 +9308,30 @@ func (r *Repository) GetOpenIssuesCount() int { return *r.OpenIssuesCount } +// GetOrganization returns the Organization field. +func (r *Repository) GetOrganization() *Organization { + if r == nil { + return nil + } + return r.Organization +} + +// GetOwner returns the Owner field. +func (r *Repository) GetOwner() *User { + if r == nil { + return nil + } + return r.Owner +} + +// GetParent returns the Parent field. +func (r *Repository) GetParent() *Repository { + if r == nil { + return nil + } + return r.Parent +} + // GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. func (r *Repository) GetPermissions() map[string]bool { if r == nil || r.Permissions == nil { @@ -5364,6 +9380,14 @@ func (r *Repository) GetSize() int { return *r.Size } +// GetSource returns the Source field. +func (r *Repository) GetSource() *Repository { + if r == nil { + return nil + } + return r.Source +} + // GetSSHURL returns the SSHURL field if it's non-nil, zero value otherwise. func (r *Repository) GetSSHURL() string { if r == nil || r.SSHURL == nil { @@ -5437,7 +9461,7 @@ func (r *Repository) GetTagsURL() string { } // GetTeamID returns the TeamID field if it's non-nil, zero value otherwise. -func (r *Repository) GetTeamID() int { +func (r *Repository) GetTeamID() int64 { if r == nil || r.TeamID == nil { return 0 } @@ -5517,7 +9541,7 @@ func (r *RepositoryComment) GetHTMLURL() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetID() int { +func (r *RepositoryComment) GetID() int64 { if r == nil || r.ID == nil { return 0 } @@ -5540,6 +9564,14 @@ func (r *RepositoryComment) GetPosition() int { return *r.Position } +// GetReactions returns the Reactions field. +func (r *RepositoryComment) GetReactions() *Reactions { + if r == nil { + return nil + } + return r.Reactions +} + // GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. func (r *RepositoryComment) GetUpdatedAt() time.Time { if r == nil || r.UpdatedAt == nil { @@ -5556,6 +9588,22 @@ func (r *RepositoryComment) GetURL() string { return *r.URL } +// GetUser returns the User field. +func (r *RepositoryComment) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + +// GetAuthor returns the Author field. +func (r *RepositoryCommit) GetAuthor() *User { + if r == nil { + return nil + } + return r.Author +} + // GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. func (r *RepositoryCommit) GetCommentsURL() string { if r == nil || r.CommentsURL == nil { @@ -5564,6 +9612,22 @@ func (r *RepositoryCommit) GetCommentsURL() string { return *r.CommentsURL } +// GetCommit returns the Commit field. +func (r *RepositoryCommit) GetCommit() *Commit { + if r == nil { + return nil + } + return r.Commit +} + +// GetCommitter returns the Committer field. +func (r *RepositoryCommit) GetCommitter() *User { + if r == nil { + return nil + } + return r.Committer +} + // GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. func (r *RepositoryCommit) GetHTMLURL() string { if r == nil || r.HTMLURL == nil { @@ -5580,6 +9644,14 @@ func (r *RepositoryCommit) GetSHA() string { return *r.SHA } +// GetStats returns the Stats field. +func (r *RepositoryCommit) GetStats() *CommitStats { + if r == nil { + return nil + } + return r.Stats +} + // GetURL returns the URL field if it's non-nil, zero value otherwise. func (r *RepositoryCommit) GetURL() string { if r == nil || r.URL == nil { @@ -5652,6 +9724,14 @@ func (r *RepositoryContent) GetSize() int { return *r.Size } +// GetTarget returns the Target field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetTarget() string { + if r == nil || r.Target == nil { + return "" + } + return *r.Target +} + // GetType returns the Type field if it's non-nil, zero value otherwise. func (r *RepositoryContent) GetType() string { if r == nil || r.Type == nil { @@ -5668,6 +9748,14 @@ func (r *RepositoryContent) GetURL() string { return *r.URL } +// GetAuthor returns the Author field. +func (r *RepositoryContentFileOptions) GetAuthor() *CommitAuthor { + if r == nil { + return nil + } + return r.Author +} + // GetBranch returns the Branch field if it's non-nil, zero value otherwise. func (r *RepositoryContentFileOptions) GetBranch() string { if r == nil || r.Branch == nil { @@ -5676,6 +9764,14 @@ func (r *RepositoryContentFileOptions) GetBranch() string { return *r.Branch } +// GetCommitter returns the Committer field. +func (r *RepositoryContentFileOptions) GetCommitter() *CommitAuthor { + if r == nil { + return nil + } + return r.Committer +} + // GetMessage returns the Message field if it's non-nil, zero value otherwise. func (r *RepositoryContentFileOptions) GetMessage() string { if r == nil || r.Message == nil { @@ -5692,6 +9788,14 @@ func (r *RepositoryContentFileOptions) GetSHA() string { return *r.SHA } +// GetContent returns the Content field. +func (r *RepositoryContentResponse) GetContent() *RepositoryContent { + if r == nil { + return nil + } + return r.Content +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (r *RepositoryEvent) GetAction() string { if r == nil || r.Action == nil { @@ -5700,6 +9804,38 @@ func (r *RepositoryEvent) GetAction() string { return *r.Action } +// GetInstallation returns the Installation field. +func (r *RepositoryEvent) GetInstallation() *Installation { + if r == nil { + return nil + } + return r.Installation +} + +// GetOrg returns the Org field. +func (r *RepositoryEvent) GetOrg() *Organization { + if r == nil { + return nil + } + return r.Org +} + +// GetRepo returns the Repo field. +func (r *RepositoryEvent) GetRepo() *Repository { + if r == nil { + return nil + } + return r.Repo +} + +// GetSender returns the Sender field. +func (r *RepositoryEvent) GetSender() *User { + if r == nil { + return nil + } + return r.Sender +} + // GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. func (r *RepositoryInvitation) GetCreatedAt() Timestamp { if r == nil || r.CreatedAt == nil { @@ -5717,13 +9853,29 @@ func (r *RepositoryInvitation) GetHTMLURL() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetID() int { +func (r *RepositoryInvitation) GetID() int64 { if r == nil || r.ID == nil { return 0 } return *r.ID } +// GetInvitee returns the Invitee field. +func (r *RepositoryInvitation) GetInvitee() *User { + if r == nil { + return nil + } + return r.Invitee +} + +// GetInviter returns the Inviter field. +func (r *RepositoryInvitation) GetInviter() *User { + if r == nil { + return nil + } + return r.Inviter +} + // GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. func (r *RepositoryInvitation) GetPermissions() string { if r == nil || r.Permissions == nil { @@ -5732,6 +9884,14 @@ func (r *RepositoryInvitation) GetPermissions() string { return *r.Permissions } +// GetRepo returns the Repo field. +func (r *RepositoryInvitation) GetRepo() *Repository { + if r == nil { + return nil + } + return r.Repo +} + // GetURL returns the URL field if it's non-nil, zero value otherwise. func (r *RepositoryInvitation) GetURL() string { if r == nil || r.URL == nil { @@ -5777,7 +9937,15 @@ func (r *RepositoryLicense) GetHTMLURL() string { if r == nil || r.HTMLURL == nil { return "" } - return *r.HTMLURL + return *r.HTMLURL +} + +// GetLicense returns the License field. +func (r *RepositoryLicense) GetLicense() *License { + if r == nil { + return nil + } + return r.License } // GetName returns the Name field if it's non-nil, zero value otherwise. @@ -5860,6 +10028,14 @@ func (r *RepositoryPermissionLevel) GetPermission() string { return *r.Permission } +// GetUser returns the User field. +func (r *RepositoryPermissionLevel) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + // GetAssetsURL returns the AssetsURL field if it's non-nil, zero value otherwise. func (r *RepositoryRelease) GetAssetsURL() string { if r == nil || r.AssetsURL == nil { @@ -5868,6 +10044,14 @@ func (r *RepositoryRelease) GetAssetsURL() string { return *r.AssetsURL } +// GetAuthor returns the Author field. +func (r *RepositoryRelease) GetAuthor() *User { + if r == nil { + return nil + } + return r.Author +} + // GetBody returns the Body field if it's non-nil, zero value otherwise. func (r *RepositoryRelease) GetBody() string { if r == nil || r.Body == nil { @@ -5901,7 +10085,7 @@ func (r *RepositoryRelease) GetHTMLURL() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetID() int { +func (r *RepositoryRelease) GetID() int64 { if r == nil || r.ID == nil { return 0 } @@ -5916,6 +10100,14 @@ func (r *RepositoryRelease) GetName() string { return *r.Name } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + // GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. func (r *RepositoryRelease) GetPrerelease() bool { if r == nil || r.Prerelease == nil { @@ -5980,6 +10172,14 @@ func (r *RepositoryRelease) GetZipballURL() string { return *r.ZipballURL } +// GetCommit returns the Commit field. +func (r *RepositoryTag) GetCommit() *Commit { + if r == nil { + return nil + } + return r.Commit +} + // GetName returns the Name field if it's non-nil, zero value otherwise. func (r *RepositoryTag) GetName() string { if r == nil || r.Name == nil { @@ -6004,6 +10204,62 @@ func (r *RepositoryTag) GetZipballURL() string { return *r.ZipballURL } +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlertEvent) GetAction() string { + if r == nil || r.Action == nil { + return "" + } + return *r.Action +} + +// GetForkRepos returns the ForkRepos field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetForkRepos() int { + if r == nil || r.ForkRepos == nil { + return 0 + } + return *r.ForkRepos +} + +// GetOrgRepos returns the OrgRepos field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetOrgRepos() int { + if r == nil || r.OrgRepos == nil { + return 0 + } + return *r.OrgRepos +} + +// GetRootRepos returns the RootRepos field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetRootRepos() int { + if r == nil || r.RootRepos == nil { + return 0 + } + return *r.RootRepos +} + +// GetTotalPushes returns the TotalPushes field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetTotalPushes() int { + if r == nil || r.TotalPushes == nil { + return 0 + } + return *r.TotalPushes +} + +// GetTotalRepos returns the TotalRepos field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetTotalRepos() int { + if r == nil || r.TotalRepos == nil { + return 0 + } + return *r.TotalRepos +} + +// GetTotalWikis returns the TotalWikis field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetTotalWikis() int { + if r == nil || r.TotalWikis == nil { + return 0 + } + return *r.TotalWikis +} + // GetContext returns the Context field if it's non-nil, zero value otherwise. func (r *RepoStatus) GetContext() string { if r == nil || r.Context == nil { @@ -6020,6 +10276,14 @@ func (r *RepoStatus) GetCreatedAt() time.Time { return *r.CreatedAt } +// GetCreator returns the Creator field. +func (r *RepoStatus) GetCreator() *User { + if r == nil { + return nil + } + return r.Creator +} + // GetDescription returns the Description field if it's non-nil, zero value otherwise. func (r *RepoStatus) GetDescription() string { if r == nil || r.Description == nil { @@ -6029,7 +10293,7 @@ func (r *RepoStatus) GetDescription() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetID() int { +func (r *RepoStatus) GetID() int64 { if r == nil || r.ID == nil { return 0 } @@ -6068,6 +10332,14 @@ func (r *RepoStatus) GetURL() string { return *r.URL } +// GetStrict returns the Strict field if it's non-nil, zero value otherwise. +func (r *RequiredStatusChecksRequest) GetStrict() bool { + if r == nil || r.Strict == nil { + return false + } + return *r.Strict +} + // GetName returns the Name field if it's non-nil, zero value otherwise. func (s *ServiceHook) GetName() string { if s == nil || s.Name == nil { @@ -6076,6 +10348,22 @@ func (s *ServiceHook) GetName() string { return *s.Name } +// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. +func (s *SignaturesProtectedBranch) GetEnabled() bool { + if s == nil || s.Enabled == nil { + return false + } + return *s.Enabled +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (s *SignaturesProtectedBranch) GetURL() string { + if s == nil || s.URL == nil { + return "" + } + return *s.URL +} + // GetPayload returns the Payload field if it's non-nil, zero value otherwise. func (s *SignatureVerification) GetPayload() string { if s == nil || s.Payload == nil { @@ -6108,14 +10396,38 @@ func (s *SignatureVerification) GetVerified() bool { return *s.Verified } +// GetActor returns the Actor field. +func (s *Source) GetActor() *User { + if s == nil { + return nil + } + return s.Actor +} + // GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *Source) GetID() int { +func (s *Source) GetID() int64 { if s == nil || s.ID == nil { return 0 } return *s.ID } +// GetIssue returns the Issue field. +func (s *Source) GetIssue() *Issue { + if s == nil { + return nil + } + return s.Issue +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (s *Source) GetType() string { + if s == nil || s.Type == nil { + return "" + } + return *s.Type +} + // GetURL returns the URL field if it's non-nil, zero value otherwise. func (s *Source) GetURL() string { if s == nil || s.URL == nil { @@ -6133,7 +10445,7 @@ func (s *SourceImportAuthor) GetEmail() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetID() int { +func (s *SourceImportAuthor) GetID() int64 { if s == nil || s.ID == nil { return 0 } @@ -6188,6 +10500,22 @@ func (s *Stargazer) GetStarredAt() Timestamp { return *s.StarredAt } +// GetUser returns the User field. +func (s *Stargazer) GetUser() *User { + if s == nil { + return nil + } + return s.User +} + +// GetRepository returns the Repository field. +func (s *StarredRepository) GetRepository() *Repository { + if s == nil { + return nil + } + return s.Repository +} + // GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. func (s *StarredRepository) GetStarredAt() Timestamp { if s == nil || s.StarredAt == nil { @@ -6196,20 +10524,12 @@ func (s *StarredRepository) GetStarredAt() Timestamp { return *s.StarredAt } -// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. -func (s *startMigration) GetExcludeAttachments() bool { - if s == nil || s.ExcludeAttachments == nil { - return false - } - return *s.ExcludeAttachments -} - -// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. -func (s *startMigration) GetLockRepositories() bool { - if s == nil || s.LockRepositories == nil { - return false +// GetCommit returns the Commit field. +func (s *StatusEvent) GetCommit() *RepositoryCommit { + if s == nil { + return nil } - return *s.LockRepositories + return s.Commit } // GetContext returns the Context field if it's non-nil, zero value otherwise. @@ -6237,13 +10557,21 @@ func (s *StatusEvent) GetDescription() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetID() int { +func (s *StatusEvent) GetID() int64 { if s == nil || s.ID == nil { return 0 } return *s.ID } +// GetInstallation returns the Installation field. +func (s *StatusEvent) GetInstallation() *Installation { + if s == nil { + return nil + } + return s.Installation +} + // GetName returns the Name field if it's non-nil, zero value otherwise. func (s *StatusEvent) GetName() string { if s == nil || s.Name == nil { @@ -6252,6 +10580,22 @@ func (s *StatusEvent) GetName() string { return *s.Name } +// GetRepo returns the Repo field. +func (s *StatusEvent) GetRepo() *Repository { + if s == nil { + return nil + } + return s.Repo +} + +// GetSender returns the Sender field. +func (s *StatusEvent) GetSender() *User { + if s == nil { + return nil + } + return s.Sender +} + // GetSHA returns the SHA field if it's non-nil, zero value otherwise. func (s *StatusEvent) GetSHA() string { if s == nil || s.SHA == nil { @@ -6348,6 +10692,22 @@ func (t *Tag) GetMessage() string { return *t.Message } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (t *Tag) GetNodeID() string { + if t == nil || t.NodeID == nil { + return "" + } + return *t.NodeID +} + +// GetObject returns the Object field. +func (t *Tag) GetObject() *GitObject { + if t == nil { + return nil + } + return t.Object +} + // GetSHA returns the SHA field if it's non-nil, zero value otherwise. func (t *Tag) GetSHA() string { if t == nil || t.SHA == nil { @@ -6364,6 +10724,14 @@ func (t *Tag) GetTag() string { return *t.Tag } +// GetTagger returns the Tagger field. +func (t *Tag) GetTagger() *CommitAuthor { + if t == nil { + return nil + } + return t.Tagger +} + // GetURL returns the URL field if it's non-nil, zero value otherwise. func (t *Tag) GetURL() string { if t == nil || t.URL == nil { @@ -6372,6 +10740,14 @@ func (t *Tag) GetURL() string { return *t.URL } +// GetVerification returns the Verification field. +func (t *Tag) GetVerification() *SignatureVerification { + if t == nil { + return nil + } + return t.Verification +} + // GetDescription returns the Description field if it's non-nil, zero value otherwise. func (t *Team) GetDescription() string { if t == nil || t.Description == nil { @@ -6381,7 +10757,7 @@ func (t *Team) GetDescription() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *Team) GetID() int { +func (t *Team) GetID() int64 { if t == nil || t.ID == nil { return 0 } @@ -6412,56 +10788,256 @@ func (t *Team) GetMembersURL() string { return *t.MembersURL } -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *Team) GetName() string { - if t == nil || t.Name == nil { - return "" +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (t *Team) GetName() string { + if t == nil || t.Name == nil { + return "" + } + return *t.Name +} + +// GetOrganization returns the Organization field. +func (t *Team) GetOrganization() *Organization { + if t == nil { + return nil + } + return t.Organization +} + +// GetParent returns the Parent field. +func (t *Team) GetParent() *Team { + if t == nil { + return nil + } + return t.Parent +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (t *Team) GetPermission() string { + if t == nil || t.Permission == nil { + return "" + } + return *t.Permission +} + +// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. +func (t *Team) GetPrivacy() string { + if t == nil || t.Privacy == nil { + return "" + } + return *t.Privacy +} + +// GetReposCount returns the ReposCount field if it's non-nil, zero value otherwise. +func (t *Team) GetReposCount() int { + if t == nil || t.ReposCount == nil { + return 0 + } + return *t.ReposCount +} + +// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. +func (t *Team) GetRepositoriesURL() string { + if t == nil || t.RepositoriesURL == nil { + return "" + } + return *t.RepositoriesURL +} + +// GetSlug returns the Slug field if it's non-nil, zero value otherwise. +func (t *Team) GetSlug() string { + if t == nil || t.Slug == nil { + return "" + } + return *t.Slug +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (t *Team) GetURL() string { + if t == nil || t.URL == nil { + return "" + } + return *t.URL +} + +// GetInstallation returns the Installation field. +func (t *TeamAddEvent) GetInstallation() *Installation { + if t == nil { + return nil + } + return t.Installation +} + +// GetOrg returns the Org field. +func (t *TeamAddEvent) GetOrg() *Organization { + if t == nil { + return nil + } + return t.Org +} + +// GetRepo returns the Repo field. +func (t *TeamAddEvent) GetRepo() *Repository { + if t == nil { + return nil + } + return t.Repo +} + +// GetSender returns the Sender field. +func (t *TeamAddEvent) GetSender() *User { + if t == nil { + return nil + } + return t.Sender +} + +// GetTeam returns the Team field. +func (t *TeamAddEvent) GetTeam() *Team { + if t == nil { + return nil + } + return t.Team +} + +// GetAuthor returns the Author field. +func (t *TeamDiscussion) GetAuthor() *User { + if t == nil { + return nil + } + return t.Author +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetBody() string { + if t == nil || t.Body == nil { + return "" + } + return *t.Body +} + +// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetBodyHTML() string { + if t == nil || t.BodyHTML == nil { + return "" + } + return *t.BodyHTML +} + +// GetBodyVersion returns the BodyVersion field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetBodyVersion() string { + if t == nil || t.BodyVersion == nil { + return "" + } + return *t.BodyVersion +} + +// GetCommentsCount returns the CommentsCount field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetCommentsCount() int { + if t == nil || t.CommentsCount == nil { + return 0 + } + return *t.CommentsCount +} + +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetCommentsURL() string { + if t == nil || t.CommentsURL == nil { + return "" + } + return *t.CommentsURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetCreatedAt() Timestamp { + if t == nil || t.CreatedAt == nil { + return Timestamp{} + } + return *t.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetHTMLURL() string { + if t == nil || t.HTMLURL == nil { + return "" + } + return *t.HTMLURL +} + +// GetLastEditedAt returns the LastEditedAt field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetLastEditedAt() Timestamp { + if t == nil || t.LastEditedAt == nil { + return Timestamp{} + } + return *t.LastEditedAt +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetNodeID() string { + if t == nil || t.NodeID == nil { + return "" + } + return *t.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetNumber() int { + if t == nil || t.Number == nil { + return 0 } - return *t.Name + return *t.Number } -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (t *Team) GetPermission() string { - if t == nil || t.Permission == nil { - return "" +// GetPinned returns the Pinned field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetPinned() bool { + if t == nil || t.Pinned == nil { + return false } - return *t.Permission + return *t.Pinned } -// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. -func (t *Team) GetPrivacy() string { - if t == nil || t.Privacy == nil { - return "" +// GetPrivate returns the Private field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetPrivate() bool { + if t == nil || t.Private == nil { + return false } - return *t.Privacy + return *t.Private } -// GetReposCount returns the ReposCount field if it's non-nil, zero value otherwise. -func (t *Team) GetReposCount() int { - if t == nil || t.ReposCount == nil { - return 0 +// GetReactions returns the Reactions field. +func (t *TeamDiscussion) GetReactions() *Reactions { + if t == nil { + return nil } - return *t.ReposCount + return t.Reactions } -// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. -func (t *Team) GetRepositoriesURL() string { - if t == nil || t.RepositoriesURL == nil { +// GetTeamURL returns the TeamURL field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetTeamURL() string { + if t == nil || t.TeamURL == nil { return "" } - return *t.RepositoriesURL + return *t.TeamURL } -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (t *Team) GetSlug() string { - if t == nil || t.Slug == nil { +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetTitle() string { + if t == nil || t.Title == nil { return "" } - return *t.Slug + return *t.Title +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetUpdatedAt() Timestamp { + if t == nil || t.UpdatedAt == nil { + return Timestamp{} + } + return *t.UpdatedAt } // GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *Team) GetURL() string { +func (t *TeamDiscussion) GetURL() string { if t == nil || t.URL == nil { return "" } @@ -6476,6 +11052,54 @@ func (t *TeamEvent) GetAction() string { return *t.Action } +// GetChanges returns the Changes field. +func (t *TeamEvent) GetChanges() *TeamChange { + if t == nil { + return nil + } + return t.Changes +} + +// GetInstallation returns the Installation field. +func (t *TeamEvent) GetInstallation() *Installation { + if t == nil { + return nil + } + return t.Installation +} + +// GetOrg returns the Org field. +func (t *TeamEvent) GetOrg() *Organization { + if t == nil { + return nil + } + return t.Org +} + +// GetRepo returns the Repo field. +func (t *TeamEvent) GetRepo() *Repository { + if t == nil { + return nil + } + return t.Repo +} + +// GetSender returns the Sender field. +func (t *TeamEvent) GetSender() *User { + if t == nil { + return nil + } + return t.Sender +} + +// GetTeam returns the Team field. +func (t *TeamEvent) GetTeam() *Team { + if t == nil { + return nil + } + return t.Team +} + // GetDescription returns the Description field if it's non-nil, zero value otherwise. func (t *TeamLDAPMapping) GetDescription() string { if t == nil || t.Description == nil { @@ -6485,7 +11109,7 @@ func (t *TeamLDAPMapping) GetDescription() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetID() int { +func (t *TeamLDAPMapping) GetID() int64 { if t == nil || t.ID == nil { return 0 } @@ -6556,6 +11180,14 @@ func (t *TeamLDAPMapping) GetURL() string { return *t.URL } +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (t *TeamProjectOptions) GetPermission() string { + if t == nil || t.Permission == nil { + return "" + } + return *t.Permission +} + // GetFragment returns the Fragment field if it's non-nil, zero value otherwise. func (t *TextMatch) GetFragment() string { if t == nil || t.Fragment == nil { @@ -6588,6 +11220,22 @@ func (t *TextMatch) GetProperty() string { return *t.Property } +// GetActor returns the Actor field. +func (t *Timeline) GetActor() *User { + if t == nil { + return nil + } + return t.Actor +} + +// GetAssignee returns the Assignee field. +func (t *Timeline) GetAssignee() *User { + if t == nil { + return nil + } + return t.Assignee +} + // GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. func (t *Timeline) GetCommitID() string { if t == nil || t.CommitID == nil { @@ -6621,13 +11269,53 @@ func (t *Timeline) GetEvent() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *Timeline) GetID() int { +func (t *Timeline) GetID() int64 { if t == nil || t.ID == nil { return 0 } return *t.ID } +// GetLabel returns the Label field. +func (t *Timeline) GetLabel() *Label { + if t == nil { + return nil + } + return t.Label +} + +// GetMilestone returns the Milestone field. +func (t *Timeline) GetMilestone() *Milestone { + if t == nil { + return nil + } + return t.Milestone +} + +// GetProjectCard returns the ProjectCard field. +func (t *Timeline) GetProjectCard() *ProjectCard { + if t == nil { + return nil + } + return t.ProjectCard +} + +// GetRename returns the Rename field. +func (t *Timeline) GetRename() *Rename { + if t == nil { + return nil + } + return t.Rename +} + +// GetSource returns the Source field. +func (t *Timeline) GetSource() *Source { + if t == nil { + return nil + } + return t.Source +} + // GetURL returns the URL field if it's non-nil, zero value otherwise. func (t *Timeline) GetURL() string { if t == nil || t.URL == nil { @@ -6756,6 +11444,14 @@ func (t *Tree) GetSHA() string { return *t.SHA } +// GetTruncated returns the Truncated field if it's non-nil, zero value otherwise. +func (t *Tree) GetTruncated() bool { + if t == nil || t.Truncated == nil { + return false + } + return *t.Truncated +} + // GetContent returns the Content field if it's non-nil, zero value otherwise. func (t *TreeEntry) GetContent() string { if t == nil || t.Content == nil { @@ -6812,20 +11508,68 @@ func (t *TreeEntry) GetURL() string { return *t.URL } -// GetForce returns the Force field if it's non-nil, zero value otherwise. -func (u *updateRefRequest) GetForce() bool { - if u == nil || u.Force == nil { - return false +// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetCompletedAt() Timestamp { + if u == nil || u.CompletedAt == nil { + return Timestamp{} } - return *u.Force + return *u.CompletedAt } -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (u *updateRefRequest) GetSHA() string { - if u == nil || u.SHA == nil { +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetConclusion() string { + if u == nil || u.Conclusion == nil { + return "" + } + return *u.Conclusion +} + +// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetDetailsURL() string { + if u == nil || u.DetailsURL == nil { + return "" + } + return *u.DetailsURL +} + +// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetExternalID() string { + if u == nil || u.ExternalID == nil { + return "" + } + return *u.ExternalID +} + +// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetHeadBranch() string { + if u == nil || u.HeadBranch == nil { + return "" + } + return *u.HeadBranch +} + +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetHeadSHA() string { + if u == nil || u.HeadSHA == nil { + return "" + } + return *u.HeadSHA +} + +// GetOutput returns the Output field. +func (u *UpdateCheckRunOptions) GetOutput() *CheckRunOutput { + if u == nil { + return nil + } + return u.Output +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetStatus() string { + if u == nil || u.Status == nil { return "" } - return *u.SHA + return *u.Status } // GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. @@ -6965,7 +11709,7 @@ func (u *User) GetHTMLURL() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *User) GetID() int { +func (u *User) GetID() int64 { if u == nil || u.ID == nil { return 0 } @@ -6996,6 +11740,14 @@ func (u *User) GetName() string { return *u.Name } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (u *User) GetNodeID() string { + if u == nil || u.NodeID == nil { + return "" + } + return *u.NodeID +} + // GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. func (u *User) GetOrganizationsURL() string { if u == nil || u.OrganizationsURL == nil { @@ -7020,6 +11772,14 @@ func (u *User) GetPermissions() map[string]bool { return *u.Permissions } +// GetPlan returns the Plan field. +func (u *User) GetPlan() *Plan { + if u == nil { + return nil + } + return u.Plan +} + // GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. func (u *User) GetPrivateGists() int { if u == nil || u.PrivateGists == nil { @@ -7100,6 +11860,14 @@ func (u *User) GetTotalPrivateRepos() int { return *u.TotalPrivateRepos } +// GetTwoFactorAuthentication returns the TwoFactorAuthentication field if it's non-nil, zero value otherwise. +func (u *User) GetTwoFactorAuthentication() bool { + if u == nil || u.TwoFactorAuthentication == nil { + return false + } + return *u.TwoFactorAuthentication +} + // GetType returns the Type field if it's non-nil, zero value otherwise. func (u *User) GetType() string { if u == nil || u.Type == nil { @@ -7124,6 +11892,22 @@ func (u *User) GetURL() string { return *u.URL } +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (u *UserContext) GetMessage() string { + if u == nil || u.Message == nil { + return "" + } + return *u.Message +} + +// GetOcticon returns the Octicon field if it's non-nil, zero value otherwise. +func (u *UserContext) GetOcticon() string { + if u == nil || u.Octicon == nil { + return "" + } + return *u.Octicon +} + // GetEmail returns the Email field if it's non-nil, zero value otherwise. func (u *UserEmail) GetEmail() string { if u == nil || u.Email == nil { @@ -7197,7 +11981,7 @@ func (u *UserLDAPMapping) GetGravatarID() string { } // GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetID() int { +func (u *UserLDAPMapping) GetID() int64 { if u == nil || u.ID == nil { return 0 } @@ -7284,6 +12068,70 @@ func (u *UserLDAPMapping) GetURL() string { return *u.URL } +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetCreatedAt() string { + if u == nil || u.CreatedAt == nil { + return "" + } + return *u.CreatedAt +} + +// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetExcludeAttachments() bool { + if u == nil || u.ExcludeAttachments == nil { + return false + } + return *u.ExcludeAttachments +} + +// GetGUID returns the GUID field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetGUID() string { + if u == nil || u.GUID == nil { + return "" + } + return *u.GUID +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetID() int64 { + if u == nil || u.ID == nil { + return 0 + } + return *u.ID +} + +// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetLockRepositories() bool { + if u == nil || u.LockRepositories == nil { + return false + } + return *u.LockRepositories +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetState() string { + if u == nil || u.State == nil { + return "" + } + return *u.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetUpdatedAt() string { + if u == nil || u.UpdatedAt == nil { + return "" + } + return *u.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetURL() string { + if u == nil || u.URL == nil { + return "" + } + return *u.URL +} + // GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. func (u *UsersSearchResult) GetIncompleteResults() bool { if u == nil || u.IncompleteResults == nil { @@ -7300,6 +12148,38 @@ func (u *UsersSearchResult) GetTotal() int { return *u.Total } +// GetAdminUsers returns the AdminUsers field if it's non-nil, zero value otherwise. +func (u *UserStats) GetAdminUsers() int { + if u == nil || u.AdminUsers == nil { + return 0 + } + return *u.AdminUsers +} + +// GetSuspendedUsers returns the SuspendedUsers field if it's non-nil, zero value otherwise. +func (u *UserStats) GetSuspendedUsers() int { + if u == nil || u.SuspendedUsers == nil { + return 0 + } + return *u.SuspendedUsers +} + +// GetTotalUsers returns the TotalUsers field if it's non-nil, zero value otherwise. +func (u *UserStats) GetTotalUsers() int { + if u == nil || u.TotalUsers == nil { + return 0 + } + return *u.TotalUsers +} + +// GetReason returns the Reason field if it's non-nil, zero value otherwise. +func (u *UserSuspendOptions) GetReason() string { + if u == nil || u.Reason == nil { + return "" + } + return *u.Reason +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (w *WatchEvent) GetAction() string { if w == nil || w.Action == nil { @@ -7308,6 +12188,30 @@ func (w *WatchEvent) GetAction() string { return *w.Action } +// GetInstallation returns the Installation field. +func (w *WatchEvent) GetInstallation() *Installation { + if w == nil { + return nil + } + return w.Installation +} + +// GetRepo returns the Repo field. +func (w *WatchEvent) GetRepo() *Repository { + if w == nil { + return nil + } + return w.Repo +} + +// GetSender returns the Sender field. +func (w *WatchEvent) GetSender() *User { + if w == nil { + return nil + } + return w.Sender +} + // GetEmail returns the Email field if it's non-nil, zero value otherwise. func (w *WebHookAuthor) GetEmail() string { if w == nil || w.Email == nil { @@ -7332,6 +12236,22 @@ func (w *WebHookAuthor) GetUsername() string { return *w.Username } +// GetAuthor returns the Author field. +func (w *WebHookCommit) GetAuthor() *WebHookAuthor { + if w == nil { + return nil + } + return w.Author +} + +// GetCommitter returns the Committer field. +func (w *WebHookCommit) GetCommitter() *WebHookAuthor { + if w == nil { + return nil + } + return w.Committer +} + // GetDistinct returns the Distinct field if it's non-nil, zero value otherwise. func (w *WebHookCommit) GetDistinct() bool { if w == nil || w.Distinct == nil { @@ -7412,6 +12332,22 @@ func (w *WebHookPayload) GetForced() bool { return *w.Forced } +// GetHeadCommit returns the HeadCommit field. +func (w *WebHookPayload) GetHeadCommit() *WebHookCommit { + if w == nil { + return nil + } + return w.HeadCommit +} + +// GetPusher returns the Pusher field. +func (w *WebHookPayload) GetPusher() *User { + if w == nil { + return nil + } + return w.Pusher +} + // GetRef returns the Ref field if it's non-nil, zero value otherwise. func (w *WebHookPayload) GetRef() string { if w == nil || w.Ref == nil { @@ -7420,6 +12356,22 @@ func (w *WebHookPayload) GetRef() string { return *w.Ref } +// GetRepo returns the Repo field. +func (w *WebHookPayload) GetRepo() *Repository { + if w == nil { + return nil + } + return w.Repo +} + +// GetSender returns the Sender field. +func (w *WebHookPayload) GetSender() *User { + if w == nil { + return nil + } + return w.Sender +} + // GetTotal returns the Total field if it's non-nil, zero value otherwise. func (w *WeeklyCommitActivity) GetTotal() int { if w == nil || w.Total == nil { diff --git a/vendor/github.com/google/go-github/github/github.go b/vendor/github.com/google/go-github/github/github.go index 5f066cf..4d86d76 100644 --- a/vendor/github.com/google/go-github/github/github.go +++ b/vendor/github.com/google/go-github/github/github.go @@ -27,10 +27,9 @@ import ( ) const ( - libraryVersion = "8" defaultBaseURL = "https://api.github.com/" uploadBaseURL = "https://uploads.github.com/" - userAgent = "go-github/" + libraryVersion + userAgent = "go-github" headerRateLimit = "X-RateLimit-Limit" headerRateRemaining = "X-RateLimit-Remaining" @@ -46,40 +45,27 @@ const ( // Media Type values to access preview APIs - // https://developer.github.com/changes/2015-03-09-licenses-api/ - mediaTypeLicensesPreview = "application/vnd.github.drax-preview+json" - // https://developer.github.com/changes/2014-12-09-new-attributes-for-stars-api/ mediaTypeStarringPreview = "application/vnd.github.v3.star+json" - // https://developer.github.com/changes/2015-11-11-protected-branches-api/ - mediaTypeProtectedBranchesPreview = "application/vnd.github.loki-preview+json" - // https://help.github.com/enterprise/2.4/admin/guides/migrations/exporting-the-github-com-organization-s-repositories/ mediaTypeMigrationsPreview = "application/vnd.github.wyandotte-preview+json" // https://developer.github.com/changes/2016-04-06-deployment-and-deployment-status-enhancements/ mediaTypeDeploymentStatusPreview = "application/vnd.github.ant-man-preview+json" + // https://developer.github.com/changes/2018-10-16-deployments-environments-states-and-auto-inactive-updates/ + mediaTypeExpandDeploymentStatusPreview = "application/vnd.github.flash-preview+json" + // https://developer.github.com/changes/2016-02-19-source-import-preview-api/ mediaTypeImportPreview = "application/vnd.github.barred-rock-preview" // https://developer.github.com/changes/2016-05-12-reactions-api-preview/ mediaTypeReactionsPreview = "application/vnd.github.squirrel-girl-preview" - // https://developer.github.com/changes/2016-04-01-squash-api-preview/ - // https://developer.github.com/changes/2016-09-26-pull-request-merge-api-update/ - mediaTypeSquashPreview = "application/vnd.github.polaris-preview+json" - - // https://developer.github.com/changes/2016-04-04-git-signing-api-preview/ - mediaTypeGitSigningPreview = "application/vnd.github.cryptographer-preview+json" - // https://developer.github.com/changes/2016-05-23-timeline-preview-api/ mediaTypeTimelinePreview = "application/vnd.github.mockingbird-preview+json" - // https://developer.github.com/changes/2016-06-14-repository-invitations/ - mediaTypeRepositoryInvitationsPreview = "application/vnd.github.swamp-thing-preview+json" - // https://developer.github.com/changes/2016-07-06-github-pages-preiew-api/ mediaTypePagesPreview = "application/vnd.github.mister-fantastic-preview+json" @@ -100,6 +86,51 @@ const ( // https://developer.github.com/changes/2017-05-23-coc-api/ mediaTypeCodesOfConductPreview = "application/vnd.github.scarlet-witch-preview+json" + + // https://developer.github.com/changes/2017-07-17-update-topics-on-repositories/ + mediaTypeTopicsPreview = "application/vnd.github.mercy-preview+json" + + // https://developer.github.com/changes/2017-08-30-preview-nested-teams/ + mediaTypeNestedTeamsPreview = "application/vnd.github.hellcat-preview+json" + + // https://developer.github.com/changes/2017-11-09-repository-transfer-api-preview/ + mediaTypeRepositoryTransferPreview = "application/vnd.github.nightshade-preview+json" + + // https://developer.github.com/changes/2018-01-25-organization-invitation-api-preview/ + mediaTypeOrganizationInvitationPreview = "application/vnd.github.dazzler-preview+json" + + // https://developer.github.com/changes/2018-03-16-protected-branches-required-approving-reviews/ + mediaTypeRequiredApprovingReviewsPreview = "application/vnd.github.luke-cage-preview+json" + + // https://developer.github.com/changes/2018-02-22-label-description-search-preview/ + mediaTypeLabelDescriptionSearchPreview = "application/vnd.github.symmetra-preview+json" + + // https://developer.github.com/changes/2018-02-07-team-discussions-api/ + mediaTypeTeamDiscussionsPreview = "application/vnd.github.echo-preview+json" + + // https://developer.github.com/changes/2018-03-21-hovercard-api-preview/ + mediaTypeHovercardPreview = "application/vnd.github.hagar-preview+json" + + // https://developer.github.com/changes/2018-01-10-lock-reason-api-preview/ + mediaTypeLockReasonPreview = "application/vnd.github.sailor-v-preview+json" + + // https://developer.github.com/changes/2018-05-07-new-checks-api-public-beta/ + mediaTypeCheckRunsPreview = "application/vnd.github.antiope-preview+json" + + // https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/ + mediaTypePreReceiveHooksPreview = "application/vnd.github.eye-scream-preview" + + // https://developer.github.com/changes/2018-02-22-protected-branches-required-signatures/ + mediaTypeSignaturePreview = "application/vnd.github.zzzax-preview+json" + + // https://developer.github.com/changes/2018-09-05-project-card-events/ + mediaTypeProjectCardDetailsPreview = "application/vnd.github.starfox-preview+json" + + // https://developer.github.com/changes/2018-12-18-interactions-preview/ + mediaTypeInteractionRestrictionsPreview = "application/vnd.github.sombra-preview+json" + + // https://developer.github.com/changes/2019-02-14-draft-pull-requests/ + mediaTypeDraftPreview = "application/vnd.github.shadow-cat-preview+json" ) // A Client manages communication with the GitHub API. @@ -128,19 +159,23 @@ type Client struct { Admin *AdminService Apps *AppsService Authorizations *AuthorizationsService + Checks *ChecksService Gists *GistsService Git *GitService Gitignores *GitignoresService + Interactions *InteractionsService Issues *IssuesService + Licenses *LicensesService + Marketplace *MarketplaceService + Migrations *MigrationService Organizations *OrganizationsService Projects *ProjectsService PullRequests *PullRequestsService + Reactions *ReactionsService Repositories *RepositoriesService Search *SearchService + Teams *TeamsService Users *UsersService - Licenses *LicensesService - Migrations *MigrationService - Reactions *ReactionsService } type service struct { @@ -159,7 +194,9 @@ type ListOptions struct { // UploadOptions specifies the parameters to methods that support uploads. type UploadOptions struct { - Name string `url:"name,omitempty"` + Name string `url:"name,omitempty"` + Label string `url:"label,omitempty"` + MediaType string `url:"-"` } // RawType represents type of raw format of a request instead of JSON. @@ -217,11 +254,14 @@ func NewClient(httpClient *http.Client) *Client { c.Admin = (*AdminService)(&c.common) c.Apps = (*AppsService)(&c.common) c.Authorizations = (*AuthorizationsService)(&c.common) + c.Checks = (*ChecksService)(&c.common) c.Gists = (*GistsService)(&c.common) c.Git = (*GitService)(&c.common) c.Gitignores = (*GitignoresService)(&c.common) + c.Interactions = (*InteractionsService)(&c.common) c.Issues = (*IssuesService)(&c.common) c.Licenses = (*LicensesService)(&c.common) + c.Marketplace = &MarketplaceService{client: c} c.Migrations = (*MigrationService)(&c.common) c.Organizations = (*OrganizationsService)(&c.common) c.Projects = (*ProjectsService)(&c.common) @@ -229,27 +269,62 @@ func NewClient(httpClient *http.Client) *Client { c.Reactions = (*ReactionsService)(&c.common) c.Repositories = (*RepositoriesService)(&c.common) c.Search = (*SearchService)(&c.common) + c.Teams = (*TeamsService)(&c.common) c.Users = (*UsersService)(&c.common) return c } +// NewEnterpriseClient returns a new GitHub API client with provided +// base URL and upload URL (often the same URL). +// If either URL does not have a trailing slash, one is added automatically. +// If a nil httpClient is provided, http.DefaultClient will be used. +// +// Note that NewEnterpriseClient is a convenience helper only; +// its behavior is equivalent to using NewClient, followed by setting +// the BaseURL and UploadURL fields. +func NewEnterpriseClient(baseURL, uploadURL string, httpClient *http.Client) (*Client, error) { + baseEndpoint, err := url.Parse(baseURL) + if err != nil { + return nil, err + } + if !strings.HasSuffix(baseEndpoint.Path, "/") { + baseEndpoint.Path += "/" + } + + uploadEndpoint, err := url.Parse(uploadURL) + if err != nil { + return nil, err + } + if !strings.HasSuffix(uploadEndpoint.Path, "/") { + uploadEndpoint.Path += "/" + } + + c := NewClient(httpClient) + c.BaseURL = baseEndpoint + c.UploadURL = uploadEndpoint + return c, nil +} + // NewRequest creates an API request. A relative URL can be provided in urlStr, // in which case it is resolved relative to the BaseURL of the Client. // Relative URLs should always be specified without a preceding slash. If // specified, the value pointed to by body is JSON encoded and included as the // request body. func (c *Client) NewRequest(method, urlStr string, body interface{}) (*http.Request, error) { - rel, err := url.Parse(urlStr) + if !strings.HasSuffix(c.BaseURL.Path, "/") { + return nil, fmt.Errorf("BaseURL must have a trailing slash, but %q does not", c.BaseURL) + } + u, err := c.BaseURL.Parse(urlStr) if err != nil { return nil, err } - u := c.BaseURL.ResolveReference(rel) - var buf io.ReadWriter if body != nil { buf = new(bytes.Buffer) - err := json.NewEncoder(buf).Encode(body) + enc := json.NewEncoder(buf) + enc.SetEscapeHTML(false) + err := enc.Encode(body) if err != nil { return nil, err } @@ -274,12 +349,14 @@ func (c *Client) NewRequest(method, urlStr string, body interface{}) (*http.Requ // urlStr, in which case it is resolved relative to the UploadURL of the Client. // Relative URLs should always be specified without a preceding slash. func (c *Client) NewUploadRequest(urlStr string, reader io.Reader, size int64, mediaType string) (*http.Request, error) { - rel, err := url.Parse(urlStr) + if !strings.HasSuffix(c.UploadURL.Path, "/") { + return nil, fmt.Errorf("UploadURL must have a trailing slash, but %q does not", c.UploadURL) + } + u, err := c.UploadURL.Parse(urlStr) if err != nil { return nil, err } - u := c.UploadURL.ResolveReference(rel) req, err := http.NewRequest("POST", u.String(), reader) if err != nil { return nil, err @@ -311,7 +388,9 @@ type Response struct { FirstPage int LastPage int - Rate + // Explicitly specify the Rate type so Rate's String() receiver doesn't + // propagate to Response. + Rate Rate } // newResponse creates a new Response for the provided http.Response. @@ -394,7 +473,7 @@ func parseRate(r *http.Response) Rate { // The provided ctx must be non-nil. If it is canceled or times out, // ctx.Err() will be returned. func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Response, error) { - ctx, req = withContext(ctx, req) + req = withContext(ctx, req) rateLimitCategory := category(req.URL.Path) @@ -426,12 +505,7 @@ func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Res return nil, err } - - defer func() { - // Drain up to 512 bytes and close the body to let the Transport reuse the connection - io.CopyN(ioutil.Discard, resp.Body, 512) - resp.Body.Close() - }() + defer resp.Body.Close() response := newResponse(resp) @@ -441,8 +515,22 @@ func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Res err = CheckResponse(resp) if err != nil { - // even though there was an error, we still return the response - // in case the caller wants to inspect it further + // Special case for AcceptedErrors. If an AcceptedError + // has been encountered, the response's payload will be + // added to the AcceptedError and returned. + // + // Issue #1022 + aerr, ok := err.(*AcceptedError) + if ok { + b, readErr := ioutil.ReadAll(resp.Body) + if readErr != nil { + return response, readErr + } + + aerr.Raw = b + return response, aerr + } + return response, err } @@ -450,9 +538,12 @@ func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Res if w, ok := v.(io.Writer); ok { io.Copy(w, resp.Body) } else { - err = json.NewDecoder(resp.Body).Decode(v) - if err == io.EOF { - err = nil // ignore EOF errors caused by empty response body + decErr := json.NewDecoder(resp.Body).Decode(v) + if decErr == io.EOF { + decErr = nil // ignore EOF errors caused by empty response body + } + if decErr != nil { + err = decErr } } } @@ -542,14 +633,17 @@ func (r *RateLimitError) Error() string { // Technically, 202 Accepted is not a real error, it's just used to // indicate that results are not ready yet, but should be available soon. // The request can be repeated after some time. -type AcceptedError struct{} +type AcceptedError struct { + // Raw contains the response body. + Raw []byte +} func (*AcceptedError) Error() string { return "job scheduled on GitHub side; try again later" } // AbuseRateLimitError occurs when GitHub returns 403 Forbidden response with the -// "documentation_url" field value equal to "https://developer.github.com/v3#abuse-rate-limits". +// "documentation_url" field value equal to "https://developer.github.com/v3/#abuse-rate-limits". type AbuseRateLimitError struct { Response *http.Response // HTTP response that caused this error Message string `json:"message"` // error message @@ -641,7 +735,7 @@ func CheckResponse(r *http.Response) error { Response: errorResponse.Response, Message: errorResponse.Message, } - case r.StatusCode == http.StatusForbidden && errorResponse.DocumentationURL == "https://developer.github.com/v3#abuse-rate-limits": + case r.StatusCode == http.StatusForbidden && strings.HasSuffix(errorResponse.DocumentationURL, "/v3/#abuse-rate-limits"): abuseRateLimitError := &AbuseRateLimitError{ Response: errorResponse.Response, Message: errorResponse.Message, @@ -807,14 +901,21 @@ func (t *UnauthenticatedRateLimitedTransport) RoundTrip(req *http.Request) (*htt // To set extra querystring params, we must make a copy of the Request so // that we don't modify the Request we were given. This is required by the // specification of http.RoundTripper. - req = cloneRequest(req) - q := req.URL.Query() + // + // Since we are going to modify only req.URL here, we only need a deep copy + // of req.URL. + req2 := new(http.Request) + *req2 = *req + req2.URL = new(url.URL) + *req2.URL = *req.URL + + q := req2.URL.Query() q.Set("client_id", t.ClientID) q.Set("client_secret", t.ClientSecret) - req.URL.RawQuery = q.Encode() + req2.URL.RawQuery = q.Encode() // Make the HTTP request. - return t.transport().RoundTrip(req) + return t.transport().RoundTrip(req2) } // Client returns an *http.Client that makes requests which are subject to the @@ -846,12 +947,24 @@ type BasicAuthTransport struct { // RoundTrip implements the RoundTripper interface. func (t *BasicAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { - req = cloneRequest(req) // per RoundTrip contract - req.SetBasicAuth(t.Username, t.Password) + // To set extra headers, we must make a copy of the Request so + // that we don't modify the Request we were given. This is required by the + // specification of http.RoundTripper. + // + // Since we are going to modify only req.Header here, we only need a deep copy + // of req.Header. + req2 := new(http.Request) + *req2 = *req + req2.Header = make(http.Header, len(req.Header)) + for k, s := range req.Header { + req2.Header[k] = append([]string(nil), s...) + } + + req2.SetBasicAuth(t.Username, t.Password) if t.OTP != "" { - req.Header.Set(headerOTP, t.OTP) + req2.Header.Set(headerOTP, t.OTP) } - return t.transport().RoundTrip(req) + return t.transport().RoundTrip(req2) } // Client returns an *http.Client that makes requests that are authenticated @@ -867,20 +980,6 @@ func (t *BasicAuthTransport) transport() http.RoundTripper { return http.DefaultTransport } -// cloneRequest returns a clone of the provided *http.Request. The clone is a -// shallow copy of the struct and its Header map. -func cloneRequest(r *http.Request) *http.Request { - // shallow copy of the struct - r2 := new(http.Request) - *r2 = *r - // deep copy of the Header - r2.Header = make(http.Header, len(r.Header)) - for k, s := range r.Header { - r2.Header[k] = append([]string(nil), s...) - } - return r2 -} - // formatRateReset formats d to look like "[rate reset in 2s]" or // "[rate reset in 87m02s]" for the positive durations. And like "[rate limit was reset 87m02s ago]" // for the negative cases. @@ -914,6 +1013,10 @@ func Bool(v bool) *bool { return &v } // to store v and returns a pointer to it. func Int(v int) *int { return &v } +// Int64 is a helper routine that allocates a new int64 value +// to store v and returns a pointer to it. +func Int64(v int64) *int64 { return &v } + // String is a helper routine that allocates a new string value // to store v and returns a pointer to it. func String(v string) *string { return &v } diff --git a/vendor/github.com/google/go-github/github/github_test.go b/vendor/github.com/google/go-github/github/github_test.go index c32af59..da29e1e 100644 --- a/vendor/github.com/google/go-github/github/github_test.go +++ b/vendor/github.com/google/go-github/github/github_test.go @@ -22,35 +22,45 @@ import ( "time" ) -var ( - // mux is the HTTP request multiplexer used with the test server. - mux *http.ServeMux - - // client is the GitHub client being tested. - client *Client - - // server is a test HTTP server used to provide mock API responses. - server *httptest.Server +const ( + // baseURLPath is a non-empty Client.BaseURL path to use during tests, + // to ensure relative URLs are used for all endpoints. See issue #752. + baseURLPath = "/api-v3" ) // setup sets up a test HTTP server along with a github.Client that is // configured to talk to that test server. Tests should register handlers on // mux which provide mock responses for the API method being tested. -func setup() { - // test server +func setup() (client *Client, mux *http.ServeMux, serverURL string, teardown func()) { + // mux is the HTTP request multiplexer used with the test server. mux = http.NewServeMux() - server = httptest.NewServer(mux) - // github client configured to use test server + // We want to ensure that tests catch mistakes where the endpoint URL is + // specified as absolute rather than relative. It only makes a difference + // when there's a non-empty base URL path. So, use that. See issue #752. + apiHandler := http.NewServeMux() + apiHandler.Handle(baseURLPath+"/", http.StripPrefix(baseURLPath, mux)) + apiHandler.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) { + fmt.Fprintln(os.Stderr, "FAIL: Client.BaseURL path prefix is not preserved in the request URL:") + fmt.Fprintln(os.Stderr) + fmt.Fprintln(os.Stderr, "\t"+req.URL.String()) + fmt.Fprintln(os.Stderr) + fmt.Fprintln(os.Stderr, "\tDid you accidentally use an absolute endpoint URL rather than relative?") + fmt.Fprintln(os.Stderr, "\tSee https://github.com/google/go-github/issues/752 for information.") + http.Error(w, "Client.BaseURL path prefix is not preserved in the request URL.", http.StatusInternalServerError) + }) + + // server is a test HTTP server used to provide mock API responses. + server := httptest.NewServer(apiHandler) + + // client is the GitHub client being tested and is + // configured to use test server. client = NewClient(nil) - url, _ := url.Parse(server.URL) + url, _ := url.Parse(server.URL + baseURLPath + "/") client.BaseURL = url client.UploadURL = url -} -// teardown closes the test HTTP server. -func teardown() { - server.Close() + return client, mux, server.URL, server.Close } // openTestFile creates a new file with the given name and content for testing. @@ -164,6 +174,41 @@ func TestNewClient(t *testing.T) { } } +func TestNewEnterpriseClient(t *testing.T) { + baseURL := "https://custom-url/" + uploadURL := "https://custom-upload-url/" + c, err := NewEnterpriseClient(baseURL, uploadURL, nil) + if err != nil { + t.Fatalf("NewEnterpriseClient returned unexpected error: %v", err) + } + + if got, want := c.BaseURL.String(), baseURL; got != want { + t.Errorf("NewClient BaseURL is %v, want %v", got, want) + } + if got, want := c.UploadURL.String(), uploadURL; got != want { + t.Errorf("NewClient UploadURL is %v, want %v", got, want) + } +} + +func TestNewEnterpriseClient_addsTrailingSlashToURLs(t *testing.T) { + baseURL := "https://custom-url" + uploadURL := "https://custom-upload-url" + formattedBaseURL := baseURL + "/" + formattedUploadURL := uploadURL + "/" + + c, err := NewEnterpriseClient(baseURL, uploadURL, nil) + if err != nil { + t.Fatalf("NewEnterpriseClient returned unexpected error: %v", err) + } + + if got, want := c.BaseURL.String(), formattedBaseURL; got != want { + t.Errorf("NewClient BaseURL is %v, want %v", got, want) + } + if got, want := c.UploadURL.String(), formattedUploadURL; got != want { + t.Errorf("NewClient UploadURL is %v, want %v", got, want) + } +} + // Ensure that length of Client.rateLimits is the same as number of fields in RateLimits struct. func TestClient_rateLimits(t *testing.T) { if got, want := len(Client{}.rateLimits), reflect.TypeOf(RateLimits{}).NumField(); got != want { @@ -201,7 +246,7 @@ func TestNewRequest_invalidJSON(t *testing.T) { type T struct { A map[interface{}]interface{} } - _, err := c.NewRequest("GET", "/", &T{}) + _, err := c.NewRequest("GET", ".", &T{}) if err == nil { t.Error("Expected error to be returned.") @@ -222,7 +267,7 @@ func TestNewRequest_badURL(t *testing.T) { func TestNewRequest_emptyUserAgent(t *testing.T) { c := NewClient(nil) c.UserAgent = "" - req, err := c.NewRequest("GET", "/", nil) + req, err := c.NewRequest("GET", ".", nil) if err != nil { t.Fatalf("NewRequest returned unexpected error: %v", err) } @@ -239,7 +284,7 @@ func TestNewRequest_emptyUserAgent(t *testing.T) { // subtle errors. func TestNewRequest_emptyBody(t *testing.T) { c := NewClient(nil) - req, err := c.NewRequest("GET", "/", nil) + req, err := c.NewRequest("GET", ".", nil) if err != nil { t.Fatalf("NewRequest returned unexpected error: %v", err) } @@ -248,6 +293,52 @@ func TestNewRequest_emptyBody(t *testing.T) { } } +func TestNewRequest_errorForNoTrailingSlash(t *testing.T) { + tests := []struct { + rawurl string + wantError bool + }{ + {rawurl: "https://example.com/api/v3", wantError: true}, + {rawurl: "https://example.com/api/v3/", wantError: false}, + } + c := NewClient(nil) + for _, test := range tests { + u, err := url.Parse(test.rawurl) + if err != nil { + t.Fatalf("url.Parse returned unexpected error: %v.", err) + } + c.BaseURL = u + if _, err := c.NewRequest(http.MethodGet, "test", nil); test.wantError && err == nil { + t.Fatalf("Expected error to be returned.") + } else if !test.wantError && err != nil { + t.Fatalf("NewRequest returned unexpected error: %v.", err) + } + } +} + +func TestNewUploadRequest_errorForNoTrailingSlash(t *testing.T) { + tests := []struct { + rawurl string + wantError bool + }{ + {rawurl: "https://example.com/api/uploads", wantError: true}, + {rawurl: "https://example.com/api/uploads/", wantError: false}, + } + c := NewClient(nil) + for _, test := range tests { + u, err := url.Parse(test.rawurl) + if err != nil { + t.Fatalf("url.Parse returned unexpected error: %v.", err) + } + c.UploadURL = u + if _, err = c.NewUploadRequest("test", nil, 0, ""); test.wantError && err == nil { + t.Fatalf("Expected error to be returned.") + } else if !test.wantError && err != nil { + t.Fatalf("NewUploadRequest returned unexpected error: %v.", err) + } + } +} + func TestResponse_populatePageValues(t *testing.T) { r := http.Response{ Header: http.Header{ @@ -314,7 +405,7 @@ func TestResponse_populatePageValues_invalid(t *testing.T) { } func TestDo(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() type foo struct { @@ -322,13 +413,11 @@ func TestDo(t *testing.T) { } mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - if m := "GET"; m != r.Method { - t.Errorf("Request method = %v, want %v", r.Method, m) - } + testMethod(t, r, "GET") fmt.Fprint(w, `{"A":"a"}`) }) - req, _ := client.NewRequest("GET", "/", nil) + req, _ := client.NewRequest("GET", ".", nil) body := new(foo) client.Do(context.Background(), req, body) @@ -339,18 +428,21 @@ func TestDo(t *testing.T) { } func TestDo_httpError(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { http.Error(w, "Bad Request", 400) }) - req, _ := client.NewRequest("GET", "/", nil) - _, err := client.Do(context.Background(), req, nil) + req, _ := client.NewRequest("GET", ".", nil) + resp, err := client.Do(context.Background(), req, nil) if err == nil { - t.Error("Expected HTTP 400 error.") + t.Fatal("Expected HTTP 400 error, got no error.") + } + if resp.StatusCode != 400 { + t.Errorf("Expected HTTP 400 error, got %d status code.", resp.StatusCode) } } @@ -358,14 +450,14 @@ func TestDo_httpError(t *testing.T) { // function. A redirect loop is pretty unlikely to occur within the GitHub // API, but does allow us to exercise the right code path. func TestDo_redirectLoop(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - http.Redirect(w, r, "/", http.StatusFound) + http.Redirect(w, r, baseURLPath, http.StatusFound) }) - req, _ := client.NewRequest("GET", "/", nil) + req, _ := client.NewRequest("GET", ".", nil) _, err := client.Do(context.Background(), req, nil) if err == nil { @@ -385,7 +477,7 @@ func TestDo_sanitizeURL(t *testing.T) { } unauthedClient := NewClient(tp.Client()) unauthedClient.BaseURL = &url.URL{Scheme: "http", Host: "127.0.0.1:0", Path: "/"} // Use port 0 on purpose to trigger a dial TCP error, expect to get "dial tcp 127.0.0.1:0: connect: can't assign requested address". - req, err := unauthedClient.NewRequest("GET", "/", nil) + req, err := unauthedClient.NewRequest("GET", ".", nil) if err != nil { t.Fatalf("NewRequest returned unexpected error: %v", err) } @@ -399,7 +491,7 @@ func TestDo_sanitizeURL(t *testing.T) { } func TestDo_rateLimit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { @@ -408,7 +500,7 @@ func TestDo_rateLimit(t *testing.T) { w.Header().Set(headerRateReset, "1372700873") }) - req, _ := client.NewRequest("GET", "/", nil) + req, _ := client.NewRequest("GET", ".", nil) resp, err := client.Do(context.Background(), req, nil) if err != nil { t.Errorf("Do returned unexpected error: %v", err) @@ -419,7 +511,7 @@ func TestDo_rateLimit(t *testing.T) { if got, want := resp.Rate.Remaining, 59; got != want { t.Errorf("Client rate remaining = %v, want %v", got, want) } - reset := time.Date(2013, 7, 1, 17, 47, 53, 0, time.UTC) + reset := time.Date(2013, time.July, 1, 17, 47, 53, 0, time.UTC) if resp.Rate.Reset.UTC() != reset { t.Errorf("Client rate reset = %v, want %v", resp.Rate.Reset, reset) } @@ -427,7 +519,7 @@ func TestDo_rateLimit(t *testing.T) { // ensure rate limit is still parsed, even for error responses func TestDo_rateLimit_errorResponse(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { @@ -437,7 +529,7 @@ func TestDo_rateLimit_errorResponse(t *testing.T) { http.Error(w, "Bad Request", 400) }) - req, _ := client.NewRequest("GET", "/", nil) + req, _ := client.NewRequest("GET", ".", nil) resp, err := client.Do(context.Background(), req, nil) if err == nil { t.Error("Expected error to be returned.") @@ -451,7 +543,7 @@ func TestDo_rateLimit_errorResponse(t *testing.T) { if got, want := resp.Rate.Remaining, 59; got != want { t.Errorf("Client rate remaining = %v, want %v", got, want) } - reset := time.Date(2013, 7, 1, 17, 47, 53, 0, time.UTC) + reset := time.Date(2013, time.July, 1, 17, 47, 53, 0, time.UTC) if resp.Rate.Reset.UTC() != reset { t.Errorf("Client rate reset = %v, want %v", resp.Rate.Reset, reset) } @@ -459,7 +551,7 @@ func TestDo_rateLimit_errorResponse(t *testing.T) { // Ensure *RateLimitError is returned when API rate limit is exceeded. func TestDo_rateLimit_rateLimitError(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { @@ -474,7 +566,7 @@ func TestDo_rateLimit_rateLimitError(t *testing.T) { }`) }) - req, _ := client.NewRequest("GET", "/", nil) + req, _ := client.NewRequest("GET", ".", nil) _, err := client.Do(context.Background(), req, nil) if err == nil { @@ -490,7 +582,7 @@ func TestDo_rateLimit_rateLimitError(t *testing.T) { if got, want := rateLimitErr.Rate.Remaining, 0; got != want { t.Errorf("rateLimitErr rate remaining = %v, want %v", got, want) } - reset := time.Date(2013, 7, 1, 17, 47, 53, 0, time.UTC) + reset := time.Date(2013, time.July, 1, 17, 47, 53, 0, time.UTC) if rateLimitErr.Rate.Reset.UTC() != reset { t.Errorf("rateLimitErr rate reset = %v, want %v", rateLimitErr.Rate.Reset.UTC(), reset) } @@ -498,7 +590,7 @@ func TestDo_rateLimit_rateLimitError(t *testing.T) { // Ensure a network call is not made when it's known that API rate limit is still exceeded. func TestDo_rateLimit_noNetworkCall(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() reset := time.Now().UTC().Add(time.Minute).Round(time.Second) // Rate reset is a minute from now, with 1 second precision. @@ -521,11 +613,11 @@ func TestDo_rateLimit_noNetworkCall(t *testing.T) { }) // First request is made, and it makes the client aware of rate reset time being in the future. - req, _ := client.NewRequest("GET", "/first", nil) + req, _ := client.NewRequest("GET", "first", nil) client.Do(context.Background(), req, nil) // Second request should not cause a network call to be made, since client can predict a rate limit error. - req, _ = client.NewRequest("GET", "/second", nil) + req, _ = client.NewRequest("GET", "second", nil) _, err := client.Do(context.Background(), req, nil) if madeNetworkCall { @@ -553,7 +645,39 @@ func TestDo_rateLimit_noNetworkCall(t *testing.T) { // Ensure *AbuseRateLimitError is returned when the response indicates that // the client has triggered an abuse detection mechanism. func TestDo_rateLimit_abuseRateLimitError(t *testing.T) { - setup() + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json; charset=utf-8") + w.WriteHeader(http.StatusForbidden) + // When the abuse rate limit error is of the "temporarily blocked from content creation" type, + // there is no "Retry-After" header. + fmt.Fprintln(w, `{ + "message": "You have triggered an abuse detection mechanism and have been temporarily blocked from content creation. Please retry your request again later.", + "documentation_url": "https://developer.github.com/v3/#abuse-rate-limits" +}`) + }) + + req, _ := client.NewRequest("GET", ".", nil) + _, err := client.Do(context.Background(), req, nil) + + if err == nil { + t.Error("Expected error to be returned.") + } + abuseRateLimitErr, ok := err.(*AbuseRateLimitError) + if !ok { + t.Fatalf("Expected a *AbuseRateLimitError error; got %#v.", err) + } + if got, want := abuseRateLimitErr.RetryAfter, (*time.Duration)(nil); got != want { + t.Errorf("abuseRateLimitErr RetryAfter = %v, want %v", got, want) + } +} + +// Ensure *AbuseRateLimitError is returned when the response indicates that +// the client has triggered an abuse detection mechanism on GitHub Enterprise. +func TestDo_rateLimit_abuseRateLimitErrorEnterprise(t *testing.T) { + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { @@ -561,13 +685,15 @@ func TestDo_rateLimit_abuseRateLimitError(t *testing.T) { w.WriteHeader(http.StatusForbidden) // When the abuse rate limit error is of the "temporarily blocked from content creation" type, // there is no "Retry-After" header. + // This response returns a documentation url like the one returned for GitHub Enterprise, this + // url changes between versions but follows roughly the same format. fmt.Fprintln(w, `{ "message": "You have triggered an abuse detection mechanism and have been temporarily blocked from content creation. Please retry your request again later.", - "documentation_url": "https://developer.github.com/v3#abuse-rate-limits" + "documentation_url": "https://developer.github.com/enterprise/2.12/v3/#abuse-rate-limits" }`) }) - req, _ := client.NewRequest("GET", "/", nil) + req, _ := client.NewRequest("GET", ".", nil) _, err := client.Do(context.Background(), req, nil) if err == nil { @@ -584,7 +710,7 @@ func TestDo_rateLimit_abuseRateLimitError(t *testing.T) { // Ensure *AbuseRateLimitError.RetryAfter is parsed correctly. func TestDo_rateLimit_abuseRateLimitError_retryAfter(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { @@ -593,11 +719,11 @@ func TestDo_rateLimit_abuseRateLimitError_retryAfter(t *testing.T) { w.WriteHeader(http.StatusForbidden) fmt.Fprintln(w, `{ "message": "You have triggered an abuse detection mechanism ...", - "documentation_url": "https://developer.github.com/v3#abuse-rate-limits" + "documentation_url": "https://developer.github.com/v3/#abuse-rate-limits" }`) }) - req, _ := client.NewRequest("GET", "/", nil) + req, _ := client.NewRequest("GET", ".", nil) _, err := client.Do(context.Background(), req, nil) if err == nil { @@ -616,7 +742,7 @@ func TestDo_rateLimit_abuseRateLimitError_retryAfter(t *testing.T) { } func TestDo_noContent(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { @@ -625,7 +751,7 @@ func TestDo_noContent(t *testing.T) { var body json.RawMessage - req, _ := client.NewRequest("GET", "/", nil) + req, _ := client.NewRequest("GET", ".", nil) _, err := client.Do(context.Background(), req, &body) if err != nil { t.Fatalf("Do returned unexpected error: %v", err) @@ -674,7 +800,7 @@ func TestCheckResponse(t *testing.T) { CreatedAt *Timestamp `json:"created_at,omitempty"` }{ Reason: "dmca", - CreatedAt: &Timestamp{time.Date(2016, 3, 17, 15, 39, 46, 0, time.UTC)}, + CreatedAt: &Timestamp{time.Date(2016, time.March, 17, 15, 39, 46, 0, time.UTC)}, }, } if !reflect.DeepEqual(err, want) { @@ -755,13 +881,11 @@ func TestError_Error(t *testing.T) { } func TestRateLimits(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/rate_limit", func(w http.ResponseWriter, r *http.Request) { - if m := "GET"; m != r.Method { - t.Errorf("Request method = %v, want %v", r.Method, m) - } + testMethod(t, r, "GET") fmt.Fprint(w, `{"resources":{ "core": {"limit":2,"remaining":1,"reset":1372700873}, "search": {"limit":3,"remaining":2,"reset":1372700874} @@ -777,12 +901,12 @@ func TestRateLimits(t *testing.T) { Core: &Rate{ Limit: 2, Remaining: 1, - Reset: Timestamp{time.Date(2013, 7, 1, 17, 47, 53, 0, time.UTC).Local()}, + Reset: Timestamp{time.Date(2013, time.July, 1, 17, 47, 53, 0, time.UTC).Local()}, }, Search: &Rate{ Limit: 3, Remaining: 2, - Reset: Timestamp{time.Date(2013, 7, 1, 17, 47, 54, 0, time.UTC).Local()}, + Reset: Timestamp{time.Date(2013, time.July, 1, 17, 47, 54, 0, time.UTC).Local()}, }, } if !reflect.DeepEqual(rate, want) { @@ -798,7 +922,7 @@ func TestRateLimits(t *testing.T) { } func TestUnauthenticatedRateLimitedTransport(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { @@ -818,7 +942,7 @@ func TestUnauthenticatedRateLimitedTransport(t *testing.T) { } unauthedClient := NewClient(tp.Client()) unauthedClient.BaseURL = client.BaseURL - req, _ := unauthedClient.NewRequest("GET", "/", nil) + req, _ := unauthedClient.NewRequest("GET", ".", nil) unauthedClient.Do(context.Background(), req, nil) } @@ -864,7 +988,7 @@ func TestUnauthenticatedRateLimitedTransport_transport(t *testing.T) { } func TestBasicAuthTransport(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() username, password, otp := "u", "p", "123456" @@ -892,7 +1016,7 @@ func TestBasicAuthTransport(t *testing.T) { } basicAuthClient := NewClient(tp.Client()) basicAuthClient.BaseURL = client.BaseURL - req, _ := basicAuthClient.NewRequest("GET", "/", nil) + req, _ := basicAuthClient.NewRequest("GET", ".", nil) basicAuthClient.Do(context.Background(), req, nil) } @@ -948,3 +1072,12 @@ func TestFormatRateReset(t *testing.T) { t.Errorf("Format is wrong. got: %v, want: %v", got, want) } } + +func TestNestedStructAccessorNoPanic(t *testing.T) { + issue := &Issue{User: nil} + got := issue.GetUser().GetPlan().GetName() + want := "" + if got != want { + t.Errorf("Issues.Get.GetUser().GetPlan().GetName() returned %+v, want %+v", got, want) + } +} diff --git a/vendor/github.com/google/go-github/github/gitignore_test.go b/vendor/github.com/google/go-github/github/gitignore_test.go index e5a6264..cdb8223 100644 --- a/vendor/github.com/google/go-github/github/gitignore_test.go +++ b/vendor/github.com/google/go-github/github/gitignore_test.go @@ -14,7 +14,7 @@ import ( ) func TestGitignoresService_List(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gitignore/templates", func(w http.ResponseWriter, r *http.Request) { @@ -34,7 +34,7 @@ func TestGitignoresService_List(t *testing.T) { } func TestGitignoresService_Get(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/gitignore/templates/name", func(w http.ResponseWriter, r *http.Request) { @@ -54,6 +54,9 @@ func TestGitignoresService_Get(t *testing.T) { } func TestGitignoresService_Get_invalidTemplate(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Gitignores.Get(context.Background(), "%") testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/interactions.go b/vendor/github.com/google/go-github/github/interactions.go new file mode 100644 index 0000000..b996549 --- /dev/null +++ b/vendor/github.com/google/go-github/github/interactions.go @@ -0,0 +1,28 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +// InteractionsService handles communication with the repository and organization related +// methods of the GitHub API. +// +// GitHub API docs: https://developer.github.com/v3/interactions/ +type InteractionsService service + +// InteractionRestriction represents the interaction restrictions for repository and organization. +type InteractionRestriction struct { + // Specifies the group of GitHub users who can + // comment, open issues, or create pull requests for the given repository. + // Possible values are: "existing_users", "contributors_only" and "collaborators_only". + Limit *string `json:"limit,omitempty"` + + // Origin specifies the type of the resource to interact with. + // Possible values are: "repository" and "organization". + Origin *string `json:"origin,omitempty"` + + // ExpiresAt specifies the time after which the interaction restrictions expire. + // The default expiry time is 24 hours from the time restriction is created. + ExpiresAt *Timestamp `json:"expires_at,omitempty"` +} diff --git a/vendor/github.com/google/go-github/github/interactions_orgs.go b/vendor/github.com/google/go-github/github/interactions_orgs.go new file mode 100644 index 0000000..af25f65 --- /dev/null +++ b/vendor/github.com/google/go-github/github/interactions_orgs.go @@ -0,0 +1,80 @@ +// Copyright 2019 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetRestrictionsForOrg fetches the interaction restrictions for an organization. +// +// GitHub API docs: https://developer.github.com/v3/interactions/orgs/#get-interaction-restrictions-for-an-organization +func (s *InteractionsService) GetRestrictionsForOrg(ctx context.Context, organization string) (*InteractionRestriction, *Response, error) { + u := fmt.Sprintf("orgs/%v/interaction-limits", organization) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + organizationInteractions := new(InteractionRestriction) + + resp, err := s.client.Do(ctx, req, organizationInteractions) + if err != nil { + return nil, resp, err + } + + return organizationInteractions, resp, nil +} + +// UpdateRestrictionsForOrg adds or updates the interaction restrictions for an organization. +// +// limit specifies the group of GitHub users who can comment, open issues, or create pull requests +// in public repositories for the given organization. +// Possible values are: "existing_users", "contributors_only", "collaborators_only". +// +// GitHub API docs: https://developer.github.com/v3/interactions/orgs/#add-or-update-interaction-restrictions-for-an-organization +func (s *InteractionsService) UpdateRestrictionsForOrg(ctx context.Context, organization, limit string) (*InteractionRestriction, *Response, error) { + u := fmt.Sprintf("orgs/%v/interaction-limits", organization) + + interaction := &InteractionRestriction{Limit: String(limit)} + + req, err := s.client.NewRequest("PUT", u, interaction) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + organizationInteractions := new(InteractionRestriction) + + resp, err := s.client.Do(ctx, req, organizationInteractions) + if err != nil { + return nil, resp, err + } + + return organizationInteractions, resp, nil +} + +// RemoveRestrictionsFromOrg removes the interaction restrictions for an organization. +// +// GitHub API docs: https://developer.github.com/v3/interactions/orgs/#remove-interaction-restrictions-for-an-organization +func (s *InteractionsService) RemoveRestrictionsFromOrg(ctx context.Context, organization string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/interaction-limits", organization) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/interactions_orgs_test.go b/vendor/github.com/google/go-github/github/interactions_orgs_test.go new file mode 100644 index 0000000..88b7ff6 --- /dev/null +++ b/vendor/github.com/google/go-github/github/interactions_orgs_test.go @@ -0,0 +1,80 @@ +// Copyright 2019 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "reflect" + "testing" +) + +func TestInteractionsService_GetRestrictionsForOrgs(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/orgs/o/interaction-limits", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeInteractionRestrictionsPreview) + fmt.Fprint(w, `{"origin":"organization"}`) + }) + + organizationInteractions, _, err := client.Interactions.GetRestrictionsForOrg(context.Background(), "o") + if err != nil { + t.Errorf("Interactions.GetRestrictionsForOrg returned error: %v", err) + } + + want := &InteractionRestriction{Origin: String("organization")} + if !reflect.DeepEqual(organizationInteractions, want) { + t.Errorf("Interactions.GetRestrictionsForOrg returned %+v, want %+v", organizationInteractions, want) + } +} + +func TestInteractionsService_UpdateRestrictionsForOrg(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := &InteractionRestriction{Limit: String("existing_users")} + + mux.HandleFunc("/orgs/o/interaction-limits", func(w http.ResponseWriter, r *http.Request) { + v := new(InteractionRestriction) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "PUT") + testHeader(t, r, "Accept", mediaTypeInteractionRestrictionsPreview) + if !reflect.DeepEqual(v, input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + fmt.Fprint(w, `{"origin":"organization"}`) + }) + + organizationInteractions, _, err := client.Interactions.UpdateRestrictionsForOrg(context.Background(), "o", input.GetLimit()) + if err != nil { + t.Errorf("Interactions.UpdateRestrictionsForOrg returned error: %v", err) + } + + want := &InteractionRestriction{Origin: String("organization")} + if !reflect.DeepEqual(organizationInteractions, want) { + t.Errorf("Interactions.UpdateRestrictionsForOrg returned %+v, want %+v", organizationInteractions, want) + } +} + +func TestInteractionsService_RemoveRestrictionsFromOrg(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/orgs/o/interaction-limits", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeInteractionRestrictionsPreview) + }) + + _, err := client.Interactions.RemoveRestrictionsFromOrg(context.Background(), "o") + if err != nil { + t.Errorf("Interactions.RemoveRestrictionsFromOrg returned error: %v", err) + } +} diff --git a/vendor/github.com/google/go-github/github/interactions_repos.go b/vendor/github.com/google/go-github/github/interactions_repos.go new file mode 100644 index 0000000..5823482 --- /dev/null +++ b/vendor/github.com/google/go-github/github/interactions_repos.go @@ -0,0 +1,80 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetRestrictionsForRepo fetches the interaction restrictions for a repository. +// +// GitHub API docs: https://developer.github.com/v3/interactions/repos/#get-interaction-restrictions-for-a-repository +func (s *InteractionsService) GetRestrictionsForRepo(ctx context.Context, owner, repo string) (*InteractionRestriction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + repositoryInteractions := new(InteractionRestriction) + + resp, err := s.client.Do(ctx, req, repositoryInteractions) + if err != nil { + return nil, resp, err + } + + return repositoryInteractions, resp, nil +} + +// UpdateRestrictionsForRepo adds or updates the interaction restrictions for a repository. +// +// limit specifies the group of GitHub users who can comment, open issues, or create pull requests +// for the given repository. +// Possible values are: "existing_users", "contributors_only", "collaborators_only". +// +// GitHub API docs: https://developer.github.com/v3/interactions/repos/#add-or-update-interaction-restrictions-for-a-repository +func (s *InteractionsService) UpdateRestrictionsForRepo(ctx context.Context, owner, repo, limit string) (*InteractionRestriction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) + + interaction := &InteractionRestriction{Limit: String(limit)} + + req, err := s.client.NewRequest("PUT", u, interaction) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + repositoryInteractions := new(InteractionRestriction) + + resp, err := s.client.Do(ctx, req, repositoryInteractions) + if err != nil { + return nil, resp, err + } + + return repositoryInteractions, resp, nil +} + +// RemoveRestrictionsFromRepo removes the interaction restrictions for a repository. +// +// GitHub API docs: https://developer.github.com/v3/interactions/repos/#remove-interaction-restrictions-for-a-repository +func (s *InteractionsService) RemoveRestrictionsFromRepo(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/interactions_repos_test.go b/vendor/github.com/google/go-github/github/interactions_repos_test.go new file mode 100644 index 0000000..80857ae --- /dev/null +++ b/vendor/github.com/google/go-github/github/interactions_repos_test.go @@ -0,0 +1,80 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "reflect" + "testing" +) + +func TestInteractionsService_GetRestrictionsForRepo(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/interaction-limits", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeInteractionRestrictionsPreview) + fmt.Fprint(w, `{"origin":"repository"}`) + }) + + repoInteractions, _, err := client.Interactions.GetRestrictionsForRepo(context.Background(), "o", "r") + if err != nil { + t.Errorf("Interactions.GetRestrictionsForRepo returned error: %v", err) + } + + want := &InteractionRestriction{Origin: String("repository")} + if !reflect.DeepEqual(repoInteractions, want) { + t.Errorf("Interactions.GetRestrictionsForRepo returned %+v, want %+v", repoInteractions, want) + } +} + +func TestInteractionsService_UpdateRestrictionsForRepo(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := &InteractionRestriction{Limit: String("existing_users")} + + mux.HandleFunc("/repos/o/r/interaction-limits", func(w http.ResponseWriter, r *http.Request) { + v := new(InteractionRestriction) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "PUT") + testHeader(t, r, "Accept", mediaTypeInteractionRestrictionsPreview) + if !reflect.DeepEqual(v, input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + fmt.Fprint(w, `{"origin":"repository"}`) + }) + + repoInteractions, _, err := client.Interactions.UpdateRestrictionsForRepo(context.Background(), "o", "r", input.GetLimit()) + if err != nil { + t.Errorf("Interactions.UpdateRestrictionsForRepo returned error: %v", err) + } + + want := &InteractionRestriction{Origin: String("repository")} + if !reflect.DeepEqual(repoInteractions, want) { + t.Errorf("Interactions.UpdateRestrictionsForRepo returned %+v, want %+v", repoInteractions, want) + } +} + +func TestInteractionsService_RemoveRestrictionsFromRepo(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/interaction-limits", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeInteractionRestrictionsPreview) + }) + + _, err := client.Interactions.RemoveRestrictionsFromRepo(context.Background(), "o", "r") + if err != nil { + t.Errorf("Interactions.RemoveRestrictionsFromRepo returned error: %v", err) + } +} diff --git a/vendor/github.com/google/go-github/github/issues.go b/vendor/github.com/google/go-github/github/issues.go index b437d50..1e0991c 100644 --- a/vendor/github.com/google/go-github/github/issues.go +++ b/vendor/github.com/google/go-github/github/issues.go @@ -8,6 +8,7 @@ package github import ( "context" "fmt" + "strings" "time" ) @@ -23,8 +24,9 @@ type IssuesService service // but not every issue is a pull request. Some endpoints, events, and webhooks // may also return pull requests via this struct. If PullRequestLinks is nil, // this is an issue, and if PullRequestLinks is not nil, this is a pull request. +// The IsPullRequest helper method can be used to check that. type Issue struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` Number *int `json:"number,omitempty"` State *string `json:"state,omitempty"` Locked *bool `json:"locked,omitempty"` @@ -40,21 +42,37 @@ type Issue struct { ClosedBy *User `json:"closed_by,omitempty"` URL *string `json:"url,omitempty"` HTMLURL *string `json:"html_url,omitempty"` + CommentsURL *string `json:"comments_url,omitempty"` + EventsURL *string `json:"events_url,omitempty"` + LabelsURL *string `json:"labels_url,omitempty"` + RepositoryURL *string `json:"repository_url,omitempty"` Milestone *Milestone `json:"milestone,omitempty"` PullRequestLinks *PullRequestLinks `json:"pull_request,omitempty"` Repository *Repository `json:"repository,omitempty"` Reactions *Reactions `json:"reactions,omitempty"` Assignees []*User `json:"assignees,omitempty"` + NodeID *string `json:"node_id,omitempty"` // TextMatches is only populated from search results that request text matches // See: search.go and https://developer.github.com/v3/search/#text-match-metadata TextMatches []TextMatch `json:"text_matches,omitempty"` + + // ActiveLockReason is populated only when LockReason is provided while locking the issue. + // Possible values are: "off-topic", "too heated", "resolved", and "spam". + ActiveLockReason *string `json:"active_lock_reason,omitempty"` } func (i Issue) String() string { return Stringify(i) } +// IsPullRequest reports whether the issue is also a pull request. It uses the +// method recommended by GitHub's API documentation, which is to check whether +// PullRequestLinks is non-nil. +func (i Issue) IsPullRequest() bool { + return i.PullRequestLinks != nil +} + // IssueRequest represents a request to create/edit an issue. // It is separate from Issue above because otherwise Labels // and Assignee fail to serialize to the correct JSON. @@ -97,7 +115,7 @@ type IssueListOptions struct { } // PullRequestLinks object is added to the Issue object when it's an issue included -// in the IssueCommentEvent webhook payload, if the webhooks is fired by a comment on a PR +// in the IssueCommentEvent webhook payload, if the webhook is fired by a comment on a PR. type PullRequestLinks struct { URL *string `json:"url,omitempty"` HTMLURL *string `json:"html_url,omitempty"` @@ -141,8 +159,9 @@ func (s *IssuesService) listIssues(ctx context.Context, u string, opt *IssueList return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) var issues []*Issue resp, err := s.client.Do(ctx, req, &issues) @@ -208,8 +227,9 @@ func (s *IssuesService) ListByRepo(ctx context.Context, owner string, repo strin return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeIntegrationPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) var issues []*Issue resp, err := s.client.Do(ctx, req, &issues) @@ -230,8 +250,9 @@ func (s *IssuesService) Get(ctx context.Context, owner string, repo string, numb return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) issue := new(Issue) resp, err := s.client.Do(ctx, req, issue) @@ -252,6 +273,9 @@ func (s *IssuesService) Create(ctx context.Context, owner string, repo string, i return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + i := new(Issue) resp, err := s.client.Do(ctx, req, i) if err != nil { @@ -271,6 +295,9 @@ func (s *IssuesService) Edit(ctx context.Context, owner string, repo string, num return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + i := new(Issue) resp, err := s.client.Do(ctx, req, i) if err != nil { @@ -280,16 +307,29 @@ func (s *IssuesService) Edit(ctx context.Context, owner string, repo string, num return i, resp, nil } +// LockIssueOptions specifies the optional parameters to the +// IssuesService.Lock method. +type LockIssueOptions struct { + // LockReason specifies the reason to lock this issue. + // Providing a lock reason can help make it clearer to contributors why an issue + // was locked. Possible values are: "off-topic", "too heated", "resolved", and "spam". + LockReason string `json:"lock_reason,omitempty"` +} + // Lock an issue's conversation. // // GitHub API docs: https://developer.github.com/v3/issues/#lock-an-issue -func (s *IssuesService) Lock(ctx context.Context, owner string, repo string, number int) (*Response, error) { +func (s *IssuesService) Lock(ctx context.Context, owner string, repo string, number int, opt *LockIssueOptions) (*Response, error) { u := fmt.Sprintf("repos/%v/%v/issues/%d/lock", owner, repo, number) - req, err := s.client.NewRequest("PUT", u, nil) + req, err := s.client.NewRequest("PUT", u, opt) if err != nil { return nil, err } + if opt != nil { + req.Header.Set("Accept", mediaTypeLockReasonPreview) + } + return s.client.Do(ctx, req, nil) } diff --git a/vendor/github.com/google/go-github/github/issues_assignees_test.go b/vendor/github.com/google/go-github/github/issues_assignees_test.go index d88a788..bb2bf4f 100644 --- a/vendor/github.com/google/go-github/github/issues_assignees_test.go +++ b/vendor/github.com/google/go-github/github/issues_assignees_test.go @@ -15,7 +15,7 @@ import ( ) func TestIssuesService_ListAssignees(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/assignees", func(w http.ResponseWriter, r *http.Request) { @@ -30,19 +30,22 @@ func TestIssuesService_ListAssignees(t *testing.T) { t.Errorf("Issues.ListAssignees returned error: %v", err) } - want := []*User{{ID: Int(1)}} + want := []*User{{ID: Int64(1)}} if !reflect.DeepEqual(assignees, want) { t.Errorf("Issues.ListAssignees returned %+v, want %+v", assignees, want) } } func TestIssuesService_ListAssignees_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.ListAssignees(context.Background(), "%", "r", nil) testURLParseError(t, err) } func TestIssuesService_IsAssignee_true(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/assignees/u", func(w http.ResponseWriter, r *http.Request) { @@ -59,7 +62,7 @@ func TestIssuesService_IsAssignee_true(t *testing.T) { } func TestIssuesService_IsAssignee_false(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/assignees/u", func(w http.ResponseWriter, r *http.Request) { @@ -77,7 +80,7 @@ func TestIssuesService_IsAssignee_false(t *testing.T) { } func TestIssuesService_IsAssignee_error(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/assignees/u", func(w http.ResponseWriter, r *http.Request) { @@ -95,12 +98,15 @@ func TestIssuesService_IsAssignee_error(t *testing.T) { } func TestIssuesService_IsAssignee_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.IsAssignee(context.Background(), "%", "r", "u") testURLParseError(t, err) } func TestIssuesService_AddAssignees(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/assignees", func(w http.ResponseWriter, r *http.Request) { @@ -129,7 +135,7 @@ func TestIssuesService_AddAssignees(t *testing.T) { } func TestIssuesService_RemoveAssignees(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/assignees", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/issues_comments.go b/vendor/github.com/google/go-github/github/issues_comments.go index fd72657..ab68afe 100644 --- a/vendor/github.com/google/go-github/github/issues_comments.go +++ b/vendor/github.com/google/go-github/github/issues_comments.go @@ -13,15 +13,19 @@ import ( // IssueComment represents a comment left on an issue. type IssueComment struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` Body *string `json:"body,omitempty"` User *User `json:"user,omitempty"` Reactions *Reactions `json:"reactions,omitempty"` CreatedAt *time.Time `json:"created_at,omitempty"` UpdatedAt *time.Time `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - IssueURL *string `json:"issue_url,omitempty"` + // AuthorAssociation is the comment author's relationship to the issue's repository. + // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". + AuthorAssociation *string `json:"author_association,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + IssueURL *string `json:"issue_url,omitempty"` } func (i IssueComment) String() string { @@ -79,8 +83,8 @@ func (s *IssuesService) ListComments(ctx context.Context, owner string, repo str // GetComment fetches the specified issue comment. // // GitHub API docs: https://developer.github.com/v3/issues/comments/#get-a-single-comment -func (s *IssuesService) GetComment(ctx context.Context, owner string, repo string, id int) (*IssueComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, id) +func (s *IssuesService) GetComment(ctx context.Context, owner string, repo string, commentID int64) (*IssueComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) req, err := s.client.NewRequest("GET", u, nil) if err != nil { @@ -118,10 +122,11 @@ func (s *IssuesService) CreateComment(ctx context.Context, owner string, repo st } // EditComment updates an issue comment. +// A non-nil comment.Body must be provided. Other comment fields should be left nil. // // GitHub API docs: https://developer.github.com/v3/issues/comments/#edit-a-comment -func (s *IssuesService) EditComment(ctx context.Context, owner string, repo string, id int, comment *IssueComment) (*IssueComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, id) +func (s *IssuesService) EditComment(ctx context.Context, owner string, repo string, commentID int64, comment *IssueComment) (*IssueComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) req, err := s.client.NewRequest("PATCH", u, comment) if err != nil { return nil, nil, err @@ -138,8 +143,8 @@ func (s *IssuesService) EditComment(ctx context.Context, owner string, repo stri // DeleteComment deletes an issue comment. // // GitHub API docs: https://developer.github.com/v3/issues/comments/#delete-a-comment -func (s *IssuesService) DeleteComment(ctx context.Context, owner string, repo string, id int) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, id) +func (s *IssuesService) DeleteComment(ctx context.Context, owner string, repo string, commentID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { return nil, err diff --git a/vendor/github.com/google/go-github/github/issues_comments_test.go b/vendor/github.com/google/go-github/github/issues_comments_test.go index 78c4863..6911a79 100644 --- a/vendor/github.com/google/go-github/github/issues_comments_test.go +++ b/vendor/github.com/google/go-github/github/issues_comments_test.go @@ -16,7 +16,7 @@ import ( ) func TestIssuesService_ListComments_allIssues(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/comments", func(w http.ResponseWriter, r *http.Request) { @@ -42,14 +42,14 @@ func TestIssuesService_ListComments_allIssues(t *testing.T) { t.Errorf("Issues.ListComments returned error: %v", err) } - want := []*IssueComment{{ID: Int(1)}} + want := []*IssueComment{{ID: Int64(1)}} if !reflect.DeepEqual(comments, want) { t.Errorf("Issues.ListComments returned %+v, want %+v", comments, want) } } func TestIssuesService_ListComments_specificIssue(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/comments", func(w http.ResponseWriter, r *http.Request) { @@ -63,19 +63,22 @@ func TestIssuesService_ListComments_specificIssue(t *testing.T) { t.Errorf("Issues.ListComments returned error: %v", err) } - want := []*IssueComment{{ID: Int(1)}} + want := []*IssueComment{{ID: Int64(1)}} if !reflect.DeepEqual(comments, want) { t.Errorf("Issues.ListComments returned %+v, want %+v", comments, want) } } func TestIssuesService_ListComments_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.ListComments(context.Background(), "%", "r", 1, nil) testURLParseError(t, err) } func TestIssuesService_GetComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/comments/1", func(w http.ResponseWriter, r *http.Request) { @@ -89,19 +92,22 @@ func TestIssuesService_GetComment(t *testing.T) { t.Errorf("Issues.GetComment returned error: %v", err) } - want := &IssueComment{ID: Int(1)} + want := &IssueComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("Issues.GetComment returned %+v, want %+v", comment, want) } } func TestIssuesService_GetComment_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.GetComment(context.Background(), "%", "r", 1) testURLParseError(t, err) } func TestIssuesService_CreateComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &IssueComment{Body: String("b")} @@ -123,19 +129,22 @@ func TestIssuesService_CreateComment(t *testing.T) { t.Errorf("Issues.CreateComment returned error: %v", err) } - want := &IssueComment{ID: Int(1)} + want := &IssueComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("Issues.CreateComment returned %+v, want %+v", comment, want) } } func TestIssuesService_CreateComment_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.CreateComment(context.Background(), "%", "r", 1, nil) testURLParseError(t, err) } func TestIssuesService_EditComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &IssueComment{Body: String("b")} @@ -157,19 +166,22 @@ func TestIssuesService_EditComment(t *testing.T) { t.Errorf("Issues.EditComment returned error: %v", err) } - want := &IssueComment{ID: Int(1)} + want := &IssueComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("Issues.EditComment returned %+v, want %+v", comment, want) } } func TestIssuesService_EditComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.EditComment(context.Background(), "%", "r", 1, nil) testURLParseError(t, err) } func TestIssuesService_DeleteComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/comments/1", func(w http.ResponseWriter, r *http.Request) { @@ -183,6 +195,9 @@ func TestIssuesService_DeleteComment(t *testing.T) { } func TestIssuesService_DeleteComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Issues.DeleteComment(context.Background(), "%", "r", 1) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/issues_events.go b/vendor/github.com/google/go-github/github/issues_events.go index 93e5d66..6a43f10 100644 --- a/vendor/github.com/google/go-github/github/issues_events.go +++ b/vendor/github.com/google/go-github/github/issues_events.go @@ -8,12 +8,13 @@ package github import ( "context" "fmt" + "strings" "time" ) // IssueEvent represents an event that occurred around an Issue or Pull Request. type IssueEvent struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` URL *string `json:"url,omitempty"` // The User that generated this event. @@ -34,9 +35,13 @@ type IssueEvent struct { // The Actor committed to master a commit mentioning the issue in its commit message. // CommitID holds the SHA1 of the commit. // - // reopened, locked, unlocked + // reopened, unlocked // The Actor did that to the issue. // + // locked + // The Actor locked the issue. + // LockReason holds the reason of locking the issue (if provided while locking). + // // renamed // The Actor changed the issue title from Rename.From to Rename.To. // @@ -64,12 +69,14 @@ type IssueEvent struct { Issue *Issue `json:"issue,omitempty"` // Only present on certain events; see above. - Assignee *User `json:"assignee,omitempty"` - Assigner *User `json:"assigner,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - Label *Label `json:"label,omitempty"` - Rename *Rename `json:"rename,omitempty"` + Assignee *User `json:"assignee,omitempty"` + Assigner *User `json:"assigner,omitempty"` + CommitID *string `json:"commit_id,omitempty"` + Milestone *Milestone `json:"milestone,omitempty"` + Label *Label `json:"label,omitempty"` + Rename *Rename `json:"rename,omitempty"` + LockReason *string `json:"lock_reason,omitempty"` + ProjectCard *ProjectCard `json:"project_card,omitempty"` } // ListIssueEvents lists events for the specified issue. @@ -87,6 +94,9 @@ func (s *IssuesService) ListIssueEvents(ctx context.Context, owner, repo string, return nil, nil, err } + acceptHeaders := []string{mediaTypeLockReasonPreview, mediaTypeProjectCardDetailsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + var events []*IssueEvent resp, err := s.client.Do(ctx, req, &events) if err != nil { @@ -123,7 +133,7 @@ func (s *IssuesService) ListRepositoryEvents(ctx context.Context, owner, repo st // GetEvent returns the specified issue event. // // GitHub API docs: https://developer.github.com/v3/issues/events/#get-a-single-event -func (s *IssuesService) GetEvent(ctx context.Context, owner, repo string, id int) (*IssueEvent, *Response, error) { +func (s *IssuesService) GetEvent(ctx context.Context, owner, repo string, id int64) (*IssueEvent, *Response, error) { u := fmt.Sprintf("repos/%v/%v/issues/events/%v", owner, repo, id) req, err := s.client.NewRequest("GET", u, nil) diff --git a/vendor/github.com/google/go-github/github/issues_events_test.go b/vendor/github.com/google/go-github/github/issues_events_test.go index d85b622..17d6ac3 100644 --- a/vendor/github.com/google/go-github/github/issues_events_test.go +++ b/vendor/github.com/google/go-github/github/issues_events_test.go @@ -10,15 +10,18 @@ import ( "fmt" "net/http" "reflect" + "strings" "testing" ) func TestIssuesService_ListIssueEvents(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeLockReasonPreview, mediaTypeProjectCardDetailsPreview} mux.HandleFunc("/repos/o/r/issues/1/events", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testFormValues(t, r, values{ "page": "1", "per_page": "2", @@ -32,14 +35,14 @@ func TestIssuesService_ListIssueEvents(t *testing.T) { t.Errorf("Issues.ListIssueEvents returned error: %v", err) } - want := []*IssueEvent{{ID: Int(1)}} + want := []*IssueEvent{{ID: Int64(1)}} if !reflect.DeepEqual(events, want) { t.Errorf("Issues.ListIssueEvents returned %+v, want %+v", events, want) } } func TestIssuesService_ListRepositoryEvents(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/events", func(w http.ResponseWriter, r *http.Request) { @@ -57,14 +60,14 @@ func TestIssuesService_ListRepositoryEvents(t *testing.T) { t.Errorf("Issues.ListRepositoryEvents returned error: %v", err) } - want := []*IssueEvent{{ID: Int(1)}} + want := []*IssueEvent{{ID: Int64(1)}} if !reflect.DeepEqual(events, want) { t.Errorf("Issues.ListRepositoryEvents returned %+v, want %+v", events, want) } } func TestIssuesService_GetEvent(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/events/1", func(w http.ResponseWriter, r *http.Request) { @@ -77,7 +80,7 @@ func TestIssuesService_GetEvent(t *testing.T) { t.Errorf("Issues.GetEvent returned error: %v", err) } - want := &IssueEvent{ID: Int(1)} + want := &IssueEvent{ID: Int64(1)} if !reflect.DeepEqual(event, want) { t.Errorf("Issues.GetEvent returned %+v, want %+v", event, want) } diff --git a/vendor/github.com/google/go-github/github/issues_labels.go b/vendor/github.com/google/go-github/github/issues_labels.go index b0e34c4..adcbe06 100644 --- a/vendor/github.com/google/go-github/github/issues_labels.go +++ b/vendor/github.com/google/go-github/github/issues_labels.go @@ -12,10 +12,13 @@ import ( // Label represents a GitHub label on an Issue type Label struct { - ID *int `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - Color *string `json:"color,omitempty"` + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + Name *string `json:"name,omitempty"` + Color *string `json:"color,omitempty"` + Description *string `json:"description,omitempty"` + Default *bool `json:"default,omitempty"` + NodeID *string `json:"node_id,omitempty"` } func (l Label) String() string { @@ -37,6 +40,9 @@ func (s *IssuesService) ListLabels(ctx context.Context, owner string, repo strin return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + var labels []*Label resp, err := s.client.Do(ctx, req, &labels) if err != nil { @@ -56,6 +62,9 @@ func (s *IssuesService) GetLabel(ctx context.Context, owner string, repo string, return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + label := new(Label) resp, err := s.client.Do(ctx, req, label) if err != nil { @@ -75,6 +84,9 @@ func (s *IssuesService) CreateLabel(ctx context.Context, owner string, repo stri return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + l := new(Label) resp, err := s.client.Do(ctx, req, l) if err != nil { @@ -94,6 +106,9 @@ func (s *IssuesService) EditLabel(ctx context.Context, owner string, repo string return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + l := new(Label) resp, err := s.client.Do(ctx, req, l) if err != nil { @@ -130,6 +145,9 @@ func (s *IssuesService) ListLabelsByIssue(ctx context.Context, owner string, rep return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + var labels []*Label resp, err := s.client.Do(ctx, req, &labels) if err != nil { @@ -149,6 +167,9 @@ func (s *IssuesService) AddLabelsToIssue(ctx context.Context, owner string, repo return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + var l []*Label resp, err := s.client.Do(ctx, req, &l) if err != nil { @@ -167,6 +188,10 @@ func (s *IssuesService) RemoveLabelForIssue(ctx context.Context, owner string, r if err != nil { return nil, err } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + return s.client.Do(ctx, req, nil) } @@ -180,6 +205,9 @@ func (s *IssuesService) ReplaceLabelsForIssue(ctx context.Context, owner string, return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + var l []*Label resp, err := s.client.Do(ctx, req, &l) if err != nil { @@ -198,6 +226,10 @@ func (s *IssuesService) RemoveLabelsForIssue(ctx context.Context, owner string, if err != nil { return nil, err } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + return s.client.Do(ctx, req, nil) } @@ -216,6 +248,9 @@ func (s *IssuesService) ListLabelsForMilestone(ctx context.Context, owner string return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + var labels []*Label resp, err := s.client.Do(ctx, req, &labels) if err != nil { diff --git a/vendor/github.com/google/go-github/github/issues_labels_test.go b/vendor/github.com/google/go-github/github/issues_labels_test.go index 5380427..6437d88 100644 --- a/vendor/github.com/google/go-github/github/issues_labels_test.go +++ b/vendor/github.com/google/go-github/github/issues_labels_test.go @@ -15,11 +15,12 @@ import ( ) func TestIssuesService_ListLabels(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/labels", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) testFormValues(t, r, values{"page": "2"}) fmt.Fprint(w, `[{"name": "a"},{"name": "b"}]`) }) @@ -37,17 +38,21 @@ func TestIssuesService_ListLabels(t *testing.T) { } func TestIssuesService_ListLabels_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.ListLabels(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestIssuesService_GetLabel(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/labels/n", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - fmt.Fprint(w, `{"url":"u", "name": "n", "color": "c"}`) + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) + fmt.Fprint(w, `{"url":"u", "name": "n", "color": "c", "description": "d"}`) }) label, _, err := client.Issues.GetLabel(context.Background(), "o", "r", "n") @@ -55,19 +60,22 @@ func TestIssuesService_GetLabel(t *testing.T) { t.Errorf("Issues.GetLabel returned error: %v", err) } - want := &Label{URL: String("u"), Name: String("n"), Color: String("c")} + want := &Label{URL: String("u"), Name: String("n"), Color: String("c"), Description: String("d")} if !reflect.DeepEqual(label, want) { t.Errorf("Issues.GetLabel returned %+v, want %+v", label, want) } } func TestIssuesService_GetLabel_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.GetLabel(context.Background(), "%", "%", "%") testURLParseError(t, err) } func TestIssuesService_CreateLabel(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Label{Name: String("n")} @@ -77,6 +85,7 @@ func TestIssuesService_CreateLabel(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } @@ -96,12 +105,15 @@ func TestIssuesService_CreateLabel(t *testing.T) { } func TestIssuesService_CreateLabel_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.CreateLabel(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestIssuesService_EditLabel(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Label{Name: String("z")} @@ -111,6 +123,7 @@ func TestIssuesService_EditLabel(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "PATCH") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } @@ -130,12 +143,15 @@ func TestIssuesService_EditLabel(t *testing.T) { } func TestIssuesService_EditLabel_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.EditLabel(context.Background(), "%", "%", "%", nil) testURLParseError(t, err) } func TestIssuesService_DeleteLabel(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/labels/n", func(w http.ResponseWriter, r *http.Request) { @@ -149,16 +165,20 @@ func TestIssuesService_DeleteLabel(t *testing.T) { } func TestIssuesService_DeleteLabel_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Issues.DeleteLabel(context.Background(), "%", "%", "%") testURLParseError(t, err) } func TestIssuesService_ListLabelsByIssue(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/labels", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) testFormValues(t, r, values{"page": "2"}) fmt.Fprint(w, `[{"name":"a","id":1},{"name":"b","id":2}]`) }) @@ -170,8 +190,8 @@ func TestIssuesService_ListLabelsByIssue(t *testing.T) { } want := []*Label{ - {Name: String("a"), ID: Int(1)}, - {Name: String("b"), ID: Int(2)}, + {Name: String("a"), ID: Int64(1)}, + {Name: String("b"), ID: Int64(2)}, } if !reflect.DeepEqual(labels, want) { t.Errorf("Issues.ListLabelsByIssue returned %+v, want %+v", labels, want) @@ -179,12 +199,15 @@ func TestIssuesService_ListLabelsByIssue(t *testing.T) { } func TestIssuesService_ListLabelsByIssue_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.ListLabelsByIssue(context.Background(), "%", "%", 1, nil) testURLParseError(t, err) } func TestIssuesService_AddLabelsToIssue(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := []string{"a", "b"} @@ -194,6 +217,7 @@ func TestIssuesService_AddLabelsToIssue(t *testing.T) { json.NewDecoder(r.Body).Decode(&v) testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } @@ -213,15 +237,19 @@ func TestIssuesService_AddLabelsToIssue(t *testing.T) { } func TestIssuesService_AddLabelsToIssue_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.AddLabelsToIssue(context.Background(), "%", "%", 1, nil) testURLParseError(t, err) } func TestIssuesService_RemoveLabelForIssue(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/labels/l", func(w http.ResponseWriter, r *http.Request) { + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) testMethod(t, r, "DELETE") }) @@ -232,12 +260,15 @@ func TestIssuesService_RemoveLabelForIssue(t *testing.T) { } func TestIssuesService_RemoveLabelForIssue_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Issues.RemoveLabelForIssue(context.Background(), "%", "%", 1, "%") testURLParseError(t, err) } func TestIssuesService_ReplaceLabelsForIssue(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := []string{"a", "b"} @@ -247,6 +278,7 @@ func TestIssuesService_ReplaceLabelsForIssue(t *testing.T) { json.NewDecoder(r.Body).Decode(&v) testMethod(t, r, "PUT") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } @@ -266,16 +298,20 @@ func TestIssuesService_ReplaceLabelsForIssue(t *testing.T) { } func TestIssuesService_ReplaceLabelsForIssue_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.ReplaceLabelsForIssue(context.Background(), "%", "%", 1, nil) testURLParseError(t, err) } func TestIssuesService_RemoveLabelsForIssue(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/labels", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) }) _, err := client.Issues.RemoveLabelsForIssue(context.Background(), "o", "r", 1) @@ -285,16 +321,20 @@ func TestIssuesService_RemoveLabelsForIssue(t *testing.T) { } func TestIssuesService_RemoveLabelsForIssue_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Issues.RemoveLabelsForIssue(context.Background(), "%", "%", 1) testURLParseError(t, err) } func TestIssuesService_ListLabelsForMilestone(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/milestones/1/labels", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) testFormValues(t, r, values{"page": "2"}) fmt.Fprint(w, `[{"name": "a"},{"name": "b"}]`) }) @@ -312,6 +352,9 @@ func TestIssuesService_ListLabelsForMilestone(t *testing.T) { } func TestIssuesService_ListLabelsForMilestone_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.ListLabelsForMilestone(context.Background(), "%", "%", 1, nil) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/issues_milestones.go b/vendor/github.com/google/go-github/github/issues_milestones.go index e6e882d..ffe9aae 100644 --- a/vendor/github.com/google/go-github/github/issues_milestones.go +++ b/vendor/github.com/google/go-github/github/issues_milestones.go @@ -16,7 +16,7 @@ type Milestone struct { URL *string `json:"url,omitempty"` HTMLURL *string `json:"html_url,omitempty"` LabelsURL *string `json:"labels_url,omitempty"` - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` Number *int `json:"number,omitempty"` State *string `json:"state,omitempty"` Title *string `json:"title,omitempty"` @@ -28,6 +28,7 @@ type Milestone struct { UpdatedAt *time.Time `json:"updated_at,omitempty"` ClosedAt *time.Time `json:"closed_at,omitempty"` DueOn *time.Time `json:"due_on,omitempty"` + NodeID *string `json:"node_id,omitempty"` } func (m Milestone) String() string { diff --git a/vendor/github.com/google/go-github/github/issues_milestones_test.go b/vendor/github.com/google/go-github/github/issues_milestones_test.go index befa1b4..019f00a 100644 --- a/vendor/github.com/google/go-github/github/issues_milestones_test.go +++ b/vendor/github.com/google/go-github/github/issues_milestones_test.go @@ -15,7 +15,7 @@ import ( ) func TestIssuesService_ListMilestones(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/milestones", func(w http.ResponseWriter, r *http.Request) { @@ -42,12 +42,15 @@ func TestIssuesService_ListMilestones(t *testing.T) { } func TestIssuesService_ListMilestones_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.ListMilestones(context.Background(), "%", "r", nil) testURLParseError(t, err) } func TestIssuesService_GetMilestone(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/milestones/1", func(w http.ResponseWriter, r *http.Request) { @@ -67,12 +70,15 @@ func TestIssuesService_GetMilestone(t *testing.T) { } func TestIssuesService_GetMilestone_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.GetMilestone(context.Background(), "%", "r", 1) testURLParseError(t, err) } func TestIssuesService_CreateMilestone(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Milestone{Title: String("t")} @@ -101,12 +107,15 @@ func TestIssuesService_CreateMilestone(t *testing.T) { } func TestIssuesService_CreateMilestone_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.CreateMilestone(context.Background(), "%", "r", nil) testURLParseError(t, err) } func TestIssuesService_EditMilestone(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Milestone{Title: String("t")} @@ -135,12 +144,15 @@ func TestIssuesService_EditMilestone(t *testing.T) { } func TestIssuesService_EditMilestone_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.EditMilestone(context.Background(), "%", "r", 1, nil) testURLParseError(t, err) } func TestIssuesService_DeleteMilestone(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/milestones/1", func(w http.ResponseWriter, r *http.Request) { @@ -154,6 +166,9 @@ func TestIssuesService_DeleteMilestone(t *testing.T) { } func TestIssuesService_DeleteMilestone_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Issues.DeleteMilestone(context.Background(), "%", "r", 1) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/issues_test.go b/vendor/github.com/google/go-github/github/issues_test.go index dadee75..7edcf09 100644 --- a/vendor/github.com/google/go-github/github/issues_test.go +++ b/vendor/github.com/google/go-github/github/issues_test.go @@ -11,17 +11,19 @@ import ( "fmt" "net/http" "reflect" + "strings" "testing" "time" ) func TestIssuesService_List_all(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} mux.HandleFunc("/issues", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeReactionsPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testFormValues(t, r, values{ "filter": "all", "state": "closed", @@ -52,12 +54,13 @@ func TestIssuesService_List_all(t *testing.T) { } func TestIssuesService_List_owned(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} mux.HandleFunc("/user/issues", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeReactionsPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) fmt.Fprint(w, `[{"number":1}]`) }) @@ -73,12 +76,13 @@ func TestIssuesService_List_owned(t *testing.T) { } func TestIssuesService_ListByOrg(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} mux.HandleFunc("/orgs/o/issues", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeReactionsPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) fmt.Fprint(w, `[{"number":1}]`) }) @@ -94,17 +98,21 @@ func TestIssuesService_ListByOrg(t *testing.T) { } func TestIssuesService_ListByOrg_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.ListByOrg(context.Background(), "%", nil) testURLParseError(t, err) } func TestIssuesService_ListByRepo(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeIntegrationPreview} mux.HandleFunc("/repos/o/r/issues", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeReactionsPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testFormValues(t, r, values{ "milestone": "*", "state": "closed", @@ -136,17 +144,21 @@ func TestIssuesService_ListByRepo(t *testing.T) { } func TestIssuesService_ListByRepo_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.ListByRepo(context.Background(), "%", "r", nil) testURLParseError(t, err) } func TestIssuesService_Get(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} mux.HandleFunc("/repos/o/r/issues/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeReactionsPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) fmt.Fprint(w, `{"number":1, "labels": [{"url": "u", "name": "n", "color": "c"}]}`) }) @@ -169,12 +181,15 @@ func TestIssuesService_Get(t *testing.T) { } func TestIssuesService_Get_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.Get(context.Background(), "%", "r", 1) testURLParseError(t, err) } func TestIssuesService_Create(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &IssueRequest{ @@ -189,6 +204,7 @@ func TestIssuesService_Create(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } @@ -208,12 +224,15 @@ func TestIssuesService_Create(t *testing.T) { } func TestIssuesService_Create_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.Create(context.Background(), "%", "r", nil) testURLParseError(t, err) } func TestIssuesService_Edit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &IssueRequest{Title: String("t")} @@ -223,6 +242,7 @@ func TestIssuesService_Edit(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "PATCH") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } @@ -242,12 +262,15 @@ func TestIssuesService_Edit(t *testing.T) { } func TestIssuesService_Edit_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Issues.Edit(context.Background(), "%", "r", 1, nil) testURLParseError(t, err) } func TestIssuesService_Lock(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/lock", func(w http.ResponseWriter, r *http.Request) { @@ -256,13 +279,30 @@ func TestIssuesService_Lock(t *testing.T) { w.WriteHeader(http.StatusNoContent) }) - if _, err := client.Issues.Lock(context.Background(), "o", "r", 1); err != nil { + if _, err := client.Issues.Lock(context.Background(), "o", "r", 1, nil); err != nil { + t.Errorf("Issues.Lock returned error: %v", err) + } +} + +func TestIssuesService_LockWithReason(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/issues/1/lock", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testHeader(t, r, "Accept", mediaTypeLockReasonPreview) + w.WriteHeader(http.StatusNoContent) + }) + + opt := &LockIssueOptions{LockReason: "off-topic"} + + if _, err := client.Issues.Lock(context.Background(), "o", "r", 1, opt); err != nil { t.Errorf("Issues.Lock returned error: %v", err) } } func TestIssuesService_Unlock(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/lock", func(w http.ResponseWriter, r *http.Request) { @@ -275,3 +315,14 @@ func TestIssuesService_Unlock(t *testing.T) { t.Errorf("Issues.Unlock returned error: %v", err) } } + +func TestIsPullRequest(t *testing.T) { + i := new(Issue) + if i.IsPullRequest() == true { + t.Errorf("expected i.IsPullRequest (%v) to return false, got true", i) + } + i.PullRequestLinks = &PullRequestLinks{URL: String("http://example.com")} + if i.IsPullRequest() == false { + t.Errorf("expected i.IsPullRequest (%v) to return true, got false", i) + } +} diff --git a/vendor/github.com/google/go-github/github/issues_timeline.go b/vendor/github.com/google/go-github/github/issues_timeline.go index bc0b108..d0e4a3a 100644 --- a/vendor/github.com/google/go-github/github/issues_timeline.go +++ b/vendor/github.com/google/go-github/github/issues_timeline.go @@ -8,6 +8,7 @@ package github import ( "context" "fmt" + "strings" "time" ) @@ -16,7 +17,7 @@ import ( // It is similar to an IssueEvent but may contain more information. // GitHub API docs: https://developer.github.com/v3/issues/timeline/ type Timeline struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` URL *string `json:"url,omitempty"` CommitURL *string `json:"commit_url,omitempty"` @@ -115,14 +116,17 @@ type Timeline struct { Source *Source `json:"source,omitempty"` // An object containing rename details including 'from' and 'to' attributes. // Only provided for 'renamed' events. - Rename *Rename `json:"rename,omitempty"` + Rename *Rename `json:"rename,omitempty"` + ProjectCard *ProjectCard `json:"project_card,omitempty"` } // Source represents a reference's source. type Source struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` URL *string `json:"url,omitempty"` Actor *User `json:"actor,omitempty"` + Type *string `json:"type,omitempty"` + Issue *Issue `json:"issue,omitempty"` } // ListIssueTimeline lists events for the specified issue. @@ -141,7 +145,8 @@ func (s *IssuesService) ListIssueTimeline(ctx context.Context, owner, repo strin } // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeTimelinePreview) + acceptHeaders := []string{mediaTypeTimelinePreview, mediaTypeProjectCardDetailsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) var events []*Timeline resp, err := s.client.Do(ctx, req, &events) diff --git a/vendor/github.com/google/go-github/github/issues_timeline_test.go b/vendor/github.com/google/go-github/github/issues_timeline_test.go index 2fd2210..4d0a41b 100644 --- a/vendor/github.com/google/go-github/github/issues_timeline_test.go +++ b/vendor/github.com/google/go-github/github/issues_timeline_test.go @@ -10,16 +10,18 @@ import ( "fmt" "net/http" "reflect" + "strings" "testing" ) func TestIssuesService_ListIssueTimeline(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeTimelinePreview, mediaTypeProjectCardDetailsPreview} mux.HandleFunc("/repos/o/r/issues/1/timeline", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeTimelinePreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testFormValues(t, r, values{ "page": "1", "per_page": "2", @@ -33,7 +35,7 @@ func TestIssuesService_ListIssueTimeline(t *testing.T) { t.Errorf("Issues.ListIssueTimeline returned error: %v", err) } - want := []*Timeline{{ID: Int(1)}} + want := []*Timeline{{ID: Int64(1)}} if !reflect.DeepEqual(events, want) { t.Errorf("Issues.ListIssueTimeline = %+v, want %+v", events, want) } diff --git a/vendor/github.com/google/go-github/github/licenses.go b/vendor/github.com/google/go-github/github/licenses.go index e9cd177..1176d3a 100644 --- a/vendor/github.com/google/go-github/github/licenses.go +++ b/vendor/github.com/google/go-github/github/licenses.go @@ -67,9 +67,6 @@ func (s *LicensesService) List(ctx context.Context) ([]*License, *Response, erro return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeLicensesPreview) - var licenses []*License resp, err := s.client.Do(ctx, req, &licenses) if err != nil { @@ -90,9 +87,6 @@ func (s *LicensesService) Get(ctx context.Context, licenseName string) (*License return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeLicensesPreview) - license := new(License) resp, err := s.client.Do(ctx, req, license) if err != nil { diff --git a/vendor/github.com/google/go-github/github/licenses_test.go b/vendor/github.com/google/go-github/github/licenses_test.go index 5c06e65..a71f6a4 100644 --- a/vendor/github.com/google/go-github/github/licenses_test.go +++ b/vendor/github.com/google/go-github/github/licenses_test.go @@ -14,12 +14,11 @@ import ( ) func TestLicensesService_List(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/licenses", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeLicensesPreview) fmt.Fprint(w, `[{"key":"mit","name":"MIT","spdx_id":"MIT","url":"https://api.github.com/licenses/mit","featured":true}]`) }) @@ -41,12 +40,11 @@ func TestLicensesService_List(t *testing.T) { } func TestLicensesService_Get(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/licenses/mit", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeLicensesPreview) fmt.Fprint(w, `{"key":"mit","name":"MIT"}`) }) @@ -62,6 +60,9 @@ func TestLicensesService_Get(t *testing.T) { } func TestLicensesService_Get_invalidTemplate(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Licenses.Get(context.Background(), "%") testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/messages.go b/vendor/github.com/google/go-github/github/messages.go index c0f315a..e8c624a 100644 --- a/vendor/github.com/google/go-github/github/messages.go +++ b/vendor/github.com/google/go-github/github/messages.go @@ -20,6 +20,7 @@ import ( "hash" "io/ioutil" "net/http" + "net/url" "strings" ) @@ -40,39 +41,43 @@ const ( var ( // eventTypeMapping maps webhooks types to their corresponding go-github struct types. eventTypeMapping = map[string]string{ - "commit_comment": "CommitCommentEvent", - "create": "CreateEvent", - "delete": "DeleteEvent", - "deployment": "DeploymentEvent", - "deployment_status": "DeploymentStatusEvent", - "fork": "ForkEvent", - "gollum": "GollumEvent", - "installation": "InstallationEvent", - "installation_repositories": "InstallationRepositoriesEvent", - "issue_comment": "IssueCommentEvent", - "issues": "IssuesEvent", - "label": "LabelEvent", - "member": "MemberEvent", - "membership": "MembershipEvent", - "milestone": "MilestoneEvent", - "organization": "OrganizationEvent", - "org_block": "OrgBlockEvent", - "page_build": "PageBuildEvent", - "ping": "PingEvent", - "project": "ProjectEvent", - "project_card": "ProjectCardEvent", - "project_column": "ProjectColumnEvent", - "public": "PublicEvent", - "pull_request_review": "PullRequestReviewEvent", - "pull_request_review_comment": "PullRequestReviewCommentEvent", - "pull_request": "PullRequestEvent", - "push": "PushEvent", - "repository": "RepositoryEvent", - "release": "ReleaseEvent", - "status": "StatusEvent", - "team": "TeamEvent", - "team_add": "TeamAddEvent", - "watch": "WatchEvent", + "check_run": "CheckRunEvent", + "check_suite": "CheckSuiteEvent", + "commit_comment": "CommitCommentEvent", + "create": "CreateEvent", + "delete": "DeleteEvent", + "deployment": "DeploymentEvent", + "deployment_status": "DeploymentStatusEvent", + "fork": "ForkEvent", + "gollum": "GollumEvent", + "installation": "InstallationEvent", + "installation_repositories": "InstallationRepositoriesEvent", + "issue_comment": "IssueCommentEvent", + "issues": "IssuesEvent", + "label": "LabelEvent", + "marketplace_purchase": "MarketplacePurchaseEvent", + "member": "MemberEvent", + "membership": "MembershipEvent", + "milestone": "MilestoneEvent", + "organization": "OrganizationEvent", + "org_block": "OrgBlockEvent", + "page_build": "PageBuildEvent", + "ping": "PingEvent", + "project": "ProjectEvent", + "project_card": "ProjectCardEvent", + "project_column": "ProjectColumnEvent", + "public": "PublicEvent", + "pull_request_review": "PullRequestReviewEvent", + "pull_request_review_comment": "PullRequestReviewCommentEvent", + "pull_request": "PullRequestEvent", + "push": "PushEvent", + "repository": "RepositoryEvent", + "repository_vulnerability_alert": "RepositoryVulnerabilityAlertEvent", + "release": "ReleaseEvent", + "status": "StatusEvent", + "team": "TeamEvent", + "team_add": "TeamAddEvent", + "watch": "WatchEvent", } ) @@ -122,7 +127,11 @@ func messageMAC(signature string) ([]byte, func() hash.Hash, error) { // ValidatePayload validates an incoming GitHub Webhook event request // and returns the (JSON) payload. -// secretKey is the GitHub Webhook secret message. +// The Content-Type header of the payload can be "application/json" or "application/x-www-form-urlencoded". +// If the Content-Type is neither then an error is returned. +// secretToken is the GitHub Webhook secret token. +// If your webhook does not contain a secret token, you can pass nil or an empty slice. +// This is intended for local development purposes only and all webhooks should ideally set up a secret token. // // Example usage: // @@ -132,31 +141,66 @@ func messageMAC(signature string) ([]byte, func() hash.Hash, error) { // // Process payload... // } // -func ValidatePayload(r *http.Request, secretKey []byte) (payload []byte, err error) { - payload, err = ioutil.ReadAll(r.Body) - if err != nil { - return nil, err +func ValidatePayload(r *http.Request, secretToken []byte) (payload []byte, err error) { + var body []byte // Raw body that GitHub uses to calculate the signature. + + switch ct := r.Header.Get("Content-Type"); ct { + case "application/json": + var err error + if body, err = ioutil.ReadAll(r.Body); err != nil { + return nil, err + } + + // If the content type is application/json, + // the JSON payload is just the original body. + payload = body + + case "application/x-www-form-urlencoded": + // payloadFormParam is the name of the form parameter that the JSON payload + // will be in if a webhook has its content type set to application/x-www-form-urlencoded. + const payloadFormParam = "payload" + + var err error + if body, err = ioutil.ReadAll(r.Body); err != nil { + return nil, err + } + + // If the content type is application/x-www-form-urlencoded, + // the JSON payload will be under the "payload" form param. + form, err := url.ParseQuery(string(body)) + if err != nil { + return nil, err + } + payload = []byte(form.Get(payloadFormParam)) + + default: + return nil, fmt.Errorf("Webhook request has unsupported Content-Type %q", ct) } - sig := r.Header.Get(signatureHeader) - if err := validateSignature(sig, payload, secretKey); err != nil { - return nil, err + // Only validate the signature if a secret token exists. This is intended for + // local development only and all webhooks should ideally set up a secret token. + if len(secretToken) > 0 { + sig := r.Header.Get(signatureHeader) + if err := ValidateSignature(sig, body, secretToken); err != nil { + return nil, err + } } + return payload, nil } -// validateSignature validates the signature for the given payload. +// ValidateSignature validates the signature for the given payload. // signature is the GitHub hash signature delivered in the X-Hub-Signature header. // payload is the JSON payload sent by GitHub Webhooks. -// secretKey is the GitHub Webhook secret message. +// secretToken is the GitHub Webhook secret token. // // GitHub API docs: https://developer.github.com/webhooks/securing/#validating-payloads-from-github -func validateSignature(signature string, payload, secretKey []byte) error { +func ValidateSignature(signature string, payload, secretToken []byte) error { messageMAC, hashFunc, err := messageMAC(signature) if err != nil { return err } - if !checkMAC(payload, messageMAC, secretKey, hashFunc) { + if !checkMAC(payload, messageMAC, secretToken, hashFunc) { return errors.New("payload signature check failed") } return nil diff --git a/vendor/github.com/google/go-github/github/messages_test.go b/vendor/github.com/google/go-github/github/messages_test.go index 5f348f2..bde9d76 100644 --- a/vendor/github.com/google/go-github/github/messages_test.go +++ b/vendor/github.com/google/go-github/github/messages_test.go @@ -9,7 +9,9 @@ import ( "bytes" "encoding/json" "net/http" + "net/url" "reflect" + "strings" "testing" ) @@ -68,6 +70,7 @@ func TestValidatePayload(t *testing.T) { if test.signature != "" { req.Header.Set(signatureHeader, test.signature) } + req.Header.Set("Content-Type", "application/json") got, err := ValidatePayload(req, secretKey) if err != nil { @@ -82,11 +85,111 @@ func TestValidatePayload(t *testing.T) { } } +func TestValidatePayload_FormGet(t *testing.T) { + payload := `{"yo":true}` + signature := "sha1=3374ef144403e8035423b23b02e2c9d7a4c50368" + secretKey := []byte("0123456789abcdef") + + form := url.Values{} + form.Add("payload", payload) + req, err := http.NewRequest("POST", "http://localhost/event", strings.NewReader(form.Encode())) + if err != nil { + t.Fatalf("NewRequest: %v", err) + } + req.PostForm = form + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + req.Header.Set(signatureHeader, signature) + + got, err := ValidatePayload(req, secretKey) + if err != nil { + t.Errorf("ValidatePayload(%#v): err = %v, want nil", payload, err) + } + if string(got) != payload { + t.Errorf("ValidatePayload = %q, want %q", got, payload) + } + + // check that if payload is invalid we get error + req.Header.Set(signatureHeader, "invalid signature") + if _, err = ValidatePayload(req, []byte{0}); err == nil { + t.Error("ValidatePayload = nil, want err") + } +} + +func TestValidatePayload_FormPost(t *testing.T) { + payload := `{"yo":true}` + signature := "sha1=3374ef144403e8035423b23b02e2c9d7a4c50368" + secretKey := []byte("0123456789abcdef") + + form := url.Values{} + form.Set("payload", payload) + buf := bytes.NewBufferString(form.Encode()) + req, err := http.NewRequest("POST", "http://localhost/event", buf) + if err != nil { + t.Fatalf("NewRequest: %v", err) + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + req.Header.Set(signatureHeader, signature) + + got, err := ValidatePayload(req, secretKey) + if err != nil { + t.Errorf("ValidatePayload(%#v): err = %v, want nil", payload, err) + } + if string(got) != payload { + t.Errorf("ValidatePayload = %q, want %q", got, payload) + } + + // check that if payload is invalid we get error + req.Header.Set(signatureHeader, "invalid signature") + if _, err = ValidatePayload(req, []byte{0}); err == nil { + t.Error("ValidatePayload = nil, want err") + } +} + +func TestValidatePayload_InvalidContentType(t *testing.T) { + req, err := http.NewRequest("POST", "http://localhost/event", nil) + if err != nil { + t.Fatalf("NewRequest: %v", err) + } + req.Header.Set("Content-Type", "invalid content type") + if _, err = ValidatePayload(req, nil); err == nil { + t.Error("ValidatePayload = nil, want err") + } +} + +func TestValidatePayload_NoSecretKey(t *testing.T) { + payload := `{"yo":true}` + + form := url.Values{} + form.Set("payload", payload) + buf := bytes.NewBufferString(form.Encode()) + req, err := http.NewRequest("POST", "http://localhost/event", buf) + if err != nil { + t.Fatalf("NewRequest: %v", err) + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + got, err := ValidatePayload(req, nil) + if err != nil { + t.Errorf("ValidatePayload(%#v): err = %v, want nil", payload, err) + } + if string(got) != payload { + t.Errorf("ValidatePayload = %q, want %q", got, payload) + } +} + func TestParseWebHook(t *testing.T) { tests := []struct { payload interface{} messageType string }{ + { + payload: &CheckRunEvent{}, + messageType: "check_run", + }, + { + payload: &CheckSuiteEvent{}, + messageType: "check_suite", + }, { payload: &CommitCommentEvent{}, messageType: "commit_comment", @@ -136,6 +239,10 @@ func TestParseWebHook(t *testing.T) { payload: &LabelEvent{}, messageType: "label", }, + { + payload: &MarketplacePurchaseEvent{}, + messageType: "marketplace_purchase", + }, { payload: &MemberEvent{}, messageType: "member", @@ -204,6 +311,10 @@ func TestParseWebHook(t *testing.T) { payload: &RepositoryEvent{}, messageType: "repository", }, + { + payload: &RepositoryVulnerabilityAlertEvent{}, + messageType: "repository_vulnerability_alert", + }, { payload: &StatusEvent{}, messageType: "status", diff --git a/vendor/github.com/google/go-github/github/migrations.go b/vendor/github.com/google/go-github/github/migrations.go index 6793269..90cc1fa 100644 --- a/vendor/github.com/google/go-github/github/migrations.go +++ b/vendor/github.com/google/go-github/github/migrations.go @@ -21,7 +21,7 @@ type MigrationService service // Migration represents a GitHub migration (archival). type Migration struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` GUID *string `json:"guid,omitempty"` // State is the current state of a migration. // Possible values are: @@ -128,7 +128,7 @@ func (s *MigrationService) ListMigrations(ctx context.Context, org string) ([]*M // id is the migration ID. // // GitHub API docs: https://developer.github.com/v3/migration/migrations/#get-the-status-of-a-migration -func (s *MigrationService) MigrationStatus(ctx context.Context, org string, id int) (*Migration, *Response, error) { +func (s *MigrationService) MigrationStatus(ctx context.Context, org string, id int64) (*Migration, *Response, error) { u := fmt.Sprintf("orgs/%v/migrations/%v", org, id) req, err := s.client.NewRequest("GET", u, nil) @@ -152,7 +152,7 @@ func (s *MigrationService) MigrationStatus(ctx context.Context, org string, id i // id is the migration ID. // // GitHub API docs: https://developer.github.com/v3/migration/migrations/#download-a-migration-archive -func (s *MigrationService) MigrationArchiveURL(ctx context.Context, org string, id int) (url string, err error) { +func (s *MigrationService) MigrationArchiveURL(ctx context.Context, org string, id int64) (url string, err error) { u := fmt.Sprintf("orgs/%v/migrations/%v/archive", org, id) req, err := s.client.NewRequest("GET", u, nil) @@ -189,7 +189,7 @@ func (s *MigrationService) MigrationArchiveURL(ctx context.Context, org string, // id is the migration ID. // // GitHub API docs: https://developer.github.com/v3/migration/migrations/#delete-a-migration-archive -func (s *MigrationService) DeleteMigration(ctx context.Context, org string, id int) (*Response, error) { +func (s *MigrationService) DeleteMigration(ctx context.Context, org string, id int64) (*Response, error) { u := fmt.Sprintf("orgs/%v/migrations/%v/archive", org, id) req, err := s.client.NewRequest("DELETE", u, nil) @@ -209,7 +209,7 @@ func (s *MigrationService) DeleteMigration(ctx context.Context, org string, id i // is complete and you no longer need the source data. // // GitHub API docs: https://developer.github.com/v3/migration/migrations/#unlock-a-repository -func (s *MigrationService) UnlockRepo(ctx context.Context, org string, id int, repo string) (*Response, error) { +func (s *MigrationService) UnlockRepo(ctx context.Context, org string, id int64, repo string) (*Response, error) { u := fmt.Sprintf("orgs/%v/migrations/%v/repos/%v/lock", org, id, repo) req, err := s.client.NewRequest("DELETE", u, nil) diff --git a/vendor/github.com/google/go-github/github/migrations_source_import.go b/vendor/github.com/google/go-github/github/migrations_source_import.go index aa45a5a..fd45e78 100644 --- a/vendor/github.com/google/go-github/github/migrations_source_import.go +++ b/vendor/github.com/google/go-github/github/migrations_source_import.go @@ -117,7 +117,7 @@ func (i Import) String() string { // // GitHub API docs: https://developer.github.com/v3/migration/source_imports/#get-commit-authors type SourceImportAuthor struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` RemoteID *string `json:"remote_id,omitempty"` RemoteName *string `json:"remote_name,omitempty"` Email *string `json:"email,omitempty"` @@ -247,7 +247,7 @@ func (s *MigrationService) CommitAuthors(ctx context.Context, owner, repo string // commits to the repository. // // GitHub API docs: https://developer.github.com/v3/migration/source_imports/#map-a-commit-author -func (s *MigrationService) MapCommitAuthor(ctx context.Context, owner, repo string, id int, author *SourceImportAuthor) (*SourceImportAuthor, *Response, error) { +func (s *MigrationService) MapCommitAuthor(ctx context.Context, owner, repo string, id int64, author *SourceImportAuthor) (*SourceImportAuthor, *Response, error) { u := fmt.Sprintf("repos/%v/%v/import/authors/%v", owner, repo, id) req, err := s.client.NewRequest("PATCH", u, author) if err != nil { diff --git a/vendor/github.com/google/go-github/github/migrations_source_import_test.go b/vendor/github.com/google/go-github/github/migrations_source_import_test.go index 67b98fb..e9e9be0 100644 --- a/vendor/github.com/google/go-github/github/migrations_source_import_test.go +++ b/vendor/github.com/google/go-github/github/migrations_source_import_test.go @@ -15,7 +15,7 @@ import ( ) func TestMigrationService_StartImport(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Import{ @@ -50,7 +50,7 @@ func TestMigrationService_StartImport(t *testing.T) { } func TestMigrationService_ImportProgress(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/import", func(w http.ResponseWriter, r *http.Request) { @@ -70,7 +70,7 @@ func TestMigrationService_ImportProgress(t *testing.T) { } func TestMigrationService_UpdateImport(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Import{ @@ -105,7 +105,7 @@ func TestMigrationService_UpdateImport(t *testing.T) { } func TestMigrationService_CommitAuthors(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/import/authors", func(w http.ResponseWriter, r *http.Request) { @@ -119,8 +119,8 @@ func TestMigrationService_CommitAuthors(t *testing.T) { t.Errorf("CommitAuthors returned error: %v", err) } want := []*SourceImportAuthor{ - {ID: Int(1), Name: String("a")}, - {ID: Int(2), Name: String("b")}, + {ID: Int64(1), Name: String("a")}, + {ID: Int64(2), Name: String("b")}, } if !reflect.DeepEqual(got, want) { t.Errorf("CommitAuthors = %+v, want %+v", got, want) @@ -128,7 +128,7 @@ func TestMigrationService_CommitAuthors(t *testing.T) { } func TestMigrationService_MapCommitAuthor(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &SourceImportAuthor{Name: String("n"), Email: String("e")} @@ -150,14 +150,14 @@ func TestMigrationService_MapCommitAuthor(t *testing.T) { if err != nil { t.Errorf("MapCommitAuthor returned error: %v", err) } - want := &SourceImportAuthor{ID: Int(1)} + want := &SourceImportAuthor{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("MapCommitAuthor = %+v, want %+v", got, want) } } func TestMigrationService_SetLFSPreference(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Import{UseLFS: String("opt_in")} @@ -187,7 +187,7 @@ func TestMigrationService_SetLFSPreference(t *testing.T) { } func TestMigrationService_LargeFiles(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/import/large_files", func(w http.ResponseWriter, r *http.Request) { @@ -210,7 +210,7 @@ func TestMigrationService_LargeFiles(t *testing.T) { } func TestMigrationService_CancelImport(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/import", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/migrations_test.go b/vendor/github.com/google/go-github/github/migrations_test.go index 7e0ac0f..687f89a 100644 --- a/vendor/github.com/google/go-github/github/migrations_test.go +++ b/vendor/github.com/google/go-github/github/migrations_test.go @@ -15,7 +15,7 @@ import ( ) func TestMigrationService_StartMigration(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/migrations", func(w http.ResponseWriter, r *http.Request) { @@ -40,7 +40,7 @@ func TestMigrationService_StartMigration(t *testing.T) { } func TestMigrationService_ListMigrations(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/migrations", func(w http.ResponseWriter, r *http.Request) { @@ -61,7 +61,7 @@ func TestMigrationService_ListMigrations(t *testing.T) { } func TestMigrationService_MigrationStatus(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/migrations/1", func(w http.ResponseWriter, r *http.Request) { @@ -82,7 +82,7 @@ func TestMigrationService_MigrationStatus(t *testing.T) { } func TestMigrationService_MigrationArchiveURL(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/migrations/1/archive", func(w http.ResponseWriter, r *http.Request) { @@ -108,7 +108,7 @@ func TestMigrationService_MigrationArchiveURL(t *testing.T) { } func TestMigrationService_DeleteMigration(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/migrations/1/archive", func(w http.ResponseWriter, r *http.Request) { @@ -124,7 +124,7 @@ func TestMigrationService_DeleteMigration(t *testing.T) { } func TestMigrationService_UnlockRepo(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/migrations/1/repos/r/lock", func(w http.ResponseWriter, r *http.Request) { @@ -159,7 +159,7 @@ var migrationJSON = []byte(`{ }`) var wantMigration = &Migration{ - ID: Int(79), + ID: Int64(79), GUID: String("0b989ba4-242f-11e5-81e1-c7b6966d2516"), State: String("pending"), LockRepositories: Bool(true), @@ -169,7 +169,7 @@ var wantMigration = &Migration{ UpdatedAt: String("2015-07-06T15:33:38-07:00"), Repositories: []*Repository{ { - ID: Int(1296269), + ID: Int64(1296269), Name: String("Hello-World"), FullName: String("octocat/Hello-World"), Description: String("This your first repo!"), diff --git a/vendor/github.com/google/go-github/github/migrations_user.go b/vendor/github.com/google/go-github/github/migrations_user.go new file mode 100644 index 0000000..d45555f --- /dev/null +++ b/vendor/github.com/google/go-github/github/migrations_user.go @@ -0,0 +1,214 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "errors" + "fmt" + "net/http" +) + +// UserMigration represents a GitHub migration (archival). +type UserMigration struct { + ID *int64 `json:"id,omitempty"` + GUID *string `json:"guid,omitempty"` + // State is the current state of a migration. + // Possible values are: + // "pending" which means the migration hasn't started yet, + // "exporting" which means the migration is in progress, + // "exported" which means the migration finished successfully, or + // "failed" which means the migration failed. + State *string `json:"state,omitempty"` + // LockRepositories indicates whether repositories are locked (to prevent + // manipulation) while migrating data. + LockRepositories *bool `json:"lock_repositories,omitempty"` + // ExcludeAttachments indicates whether attachments should be excluded from + // the migration (to reduce migration archive file size). + ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` + URL *string `json:"url,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + UpdatedAt *string `json:"updated_at,omitempty"` + Repositories []*Repository `json:"repositories,omitempty"` +} + +func (m UserMigration) String() string { + return Stringify(m) +} + +// UserMigrationOptions specifies the optional parameters to Migration methods. +type UserMigrationOptions struct { + // LockRepositories indicates whether repositories should be locked (to prevent + // manipulation) while migrating data. + LockRepositories bool + + // ExcludeAttachments indicates whether attachments should be excluded from + // the migration (to reduce migration archive file size). + ExcludeAttachments bool +} + +// startUserMigration represents the body of a StartMigration request. +type startUserMigration struct { + // Repositories is a slice of repository names to migrate. + Repositories []string `json:"repositories,omitempty"` + + // LockRepositories indicates whether repositories should be locked (to prevent + // manipulation) while migrating data. + LockRepositories *bool `json:"lock_repositories,omitempty"` + + // ExcludeAttachments indicates whether attachments should be excluded from + // the migration (to reduce migration archive file size). + ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` +} + +// StartUserMigration starts the generation of a migration archive. +// repos is a slice of repository names to migrate. +// +// GitHub API docs: https://developer.github.com/v3/migrations/users/#start-a-user-migration +func (s *MigrationService) StartUserMigration(ctx context.Context, repos []string, opt *UserMigrationOptions) (*UserMigration, *Response, error) { + u := "user/migrations" + + body := &startUserMigration{Repositories: repos} + if opt != nil { + body.LockRepositories = Bool(opt.LockRepositories) + body.ExcludeAttachments = Bool(opt.ExcludeAttachments) + } + + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + m := &UserMigration{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// ListUserMigrations lists the most recent migrations. +// +// GitHub API docs: https://developer.github.com/v3/migrations/users/#get-a-list-of-user-migrations +func (s *MigrationService) ListUserMigrations(ctx context.Context) ([]*UserMigration, *Response, error) { + u := "user/migrations" + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + var m []*UserMigration + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// UserMigrationStatus gets the status of a specific migration archive. +// id is the migration ID. +// +// GitHub API docs: https://developer.github.com/v3/migrations/users/#get-the-status-of-a-user-migration +func (s *MigrationService) UserMigrationStatus(ctx context.Context, id int64) (*UserMigration, *Response, error) { + u := fmt.Sprintf("user/migrations/%v", id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + m := &UserMigration{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// UserMigrationArchiveURL gets the URL for a specific migration archive. +// id is the migration ID. +// +// GitHub API docs: https://developer.github.com/v3/migrations/users/#download-a-user-migration-archive +func (s *MigrationService) UserMigrationArchiveURL(ctx context.Context, id int64) (string, error) { + url := fmt.Sprintf("user/migrations/%v/archive", id) + + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return "", err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + m := &UserMigration{} + + var loc string + originalRedirect := s.client.client.CheckRedirect + s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { + loc = req.URL.String() + return http.ErrUseLastResponse + } + defer func() { + s.client.client.CheckRedirect = originalRedirect + }() + resp, err := s.client.Do(ctx, req, m) + if err == nil { + return "", errors.New("expected redirect, none provided") + } + loc = resp.Header.Get("Location") + return loc, nil +} + +// DeleteUserMigration will delete a previous migration archive. +// id is the migration ID. +// +// GitHub API docs: https://developer.github.com/v3/migrations/users/#delete-a-user-migration-archive +func (s *MigrationService) DeleteUserMigration(ctx context.Context, id int64) (*Response, error) { + url := fmt.Sprintf("user/migrations/%v/archive", id) + + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + return s.client.Do(ctx, req, nil) +} + +// UnlockUserRepo will unlock a repo that was locked for migration. +// id is migration ID. +// You should unlock each migrated repository and delete them when the migration +// is complete and you no longer need the source data. +// +// GitHub API docs: https://developer.github.com/v3/migrations/users/#unlock-a-user-repository +func (s *MigrationService) UnlockUserRepo(ctx context.Context, id int64, repo string) (*Response, error) { + url := fmt.Sprintf("user/migrations/%v/repos/%v/lock", id, repo) + + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/migrations_user_test.go b/vendor/github.com/google/go-github/github/migrations_user_test.go new file mode 100644 index 0000000..c271e02 --- /dev/null +++ b/vendor/github.com/google/go-github/github/migrations_user_test.go @@ -0,0 +1,197 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" + "reflect" + "strings" + "testing" +) + +func TestMigrationService_StartUserMigration(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/migrations", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeMigrationsPreview) + + w.WriteHeader(http.StatusCreated) + w.Write(userMigrationJSON) + }) + + opt := &UserMigrationOptions{ + LockRepositories: true, + ExcludeAttachments: false, + } + + got, _, err := client.Migrations.StartUserMigration(context.Background(), []string{"r"}, opt) + if err != nil { + t.Errorf("StartUserMigration returned error: %v", err) + } + + want := wantUserMigration + if !reflect.DeepEqual(want, got) { + t.Errorf("StartUserMigration = %v, want = %v", got, want) + } +} + +func TestMigrationService_ListUserMigrations(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/migrations", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeMigrationsPreview) + + w.WriteHeader(http.StatusOK) + w.Write([]byte(fmt.Sprintf("[%s]", userMigrationJSON))) + }) + + got, _, err := client.Migrations.ListUserMigrations(context.Background()) + if err != nil { + t.Errorf("ListUserMigrations returned error %v", err) + } + + want := []*UserMigration{wantUserMigration} + if !reflect.DeepEqual(want, got) { + t.Errorf("ListUserMigrations = %v, want = %v", got, want) + } +} + +func TestMigrationService_UserMigrationStatus(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/migrations/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeMigrationsPreview) + + w.WriteHeader(http.StatusOK) + w.Write(userMigrationJSON) + }) + + got, _, err := client.Migrations.UserMigrationStatus(context.Background(), 1) + if err != nil { + t.Errorf("UserMigrationStatus returned error %v", err) + } + + want := wantUserMigration + if !reflect.DeepEqual(want, got) { + t.Errorf("UserMigrationStatus = %v, want = %v", got, want) + } +} + +func TestMigrationService_UserMigrationArchiveURL(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/migrations/1/archive", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeMigrationsPreview) + + http.Redirect(w, r, "/go-github", http.StatusFound) + }) + + mux.HandleFunc("/go-github", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + + w.WriteHeader(http.StatusOK) + }) + + got, err := client.Migrations.UserMigrationArchiveURL(context.Background(), 1) + if err != nil { + t.Errorf("UserMigrationArchiveURL returned error %v", err) + } + + want := "/go-github" + if !strings.HasSuffix(got, want) { + t.Errorf("UserMigrationArchiveURL = %v, want = %v", got, want) + } +} + +func TestMigrationService_DeleteUserMigration(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/migrations/1/archive", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeMigrationsPreview) + + w.WriteHeader(http.StatusNoContent) + }) + + got, err := client.Migrations.DeleteUserMigration(context.Background(), 1) + if err != nil { + t.Errorf("DeleteUserMigration returned error %v", err) + } + + if got.StatusCode != http.StatusNoContent { + t.Errorf("DeleteUserMigration returned status = %v, want = %v", got.StatusCode, http.StatusNoContent) + } +} + +func TestMigrationService_UnlockUserRepo(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/migrations/1/repos/r/lock", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeMigrationsPreview) + + w.WriteHeader(http.StatusNoContent) + }) + + got, err := client.Migrations.UnlockUserRepo(context.Background(), 1, "r") + if err != nil { + t.Errorf("UnlockUserRepo returned error %v", err) + } + + if got.StatusCode != http.StatusNoContent { + t.Errorf("UnlockUserRepo returned status = %v, want = %v", got.StatusCode, http.StatusNoContent) + } +} + +var userMigrationJSON = []byte(`{ + "id": 79, + "guid": "0b989ba4-242f-11e5-81e1-c7b6966d2516", + "state": "pending", + "lock_repositories": true, + "exclude_attachments": false, + "url": "https://api.github.com/orgs/octo-org/migrations/79", + "created_at": "2015-07-06T15:33:38-07:00", + "updated_at": "2015-07-06T15:33:38-07:00", + "repositories": [ + { + "id": 1296269, + "name": "Hello-World", + "full_name": "octocat/Hello-World", + "description": "This your first repo!" + } + ] +}`) + +var wantUserMigration = &UserMigration{ + ID: Int64(79), + GUID: String("0b989ba4-242f-11e5-81e1-c7b6966d2516"), + State: String("pending"), + LockRepositories: Bool(true), + ExcludeAttachments: Bool(false), + URL: String("https://api.github.com/orgs/octo-org/migrations/79"), + CreatedAt: String("2015-07-06T15:33:38-07:00"), + UpdatedAt: String("2015-07-06T15:33:38-07:00"), + Repositories: []*Repository{ + { + ID: Int64(1296269), + Name: String("Hello-World"), + FullName: String("octocat/Hello-World"), + Description: String("This your first repo!"), + }, + }, +} diff --git a/vendor/github.com/google/go-github/github/misc.go b/vendor/github.com/google/go-github/github/misc.go index 5b8082d..e9b0ea2 100644 --- a/vendor/github.com/google/go-github/github/misc.go +++ b/vendor/github.com/google/go-github/github/misc.go @@ -158,6 +158,10 @@ type APIMeta struct { // An array of IP addresses in CIDR format specifying the addresses // which serve GitHub Pages websites. Pages []string `json:"pages,omitempty"` + + // An Array of IP addresses specifying the addresses that source imports + // will originate from on GitHub.com. + Importer []string `json:"importer,omitempty"` } // APIMeta returns information about GitHub.com, the service. Or, if you access diff --git a/vendor/github.com/google/go-github/github/misc_test.go b/vendor/github.com/google/go-github/github/misc_test.go index f2b2df4..b7144f9 100644 --- a/vendor/github.com/google/go-github/github/misc_test.go +++ b/vendor/github.com/google/go-github/github/misc_test.go @@ -15,7 +15,7 @@ import ( ) func TestMarkdown(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &markdownRequest{ @@ -48,7 +48,7 @@ func TestMarkdown(t *testing.T) { } func TestListEmojis(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/emojis", func(w http.ResponseWriter, r *http.Request) { @@ -68,7 +68,7 @@ func TestListEmojis(t *testing.T) { } func TestListCodesOfConduct(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/codes_of_conduct", func(w http.ResponseWriter, r *http.Request) { @@ -98,7 +98,7 @@ func TestListCodesOfConduct(t *testing.T) { } func TestGetCodeOfConduct(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/codes_of_conduct/k", func(w http.ResponseWriter, r *http.Request) { @@ -129,12 +129,12 @@ func TestGetCodeOfConduct(t *testing.T) { } func TestAPIMeta(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/meta", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - fmt.Fprint(w, `{"hooks":["h"], "git":["g"], "pages":["p"], "verifiable_password_authentication": true}`) + fmt.Fprint(w, `{"hooks":["h"], "git":["g"], "pages":["p"], "importer":["i"], "verifiable_password_authentication": true}`) }) meta, _, err := client.APIMeta(context.Background()) @@ -143,9 +143,11 @@ func TestAPIMeta(t *testing.T) { } want := &APIMeta{ - Hooks: []string{"h"}, - Git: []string{"g"}, - Pages: []string{"p"}, + Hooks: []string{"h"}, + Git: []string{"g"}, + Pages: []string{"p"}, + Importer: []string{"i"}, + VerifiablePasswordAuthentication: Bool(true), } if !reflect.DeepEqual(want, meta) { @@ -154,7 +156,7 @@ func TestAPIMeta(t *testing.T) { } func TestOctocat(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := "input" @@ -178,7 +180,7 @@ func TestOctocat(t *testing.T) { } func TestZen(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() output := "sample text" @@ -200,7 +202,7 @@ func TestZen(t *testing.T) { } func TestListServiceHooks(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/hooks", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/orgs.go b/vendor/github.com/google/go-github/github/orgs.go index e6947c9..c70039b 100644 --- a/vendor/github.com/google/go-github/github/orgs.go +++ b/vendor/github.com/google/go-github/github/orgs.go @@ -19,30 +19,42 @@ type OrganizationsService service // Organization represents a GitHub organization account. type Organization struct { - Login *string `json:"login,omitempty"` - ID *int `json:"id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Name *string `json:"name,omitempty"` - Company *string `json:"company,omitempty"` - Blog *string `json:"blog,omitempty"` - Location *string `json:"location,omitempty"` - Email *string `json:"email,omitempty"` - Description *string `json:"description,omitempty"` - PublicRepos *int `json:"public_repos,omitempty"` - PublicGists *int `json:"public_gists,omitempty"` - Followers *int `json:"followers,omitempty"` - Following *int `json:"following,omitempty"` - CreatedAt *time.Time `json:"created_at,omitempty"` - UpdatedAt *time.Time `json:"updated_at,omitempty"` - TotalPrivateRepos *int `json:"total_private_repos,omitempty"` - OwnedPrivateRepos *int `json:"owned_private_repos,omitempty"` - PrivateGists *int `json:"private_gists,omitempty"` - DiskUsage *int `json:"disk_usage,omitempty"` - Collaborators *int `json:"collaborators,omitempty"` - BillingEmail *string `json:"billing_email,omitempty"` - Type *string `json:"type,omitempty"` - Plan *Plan `json:"plan,omitempty"` + Login *string `json:"login,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + AvatarURL *string `json:"avatar_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + Name *string `json:"name,omitempty"` + Company *string `json:"company,omitempty"` + Blog *string `json:"blog,omitempty"` + Location *string `json:"location,omitempty"` + Email *string `json:"email,omitempty"` + Description *string `json:"description,omitempty"` + PublicRepos *int `json:"public_repos,omitempty"` + PublicGists *int `json:"public_gists,omitempty"` + Followers *int `json:"followers,omitempty"` + Following *int `json:"following,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + TotalPrivateRepos *int `json:"total_private_repos,omitempty"` + OwnedPrivateRepos *int `json:"owned_private_repos,omitempty"` + PrivateGists *int `json:"private_gists,omitempty"` + DiskUsage *int `json:"disk_usage,omitempty"` + Collaborators *int `json:"collaborators,omitempty"` + BillingEmail *string `json:"billing_email,omitempty"` + Type *string `json:"type,omitempty"` + Plan *Plan `json:"plan,omitempty"` + TwoFactorRequirementEnabled *bool `json:"two_factor_requirement_enabled,omitempty"` + + // DefaultRepoPermission can be one of: "read", "write", "admin", or "none". (Default: "read"). + // It is only used in OrganizationsService.Edit. + DefaultRepoPermission *string `json:"default_repository_permission,omitempty"` + // DefaultRepoSettings can be one of: "read", "write", "admin", or "none". (Default: "read"). + // It is only used in OrganizationsService.Get. + DefaultRepoSettings *string `json:"default_repository_settings,omitempty"` + + // MembersCanCreateRepos default value is true and is only used in Organizations.Edit. + MembersCanCreateRepos *bool `json:"members_can_create_repositories,omitempty"` // API URLs URL *string `json:"url,omitempty"` @@ -74,8 +86,11 @@ func (p Plan) String() string { // OrganizationsService.ListAll method. type OrganizationsListOptions struct { // Since filters Organizations by ID. - Since int `url:"since,omitempty"` + Since int64 `url:"since,omitempty"` + // Note: Pagination is powered exclusively by the Since parameter, + // ListOptions.Page has no effect. + // ListOptions.PerPage controls an undocumented GitHub API parameter. ListOptions } @@ -157,7 +172,7 @@ func (s *OrganizationsService) Get(ctx context.Context, org string) (*Organizati // GetByID fetches an organization. // // Note: GetByID uses the undocumented GitHub API endpoint /organizations/:id. -func (s *OrganizationsService) GetByID(ctx context.Context, id int) (*Organization, *Response, error) { +func (s *OrganizationsService) GetByID(ctx context.Context, id int64) (*Organization, *Response, error) { u := fmt.Sprintf("organizations/%d", id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { diff --git a/vendor/github.com/google/go-github/github/orgs_hooks.go b/vendor/github.com/google/go-github/github/orgs_hooks.go index 4fc692e..5357eb8 100644 --- a/vendor/github.com/google/go-github/github/orgs_hooks.go +++ b/vendor/github.com/google/go-github/github/orgs_hooks.go @@ -37,7 +37,7 @@ func (s *OrganizationsService) ListHooks(ctx context.Context, org string, opt *L // GetHook returns a single specified Hook. // // GitHub API docs: https://developer.github.com/v3/orgs/hooks/#get-single-hook -func (s *OrganizationsService) GetHook(ctx context.Context, org string, id int) (*Hook, *Response, error) { +func (s *OrganizationsService) GetHook(ctx context.Context, org string, id int64) (*Hook, *Response, error) { u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { @@ -49,12 +49,23 @@ func (s *OrganizationsService) GetHook(ctx context.Context, org string, id int) } // CreateHook creates a Hook for the specified org. -// Name and Config are required fields. +// Config is a required field. +// +// Note that only a subset of the hook fields are used and hook must +// not be nil. // // GitHub API docs: https://developer.github.com/v3/orgs/hooks/#create-a-hook func (s *OrganizationsService) CreateHook(ctx context.Context, org string, hook *Hook) (*Hook, *Response, error) { u := fmt.Sprintf("orgs/%v/hooks", org) - req, err := s.client.NewRequest("POST", u, hook) + + hookReq := &createHookRequest{ + Name: "web", + Events: hook.Events, + Active: hook.Active, + Config: hook.Config, + } + + req, err := s.client.NewRequest("POST", u, hookReq) if err != nil { return nil, nil, err } @@ -71,7 +82,7 @@ func (s *OrganizationsService) CreateHook(ctx context.Context, org string, hook // EditHook updates a specified Hook. // // GitHub API docs: https://developer.github.com/v3/orgs/hooks/#edit-a-hook -func (s *OrganizationsService) EditHook(ctx context.Context, org string, id int, hook *Hook) (*Hook, *Response, error) { +func (s *OrganizationsService) EditHook(ctx context.Context, org string, id int64, hook *Hook) (*Hook, *Response, error) { u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) req, err := s.client.NewRequest("PATCH", u, hook) if err != nil { @@ -85,7 +96,7 @@ func (s *OrganizationsService) EditHook(ctx context.Context, org string, id int, // PingHook triggers a 'ping' event to be sent to the Hook. // // GitHub API docs: https://developer.github.com/v3/orgs/hooks/#ping-a-hook -func (s *OrganizationsService) PingHook(ctx context.Context, org string, id int) (*Response, error) { +func (s *OrganizationsService) PingHook(ctx context.Context, org string, id int64) (*Response, error) { u := fmt.Sprintf("orgs/%v/hooks/%d/pings", org, id) req, err := s.client.NewRequest("POST", u, nil) if err != nil { @@ -97,7 +108,7 @@ func (s *OrganizationsService) PingHook(ctx context.Context, org string, id int) // DeleteHook deletes a specified Hook. // // GitHub API docs: https://developer.github.com/v3/orgs/hooks/#delete-a-hook -func (s *OrganizationsService) DeleteHook(ctx context.Context, org string, id int) (*Response, error) { +func (s *OrganizationsService) DeleteHook(ctx context.Context, org string, id int64) (*Response, error) { u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { diff --git a/vendor/github.com/google/go-github/github/orgs_hooks_test.go b/vendor/github.com/google/go-github/github/orgs_hooks_test.go index 69e75f9..5ed2e16 100644 --- a/vendor/github.com/google/go-github/github/orgs_hooks_test.go +++ b/vendor/github.com/google/go-github/github/orgs_hooks_test.go @@ -15,7 +15,7 @@ import ( ) func TestOrganizationsService_ListHooks(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/hooks", func(w http.ResponseWriter, r *http.Request) { @@ -31,19 +31,52 @@ func TestOrganizationsService_ListHooks(t *testing.T) { t.Errorf("Organizations.ListHooks returned error: %v", err) } - want := []*Hook{{ID: Int(1)}, {ID: Int(2)}} + want := []*Hook{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(hooks, want) { t.Errorf("Organizations.ListHooks returned %+v, want %+v", hooks, want) } } func TestOrganizationsService_ListHooks_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.ListHooks(context.Background(), "%", nil) testURLParseError(t, err) } +func TestOrganizationsService_CreateHook(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := &Hook{CreatedAt: &referenceTime} + + mux.HandleFunc("/orgs/o/hooks", func(w http.ResponseWriter, r *http.Request) { + v := new(createHookRequest) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "POST") + want := &createHookRequest{Name: "web"} + if !reflect.DeepEqual(v, want) { + t.Errorf("Request body = %+v, want %+v", v, want) + } + + fmt.Fprint(w, `{"id":1}`) + }) + + hook, _, err := client.Organizations.CreateHook(context.Background(), "o", input) + if err != nil { + t.Errorf("Organizations.CreateHook returned error: %v", err) + } + + want := &Hook{ID: Int64(1)} + if !reflect.DeepEqual(hook, want) { + t.Errorf("Organizations.CreateHook returned %+v, want %+v", hook, want) + } +} + func TestOrganizationsService_GetHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/hooks/1", func(w http.ResponseWriter, r *http.Request) { @@ -56,22 +89,25 @@ func TestOrganizationsService_GetHook(t *testing.T) { t.Errorf("Organizations.GetHook returned error: %v", err) } - want := &Hook{ID: Int(1)} + want := &Hook{ID: Int64(1)} if !reflect.DeepEqual(hook, want) { t.Errorf("Organizations.GetHook returned %+v, want %+v", hook, want) } } func TestOrganizationsService_GetHook_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.GetHook(context.Background(), "%", 1) testURLParseError(t, err) } func TestOrganizationsService_EditHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &Hook{Name: String("t")} + input := &Hook{} mux.HandleFunc("/orgs/o/hooks/1", func(w http.ResponseWriter, r *http.Request) { v := new(Hook) @@ -90,19 +126,22 @@ func TestOrganizationsService_EditHook(t *testing.T) { t.Errorf("Organizations.EditHook returned error: %v", err) } - want := &Hook{ID: Int(1)} + want := &Hook{ID: Int64(1)} if !reflect.DeepEqual(hook, want) { t.Errorf("Organizations.EditHook returned %+v, want %+v", hook, want) } } func TestOrganizationsService_EditHook_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.EditHook(context.Background(), "%", 1, nil) testURLParseError(t, err) } func TestOrganizationsService_PingHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/hooks/1/pings", func(w http.ResponseWriter, r *http.Request) { @@ -116,7 +155,7 @@ func TestOrganizationsService_PingHook(t *testing.T) { } func TestOrganizationsService_DeleteHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/hooks/1", func(w http.ResponseWriter, r *http.Request) { @@ -130,6 +169,9 @@ func TestOrganizationsService_DeleteHook(t *testing.T) { } func TestOrganizationsService_DeleteHook_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Organizations.DeleteHook(context.Background(), "%", 1) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/orgs_members.go b/vendor/github.com/google/go-github/github/orgs_members.go index f1209c7..d184359 100644 --- a/vendor/github.com/google/go-github/github/orgs_members.go +++ b/vendor/github.com/google/go-github/github/orgs_members.go @@ -59,7 +59,7 @@ type ListMembersOptions struct { // Possible values are: // all - all members of the organization, regardless of role // admin - organization owners - // member - non-organization members + // member - non-owner organization members // // Default is "all". Role string `url:"role,omitempty"` @@ -278,7 +278,7 @@ func (s *OrganizationsService) RemoveOrgMembership(ctx context.Context, user, or // ListPendingOrgInvitations returns a list of pending invitations. // // GitHub API docs: https://developer.github.com/v3/orgs/members/#list-pending-organization-invitations -func (s *OrganizationsService) ListPendingOrgInvitations(ctx context.Context, org int, opt *ListOptions) ([]*Invitation, *Response, error) { +func (s *OrganizationsService) ListPendingOrgInvitations(ctx context.Context, org string, opt *ListOptions) ([]*Invitation, *Response, error) { u := fmt.Sprintf("orgs/%v/invitations", org) u, err := addOptions(u, opt) if err != nil { @@ -297,3 +297,74 @@ func (s *OrganizationsService) ListPendingOrgInvitations(ctx context.Context, or } return pendingInvitations, resp, nil } + +// CreateOrgInvitationOptions specifies the parameters to the OrganizationService.Invite +// method. +type CreateOrgInvitationOptions struct { + // GitHub user ID for the person you are inviting. Not required if you provide Email. + InviteeID *int64 `json:"invitee_id,omitempty"` + // Email address of the person you are inviting, which can be an existing GitHub user. + // Not required if you provide InviteeID + Email *string `json:"email,omitempty"` + // Specify role for new member. Can be one of: + // * admin - Organization owners with full administrative rights to the + // organization and complete access to all repositories and teams. + // * direct_member - Non-owner organization members with ability to see + // other members and join teams by invitation. + // * billing_manager - Non-owner organization members with ability to + // manage the billing settings of your organization. + // Default is "direct_member". + Role *string `json:"role"` + TeamID []int64 `json:"team_ids"` +} + +// CreateOrgInvitation invites people to an organization by using their GitHub user ID or their email address. +// In order to create invitations in an organization, +// the authenticated user must be an organization owner. +// +// https://developer.github.com/v3/orgs/members/#create-organization-invitation +func (s *OrganizationsService) CreateOrgInvitation(ctx context.Context, org string, opt *CreateOrgInvitationOptions) (*Invitation, *Response, error) { + u := fmt.Sprintf("orgs/%v/invitations", org) + + req, err := s.client.NewRequest("POST", u, opt) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeOrganizationInvitationPreview) + + var invitation *Invitation + resp, err := s.client.Do(ctx, req, &invitation) + if err != nil { + return nil, resp, err + } + return invitation, resp, nil +} + +// ListOrgInvitationTeams lists all teams associated with an invitation. In order to see invitations in an organization, +// the authenticated user must be an organization owner. +// +// GitHub API docs: https://developer.github.com/v3/orgs/members/#list-organization-invitation-teams +func (s *OrganizationsService) ListOrgInvitationTeams(ctx context.Context, org, invitationID string, opt *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/invitations/%v/teams", org, invitationID) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeOrganizationInvitationPreview) + + var orgInvitationTeams []*Team + resp, err := s.client.Do(ctx, req, &orgInvitationTeams) + if err != nil { + return nil, resp, err + } + return orgInvitationTeams, resp, nil +} diff --git a/vendor/github.com/google/go-github/github/orgs_members_test.go b/vendor/github.com/google/go-github/github/orgs_members_test.go index a17c518..e524d03 100644 --- a/vendor/github.com/google/go-github/github/orgs_members_test.go +++ b/vendor/github.com/google/go-github/github/orgs_members_test.go @@ -16,7 +16,7 @@ import ( ) func TestOrganizationsService_ListMembers(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/members", func(w http.ResponseWriter, r *http.Request) { @@ -40,19 +40,22 @@ func TestOrganizationsService_ListMembers(t *testing.T) { t.Errorf("Organizations.ListMembers returned error: %v", err) } - want := []*User{{ID: Int(1)}} + want := []*User{{ID: Int64(1)}} if !reflect.DeepEqual(members, want) { t.Errorf("Organizations.ListMembers returned %+v, want %+v", members, want) } } func TestOrganizationsService_ListMembers_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.ListMembers(context.Background(), "%", nil) testURLParseError(t, err) } func TestOrganizationsService_ListMembers_public(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/public_members", func(w http.ResponseWriter, r *http.Request) { @@ -66,14 +69,14 @@ func TestOrganizationsService_ListMembers_public(t *testing.T) { t.Errorf("Organizations.ListMembers returned error: %v", err) } - want := []*User{{ID: Int(1)}} + want := []*User{{ID: Int64(1)}} if !reflect.DeepEqual(members, want) { t.Errorf("Organizations.ListMembers returned %+v, want %+v", members, want) } } func TestOrganizationsService_IsMember(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/members/u", func(w http.ResponseWriter, r *http.Request) { @@ -92,7 +95,7 @@ func TestOrganizationsService_IsMember(t *testing.T) { // ensure that a 404 response is interpreted as "false" and not an error func TestOrganizationsService_IsMember_notMember(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/members/u", func(w http.ResponseWriter, r *http.Request) { @@ -112,7 +115,7 @@ func TestOrganizationsService_IsMember_notMember(t *testing.T) { // ensure that a 400 response is interpreted as an actual error, and not simply // as "false" like the above case of a 404 func TestOrganizationsService_IsMember_error(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/members/u", func(w http.ResponseWriter, r *http.Request) { @@ -130,12 +133,15 @@ func TestOrganizationsService_IsMember_error(t *testing.T) { } func TestOrganizationsService_IsMember_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.IsMember(context.Background(), "%", "u") testURLParseError(t, err) } func TestOrganizationsService_IsPublicMember(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/public_members/u", func(w http.ResponseWriter, r *http.Request) { @@ -154,7 +160,7 @@ func TestOrganizationsService_IsPublicMember(t *testing.T) { // ensure that a 404 response is interpreted as "false" and not an error func TestOrganizationsService_IsPublicMember_notMember(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/public_members/u", func(w http.ResponseWriter, r *http.Request) { @@ -174,7 +180,7 @@ func TestOrganizationsService_IsPublicMember_notMember(t *testing.T) { // ensure that a 400 response is interpreted as an actual error, and not simply // as "false" like the above case of a 404 func TestOrganizationsService_IsPublicMember_error(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/public_members/u", func(w http.ResponseWriter, r *http.Request) { @@ -192,12 +198,15 @@ func TestOrganizationsService_IsPublicMember_error(t *testing.T) { } func TestOrganizationsService_IsPublicMember_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.IsPublicMember(context.Background(), "%", "u") testURLParseError(t, err) } func TestOrganizationsService_RemoveMember(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/members/u", func(w http.ResponseWriter, r *http.Request) { @@ -211,12 +220,15 @@ func TestOrganizationsService_RemoveMember(t *testing.T) { } func TestOrganizationsService_RemoveMember_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Organizations.RemoveMember(context.Background(), "%", "u") testURLParseError(t, err) } func TestOrganizationsService_ListOrgMemberships(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/memberships/orgs", func(w http.ResponseWriter, r *http.Request) { @@ -244,7 +256,7 @@ func TestOrganizationsService_ListOrgMemberships(t *testing.T) { } func TestOrganizationsService_GetOrgMembership_AuthenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/memberships/orgs/o", func(w http.ResponseWriter, r *http.Request) { @@ -264,7 +276,7 @@ func TestOrganizationsService_GetOrgMembership_AuthenticatedUser(t *testing.T) { } func TestOrganizationsService_GetOrgMembership_SpecifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/memberships/u", func(w http.ResponseWriter, r *http.Request) { @@ -284,7 +296,7 @@ func TestOrganizationsService_GetOrgMembership_SpecifiedUser(t *testing.T) { } func TestOrganizationsService_EditOrgMembership_AuthenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Membership{State: String("active")} @@ -313,7 +325,7 @@ func TestOrganizationsService_EditOrgMembership_AuthenticatedUser(t *testing.T) } func TestOrganizationsService_EditOrgMembership_SpecifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Membership{State: String("active")} @@ -342,7 +354,7 @@ func TestOrganizationsService_EditOrgMembership_SpecifiedUser(t *testing.T) { } func TestOrganizationsService_RemoveOrgMembership(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/memberships/u", func(w http.ResponseWriter, r *http.Request) { @@ -357,10 +369,10 @@ func TestOrganizationsService_RemoveOrgMembership(t *testing.T) { } func TestOrganizationsService_ListPendingOrgInvitations(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - mux.HandleFunc("/orgs/1/invitations", func(w http.ResponseWriter, r *http.Request) { + mux.HandleFunc("/orgs/o/invitations", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testFormValues(t, r, values{"page": "1"}) fmt.Fprint(w, `[ @@ -388,28 +400,30 @@ func TestOrganizationsService_ListPendingOrgInvitations(t *testing.T) { "received_events_url": "https://api.github.com/users/other_user/received_events/privacy", "type": "User", "site_admin": false - } + }, + "team_count": 2, + "invitation_team_url": "https://api.github.com/organizations/2/invitations/1/teams" } ]`) }) opt := &ListOptions{Page: 1} - invitations, _, err := client.Organizations.ListPendingOrgInvitations(context.Background(), 1, opt) + invitations, _, err := client.Organizations.ListPendingOrgInvitations(context.Background(), "o", opt) if err != nil { t.Errorf("Organizations.ListPendingOrgInvitations returned error: %v", err) } - createdAt := time.Date(2017, 01, 21, 0, 0, 0, 0, time.UTC) + createdAt := time.Date(2017, time.January, 21, 0, 0, 0, 0, time.UTC) want := []*Invitation{ { - ID: Int(1), + ID: Int64(1), Login: String("monalisa"), Email: String("octocat@github.com"), Role: String("direct_member"), CreatedAt: &createdAt, Inviter: &User{ Login: String("other_user"), - ID: Int(1), + ID: Int64(1), AvatarURL: String("https://github.com/images/error/other_user_happy.gif"), GravatarID: String(""), URL: String("https://api.github.com/users/other_user"), @@ -426,9 +440,96 @@ func TestOrganizationsService_ListPendingOrgInvitations(t *testing.T) { Type: String("User"), SiteAdmin: Bool(false), }, + TeamCount: Int(2), + InvitationTeamURL: String("https://api.github.com/organizations/2/invitations/1/teams"), }} if !reflect.DeepEqual(invitations, want) { t.Errorf("Organizations.ListPendingOrgInvitations returned %+v, want %+v", invitations, want) } } + +func TestOrganizationsService_CreateOrgInvitation(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + input := &CreateOrgInvitationOptions{ + Email: String("octocat@github.com"), + Role: String("direct_member"), + TeamID: []int64{ + 12, + 26, + }, + } + + mux.HandleFunc("/orgs/o/invitations", func(w http.ResponseWriter, r *http.Request) { + v := new(CreateOrgInvitationOptions) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeOrganizationInvitationPreview) + if !reflect.DeepEqual(v, input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + fmt.Fprintln(w, `{"email": "octocat@github.com"}`) + + }) + + invitations, _, err := client.Organizations.CreateOrgInvitation(context.Background(), "o", input) + if err != nil { + t.Errorf("Organizations.CreateOrgInvitation returned error: %v", err) + } + + want := &Invitation{Email: String("octocat@github.com")} + if !reflect.DeepEqual(invitations, want) { + t.Errorf("Organizations.ListPendingOrgInvitations returned %+v, want %+v", invitations, want) + } +} + +func TestOrganizationsService_ListOrgInvitationTeams(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/orgs/o/invitations/22/teams", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testFormValues(t, r, values{"page": "1"}) + testHeader(t, r, "Accept", mediaTypeOrganizationInvitationPreview) + fmt.Fprint(w, `[ + { + "id": 1, + "url": "https://api.github.com/teams/1", + "name": "Justice League", + "slug": "justice-league", + "description": "A great team.", + "privacy": "closed", + "permission": "admin", + "members_url": "https://api.github.com/teams/1/members{/member}", + "repositories_url": "https://api.github.com/teams/1/repos" + } + ]`) + }) + + opt := &ListOptions{Page: 1} + invitations, _, err := client.Organizations.ListOrgInvitationTeams(context.Background(), "o", "22", opt) + if err != nil { + t.Errorf("Organizations.ListOrgInvitationTeams returned error: %v", err) + } + + want := []*Team{ + { + ID: Int64(1), + URL: String("https://api.github.com/teams/1"), + Name: String("Justice League"), + Slug: String("justice-league"), + Description: String("A great team."), + Privacy: String("closed"), + Permission: String("admin"), + MembersURL: String("https://api.github.com/teams/1/members{/member}"), + RepositoriesURL: String("https://api.github.com/teams/1/repos"), + }, + } + + if !reflect.DeepEqual(invitations, want) { + t.Errorf("Organizations.ListOrgInvitationTeams returned %+v, want %+v", invitations, want) + } +} diff --git a/vendor/github.com/google/go-github/github/orgs_outside_collaborators.go b/vendor/github.com/google/go-github/github/orgs_outside_collaborators.go index e34f865..85ffd05 100644 --- a/vendor/github.com/google/go-github/github/orgs_outside_collaborators.go +++ b/vendor/github.com/google/go-github/github/orgs_outside_collaborators.go @@ -48,3 +48,34 @@ func (s *OrganizationsService) ListOutsideCollaborators(ctx context.Context, org return members, resp, nil } + +// RemoveOutsideCollaborator removes a user from the list of outside collaborators; +// consequently, removing them from all the organization's repositories. +// +// GitHub API docs: https://developer.github.com/v3/orgs/outside_collaborators/#remove-outside-collaborator +func (s *OrganizationsService) RemoveOutsideCollaborator(ctx context.Context, org string, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/outside_collaborators/%v", org, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ConvertMemberToOutsideCollaborator reduces the permission level of a member of the +// organization to that of an outside collaborator. Therefore, they will only +// have access to the repositories that their current team membership allows. +// Responses for converting a non-member or the last owner to an outside collaborator +// are listed in GitHub API docs. +// +// GitHub API docs: https://developer.github.com/v3/orgs/outside_collaborators/#convert-member-to-outside-collaborator +func (s *OrganizationsService) ConvertMemberToOutsideCollaborator(ctx context.Context, org string, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/outside_collaborators/%v", org, user) + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/orgs_outside_collaborators_test.go b/vendor/github.com/google/go-github/github/orgs_outside_collaborators_test.go index 5360061..8b72feb 100644 --- a/vendor/github.com/google/go-github/github/orgs_outside_collaborators_test.go +++ b/vendor/github.com/google/go-github/github/orgs_outside_collaborators_test.go @@ -14,7 +14,7 @@ import ( ) func TestOrganizationsService_ListOutsideCollaborators(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/outside_collaborators", func(w http.ResponseWriter, r *http.Request) { @@ -35,13 +35,100 @@ func TestOrganizationsService_ListOutsideCollaborators(t *testing.T) { t.Errorf("Organizations.ListOutsideCollaborators returned error: %v", err) } - want := []*User{{ID: Int(1)}} + want := []*User{{ID: Int64(1)}} if !reflect.DeepEqual(members, want) { t.Errorf("Organizations.ListOutsideCollaborators returned %+v, want %+v", members, want) } } func TestOrganizationsService_ListOutsideCollaborators_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.ListOutsideCollaborators(context.Background(), "%", nil) testURLParseError(t, err) } + +func TestOrganizationsService_RemoveOutsideCollaborator(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + handler := func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + } + mux.HandleFunc("/orgs/o/outside_collaborators/u", handler) + + _, err := client.Organizations.RemoveOutsideCollaborator(context.Background(), "o", "u") + if err != nil { + t.Errorf("Organizations.RemoveOutsideCollaborator returned error: %v", err) + } +} + +func TestOrganizationsService_RemoveOutsideCollaborator_NonMember(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + handler := func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + w.WriteHeader(http.StatusNotFound) + } + mux.HandleFunc("/orgs/o/outside_collaborators/u", handler) + + _, err := client.Organizations.RemoveOutsideCollaborator(context.Background(), "o", "u") + if err, ok := err.(*ErrorResponse); !ok { + t.Errorf("Organizations.RemoveOutsideCollaborator did not return an error") + } else if err.Response.StatusCode != http.StatusNotFound { + t.Errorf("Organizations.RemoveOutsideCollaborator did not return 404 status code") + } +} + +func TestOrganizationsService_RemoveOutsideCollaborator_Member(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + handler := func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + w.WriteHeader(http.StatusUnprocessableEntity) + } + mux.HandleFunc("/orgs/o/outside_collaborators/u", handler) + + _, err := client.Organizations.RemoveOutsideCollaborator(context.Background(), "o", "u") + if err, ok := err.(*ErrorResponse); !ok { + t.Errorf("Organizations.RemoveOutsideCollaborator did not return an error") + } else if err.Response.StatusCode != http.StatusUnprocessableEntity { + t.Errorf("Organizations.RemoveOutsideCollaborator did not return 422 status code") + } +} + +func TestOrganizationsService_ConvertMemberToOutsideCollaborator(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + handler := func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + } + mux.HandleFunc("/orgs/o/outside_collaborators/u", handler) + + _, err := client.Organizations.ConvertMemberToOutsideCollaborator(context.Background(), "o", "u") + if err != nil { + t.Errorf("Organizations.ConvertMemberToOutsideCollaborator returned error: %v", err) + } +} + +func TestOrganizationsService_ConvertMemberToOutsideCollaborator_NonMemberOrLastOwner(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + handler := func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + w.WriteHeader(http.StatusForbidden) + } + mux.HandleFunc("/orgs/o/outside_collaborators/u", handler) + + _, err := client.Organizations.ConvertMemberToOutsideCollaborator(context.Background(), "o", "u") + if err, ok := err.(*ErrorResponse); !ok { + t.Errorf("Organizations.ConvertMemberToOutsideCollaborator did not return an error") + } else if err.Response.StatusCode != http.StatusForbidden { + t.Errorf("Organizations.ConvertMemberToOutsideCollaborator did not return 403 status code") + } +} diff --git a/vendor/github.com/google/go-github/github/orgs_projects_test.go b/vendor/github.com/google/go-github/github/orgs_projects_test.go index 533f691..efea5cb 100644 --- a/vendor/github.com/google/go-github/github/orgs_projects_test.go +++ b/vendor/github.com/google/go-github/github/orgs_projects_test.go @@ -15,7 +15,7 @@ import ( ) func TestOrganizationsService_ListProjects(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/projects", func(w http.ResponseWriter, r *http.Request) { @@ -31,17 +31,17 @@ func TestOrganizationsService_ListProjects(t *testing.T) { t.Errorf("Organizations.ListProjects returned error: %v", err) } - want := []*Project{{ID: Int(1)}} + want := []*Project{{ID: Int64(1)}} if !reflect.DeepEqual(projects, want) { t.Errorf("Organizations.ListProjects returned %+v, want %+v", projects, want) } } func TestOrganizationsService_CreateProject(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &ProjectOptions{Name: "Project Name", Body: "Project body."} + input := &ProjectOptions{Name: String("Project Name"), Body: String("Project body.")} mux.HandleFunc("/orgs/o/projects", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "POST") @@ -61,7 +61,7 @@ func TestOrganizationsService_CreateProject(t *testing.T) { t.Errorf("Organizations.CreateProject returned error: %v", err) } - want := &Project{ID: Int(1)} + want := &Project{ID: Int64(1)} if !reflect.DeepEqual(project, want) { t.Errorf("Organizations.CreateProject returned %+v, want %+v", project, want) } diff --git a/vendor/github.com/google/go-github/github/orgs_teams.go b/vendor/github.com/google/go-github/github/orgs_teams.go deleted file mode 100644 index 70b090d..0000000 --- a/vendor/github.com/google/go-github/github/orgs_teams.go +++ /dev/null @@ -1,428 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// Team represents a team within a GitHub organization. Teams are used to -// manage access to an organization's repositories. -type Team struct { - ID *int `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - URL *string `json:"url,omitempty"` - Slug *string `json:"slug,omitempty"` - - // Permission is deprecated when creating or editing a team in an org - // using the new GitHub permission model. It no longer identifies the - // permission a team has on its repos, but only specifies the default - // permission a repo is initially added with. Avoid confusion by - // specifying a permission value when calling AddTeamRepo. - Permission *string `json:"permission,omitempty"` - - // Privacy identifies the level of privacy this team should have. - // Possible values are: - // secret - only visible to organization owners and members of this team - // closed - visible to all members of this organization - // Default is "secret". - Privacy *string `json:"privacy,omitempty"` - - MembersCount *int `json:"members_count,omitempty"` - ReposCount *int `json:"repos_count,omitempty"` - Organization *Organization `json:"organization,omitempty"` - MembersURL *string `json:"members_url,omitempty"` - RepositoriesURL *string `json:"repositories_url,omitempty"` - - // LDAPDN is only available in GitHub Enterprise and when the team - // membership is synchronized with LDAP. - LDAPDN *string `json:"ldap_dn,omitempty"` -} - -func (t Team) String() string { - return Stringify(t) -} - -// Invitation represents a team member's invitation status. -type Invitation struct { - ID *int `json:"id,omitempty"` - Login *string `json:"login,omitempty"` - Email *string `json:"email,omitempty"` - // Role can be one of the values - 'direct_member', 'admin', 'billing_manager', 'hiring_manager', or 'reinstate'. - Role *string `json:"role,omitempty"` - CreatedAt *time.Time `json:"created_at,omitempty"` - Inviter *User `json:"inviter,omitempty"` -} - -func (i Invitation) String() string { - return Stringify(i) -} - -// ListTeams lists all of the teams for an organization. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-teams -func (s *OrganizationsService) ListTeams(ctx context.Context, org string, opt *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams", org) - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// GetTeam fetches a team by ID. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#get-team -func (s *OrganizationsService) GetTeam(ctx context.Context, team int) (*Team, *Response, error) { - u := fmt.Sprintf("teams/%v", team) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// CreateTeam creates a new team within an organization. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#create-team -func (s *OrganizationsService) CreateTeam(ctx context.Context, org string, team *Team) (*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams", org) - req, err := s.client.NewRequest("POST", u, team) - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// EditTeam edits a team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#edit-team -func (s *OrganizationsService) EditTeam(ctx context.Context, id int, team *Team) (*Team, *Response, error) { - u := fmt.Sprintf("teams/%v", id) - req, err := s.client.NewRequest("PATCH", u, team) - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// DeleteTeam deletes a team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#delete-team -func (s *OrganizationsService) DeleteTeam(ctx context.Context, team int) (*Response, error) { - u := fmt.Sprintf("teams/%v", team) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// OrganizationListTeamMembersOptions specifies the optional parameters to the -// OrganizationsService.ListTeamMembers method. -type OrganizationListTeamMembersOptions struct { - // Role filters members returned by their role in the team. Possible - // values are "all", "member", "maintainer". Default is "all". - Role string `url:"role,omitempty"` - - ListOptions -} - -// ListTeamMembers lists all of the users who are members of the specified -// team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-team-members -func (s *OrganizationsService) ListTeamMembers(ctx context.Context, team int, opt *OrganizationListTeamMembersOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("teams/%v/members", team) - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// IsTeamMember checks if a user is a member of the specified team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#get-team-member -func (s *OrganizationsService) IsTeamMember(ctx context.Context, team int, user string) (bool, *Response, error) { - u := fmt.Sprintf("teams/%v/members/%v", team, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - member, err := parseBoolResponse(err) - return member, resp, err -} - -// ListTeamRepos lists the repositories that the specified team has access to. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-team-repos -func (s *OrganizationsService) ListTeamRepos(ctx context.Context, team int, opt *ListOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("teams/%v/repos", team) - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// IsTeamRepo checks if a team manages the specified repository. If the -// repository is managed by team, a Repository is returned which includes the -// permissions team has for that repo. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#check-if-a-team-manages-a-repository -func (s *OrganizationsService) IsTeamRepo(ctx context.Context, team int, owner string, repo string) (*Repository, *Response, error) { - u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeOrgPermissionRepo) - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// OrganizationAddTeamRepoOptions specifies the optional parameters to the -// OrganizationsService.AddTeamRepo method. -type OrganizationAddTeamRepoOptions struct { - // Permission specifies the permission to grant the team on this repository. - // Possible values are: - // pull - team members can pull, but not push to or administer this repository - // push - team members can pull and push, but not administer this repository - // admin - team members can pull, push and administer this repository - // - // If not specified, the team's permission attribute will be used. - Permission string `json:"permission,omitempty"` -} - -// AddTeamRepo adds a repository to be managed by the specified team. The -// specified repository must be owned by the organization to which the team -// belongs, or a direct fork of a repository owned by the organization. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#add-team-repo -func (s *OrganizationsService) AddTeamRepo(ctx context.Context, team int, owner string, repo string, opt *OrganizationAddTeamRepoOptions) (*Response, error) { - u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) - req, err := s.client.NewRequest("PUT", u, opt) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamRepo removes a repository from being managed by the specified -// team. Note that this does not delete the repository, it just removes it -// from the team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#remove-team-repo -func (s *OrganizationsService) RemoveTeamRepo(ctx context.Context, team int, owner string, repo string) (*Response, error) { - u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListUserTeams lists a user's teams -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-user-teams -func (s *OrganizationsService) ListUserTeams(ctx context.Context, opt *ListOptions) ([]*Team, *Response, error) { - u := "user/teams" - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// GetTeamMembership returns the membership status for a user in a team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#get-team-membership -func (s *OrganizationsService) GetTeamMembership(ctx context.Context, team int, user string) (*Membership, *Response, error) { - u := fmt.Sprintf("teams/%v/memberships/%v", team, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// OrganizationAddTeamMembershipOptions does stuff specifies the optional -// parameters to the OrganizationsService.AddTeamMembership method. -type OrganizationAddTeamMembershipOptions struct { - // Role specifies the role the user should have in the team. Possible - // values are: - // member - a normal member of the team - // maintainer - a team maintainer. Able to add/remove other team - // members, promote other team members to team - // maintainer, and edit the team’s name and description - // - // Default value is "member". - Role string `json:"role,omitempty"` -} - -// AddTeamMembership adds or invites a user to a team. -// -// In order to add a membership between a user and a team, the authenticated -// user must have 'admin' permissions to the team or be an owner of the -// organization that the team is associated with. -// -// If the user is already a part of the team's organization (meaning they're on -// at least one other team in the organization), this endpoint will add the -// user to the team. -// -// If the user is completely unaffiliated with the team's organization (meaning -// they're on none of the organization's teams), this endpoint will send an -// invitation to the user via email. This newly-created membership will be in -// the "pending" state until the user accepts the invitation, at which point -// the membership will transition to the "active" state and the user will be -// added as a member of the team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#add-team-membership -func (s *OrganizationsService) AddTeamMembership(ctx context.Context, team int, user string, opt *OrganizationAddTeamMembershipOptions) (*Membership, *Response, error) { - u := fmt.Sprintf("teams/%v/memberships/%v", team, user) - req, err := s.client.NewRequest("PUT", u, opt) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// RemoveTeamMembership removes a user from a team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#remove-team-membership -func (s *OrganizationsService) RemoveTeamMembership(ctx context.Context, team int, user string) (*Response, error) { - u := fmt.Sprintf("teams/%v/memberships/%v", team, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListPendingTeamInvitations get pending invitaion list in team. -// Warning: The API may change without advance notice during the preview period. -// Preview features are not supported for production use. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-pending-team-invitations -func (s *OrganizationsService) ListPendingTeamInvitations(ctx context.Context, team int, opt *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("teams/%v/invitations", team) - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pendingInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &pendingInvitations) - if err != nil { - return nil, resp, err - } - - return pendingInvitations, resp, nil -} diff --git a/vendor/github.com/google/go-github/github/orgs_teams_test.go b/vendor/github.com/google/go-github/github/orgs_teams_test.go deleted file mode 100644 index e9342fb..0000000 --- a/vendor/github.com/google/go-github/github/orgs_teams_test.go +++ /dev/null @@ -1,580 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "reflect" - "testing" - "time" -) - -func TestOrganizationsService_ListTeams(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/orgs/o/teams", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testFormValues(t, r, values{"page": "2"}) - fmt.Fprint(w, `[{"id":1}]`) - }) - - opt := &ListOptions{Page: 2} - teams, _, err := client.Organizations.ListTeams(context.Background(), "o", opt) - if err != nil { - t.Errorf("Organizations.ListTeams returned error: %v", err) - } - - want := []*Team{{ID: Int(1)}} - if !reflect.DeepEqual(teams, want) { - t.Errorf("Organizations.ListTeams returned %+v, want %+v", teams, want) - } -} - -func TestOrganizationsService_ListTeams_invalidOrg(t *testing.T) { - _, _, err := client.Organizations.ListTeams(context.Background(), "%", nil) - testURLParseError(t, err) -} - -func TestOrganizationsService_GetTeam(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - fmt.Fprint(w, `{"id":1, "name":"n", "description": "d", "url":"u", "slug": "s", "permission":"p", "ldap_dn":"cn=n,ou=groups,dc=example,dc=com"}`) - }) - - team, _, err := client.Organizations.GetTeam(context.Background(), 1) - if err != nil { - t.Errorf("Organizations.GetTeam returned error: %v", err) - } - - want := &Team{ID: Int(1), Name: String("n"), Description: String("d"), URL: String("u"), Slug: String("s"), Permission: String("p"), LDAPDN: String("cn=n,ou=groups,dc=example,dc=com")} - if !reflect.DeepEqual(team, want) { - t.Errorf("Organizations.GetTeam returned %+v, want %+v", team, want) - } -} - -func TestOrganizationsService_CreateTeam(t *testing.T) { - setup() - defer teardown() - - input := &Team{Name: String("n"), Privacy: String("closed")} - - mux.HandleFunc("/orgs/o/teams", func(w http.ResponseWriter, r *http.Request) { - v := new(Team) - json.NewDecoder(r.Body).Decode(v) - - testMethod(t, r, "POST") - if !reflect.DeepEqual(v, input) { - t.Errorf("Request body = %+v, want %+v", v, input) - } - - fmt.Fprint(w, `{"id":1}`) - }) - - team, _, err := client.Organizations.CreateTeam(context.Background(), "o", input) - if err != nil { - t.Errorf("Organizations.CreateTeam returned error: %v", err) - } - - want := &Team{ID: Int(1)} - if !reflect.DeepEqual(team, want) { - t.Errorf("Organizations.CreateTeam returned %+v, want %+v", team, want) - } -} - -func TestOrganizationsService_CreateTeam_invalidOrg(t *testing.T) { - _, _, err := client.Organizations.CreateTeam(context.Background(), "%", nil) - testURLParseError(t, err) -} - -func TestOrganizationsService_EditTeam(t *testing.T) { - setup() - defer teardown() - - input := &Team{Name: String("n"), Privacy: String("closed")} - - mux.HandleFunc("/teams/1", func(w http.ResponseWriter, r *http.Request) { - v := new(Team) - json.NewDecoder(r.Body).Decode(v) - - testMethod(t, r, "PATCH") - if !reflect.DeepEqual(v, input) { - t.Errorf("Request body = %+v, want %+v", v, input) - } - - fmt.Fprint(w, `{"id":1}`) - }) - - team, _, err := client.Organizations.EditTeam(context.Background(), 1, input) - if err != nil { - t.Errorf("Organizations.EditTeam returned error: %v", err) - } - - want := &Team{ID: Int(1)} - if !reflect.DeepEqual(team, want) { - t.Errorf("Organizations.EditTeam returned %+v, want %+v", team, want) - } -} - -func TestOrganizationsService_DeleteTeam(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "DELETE") - }) - - _, err := client.Organizations.DeleteTeam(context.Background(), 1) - if err != nil { - t.Errorf("Organizations.DeleteTeam returned error: %v", err) - } -} - -func TestOrganizationsService_ListTeamMembers(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/members", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testFormValues(t, r, values{"role": "member", "page": "2"}) - fmt.Fprint(w, `[{"id":1}]`) - }) - - opt := &OrganizationListTeamMembersOptions{Role: "member", ListOptions: ListOptions{Page: 2}} - members, _, err := client.Organizations.ListTeamMembers(context.Background(), 1, opt) - if err != nil { - t.Errorf("Organizations.ListTeamMembers returned error: %v", err) - } - - want := []*User{{ID: Int(1)}} - if !reflect.DeepEqual(members, want) { - t.Errorf("Organizations.ListTeamMembers returned %+v, want %+v", members, want) - } -} - -func TestOrganizationsService_IsTeamMember_true(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/members/u", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - }) - - member, _, err := client.Organizations.IsTeamMember(context.Background(), 1, "u") - if err != nil { - t.Errorf("Organizations.IsTeamMember returned error: %v", err) - } - if want := true; member != want { - t.Errorf("Organizations.IsTeamMember returned %+v, want %+v", member, want) - } -} - -// ensure that a 404 response is interpreted as "false" and not an error -func TestOrganizationsService_IsTeamMember_false(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/members/u", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - w.WriteHeader(http.StatusNotFound) - }) - - member, _, err := client.Organizations.IsTeamMember(context.Background(), 1, "u") - if err != nil { - t.Errorf("Organizations.IsTeamMember returned error: %+v", err) - } - if want := false; member != want { - t.Errorf("Organizations.IsTeamMember returned %+v, want %+v", member, want) - } -} - -// ensure that a 400 response is interpreted as an actual error, and not simply -// as "false" like the above case of a 404 -func TestOrganizationsService_IsTeamMember_error(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/members/u", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - http.Error(w, "BadRequest", http.StatusBadRequest) - }) - - member, _, err := client.Organizations.IsTeamMember(context.Background(), 1, "u") - if err == nil { - t.Errorf("Expected HTTP 400 response") - } - if want := false; member != want { - t.Errorf("Organizations.IsTeamMember returned %+v, want %+v", member, want) - } -} - -func TestOrganizationsService_IsTeamMember_invalidUser(t *testing.T) { - _, _, err := client.Organizations.IsTeamMember(context.Background(), 1, "%") - testURLParseError(t, err) -} - -func TestOrganizationsService_PublicizeMembership(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/orgs/o/public_members/u", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "PUT") - w.WriteHeader(http.StatusNoContent) - }) - - _, err := client.Organizations.PublicizeMembership(context.Background(), "o", "u") - if err != nil { - t.Errorf("Organizations.PublicizeMembership returned error: %v", err) - } -} - -func TestOrganizationsService_PublicizeMembership_invalidOrg(t *testing.T) { - _, err := client.Organizations.PublicizeMembership(context.Background(), "%", "u") - testURLParseError(t, err) -} - -func TestOrganizationsService_ConcealMembership(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/orgs/o/public_members/u", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "DELETE") - w.WriteHeader(http.StatusNoContent) - }) - - _, err := client.Organizations.ConcealMembership(context.Background(), "o", "u") - if err != nil { - t.Errorf("Organizations.ConcealMembership returned error: %v", err) - } -} - -func TestOrganizationsService_ConcealMembership_invalidOrg(t *testing.T) { - _, err := client.Organizations.ConcealMembership(context.Background(), "%", "u") - testURLParseError(t, err) -} - -func TestOrganizationsService_ListTeamRepos(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/repos", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testFormValues(t, r, values{"page": "2"}) - fmt.Fprint(w, `[{"id":1}]`) - }) - - opt := &ListOptions{Page: 2} - members, _, err := client.Organizations.ListTeamRepos(context.Background(), 1, opt) - if err != nil { - t.Errorf("Organizations.ListTeamRepos returned error: %v", err) - } - - want := []*Repository{{ID: Int(1)}} - if !reflect.DeepEqual(members, want) { - t.Errorf("Organizations.ListTeamRepos returned %+v, want %+v", members, want) - } -} - -func TestOrganizationsService_IsTeamRepo_true(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeOrgPermissionRepo) - fmt.Fprint(w, `{"id":1}`) - }) - - repo, _, err := client.Organizations.IsTeamRepo(context.Background(), 1, "o", "r") - if err != nil { - t.Errorf("Organizations.IsTeamRepo returned error: %v", err) - } - - want := &Repository{ID: Int(1)} - if !reflect.DeepEqual(repo, want) { - t.Errorf("Organizations.IsTeamRepo returned %+v, want %+v", repo, want) - } -} - -func TestOrganizationsService_IsTeamRepo_false(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - w.WriteHeader(http.StatusNotFound) - }) - - repo, resp, err := client.Organizations.IsTeamRepo(context.Background(), 1, "o", "r") - if err == nil { - t.Errorf("Expected HTTP 404 response") - } - if got, want := resp.Response.StatusCode, http.StatusNotFound; got != want { - t.Errorf("Organizations.IsTeamRepo returned status %d, want %d", got, want) - } - if repo != nil { - t.Errorf("Organizations.IsTeamRepo returned %+v, want nil", repo) - } -} - -func TestOrganizationsService_IsTeamRepo_error(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - http.Error(w, "BadRequest", http.StatusBadRequest) - }) - - repo, resp, err := client.Organizations.IsTeamRepo(context.Background(), 1, "o", "r") - if err == nil { - t.Errorf("Expected HTTP 400 response") - } - if got, want := resp.Response.StatusCode, http.StatusBadRequest; got != want { - t.Errorf("Organizations.IsTeamRepo returned status %d, want %d", got, want) - } - if repo != nil { - t.Errorf("Organizations.IsTeamRepo returned %+v, want nil", repo) - } -} - -func TestOrganizationsService_IsTeamRepo_invalidOwner(t *testing.T) { - _, _, err := client.Organizations.IsTeamRepo(context.Background(), 1, "%", "r") - testURLParseError(t, err) -} - -func TestOrganizationsService_AddTeamRepo(t *testing.T) { - setup() - defer teardown() - - opt := &OrganizationAddTeamRepoOptions{Permission: "admin"} - - mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { - v := new(OrganizationAddTeamRepoOptions) - json.NewDecoder(r.Body).Decode(v) - - testMethod(t, r, "PUT") - if !reflect.DeepEqual(v, opt) { - t.Errorf("Request body = %+v, want %+v", v, opt) - } - - w.WriteHeader(http.StatusNoContent) - }) - - _, err := client.Organizations.AddTeamRepo(context.Background(), 1, "o", "r", opt) - if err != nil { - t.Errorf("Organizations.AddTeamRepo returned error: %v", err) - } -} - -func TestOrganizationsService_AddTeamRepo_noAccess(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "PUT") - w.WriteHeader(http.StatusUnprocessableEntity) - }) - - _, err := client.Organizations.AddTeamRepo(context.Background(), 1, "o", "r", nil) - if err == nil { - t.Errorf("Expcted error to be returned") - } -} - -func TestOrganizationsService_AddTeamRepo_invalidOwner(t *testing.T) { - _, err := client.Organizations.AddTeamRepo(context.Background(), 1, "%", "r", nil) - testURLParseError(t, err) -} - -func TestOrganizationsService_RemoveTeamRepo(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "DELETE") - w.WriteHeader(http.StatusNoContent) - }) - - _, err := client.Organizations.RemoveTeamRepo(context.Background(), 1, "o", "r") - if err != nil { - t.Errorf("Organizations.RemoveTeamRepo returned error: %v", err) - } -} - -func TestOrganizationsService_RemoveTeamRepo_invalidOwner(t *testing.T) { - _, err := client.Organizations.RemoveTeamRepo(context.Background(), 1, "%", "r") - testURLParseError(t, err) -} - -func TestOrganizationsService_GetTeamMembership(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/memberships/u", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - fmt.Fprint(w, `{"url":"u", "state":"active"}`) - }) - - membership, _, err := client.Organizations.GetTeamMembership(context.Background(), 1, "u") - if err != nil { - t.Errorf("Organizations.GetTeamMembership returned error: %v", err) - } - - want := &Membership{URL: String("u"), State: String("active")} - if !reflect.DeepEqual(membership, want) { - t.Errorf("Organizations.GetTeamMembership returned %+v, want %+v", membership, want) - } -} - -func TestOrganizationsService_AddTeamMembership(t *testing.T) { - setup() - defer teardown() - - opt := &OrganizationAddTeamMembershipOptions{Role: "maintainer"} - - mux.HandleFunc("/teams/1/memberships/u", func(w http.ResponseWriter, r *http.Request) { - v := new(OrganizationAddTeamMembershipOptions) - json.NewDecoder(r.Body).Decode(v) - - testMethod(t, r, "PUT") - if !reflect.DeepEqual(v, opt) { - t.Errorf("Request body = %+v, want %+v", v, opt) - } - - fmt.Fprint(w, `{"url":"u", "state":"pending"}`) - }) - - membership, _, err := client.Organizations.AddTeamMembership(context.Background(), 1, "u", opt) - if err != nil { - t.Errorf("Organizations.AddTeamMembership returned error: %v", err) - } - - want := &Membership{URL: String("u"), State: String("pending")} - if !reflect.DeepEqual(membership, want) { - t.Errorf("Organizations.AddTeamMembership returned %+v, want %+v", membership, want) - } -} - -func TestOrganizationsService_RemoveTeamMembership(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/memberships/u", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "DELETE") - w.WriteHeader(http.StatusNoContent) - }) - - _, err := client.Organizations.RemoveTeamMembership(context.Background(), 1, "u") - if err != nil { - t.Errorf("Organizations.RemoveTeamMembership returned error: %v", err) - } -} - -func TestOrganizationsService_ListUserTeams(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/user/teams", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testFormValues(t, r, values{"page": "1"}) - fmt.Fprint(w, `[{"id":1}]`) - }) - - opt := &ListOptions{Page: 1} - teams, _, err := client.Organizations.ListUserTeams(context.Background(), opt) - if err != nil { - t.Errorf("Organizations.ListUserTeams returned error: %v", err) - } - - want := []*Team{{ID: Int(1)}} - if !reflect.DeepEqual(teams, want) { - t.Errorf("Organizations.ListUserTeams returned %+v, want %+v", teams, want) - } -} - -func TestOrganizationsService_ListPendingTeamInvitations(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/teams/1/invitations", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testFormValues(t, r, values{"page": "1"}) - fmt.Fprint(w, `[ - { - "id": 1, - "login": "monalisa", - "email": "octocat@github.com", - "role": "direct_member", - "created_at": "2017-01-21T00:00:00Z", - "inviter": { - "login": "other_user", - "id": 1, - "avatar_url": "https://github.com/images/error/other_user_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/other_user", - "html_url": "https://github.com/other_user", - "followers_url": "https://api.github.com/users/other_user/followers", - "following_url": "https://api.github.com/users/other_user/following/other_user", - "gists_url": "https://api.github.com/users/other_user/gists/gist_id", - "starred_url": "https://api.github.com/users/other_user/starred/owner/repo", - "subscriptions_url": "https://api.github.com/users/other_user/subscriptions", - "organizations_url": "https://api.github.com/users/other_user/orgs", - "repos_url": "https://api.github.com/users/other_user/repos", - "events_url": "https://api.github.com/users/other_user/events/privacy", - "received_events_url": "https://api.github.com/users/other_user/received_events/privacy", - "type": "User", - "site_admin": false - } - } - ]`) - }) - - opt := &ListOptions{Page: 1} - invitations, _, err := client.Organizations.ListPendingTeamInvitations(context.Background(), 1, opt) - if err != nil { - t.Errorf("Organizations.ListPendingTeamInvitations returned error: %v", err) - } - - createdAt := time.Date(2017, 01, 21, 0, 0, 0, 0, time.UTC) - want := []*Invitation{ - { - ID: Int(1), - Login: String("monalisa"), - Email: String("octocat@github.com"), - Role: String("direct_member"), - CreatedAt: &createdAt, - Inviter: &User{ - Login: String("other_user"), - ID: Int(1), - AvatarURL: String("https://github.com/images/error/other_user_happy.gif"), - GravatarID: String(""), - URL: String("https://api.github.com/users/other_user"), - HTMLURL: String("https://github.com/other_user"), - FollowersURL: String("https://api.github.com/users/other_user/followers"), - FollowingURL: String("https://api.github.com/users/other_user/following/other_user"), - GistsURL: String("https://api.github.com/users/other_user/gists/gist_id"), - StarredURL: String("https://api.github.com/users/other_user/starred/owner/repo"), - SubscriptionsURL: String("https://api.github.com/users/other_user/subscriptions"), - OrganizationsURL: String("https://api.github.com/users/other_user/orgs"), - ReposURL: String("https://api.github.com/users/other_user/repos"), - EventsURL: String("https://api.github.com/users/other_user/events/privacy"), - ReceivedEventsURL: String("https://api.github.com/users/other_user/received_events/privacy"), - Type: String("User"), - SiteAdmin: Bool(false), - }, - }} - - if !reflect.DeepEqual(invitations, want) { - t.Errorf("Organizations.ListPendingTeamInvitations returned %+v, want %+v", invitations, want) - } -} diff --git a/vendor/github.com/google/go-github/github/orgs_test.go b/vendor/github.com/google/go-github/github/orgs_test.go index 6e227e8..9fe484d 100644 --- a/vendor/github.com/google/go-github/github/orgs_test.go +++ b/vendor/github.com/google/go-github/github/orgs_test.go @@ -15,10 +15,10 @@ import ( ) func TestOrganizationsService_ListAll(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - since := 1342004 + since := int64(1342004) mux.HandleFunc("/organizations", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testFormValues(t, r, values{"since": "1342004"}) @@ -31,14 +31,14 @@ func TestOrganizationsService_ListAll(t *testing.T) { t.Errorf("Organizations.ListAll returned error: %v", err) } - want := []*Organization{{ID: Int(4314092)}} + want := []*Organization{{ID: Int64(4314092)}} if !reflect.DeepEqual(orgs, want) { t.Errorf("Organizations.ListAll returned %+v, want %+v", orgs, want) } } func TestOrganizationsService_List_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/orgs", func(w http.ResponseWriter, r *http.Request) { @@ -51,14 +51,14 @@ func TestOrganizationsService_List_authenticatedUser(t *testing.T) { t.Errorf("Organizations.List returned error: %v", err) } - want := []*Organization{{ID: Int(1)}, {ID: Int(2)}} + want := []*Organization{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(orgs, want) { t.Errorf("Organizations.List returned %+v, want %+v", orgs, want) } } func TestOrganizationsService_List_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/orgs", func(w http.ResponseWriter, r *http.Request) { @@ -73,19 +73,22 @@ func TestOrganizationsService_List_specifiedUser(t *testing.T) { t.Errorf("Organizations.List returned error: %v", err) } - want := []*Organization{{ID: Int(1)}, {ID: Int(2)}} + want := []*Organization{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(orgs, want) { t.Errorf("Organizations.List returned %+v, want %+v", orgs, want) } } func TestOrganizationsService_List_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.List(context.Background(), "%", nil) testURLParseError(t, err) } func TestOrganizationsService_Get(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o", func(w http.ResponseWriter, r *http.Request) { @@ -98,19 +101,22 @@ func TestOrganizationsService_Get(t *testing.T) { t.Errorf("Organizations.Get returned error: %v", err) } - want := &Organization{ID: Int(1), Login: String("l"), URL: String("u"), AvatarURL: String("a"), Location: String("l")} + want := &Organization{ID: Int64(1), Login: String("l"), URL: String("u"), AvatarURL: String("a"), Location: String("l")} if !reflect.DeepEqual(org, want) { t.Errorf("Organizations.Get returned %+v, want %+v", org, want) } } func TestOrganizationsService_Get_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.Get(context.Background(), "%") testURLParseError(t, err) } func TestOrganizationsService_GetByID(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/organizations/1", func(w http.ResponseWriter, r *http.Request) { @@ -123,14 +129,14 @@ func TestOrganizationsService_GetByID(t *testing.T) { t.Fatalf("Organizations.GetByID returned error: %v", err) } - want := &Organization{ID: Int(1), Login: String("l"), URL: String("u"), AvatarURL: String("a"), Location: String("l")} + want := &Organization{ID: Int64(1), Login: String("l"), URL: String("u"), AvatarURL: String("a"), Location: String("l")} if !reflect.DeepEqual(org, want) { t.Errorf("Organizations.GetByID returned %+v, want %+v", org, want) } } func TestOrganizationsService_Edit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Organization{Login: String("l")} @@ -152,13 +158,16 @@ func TestOrganizationsService_Edit(t *testing.T) { t.Errorf("Organizations.Edit returned error: %v", err) } - want := &Organization{ID: Int(1)} + want := &Organization{ID: Int64(1)} if !reflect.DeepEqual(org, want) { t.Errorf("Organizations.Edit returned %+v, want %+v", org, want) } } func TestOrganizationsService_Edit_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Organizations.Edit(context.Background(), "%", nil) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/orgs_users_blocking_test.go b/vendor/github.com/google/go-github/github/orgs_users_blocking_test.go index 53be507..5d38cc4 100644 --- a/vendor/github.com/google/go-github/github/orgs_users_blocking_test.go +++ b/vendor/github.com/google/go-github/github/orgs_users_blocking_test.go @@ -14,7 +14,7 @@ import ( ) func TestOrganizationsService_ListBlockedUsers(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/blocks", func(w http.ResponseWriter, r *http.Request) { @@ -39,7 +39,7 @@ func TestOrganizationsService_ListBlockedUsers(t *testing.T) { } func TestOrganizationsService_IsBlocked(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/blocks/u", func(w http.ResponseWriter, r *http.Request) { @@ -58,7 +58,7 @@ func TestOrganizationsService_IsBlocked(t *testing.T) { } func TestOrganizationsService_BlockUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/blocks/u", func(w http.ResponseWriter, r *http.Request) { @@ -74,7 +74,7 @@ func TestOrganizationsService_BlockUser(t *testing.T) { } func TestOrganizationsService_UnblockUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/orgs/o/blocks/u", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/projects.go b/vendor/github.com/google/go-github/github/projects.go index 11c885f..c7a68f5 100644 --- a/vendor/github.com/google/go-github/github/projects.go +++ b/vendor/github.com/google/go-github/github/projects.go @@ -18,14 +18,18 @@ type ProjectsService service // Project represents a GitHub Project. type Project struct { - ID *int `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - OwnerURL *string `json:"owner_url,omitempty"` - Name *string `json:"name,omitempty"` - Body *string `json:"body,omitempty"` - Number *int `json:"number,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + ColumnsURL *string `json:"columns_url,omitempty"` + OwnerURL *string `json:"owner_url,omitempty"` + Name *string `json:"name,omitempty"` + Body *string `json:"body,omitempty"` + Number *int `json:"number,omitempty"` + State *string `json:"state,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` // The User object that generated the project. Creator *User `json:"creator,omitempty"` @@ -38,14 +42,14 @@ func (p Project) String() string { // GetProject gets a GitHub Project for a repo. // // GitHub API docs: https://developer.github.com/v3/projects/#get-a-project -func (s *ProjectsService) GetProject(ctx context.Context, id int) (*Project, *Response, error) { +func (s *ProjectsService) GetProject(ctx context.Context, id int64) (*Project, *Response, error) { u := fmt.Sprintf("projects/%v", id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) project := &Project{} @@ -62,22 +66,37 @@ func (s *ProjectsService) GetProject(ctx context.Context, id int) (*Project, *Re // ProjectsService.UpdateProject methods. type ProjectOptions struct { // The name of the project. (Required for creation; optional for update.) - Name string `json:"name,omitempty"` + Name *string `json:"name,omitempty"` // The body of the project. (Optional.) - Body string `json:"body,omitempty"` + Body *string `json:"body,omitempty"` + + // The following field(s) are only applicable for update. + // They should be left with zero values for creation. + + // State of the project. Either "open" or "closed". (Optional.) + State *string `json:"state,omitempty"` + // The permission level that all members of the project's organization + // will have on this project. + // Setting the organization permission is only available + // for organization projects. (Optional.) + OrganizationPermission *string `json:"organization_permission,omitempty"` + // Sets visibility of the project within the organization. + // Setting visibility is only available + // for organization projects.(Optional.) + Public *bool `json:"public,omitempty"` } // UpdateProject updates a repository project. // // GitHub API docs: https://developer.github.com/v3/projects/#update-a-project -func (s *ProjectsService) UpdateProject(ctx context.Context, id int, opt *ProjectOptions) (*Project, *Response, error) { +func (s *ProjectsService) UpdateProject(ctx context.Context, id int64, opt *ProjectOptions) (*Project, *Response, error) { u := fmt.Sprintf("projects/%v", id) req, err := s.client.NewRequest("PATCH", u, opt) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) project := &Project{} @@ -92,7 +111,7 @@ func (s *ProjectsService) UpdateProject(ctx context.Context, id int, opt *Projec // DeleteProject deletes a GitHub Project from a repository. // // GitHub API docs: https://developer.github.com/v3/projects/#delete-a-project -func (s *ProjectsService) DeleteProject(ctx context.Context, id int) (*Response, error) { +func (s *ProjectsService) DeleteProject(ctx context.Context, id int64) (*Response, error) { u := fmt.Sprintf("projects/%v", id) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { @@ -109,17 +128,20 @@ func (s *ProjectsService) DeleteProject(ctx context.Context, id int) (*Response, // // GitHub API docs: https://developer.github.com/v3/repos/projects/ type ProjectColumn struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` Name *string `json:"name,omitempty"` + URL *string `json:"url,omitempty"` ProjectURL *string `json:"project_url,omitempty"` + CardsURL *string `json:"cards_url,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` } // ListProjectColumns lists the columns of a GitHub Project for a repo. // // GitHub API docs: https://developer.github.com/v3/projects/columns/#list-project-columns -func (s *ProjectsService) ListProjectColumns(ctx context.Context, projectID int, opt *ListOptions) ([]*ProjectColumn, *Response, error) { +func (s *ProjectsService) ListProjectColumns(ctx context.Context, projectID int64, opt *ListOptions) ([]*ProjectColumn, *Response, error) { u := fmt.Sprintf("projects/%v/columns", projectID) u, err := addOptions(u, opt) if err != nil { @@ -131,7 +153,7 @@ func (s *ProjectsService) ListProjectColumns(ctx context.Context, projectID int, return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) columns := []*ProjectColumn{} @@ -146,14 +168,14 @@ func (s *ProjectsService) ListProjectColumns(ctx context.Context, projectID int, // GetProjectColumn gets a column of a GitHub Project for a repo. // // GitHub API docs: https://developer.github.com/v3/projects/columns/#get-a-project-column -func (s *ProjectsService) GetProjectColumn(ctx context.Context, id int) (*ProjectColumn, *Response, error) { +func (s *ProjectsService) GetProjectColumn(ctx context.Context, id int64) (*ProjectColumn, *Response, error) { u := fmt.Sprintf("projects/columns/%v", id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) column := &ProjectColumn{} @@ -176,14 +198,14 @@ type ProjectColumnOptions struct { // CreateProjectColumn creates a column for the specified (by number) project. // // GitHub API docs: https://developer.github.com/v3/projects/columns/#create-a-project-column -func (s *ProjectsService) CreateProjectColumn(ctx context.Context, projectID int, opt *ProjectColumnOptions) (*ProjectColumn, *Response, error) { +func (s *ProjectsService) CreateProjectColumn(ctx context.Context, projectID int64, opt *ProjectColumnOptions) (*ProjectColumn, *Response, error) { u := fmt.Sprintf("projects/%v/columns", projectID) req, err := s.client.NewRequest("POST", u, opt) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) column := &ProjectColumn{} @@ -198,14 +220,14 @@ func (s *ProjectsService) CreateProjectColumn(ctx context.Context, projectID int // UpdateProjectColumn updates a column of a GitHub Project. // // GitHub API docs: https://developer.github.com/v3/projects/columns/#update-a-project-column -func (s *ProjectsService) UpdateProjectColumn(ctx context.Context, columnID int, opt *ProjectColumnOptions) (*ProjectColumn, *Response, error) { +func (s *ProjectsService) UpdateProjectColumn(ctx context.Context, columnID int64, opt *ProjectColumnOptions) (*ProjectColumn, *Response, error) { u := fmt.Sprintf("projects/columns/%v", columnID) req, err := s.client.NewRequest("PATCH", u, opt) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) column := &ProjectColumn{} @@ -220,7 +242,7 @@ func (s *ProjectsService) UpdateProjectColumn(ctx context.Context, columnID int, // DeleteProjectColumn deletes a column from a GitHub Project. // // GitHub API docs: https://developer.github.com/v3/projects/columns/#delete-a-project-column -func (s *ProjectsService) DeleteProjectColumn(ctx context.Context, columnID int) (*Response, error) { +func (s *ProjectsService) DeleteProjectColumn(ctx context.Context, columnID int64) (*Response, error) { u := fmt.Sprintf("projects/columns/%v", columnID) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { @@ -244,7 +266,7 @@ type ProjectColumnMoveOptions struct { // MoveProjectColumn moves a column within a GitHub Project. // // GitHub API docs: https://developer.github.com/v3/projects/columns/#move-a-project-column -func (s *ProjectsService) MoveProjectColumn(ctx context.Context, columnID int, opt *ProjectColumnMoveOptions) (*Response, error) { +func (s *ProjectsService) MoveProjectColumn(ctx context.Context, columnID int64, opt *ProjectColumnMoveOptions) (*Response, error) { u := fmt.Sprintf("projects/columns/%v/moves", columnID) req, err := s.client.NewRequest("POST", u, opt) if err != nil { @@ -264,20 +286,38 @@ type ProjectCard struct { URL *string `json:"url,omitempty"` ColumnURL *string `json:"column_url,omitempty"` ContentURL *string `json:"content_url,omitempty"` - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` Note *string `json:"note,omitempty"` Creator *User `json:"creator,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Archived *bool `json:"archived,omitempty"` // The following fields are only populated by Webhook events. - ColumnID *int `json:"column_id,omitempty"` + ColumnID *int64 `json:"column_id,omitempty"` + + // The following fields are only populated by Events API. + ProjectID *int64 `json:"project_id,omitempty"` + ProjectURL *string `json:"project_url,omitempty"` + ColumnName *string `json:"column_name,omitempty"` + PreviousColumnName *string `json:"previous_column_name,omitempty"` // Populated in "moved_columns_in_project" event deliveries. +} + +// ProjectCardListOptions specifies the optional parameters to the +// ProjectsService.ListProjectCards method. +type ProjectCardListOptions struct { + // ArchivedState is used to list all, archived, or not_archived project cards. + // Defaults to not_archived when you omit this parameter. + ArchivedState *string `url:"archived_state,omitempty"` + + ListOptions } // ListProjectCards lists the cards in a column of a GitHub Project. // // GitHub API docs: https://developer.github.com/v3/projects/cards/#list-project-cards -func (s *ProjectsService) ListProjectCards(ctx context.Context, columnID int, opt *ListOptions) ([]*ProjectCard, *Response, error) { +func (s *ProjectsService) ListProjectCards(ctx context.Context, columnID int64, opt *ProjectCardListOptions) ([]*ProjectCard, *Response, error) { u := fmt.Sprintf("projects/columns/%v/cards", columnID) u, err := addOptions(u, opt) if err != nil { @@ -289,7 +329,7 @@ func (s *ProjectsService) ListProjectCards(ctx context.Context, columnID int, op return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) cards := []*ProjectCard{} @@ -304,14 +344,14 @@ func (s *ProjectsService) ListProjectCards(ctx context.Context, columnID int, op // GetProjectCard gets a card in a column of a GitHub Project. // // GitHub API docs: https://developer.github.com/v3/projects/cards/#get-a-project-card -func (s *ProjectsService) GetProjectCard(ctx context.Context, columnID int) (*ProjectCard, *Response, error) { +func (s *ProjectsService) GetProjectCard(ctx context.Context, columnID int64) (*ProjectCard, *Response, error) { u := fmt.Sprintf("projects/columns/cards/%v", columnID) req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) card := &ProjectCard{} @@ -329,24 +369,27 @@ func (s *ProjectsService) GetProjectCard(ctx context.Context, columnID int) (*Pr type ProjectCardOptions struct { // The note of the card. Note and ContentID are mutually exclusive. Note string `json:"note,omitempty"` - // The ID (not Number) of the Issue or Pull Request to associate with this card. + // The ID (not Number) of the Issue to associate with this card. // Note and ContentID are mutually exclusive. - ContentID int `json:"content_id,omitempty"` - // The type of content to associate with this card. Possible values are: "Issue", "PullRequest". + ContentID int64 `json:"content_id,omitempty"` + // The type of content to associate with this card. Possible values are: "Issue" and "PullRequest". ContentType string `json:"content_type,omitempty"` + // Use true to archive a project card. + // Specify false if you need to restore a previously archived project card. + Archived *bool `json:"archived,omitempty"` } // CreateProjectCard creates a card in the specified column of a GitHub Project. // // GitHub API docs: https://developer.github.com/v3/projects/cards/#create-a-project-card -func (s *ProjectsService) CreateProjectCard(ctx context.Context, columnID int, opt *ProjectCardOptions) (*ProjectCard, *Response, error) { +func (s *ProjectsService) CreateProjectCard(ctx context.Context, columnID int64, opt *ProjectCardOptions) (*ProjectCard, *Response, error) { u := fmt.Sprintf("projects/columns/%v/cards", columnID) req, err := s.client.NewRequest("POST", u, opt) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) card := &ProjectCard{} @@ -361,14 +404,14 @@ func (s *ProjectsService) CreateProjectCard(ctx context.Context, columnID int, o // UpdateProjectCard updates a card of a GitHub Project. // // GitHub API docs: https://developer.github.com/v3/projects/cards/#update-a-project-card -func (s *ProjectsService) UpdateProjectCard(ctx context.Context, cardID int, opt *ProjectCardOptions) (*ProjectCard, *Response, error) { +func (s *ProjectsService) UpdateProjectCard(ctx context.Context, cardID int64, opt *ProjectCardOptions) (*ProjectCard, *Response, error) { u := fmt.Sprintf("projects/columns/cards/%v", cardID) req, err := s.client.NewRequest("PATCH", u, opt) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) card := &ProjectCard{} @@ -383,7 +426,7 @@ func (s *ProjectsService) UpdateProjectCard(ctx context.Context, cardID int, opt // DeleteProjectCard deletes a card from a GitHub Project. // // GitHub API docs: https://developer.github.com/v3/projects/cards/#delete-a-project-card -func (s *ProjectsService) DeleteProjectCard(ctx context.Context, cardID int) (*Response, error) { +func (s *ProjectsService) DeleteProjectCard(ctx context.Context, cardID int64) (*Response, error) { u := fmt.Sprintf("projects/columns/cards/%v", cardID) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { @@ -405,13 +448,13 @@ type ProjectCardMoveOptions struct { // ColumnID is the ID of a column in the same project. Note that ColumnID // is required when using Position "after:" when that card is in // another column; otherwise it is optional. - ColumnID int `json:"column_id,omitempty"` + ColumnID int64 `json:"column_id,omitempty"` } // MoveProjectCard moves a card within a GitHub Project. // // GitHub API docs: https://developer.github.com/v3/projects/cards/#move-a-project-card -func (s *ProjectsService) MoveProjectCard(ctx context.Context, cardID int, opt *ProjectCardMoveOptions) (*Response, error) { +func (s *ProjectsService) MoveProjectCard(ctx context.Context, cardID int64, opt *ProjectCardMoveOptions) (*Response, error) { u := fmt.Sprintf("projects/columns/cards/%v/moves", cardID) req, err := s.client.NewRequest("POST", u, opt) if err != nil { @@ -423,3 +466,129 @@ func (s *ProjectsService) MoveProjectCard(ctx context.Context, cardID int, opt * return s.client.Do(ctx, req, nil) } + +// ProjectCollaboratorOptions specifies the optional parameters to the +// ProjectsService.AddProjectCollaborator method. +type ProjectCollaboratorOptions struct { + // Permission specifies the permission to grant to the collaborator. + // Possible values are: + // "read" - can read, but not write to or administer this project. + // "write" - can read and write, but not administer this project. + // "admin" - can read, write and administer this project. + // + // Default value is "write" + Permission *string `json:"permission,omitempty"` +} + +// AddProjectCollaborator adds a collaborator to an organization project and sets +// their permission level. You must be an organization owner or a project admin to add a collaborator. +// +// GitHub API docs: https://developer.github.com/v3/projects/collaborators/#add-user-as-a-collaborator +func (s *ProjectsService) AddProjectCollaborator(ctx context.Context, id int64, username string, opt *ProjectCollaboratorOptions) (*Response, error) { + u := fmt.Sprintf("projects/%v/collaborators/%v", id, username) + req, err := s.client.NewRequest("PUT", u, opt) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + return s.client.Do(ctx, req, nil) +} + +// RemoveProjectCollaborator removes a collaborator from an organization project. +// You must be an organization owner or a project admin to remove a collaborator. +// +// GitHub API docs: https://developer.github.com/v3/projects/collaborators/#remove-user-as-a-collaborator +func (s *ProjectsService) RemoveProjectCollaborator(ctx context.Context, id int64, username string) (*Response, error) { + u := fmt.Sprintf("projects/%v/collaborators/%v", id, username) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ListCollaboratorOptions specifies the optional parameters to the +// ProjectsService.ListProjectCollaborators method. +type ListCollaboratorOptions struct { + // Affiliation specifies how collaborators should be filtered by their affiliation. + // Possible values are: + // "outside" - All outside collaborators of an organization-owned repository + // "direct" - All collaborators with permissions to an organization-owned repository, + // regardless of organization membership status + // "all" - All collaborators the authenticated user can see + // + // Default value is "all". + Affiliation *string `url:"affiliation,omitempty"` + + ListOptions +} + +// ListProjectCollaborators lists the collaborators for an organization project. For a project, +// the list of collaborators includes outside collaborators, organization members that are direct +// collaborators, organization members with access through team memberships, organization members +// with access through default organization permissions, and organization owners. You must be an +// organization owner or a project admin to list collaborators. +// +// GitHub API docs: https://developer.github.com/v3/projects/collaborators/#list-collaborators +func (s *ProjectsService) ListProjectCollaborators(ctx context.Context, id int64, opt *ListCollaboratorOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("projects/%v/collaborators", id) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + var users []*User + resp, err := s.client.Do(ctx, req, &users) + if err != nil { + return nil, resp, err + } + + return users, resp, nil +} + +// ProjectPermissionLevel represents the permission level an organization +// member has for a given project. +type ProjectPermissionLevel struct { + // Possible values: "admin", "write", "read", "none" + Permission *string `json:"permission,omitempty"` + + User *User `json:"user,omitempty"` +} + +// ReviewProjectCollaboratorPermission returns the collaborator's permission level for an organization +// project. Possible values for the permission key: "admin", "write", "read", "none". +// You must be an organization owner or a project admin to review a user's permission level. +// +// GitHub API docs: https://developer.github.com/v3/projects/collaborators/#review-a-users-permission-level +func (s *ProjectsService) ReviewProjectCollaboratorPermission(ctx context.Context, id int64, username string) (*ProjectPermissionLevel, *Response, error) { + u := fmt.Sprintf("projects/%v/collaborators/%v/permission", id, username) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + ppl := new(ProjectPermissionLevel) + resp, err := s.client.Do(ctx, req, ppl) + if err != nil { + return nil, resp, err + } + return ppl, resp, nil +} diff --git a/vendor/github.com/google/go-github/github/projects_test.go b/vendor/github.com/google/go-github/github/projects_test.go index 1045ca9..666c0e6 100644 --- a/vendor/github.com/google/go-github/github/projects_test.go +++ b/vendor/github.com/google/go-github/github/projects_test.go @@ -11,14 +11,89 @@ import ( "fmt" "net/http" "reflect" + "strings" "testing" ) +func TestProject_marshall(t *testing.T) { + testJSONMarshal(t, &Project{}, "{}") + + u := &Project{ + ID: Int64(1), + URL: String("u"), + HTMLURL: String("h"), + ColumnsURL: String("c"), + OwnerURL: String("o"), + Name: String("n"), + Body: String("b"), + Number: Int(1), + State: String("s"), + CreatedAt: &Timestamp{referenceTime}, + UpdatedAt: &Timestamp{referenceTime}, + NodeID: String("n"), + Creator: &User{ + Login: String("l"), + ID: Int64(1), + AvatarURL: String("a"), + GravatarID: String("g"), + Name: String("n"), + Company: String("c"), + Blog: String("b"), + Location: String("l"), + Email: String("e"), + Hireable: Bool(true), + PublicRepos: Int(1), + Followers: Int(1), + Following: Int(1), + CreatedAt: &Timestamp{referenceTime}, + URL: String("u"), + }, + } + want := `{ + "id": 1, + "url": "u", + "html_url": "h", + "columns_url": "c", + "owner_url": "o", + "name": "n", + "body": "b", + "number": 1, + "state": "s", + "created_at": ` + referenceTimeStr + `, + "updated_at": ` + referenceTimeStr + `, + "node_id": "n", + "creator": { + "login": "l", + "id": 1, + "avatar_url": "a", + "gravatar_id": "g", + "name": "n", + "company": "c", + "blog": "b", + "location": "l", + "email": "e", + "hireable": true, + "public_repos": 1, + "followers": 1, + "following": 1, + "created_at": ` + referenceTimeStr + `, + "url": "u" + } + }` + testJSONMarshal(t, u, want) +} func TestProjectsService_UpdateProject(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &ProjectOptions{Name: "Project Name", Body: "Project body."} + input := &ProjectOptions{ + Name: String("Project Name"), + Body: String("Project body."), + State: String("open"), + Public: Bool(true), + + OrganizationPermission: String("read"), + } mux.HandleFunc("/projects/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PATCH") @@ -38,14 +113,14 @@ func TestProjectsService_UpdateProject(t *testing.T) { t.Errorf("Projects.UpdateProject returned error: %v", err) } - want := &Project{ID: Int(1)} + want := &Project{ID: Int64(1)} if !reflect.DeepEqual(project, want) { t.Errorf("Projects.UpdateProject returned %+v, want %+v", project, want) } } func TestProjectsService_GetProject(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/projects/1", func(w http.ResponseWriter, r *http.Request) { @@ -59,14 +134,14 @@ func TestProjectsService_GetProject(t *testing.T) { t.Errorf("Projects.GetProject returned error: %v", err) } - want := &Project{ID: Int(1)} + want := &Project{ID: Int64(1)} if !reflect.DeepEqual(project, want) { t.Errorf("Projects.GetProject returned %+v, want %+v", project, want) } } func TestProjectsService_DeleteProject(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/projects/1", func(w http.ResponseWriter, r *http.Request) { @@ -81,12 +156,13 @@ func TestProjectsService_DeleteProject(t *testing.T) { } func TestProjectsService_ListProjectColumns(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeProjectsPreview} mux.HandleFunc("/projects/1/columns", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeProjectsPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testFormValues(t, r, values{"page": "2"}) fmt.Fprint(w, `[{"id":1}]`) }) @@ -97,14 +173,14 @@ func TestProjectsService_ListProjectColumns(t *testing.T) { t.Errorf("Projects.ListProjectColumns returned error: %v", err) } - want := []*ProjectColumn{{ID: Int(1)}} + want := []*ProjectColumn{{ID: Int64(1)}} if !reflect.DeepEqual(columns, want) { t.Errorf("Projects.ListProjectColumns returned %+v, want %+v", columns, want) } } func TestProjectsService_GetProjectColumn(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/projects/columns/1", func(w http.ResponseWriter, r *http.Request) { @@ -118,14 +194,14 @@ func TestProjectsService_GetProjectColumn(t *testing.T) { t.Errorf("Projects.GetProjectColumn returned error: %v", err) } - want := &ProjectColumn{ID: Int(1)} + want := &ProjectColumn{ID: Int64(1)} if !reflect.DeepEqual(column, want) { t.Errorf("Projects.GetProjectColumn returned %+v, want %+v", column, want) } } func TestProjectsService_CreateProjectColumn(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &ProjectColumnOptions{Name: "Column Name"} @@ -148,14 +224,14 @@ func TestProjectsService_CreateProjectColumn(t *testing.T) { t.Errorf("Projects.CreateProjectColumn returned error: %v", err) } - want := &ProjectColumn{ID: Int(1)} + want := &ProjectColumn{ID: Int64(1)} if !reflect.DeepEqual(column, want) { t.Errorf("Projects.CreateProjectColumn returned %+v, want %+v", column, want) } } func TestProjectsService_UpdateProjectColumn(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &ProjectColumnOptions{Name: "Column Name"} @@ -178,14 +254,14 @@ func TestProjectsService_UpdateProjectColumn(t *testing.T) { t.Errorf("Projects.UpdateProjectColumn returned error: %v", err) } - want := &ProjectColumn{ID: Int(1)} + want := &ProjectColumn{ID: Int64(1)} if !reflect.DeepEqual(column, want) { t.Errorf("Projects.UpdateProjectColumn returned %+v, want %+v", column, want) } } func TestProjectsService_DeleteProjectColumn(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/projects/columns/1", func(w http.ResponseWriter, r *http.Request) { @@ -200,7 +276,7 @@ func TestProjectsService_DeleteProjectColumn(t *testing.T) { } func TestProjectsService_MoveProjectColumn(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &ProjectColumnMoveOptions{Position: "after:12345"} @@ -223,30 +299,34 @@ func TestProjectsService_MoveProjectColumn(t *testing.T) { } func TestProjectsService_ListProjectCards(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/projects/columns/1/cards", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testHeader(t, r, "Accept", mediaTypeProjectsPreview) - testFormValues(t, r, values{"page": "2"}) + testFormValues(t, r, values{ + "archived_state": "all", + "page": "2"}) fmt.Fprint(w, `[{"id":1}]`) }) - opt := &ListOptions{Page: 2} + opt := &ProjectCardListOptions{ + ArchivedState: String("all"), + ListOptions: ListOptions{Page: 2}} cards, _, err := client.Projects.ListProjectCards(context.Background(), 1, opt) if err != nil { t.Errorf("Projects.ListProjectCards returned error: %v", err) } - want := []*ProjectCard{{ID: Int(1)}} + want := []*ProjectCard{{ID: Int64(1)}} if !reflect.DeepEqual(cards, want) { t.Errorf("Projects.ListProjectCards returned %+v, want %+v", cards, want) } } func TestProjectsService_GetProjectCard(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/projects/columns/cards/1", func(w http.ResponseWriter, r *http.Request) { @@ -260,14 +340,14 @@ func TestProjectsService_GetProjectCard(t *testing.T) { t.Errorf("Projects.GetProjectCard returned error: %v", err) } - want := &ProjectCard{ID: Int(1)} + want := &ProjectCard{ID: Int64(1)} if !reflect.DeepEqual(card, want) { t.Errorf("Projects.GetProjectCard returned %+v, want %+v", card, want) } } func TestProjectsService_CreateProjectCard(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &ProjectCardOptions{ @@ -293,14 +373,14 @@ func TestProjectsService_CreateProjectCard(t *testing.T) { t.Errorf("Projects.CreateProjectCard returned error: %v", err) } - want := &ProjectCard{ID: Int(1)} + want := &ProjectCard{ID: Int64(1)} if !reflect.DeepEqual(card, want) { t.Errorf("Projects.CreateProjectCard returned %+v, want %+v", card, want) } } func TestProjectsService_UpdateProjectCard(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &ProjectCardOptions{ @@ -318,7 +398,7 @@ func TestProjectsService_UpdateProjectCard(t *testing.T) { t.Errorf("Request body = %+v, want %+v", v, input) } - fmt.Fprint(w, `{"id":1}`) + fmt.Fprint(w, `{"id":1, "archived":false}`) }) card, _, err := client.Projects.UpdateProjectCard(context.Background(), 1, input) @@ -326,14 +406,14 @@ func TestProjectsService_UpdateProjectCard(t *testing.T) { t.Errorf("Projects.UpdateProjectCard returned error: %v", err) } - want := &ProjectCard{ID: Int(1)} + want := &ProjectCard{ID: Int64(1), Archived: Bool(false)} if !reflect.DeepEqual(card, want) { t.Errorf("Projects.UpdateProjectCard returned %+v, want %+v", card, want) } } func TestProjectsService_DeleteProjectCard(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/projects/columns/cards/1", func(w http.ResponseWriter, r *http.Request) { @@ -348,7 +428,7 @@ func TestProjectsService_DeleteProjectCard(t *testing.T) { } func TestProjectsService_MoveProjectCard(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &ProjectCardMoveOptions{Position: "after:12345"} @@ -369,3 +449,140 @@ func TestProjectsService_MoveProjectCard(t *testing.T) { t.Errorf("Projects.MoveProjectCard returned error: %v", err) } } + +func TestProjectsService_AddProjectCollaborator(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + opt := &ProjectCollaboratorOptions{ + Permission: String("admin"), + } + + mux.HandleFunc("/projects/1/collaborators/u", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testHeader(t, r, "Accept", mediaTypeProjectsPreview) + + v := &ProjectCollaboratorOptions{} + json.NewDecoder(r.Body).Decode(v) + if !reflect.DeepEqual(v, opt) { + t.Errorf("Request body = %+v, want %+v", v, opt) + } + + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Projects.AddProjectCollaborator(context.Background(), 1, "u", opt) + if err != nil { + t.Errorf("Projects.AddProjectCollaborator returned error: %v", err) + } +} + +func TestProjectsService_AddCollaborator_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, err := client.Projects.AddProjectCollaborator(context.Background(), 1, "%", nil) + testURLParseError(t, err) +} + +func TestProjectsService_RemoveCollaborator(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/projects/1/collaborators/u", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeProjectsPreview) + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Projects.RemoveProjectCollaborator(context.Background(), 1, "u") + if err != nil { + t.Errorf("Projects.RemoveProjectCollaborator returned error: %v", err) + } +} + +func TestProjectsService_RemoveCollaborator_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, err := client.Projects.RemoveProjectCollaborator(context.Background(), 1, "%") + testURLParseError(t, err) +} + +func TestProjectsService_ListCollaborators(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/projects/1/collaborators", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeProjectsPreview) + testFormValues(t, r, values{"page": "2"}) + fmt.Fprintf(w, `[{"id":1}, {"id":2}]`) + }) + + opt := &ListCollaboratorOptions{ + ListOptions: ListOptions{Page: 2}, + } + users, _, err := client.Projects.ListProjectCollaborators(context.Background(), 1, opt) + if err != nil { + t.Errorf("Projects.ListProjectCollaborators returned error: %v", err) + } + + want := []*User{{ID: Int64(1)}, {ID: Int64(2)}} + if !reflect.DeepEqual(users, want) { + t.Errorf("Projects.ListProjectCollaborators returned %+v, want %+v", users, want) + } +} + +func TestProjectsService_ListCollaborators_withAffiliation(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/projects/1/collaborators", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeProjectsPreview) + testFormValues(t, r, values{"affiliation": "all", "page": "2"}) + fmt.Fprintf(w, `[{"id":1}, {"id":2}]`) + }) + + opt := &ListCollaboratorOptions{ + ListOptions: ListOptions{Page: 2}, + Affiliation: String("all"), + } + users, _, err := client.Projects.ListProjectCollaborators(context.Background(), 1, opt) + if err != nil { + t.Errorf("Projects.ListProjectCollaborators returned error: %v", err) + } + + want := []*User{{ID: Int64(1)}, {ID: Int64(2)}} + if !reflect.DeepEqual(users, want) { + t.Errorf("Projects.ListProjectCollaborators returned %+v, want %+v", users, want) + } +} + +func TestProjectsService_GetPermissionLevel(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/projects/1/collaborators/u/permission", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeProjectsPreview) + fmt.Fprintf(w, `{"permission":"admin","user":{"login":"u"}}`) + }) + + ppl, _, err := client.Projects.ReviewProjectCollaboratorPermission(context.Background(), 1, "u") + if err != nil { + t.Errorf("Projects.ReviewProjectCollaboratorPermission returned error: %v", err) + } + + want := &ProjectPermissionLevel{ + Permission: String("admin"), + User: &User{ + Login: String("u"), + }, + } + + if !reflect.DeepEqual(ppl, want) { + t.Errorf("Projects.ReviewProjectCollaboratorPermission returned %+v, want %+v", ppl, want) + } +} diff --git a/vendor/github.com/google/go-github/github/pulls.go b/vendor/github.com/google/go-github/github/pulls.go index 51e4d95..067e362 100644 --- a/vendor/github.com/google/go-github/github/pulls.go +++ b/vendor/github.com/google/go-github/github/pulls.go @@ -9,6 +9,7 @@ import ( "bytes" "context" "fmt" + "strings" "time" ) @@ -20,7 +21,7 @@ type PullRequestsService service // PullRequest represents a GitHub pull request on a repository. type PullRequest struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` Number *int `json:"number,omitempty"` State *string `json:"state,omitempty"` Title *string `json:"title,omitempty"` @@ -29,9 +30,12 @@ type PullRequest struct { UpdatedAt *time.Time `json:"updated_at,omitempty"` ClosedAt *time.Time `json:"closed_at,omitempty"` MergedAt *time.Time `json:"merged_at,omitempty"` + Labels []*Label `json:"labels,omitempty"` User *User `json:"user,omitempty"` + Draft *bool `json:"draft,omitempty"` Merged *bool `json:"merged,omitempty"` Mergeable *bool `json:"mergeable,omitempty"` + MergeableState *string `json:"mergeable_state,omitempty"` MergedBy *User `json:"merged_by,omitempty"` MergeCommitSHA *string `json:"merge_commit_sha,omitempty"` Comments *int `json:"comments,omitempty"` @@ -45,21 +49,53 @@ type PullRequest struct { StatusesURL *string `json:"statuses_url,omitempty"` DiffURL *string `json:"diff_url,omitempty"` PatchURL *string `json:"patch_url,omitempty"` + CommitsURL *string `json:"commits_url,omitempty"` + CommentsURL *string `json:"comments_url,omitempty"` ReviewCommentsURL *string `json:"review_comments_url,omitempty"` ReviewCommentURL *string `json:"review_comment_url,omitempty"` + ReviewComments *int `json:"review_comments,omitempty"` Assignee *User `json:"assignee,omitempty"` Assignees []*User `json:"assignees,omitempty"` Milestone *Milestone `json:"milestone,omitempty"` MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` + AuthorAssociation *string `json:"author_association,omitempty"` + NodeID *string `json:"node_id,omitempty"` + RequestedReviewers []*User `json:"requested_reviewers,omitempty"` - Head *PullRequestBranch `json:"head,omitempty"` - Base *PullRequestBranch `json:"base,omitempty"` + // RequestedTeams is populated as part of the PullRequestEvent. + // See, https://developer.github.com/v3/activity/events/types/#pullrequestevent for an example. + RequestedTeams []*Team `json:"requested_teams,omitempty"` + + Links *PRLinks `json:"_links,omitempty"` + Head *PullRequestBranch `json:"head,omitempty"` + Base *PullRequestBranch `json:"base,omitempty"` + + // ActiveLockReason is populated only when LockReason is provided while locking the pull request. + // Possible values are: "off-topic", "too heated", "resolved", and "spam". + ActiveLockReason *string `json:"active_lock_reason,omitempty"` } func (p PullRequest) String() string { return Stringify(p) } +// PRLink represents a single link object from Github pull request _links. +type PRLink struct { + HRef *string `json:"href,omitempty"` +} + +// PRLinks represents the "_links" object in a Github pull request. +type PRLinks struct { + Self *PRLink `json:"self,omitempty"` + HTML *PRLink `json:"html,omitempty"` + Issue *PRLink `json:"issue,omitempty"` + Comments *PRLink `json:"comments,omitempty"` + ReviewComments *PRLink `json:"review_comments,omitempty"` + ReviewComment *PRLink `json:"review_comment,omitempty"` + Commits *PRLink `json:"commits,omitempty"` + Statuses *PRLink `json:"statuses,omitempty"` +} + // PullRequestBranch represents a base or head branch in a GitHub pull request. type PullRequestBranch struct { Label *string `json:"label,omitempty"` @@ -73,7 +109,7 @@ type PullRequestBranch struct { // PullRequestsService.List method. type PullRequestListOptions struct { // State filters pull requests based on their state. Possible values are: - // open, closed. Default is "open". + // open, closed, all. Default is "open". State string `url:"state,omitempty"` // Head filters pull requests by head user and branch name in the format of: @@ -110,6 +146,10 @@ func (s *PullRequestsService) List(ctx context.Context, owner string, repo strin return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview, mediaTypeDraftPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + var pulls []*PullRequest resp, err := s.client.Do(ctx, req, &pulls) if err != nil { @@ -129,6 +169,10 @@ func (s *PullRequestsService) Get(ctx context.Context, owner string, repo string return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview, mediaTypeDraftPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + pull := new(PullRequest) resp, err := s.client.Do(ctx, req, pull) if err != nil { @@ -138,7 +182,7 @@ func (s *PullRequestsService) Get(ctx context.Context, owner string, repo string return pull, resp, nil } -// GetRaw gets raw (diff or patch) format of a pull request. +// GetRaw gets a single pull request in raw (diff or patch) format. func (s *PullRequestsService) GetRaw(ctx context.Context, owner string, repo string, number int, opt RawOptions) (string, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) req, err := s.client.NewRequest("GET", u, nil) @@ -155,13 +199,13 @@ func (s *PullRequestsService) GetRaw(ctx context.Context, owner string, repo str return "", nil, fmt.Errorf("unsupported raw type %d", opt.Type) } - ret := new(bytes.Buffer) - resp, err := s.client.Do(ctx, req, ret) + var buf bytes.Buffer + resp, err := s.client.Do(ctx, req, &buf) if err != nil { return "", resp, err } - return ret.String(), resp, nil + return buf.String(), resp, nil } // NewPullRequest represents a new pull request to be created. @@ -184,6 +228,9 @@ func (s *PullRequestsService) Create(ctx context.Context, owner string, repo str return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) + p := new(PullRequest) resp, err := s.client.Do(ctx, req, p) if err != nil { @@ -230,6 +277,10 @@ func (s *PullRequestsService) Edit(ctx context.Context, owner string, repo strin return nil, nil, err } + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + p := new(PullRequest) resp, err := s.client.Do(ctx, req, p) if err != nil { @@ -343,9 +394,6 @@ func (s *PullRequestsService) Merge(ctx context.Context, owner string, repo stri return nil, nil, err } - // TODO: This header will be unnecessary when the API is no longer in preview. - req.Header.Set("Accept", mediaTypeSquashPreview) - mergeResult := new(PullRequestMergeResult) resp, err := s.client.Do(ctx, req, mergeResult) if err != nil { diff --git a/vendor/github.com/google/go-github/github/pulls_comments.go b/vendor/github.com/google/go-github/github/pulls_comments.go index bc0bc2d..f306776 100644 --- a/vendor/github.com/google/go-github/github/pulls_comments.go +++ b/vendor/github.com/google/go-github/github/pulls_comments.go @@ -13,22 +13,26 @@ import ( // PullRequestComment represents a comment left on a pull request. type PullRequestComment struct { - ID *int `json:"id,omitempty"` - InReplyTo *int `json:"in_reply_to,omitempty"` - Body *string `json:"body,omitempty"` - Path *string `json:"path,omitempty"` - DiffHunk *string `json:"diff_hunk,omitempty"` - Position *int `json:"position,omitempty"` - OriginalPosition *int `json:"original_position,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - OriginalCommitID *string `json:"original_commit_id,omitempty"` - User *User `json:"user,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - CreatedAt *time.Time `json:"created_at,omitempty"` - UpdatedAt *time.Time `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - PullRequestURL *string `json:"pull_request_url,omitempty"` + ID *int64 `json:"id,omitempty"` + InReplyTo *int64 `json:"in_reply_to_id,omitempty"` + Body *string `json:"body,omitempty"` + Path *string `json:"path,omitempty"` + DiffHunk *string `json:"diff_hunk,omitempty"` + PullRequestReviewID *int64 `json:"pull_request_review_id,omitempty"` + Position *int `json:"position,omitempty"` + OriginalPosition *int `json:"original_position,omitempty"` + CommitID *string `json:"commit_id,omitempty"` + OriginalCommitID *string `json:"original_commit_id,omitempty"` + User *User `json:"user,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + // AuthorAssociation is the comment author's relationship to the pull request's repository. + // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". + AuthorAssociation *string `json:"author_association,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + PullRequestURL *string `json:"pull_request_url,omitempty"` } func (p PullRequestComment) String() string { @@ -87,8 +91,8 @@ func (s *PullRequestsService) ListComments(ctx context.Context, owner string, re // GetComment fetches the specified pull request comment. // // GitHub API docs: https://developer.github.com/v3/pulls/comments/#get-a-single-comment -func (s *PullRequestsService) GetComment(ctx context.Context, owner string, repo string, number int) (*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, number) +func (s *PullRequestsService) GetComment(ctx context.Context, owner string, repo string, commentID int64) (*PullRequestComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err @@ -125,11 +129,38 @@ func (s *PullRequestsService) CreateComment(ctx context.Context, owner string, r return c, resp, nil } +// CreateCommentInReplyTo creates a new comment as a reply to an existing pull request comment. +// +// GitHub API docs: https://developer.github.com/v3/pulls/comments/#alternative-input +func (s *PullRequestsService) CreateCommentInReplyTo(ctx context.Context, owner string, repo string, number int, body string, commentID int64) (*PullRequestComment, *Response, error) { + comment := &struct { + Body string `json:"body,omitempty"` + InReplyTo int64 `json:"in_reply_to,omitempty"` + }{ + Body: body, + InReplyTo: commentID, + } + u := fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) + req, err := s.client.NewRequest("POST", u, comment) + if err != nil { + return nil, nil, err + } + + c := new(PullRequestComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + // EditComment updates a pull request comment. +// A non-nil comment.Body must be provided. Other comment fields should be left nil. // // GitHub API docs: https://developer.github.com/v3/pulls/comments/#edit-a-comment -func (s *PullRequestsService) EditComment(ctx context.Context, owner string, repo string, number int, comment *PullRequestComment) (*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, number) +func (s *PullRequestsService) EditComment(ctx context.Context, owner string, repo string, commentID int64, comment *PullRequestComment) (*PullRequestComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) req, err := s.client.NewRequest("PATCH", u, comment) if err != nil { return nil, nil, err @@ -147,8 +178,8 @@ func (s *PullRequestsService) EditComment(ctx context.Context, owner string, rep // DeleteComment deletes a pull request comment. // // GitHub API docs: https://developer.github.com/v3/pulls/comments/#delete-a-comment -func (s *PullRequestsService) DeleteComment(ctx context.Context, owner string, repo string, number int) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, number) +func (s *PullRequestsService) DeleteComment(ctx context.Context, owner string, repo string, commentID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { return nil, err diff --git a/vendor/github.com/google/go-github/github/pulls_comments_test.go b/vendor/github.com/google/go-github/github/pulls_comments_test.go index e35b04f..76b78cf 100644 --- a/vendor/github.com/google/go-github/github/pulls_comments_test.go +++ b/vendor/github.com/google/go-github/github/pulls_comments_test.go @@ -16,7 +16,7 @@ import ( ) func TestPullRequestsService_ListComments_allPulls(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/comments", func(w http.ResponseWriter, r *http.Request) { @@ -42,20 +42,20 @@ func TestPullRequestsService_ListComments_allPulls(t *testing.T) { t.Errorf("PullRequests.ListComments returned error: %v", err) } - want := []*PullRequestComment{{ID: Int(1)}} + want := []*PullRequestComment{{ID: Int64(1)}} if !reflect.DeepEqual(pulls, want) { t.Errorf("PullRequests.ListComments returned %+v, want %+v", pulls, want) } } func TestPullRequestsService_ListComments_specificPull(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/comments", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testHeader(t, r, "Accept", mediaTypeReactionsPreview) - fmt.Fprint(w, `[{"id":1}]`) + fmt.Fprint(w, `[{"id":1, "pull_request_review_id":42}]`) }) pulls, _, err := client.PullRequests.ListComments(context.Background(), "o", "r", 1, nil) @@ -63,19 +63,22 @@ func TestPullRequestsService_ListComments_specificPull(t *testing.T) { t.Errorf("PullRequests.ListComments returned error: %v", err) } - want := []*PullRequestComment{{ID: Int(1)}} + want := []*PullRequestComment{{ID: Int64(1), PullRequestReviewID: Int64(42)}} if !reflect.DeepEqual(pulls, want) { t.Errorf("PullRequests.ListComments returned %+v, want %+v", pulls, want) } } func TestPullRequestsService_ListComments_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.ListComments(context.Background(), "%", "r", 1, nil) testURLParseError(t, err) } func TestPullRequestsService_GetComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/comments/1", func(w http.ResponseWriter, r *http.Request) { @@ -89,19 +92,22 @@ func TestPullRequestsService_GetComment(t *testing.T) { t.Errorf("PullRequests.GetComment returned error: %v", err) } - want := &PullRequestComment{ID: Int(1)} + want := &PullRequestComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("PullRequests.GetComment returned %+v, want %+v", comment, want) } } func TestPullRequestsService_GetComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.GetComment(context.Background(), "%", "r", 1) testURLParseError(t, err) } func TestPullRequestsService_CreateComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &PullRequestComment{Body: String("b")} @@ -123,19 +129,22 @@ func TestPullRequestsService_CreateComment(t *testing.T) { t.Errorf("PullRequests.CreateComment returned error: %v", err) } - want := &PullRequestComment{ID: Int(1)} + want := &PullRequestComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("PullRequests.CreateComment returned %+v, want %+v", comment, want) } } func TestPullRequestsService_CreateComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.CreateComment(context.Background(), "%", "r", 1, nil) testURLParseError(t, err) } func TestPullRequestsService_EditComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &PullRequestComment{Body: String("b")} @@ -157,19 +166,22 @@ func TestPullRequestsService_EditComment(t *testing.T) { t.Errorf("PullRequests.EditComment returned error: %v", err) } - want := &PullRequestComment{ID: Int(1)} + want := &PullRequestComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("PullRequests.EditComment returned %+v, want %+v", comment, want) } } func TestPullRequestsService_EditComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.EditComment(context.Background(), "%", "r", 1, nil) testURLParseError(t, err) } func TestPullRequestsService_DeleteComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/comments/1", func(w http.ResponseWriter, r *http.Request) { @@ -183,6 +195,9 @@ func TestPullRequestsService_DeleteComment(t *testing.T) { } func TestPullRequestsService_DeleteComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.PullRequests.DeleteComment(context.Background(), "%", "r", 1) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/pulls_reviewers.go b/vendor/github.com/google/go-github/github/pulls_reviewers.go index bd94a65..a1d7853 100644 --- a/vendor/github.com/google/go-github/github/pulls_reviewers.go +++ b/vendor/github.com/google/go-github/github/pulls_reviewers.go @@ -10,17 +10,23 @@ import ( "fmt" ) -// RequestReviewers creates a review request for the provided GitHub users for the specified pull request. +// ReviewersRequest specifies users and teams for a pull request review request. +type ReviewersRequest struct { + Reviewers []string `json:"reviewers,omitempty"` + TeamReviewers []string `json:"team_reviewers,omitempty"` +} + +// Reviewers represents reviewers of a pull request. +type Reviewers struct { + Users []*User `json:"users,omitempty"` + Teams []*Team `json:"teams,omitempty"` +} + +// RequestReviewers creates a review request for the provided reviewers for the specified pull request. // // GitHub API docs: https://developer.github.com/v3/pulls/review_requests/#create-a-review-request -func (s *PullRequestsService) RequestReviewers(ctx context.Context, owner, repo string, number int, logins []string) (*PullRequest, *Response, error) { +func (s *PullRequestsService) RequestReviewers(ctx context.Context, owner, repo string, number int, reviewers ReviewersRequest) (*PullRequest, *Response, error) { u := fmt.Sprintf("repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, number) - - reviewers := struct { - Reviewers []string `json:"reviewers,omitempty"` - }{ - Reviewers: logins, - } req, err := s.client.NewRequest("POST", u, &reviewers) if err != nil { return nil, nil, err @@ -35,10 +41,10 @@ func (s *PullRequestsService) RequestReviewers(ctx context.Context, owner, repo return r, resp, nil } -// ListReviewers lists users whose reviews have been requested on the specified pull request. +// ListReviewers lists reviewers whose reviews have been requested on the specified pull request. // // GitHub API docs: https://developer.github.com/v3/pulls/review_requests/#list-review-requests -func (s *PullRequestsService) ListReviewers(ctx context.Context, owner, repo string, number int, opt *ListOptions) ([]*User, *Response, error) { +func (s *PullRequestsService) ListReviewers(ctx context.Context, owner, repo string, number int, opt *ListOptions) (*Reviewers, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pulls/%d/requested_reviewers", owner, repo, number) u, err := addOptions(u, opt) if err != nil { @@ -50,30 +56,24 @@ func (s *PullRequestsService) ListReviewers(ctx context.Context, owner, repo str return nil, nil, err } - var users []*User - resp, err := s.client.Do(ctx, req, &users) + reviewers := new(Reviewers) + resp, err := s.client.Do(ctx, req, reviewers) if err != nil { return nil, resp, err } - return users, resp, nil + return reviewers, resp, nil } -// RemoveReviewers removes the review request for the provided GitHub users for the specified pull request. +// RemoveReviewers removes the review request for the provided reviewers for the specified pull request. // // GitHub API docs: https://developer.github.com/v3/pulls/review_requests/#delete-a-review-request -func (s *PullRequestsService) RemoveReviewers(ctx context.Context, owner, repo string, number int, logins []string) (*Response, error) { +func (s *PullRequestsService) RemoveReviewers(ctx context.Context, owner, repo string, number int, reviewers ReviewersRequest) (*Response, error) { u := fmt.Sprintf("repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, number) - - reviewers := struct { - Reviewers []string `json:"reviewers,omitempty"` - }{ - Reviewers: logins, - } req, err := s.client.NewRequest("DELETE", u, &reviewers) if err != nil { return nil, err } - return s.client.Do(ctx, req, reviewers) + return s.client.Do(ctx, req, nil) } diff --git a/vendor/github.com/google/go-github/github/pulls_reviewers_test.go b/vendor/github.com/google/go-github/github/pulls_reviewers_test.go index a60f2e7..7c550e1 100644 --- a/vendor/github.com/google/go-github/github/pulls_reviewers_test.go +++ b/vendor/github.com/google/go-github/github/pulls_reviewers_test.go @@ -14,17 +14,17 @@ import ( ) func TestRequestReviewers(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/requested_reviewers", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "POST") - testBody(t, r, `{"reviewers":["octocat","googlebot"]}`+"\n") + testBody(t, r, `{"reviewers":["octocat","googlebot"],"team_reviewers":["justice-league","injustice-league"]}`+"\n") fmt.Fprint(w, `{"number":1}`) }) // This returns a PR, unmarshalling of which is tested elsewhere - pull, _, err := client.PullRequests.RequestReviewers(context.Background(), "o", "r", 1, []string{"octocat", "googlebot"}) + pull, _, err := client.PullRequests.RequestReviewers(context.Background(), "o", "r", 1, ReviewersRequest{Reviewers: []string{"octocat", "googlebot"}, TeamReviewers: []string{"justice-league", "injustice-league"}}) if err != nil { t.Errorf("PullRequests.RequestReviewers returned error: %v", err) } @@ -35,27 +35,27 @@ func TestRequestReviewers(t *testing.T) { } func TestRemoveReviewers(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/requested_reviewers", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "DELETE") - testBody(t, r, `{"reviewers":["octocat","googlebot"]}`+"\n") + testBody(t, r, `{"reviewers":["octocat","googlebot"],"team_reviewers":["justice-league"]}`+"\n") }) - _, err := client.PullRequests.RemoveReviewers(context.Background(), "o", "r", 1, []string{"octocat", "googlebot"}) + _, err := client.PullRequests.RemoveReviewers(context.Background(), "o", "r", 1, ReviewersRequest{Reviewers: []string{"octocat", "googlebot"}, TeamReviewers: []string{"justice-league"}}) if err != nil { t.Errorf("PullRequests.RemoveReviewers returned error: %v", err) } } func TestListReviewers(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/requested_reviewers", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - fmt.Fprint(w, `[{"login":"octocat","id":1}]`) + fmt.Fprint(w, `{"users":[{"login":"octocat","id":1}],"teams":[{"id":1,"name":"Justice League"}]}`) }) reviewers, _, err := client.PullRequests.ListReviewers(context.Background(), "o", "r", 1, nil) @@ -63,10 +63,18 @@ func TestListReviewers(t *testing.T) { t.Errorf("PullRequests.ListReviewers returned error: %v", err) } - want := []*User{ - { - Login: String("octocat"), - ID: Int(1), + want := &Reviewers{ + Users: []*User{ + { + Login: String("octocat"), + ID: Int64(1), + }, + }, + Teams: []*Team{ + { + ID: Int64(1), + Name: String("Justice League"), + }, }, } if !reflect.DeepEqual(reviewers, want) { @@ -75,7 +83,7 @@ func TestListReviewers(t *testing.T) { } func TestListReviewers_withOptions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/requested_reviewers", func(w http.ResponseWriter, r *http.Request) { @@ -83,7 +91,7 @@ func TestListReviewers_withOptions(t *testing.T) { testFormValues(t, r, values{ "page": "2", }) - fmt.Fprint(w, `[]`) + fmt.Fprint(w, `{}`) }) _, _, err := client.PullRequests.ListReviewers(context.Background(), "o", "r", 1, &ListOptions{Page: 2}) diff --git a/vendor/github.com/google/go-github/github/pulls_reviews.go b/vendor/github.com/google/go-github/github/pulls_reviews.go index c071338..57d3c63 100644 --- a/vendor/github.com/google/go-github/github/pulls_reviews.go +++ b/vendor/github.com/google/go-github/github/pulls_reviews.go @@ -13,7 +13,7 @@ import ( // PullRequestReview represents a review of a pull request. type PullRequestReview struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` User *User `json:"user,omitempty"` Body *string `json:"body,omitempty"` SubmittedAt *time.Time `json:"submitted_at,omitempty"` @@ -40,6 +40,7 @@ func (c DraftReviewComment) String() string { // PullRequestReviewRequest represents a request to create a review. type PullRequestReviewRequest struct { + CommitID *string `json:"commit_id,omitempty"` Body *string `json:"body,omitempty"` Event *string `json:"event,omitempty"` Comments []*DraftReviewComment `json:"comments,omitempty"` @@ -93,7 +94,7 @@ func (s *PullRequestsService) ListReviews(ctx context.Context, owner, repo strin // Read more about it here - https://github.com/google/go-github/issues/540 // // GitHub API docs: https://developer.github.com/v3/pulls/reviews/#get-a-single-review -func (s *PullRequestsService) GetReview(ctx context.Context, owner, repo string, number, reviewID int) (*PullRequestReview, *Response, error) { +func (s *PullRequestsService) GetReview(ctx context.Context, owner, repo string, number int, reviewID int64) (*PullRequestReview, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) req, err := s.client.NewRequest("GET", u, nil) @@ -117,7 +118,7 @@ func (s *PullRequestsService) GetReview(ctx context.Context, owner, repo string, // Read more about it here - https://github.com/google/go-github/issues/540 // // GitHub API docs: https://developer.github.com/v3/pulls/reviews/#delete-a-pending-review -func (s *PullRequestsService) DeletePendingReview(ctx context.Context, owner, repo string, number, reviewID int) (*PullRequestReview, *Response, error) { +func (s *PullRequestsService) DeletePendingReview(ctx context.Context, owner, repo string, number int, reviewID int64) (*PullRequestReview, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) req, err := s.client.NewRequest("DELETE", u, nil) @@ -141,7 +142,7 @@ func (s *PullRequestsService) DeletePendingReview(ctx context.Context, owner, re // Read more about it here - https://github.com/google/go-github/issues/540 // // GitHub API docs: https://developer.github.com/v3/pulls/reviews/#get-comments-for-a-single-review -func (s *PullRequestsService) ListReviewComments(ctx context.Context, owner, repo string, number, reviewID int, opt *ListOptions) ([]*PullRequestComment, *Response, error) { +func (s *PullRequestsService) ListReviewComments(ctx context.Context, owner, repo string, number int, reviewID int64, opt *ListOptions) ([]*PullRequestComment, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/comments", owner, repo, number, reviewID) u, err := addOptions(u, opt) if err != nil { @@ -193,7 +194,7 @@ func (s *PullRequestsService) CreateReview(ctx context.Context, owner, repo stri // Read more about it here - https://github.com/google/go-github/issues/540 // // GitHub API docs: https://developer.github.com/v3/pulls/reviews/#submit-a-pull-request-review -func (s *PullRequestsService) SubmitReview(ctx context.Context, owner, repo string, number, reviewID int, review *PullRequestReviewRequest) (*PullRequestReview, *Response, error) { +func (s *PullRequestsService) SubmitReview(ctx context.Context, owner, repo string, number int, reviewID int64, review *PullRequestReviewRequest) (*PullRequestReview, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/events", owner, repo, number, reviewID) req, err := s.client.NewRequest("POST", u, review) @@ -217,7 +218,7 @@ func (s *PullRequestsService) SubmitReview(ctx context.Context, owner, repo stri // Read more about it here - https://github.com/google/go-github/issues/540 // // GitHub API docs: https://developer.github.com/v3/pulls/reviews/#dismiss-a-pull-request-review -func (s *PullRequestsService) DismissReview(ctx context.Context, owner, repo string, number, reviewID int, review *PullRequestReviewDismissalRequest) (*PullRequestReview, *Response, error) { +func (s *PullRequestsService) DismissReview(ctx context.Context, owner, repo string, number int, reviewID int64, review *PullRequestReviewDismissalRequest) (*PullRequestReview, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/dismissals", owner, repo, number, reviewID) req, err := s.client.NewRequest("PUT", u, review) diff --git a/vendor/github.com/google/go-github/github/pulls_reviews_test.go b/vendor/github.com/google/go-github/github/pulls_reviews_test.go index d72de47..58e301b 100644 --- a/vendor/github.com/google/go-github/github/pulls_reviews_test.go +++ b/vendor/github.com/google/go-github/github/pulls_reviews_test.go @@ -15,7 +15,7 @@ import ( ) func TestPullRequestsService_ListReviews(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/reviews", func(w http.ResponseWriter, r *http.Request) { @@ -33,8 +33,8 @@ func TestPullRequestsService_ListReviews(t *testing.T) { } want := []*PullRequestReview{ - {ID: Int(1)}, - {ID: Int(2)}, + {ID: Int64(1)}, + {ID: Int64(2)}, } if !reflect.DeepEqual(reviews, want) { t.Errorf("PullRequests.ListReviews returned %+v, want %+v", reviews, want) @@ -42,12 +42,15 @@ func TestPullRequestsService_ListReviews(t *testing.T) { } func TestPullRequestsService_ListReviews_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.ListReviews(context.Background(), "%", "r", 1, nil) testURLParseError(t, err) } func TestPullRequestsService_GetReview(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/reviews/1", func(w http.ResponseWriter, r *http.Request) { @@ -60,19 +63,22 @@ func TestPullRequestsService_GetReview(t *testing.T) { t.Errorf("PullRequests.GetReview returned error: %v", err) } - want := &PullRequestReview{ID: Int(1)} + want := &PullRequestReview{ID: Int64(1)} if !reflect.DeepEqual(review, want) { t.Errorf("PullRequests.GetReview returned %+v, want %+v", review, want) } } func TestPullRequestsService_GetReview_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.GetReview(context.Background(), "%", "r", 1, 1) testURLParseError(t, err) } func TestPullRequestsService_DeletePendingReview(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/reviews/1", func(w http.ResponseWriter, r *http.Request) { @@ -85,19 +91,22 @@ func TestPullRequestsService_DeletePendingReview(t *testing.T) { t.Errorf("PullRequests.DeletePendingReview returned error: %v", err) } - want := &PullRequestReview{ID: Int(1)} + want := &PullRequestReview{ID: Int64(1)} if !reflect.DeepEqual(review, want) { t.Errorf("PullRequests.DeletePendingReview returned %+v, want %+v", review, want) } } func TestPullRequestsService_DeletePendingReview_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.DeletePendingReview(context.Background(), "%", "r", 1, 1) testURLParseError(t, err) } func TestPullRequestsService_ListReviewComments(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/reviews/1/comments", func(w http.ResponseWriter, r *http.Request) { @@ -111,8 +120,8 @@ func TestPullRequestsService_ListReviewComments(t *testing.T) { } want := []*PullRequestComment{ - {ID: Int(1)}, - {ID: Int(2)}, + {ID: Int64(1)}, + {ID: Int64(2)}, } if !reflect.DeepEqual(comments, want) { t.Errorf("PullRequests.ListReviewComments returned %+v, want %+v", comments, want) @@ -120,7 +129,7 @@ func TestPullRequestsService_ListReviewComments(t *testing.T) { } func TestPullRequestsService_ListReviewComments_withOptions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/reviews/1/comments", func(w http.ResponseWriter, r *http.Request) { @@ -138,17 +147,21 @@ func TestPullRequestsService_ListReviewComments_withOptions(t *testing.T) { } func TestPullRequestsService_ListReviewComments_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.ListReviewComments(context.Background(), "%", "r", 1, 1, nil) testURLParseError(t, err) } func TestPullRequestsService_CreateReview(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &PullRequestReviewRequest{ - Body: String("b"), - Event: String("APPROVE"), + CommitID: String("commit_id"), + Body: String("b"), + Event: String("APPROVE"), } mux.HandleFunc("/repos/o/r/pulls/1/reviews", func(w http.ResponseWriter, r *http.Request) { @@ -168,19 +181,22 @@ func TestPullRequestsService_CreateReview(t *testing.T) { t.Errorf("PullRequests.CreateReview returned error: %v", err) } - want := &PullRequestReview{ID: Int(1)} + want := &PullRequestReview{ID: Int64(1)} if !reflect.DeepEqual(review, want) { t.Errorf("PullRequests.CreateReview returned %+v, want %+v", review, want) } } func TestPullRequestsService_CreateReview_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.CreateReview(context.Background(), "%", "r", 1, &PullRequestReviewRequest{}) testURLParseError(t, err) } func TestPullRequestsService_SubmitReview(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &PullRequestReviewRequest{ @@ -205,19 +221,22 @@ func TestPullRequestsService_SubmitReview(t *testing.T) { t.Errorf("PullRequests.SubmitReview returned error: %v", err) } - want := &PullRequestReview{ID: Int(1)} + want := &PullRequestReview{ID: Int64(1)} if !reflect.DeepEqual(review, want) { t.Errorf("PullRequests.SubmitReview returned %+v, want %+v", review, want) } } func TestPullRequestsService_SubmitReview_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.SubmitReview(context.Background(), "%", "r", 1, 1, &PullRequestReviewRequest{}) testURLParseError(t, err) } func TestPullRequestsService_DismissReview(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &PullRequestReviewDismissalRequest{Message: String("m")} @@ -239,13 +258,16 @@ func TestPullRequestsService_DismissReview(t *testing.T) { t.Errorf("PullRequests.DismissReview returned error: %v", err) } - want := &PullRequestReview{ID: Int(1)} + want := &PullRequestReview{ID: Int64(1)} if !reflect.DeepEqual(review, want) { t.Errorf("PullRequests.DismissReview returned %+v, want %+v", review, want) } } func TestPullRequestsService_DismissReview_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.DismissReview(context.Background(), "%", "r", 1, 1, &PullRequestReviewDismissalRequest{}) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/pulls_test.go b/vendor/github.com/google/go-github/github/pulls_test.go index b458831..602f03b 100644 --- a/vendor/github.com/google/go-github/github/pulls_test.go +++ b/vendor/github.com/google/go-github/github/pulls_test.go @@ -17,11 +17,13 @@ import ( ) func TestPullRequestsService_List(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview, mediaTypeDraftPreview} mux.HandleFunc("/repos/o/r/pulls", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testFormValues(t, r, values{ "state": "closed", "head": "h", @@ -46,16 +48,21 @@ func TestPullRequestsService_List(t *testing.T) { } func TestPullRequestsService_List_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.List(context.Background(), "%", "r", nil) testURLParseError(t, err) } func TestPullRequestsService_Get(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview, mediaTypeDraftPreview} mux.HandleFunc("/repos/o/r/pulls/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) fmt.Fprint(w, `{"number":1}`) }) @@ -70,9 +77,10 @@ func TestPullRequestsService_Get(t *testing.T) { } } -func TestPullRequestsService_GetRawDiff(t *testing.T) { - setup() +func TestPullRequestsService_GetRaw_diff(t *testing.T) { + client, mux, _, teardown := setup() defer teardown() + const rawStr = "@@diff content" mux.HandleFunc("/repos/o/r/pulls/1", func(w http.ResponseWriter, r *http.Request) { @@ -81,19 +89,20 @@ func TestPullRequestsService_GetRawDiff(t *testing.T) { fmt.Fprint(w, rawStr) }) - ret, _, err := client.PullRequests.GetRaw(context.Background(), "o", "r", 1, RawOptions{Diff}) + got, _, err := client.PullRequests.GetRaw(context.Background(), "o", "r", 1, RawOptions{Diff}) if err != nil { t.Fatalf("PullRequests.GetRaw returned error: %v", err) } - - if ret != rawStr { - t.Errorf("PullRequests.GetRaw returned %s want %s", ret, rawStr) + want := rawStr + if got != want { + t.Errorf("PullRequests.GetRaw returned %s want %s", got, want) } } -func TestPullRequestsService_GetRawPatch(t *testing.T) { - setup() +func TestPullRequestsService_GetRaw_patch(t *testing.T) { + client, mux, _, teardown := setup() defer teardown() + const rawStr = "@@patch content" mux.HandleFunc("/repos/o/r/pulls/1", func(w http.ResponseWriter, r *http.Request) { @@ -102,18 +111,18 @@ func TestPullRequestsService_GetRawPatch(t *testing.T) { fmt.Fprint(w, rawStr) }) - ret, _, err := client.PullRequests.GetRaw(context.Background(), "o", "r", 1, RawOptions{Patch}) + got, _, err := client.PullRequests.GetRaw(context.Background(), "o", "r", 1, RawOptions{Patch}) if err != nil { t.Fatalf("PullRequests.GetRaw returned error: %v", err) } - - if ret != rawStr { - t.Errorf("PullRequests.GetRaw returned %s want %s", ret, rawStr) + want := rawStr + if got != want { + t.Errorf("PullRequests.GetRaw returned %s want %s", got, want) } } -func TestPullRequestsService_GetRawInvalid(t *testing.T) { - setup() +func TestPullRequestsService_GetRaw_invalid(t *testing.T) { + client, _, _, teardown := setup() defer teardown() _, _, err := client.PullRequests.GetRaw(context.Background(), "o", "r", 1, RawOptions{100}) @@ -125,8 +134,61 @@ func TestPullRequestsService_GetRawInvalid(t *testing.T) { } } +func TestPullRequestsService_Get_links(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/pulls/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + fmt.Fprint(w, `{ + "number":1, + "_links":{ + "self":{"href":"https://api.github.com/repos/octocat/Hello-World/pulls/1347"}, + "html":{"href":"https://github.com/octocat/Hello-World/pull/1347"}, + "issue":{"href":"https://api.github.com/repos/octocat/Hello-World/issues/1347"}, + "comments":{"href":"https://api.github.com/repos/octocat/Hello-World/issues/1347/comments"}, + "review_comments":{"href":"https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments"}, + "review_comment":{"href":"https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number}"}, + "commits":{"href":"https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits"}, + "statuses":{"href":"https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e"} + } + }`) + }) + + pull, _, err := client.PullRequests.Get(context.Background(), "o", "r", 1) + if err != nil { + t.Errorf("PullRequests.Get returned error: %v", err) + } + + want := &PullRequest{ + Number: Int(1), + Links: &PRLinks{ + Self: &PRLink{ + HRef: String("https://api.github.com/repos/octocat/Hello-World/pulls/1347"), + }, HTML: &PRLink{ + HRef: String("https://github.com/octocat/Hello-World/pull/1347"), + }, Issue: &PRLink{ + HRef: String("https://api.github.com/repos/octocat/Hello-World/issues/1347"), + }, Comments: &PRLink{ + HRef: String("https://api.github.com/repos/octocat/Hello-World/issues/1347/comments"), + }, ReviewComments: &PRLink{ + HRef: String("https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments"), + }, ReviewComment: &PRLink{ + HRef: String("https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number}"), + }, Commits: &PRLink{ + HRef: String("https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits"), + }, Statuses: &PRLink{ + HRef: String("https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e"), + }, + }, + } + if !reflect.DeepEqual(pull, want) { + t.Errorf("PullRequests.Get returned %+v, want %+v", pull, want) + } +} + func TestPullRequestsService_Get_headAndBase(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1", func(w http.ResponseWriter, r *http.Request) { @@ -143,11 +205,11 @@ func TestPullRequestsService_Get_headAndBase(t *testing.T) { Number: Int(1), Head: &PullRequestBranch{ Ref: String("r2"), - Repo: &Repository{ID: Int(2)}, + Repo: &Repository{ID: Int64(2)}, }, Base: &PullRequestBranch{ Ref: String("r1"), - Repo: &Repository{ID: Int(1)}, + Repo: &Repository{ID: Int64(1)}, }, } if !reflect.DeepEqual(pull, want) { @@ -156,7 +218,7 @@ func TestPullRequestsService_Get_headAndBase(t *testing.T) { } func TestPullRequestsService_Get_urlFields(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1", func(w http.ResponseWriter, r *http.Request) { @@ -195,12 +257,15 @@ func TestPullRequestsService_Get_urlFields(t *testing.T) { } func TestPullRequestsService_Get_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.Get(context.Background(), "%", "r", 1) testURLParseError(t, err) } func TestPullRequestsService_Create(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &NewPullRequest{Title: String("t")} @@ -210,6 +275,7 @@ func TestPullRequestsService_Create(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeLabelDescriptionSearchPreview) if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } @@ -229,12 +295,15 @@ func TestPullRequestsService_Create(t *testing.T) { } func TestPullRequestsService_Create_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.Create(context.Background(), "%", "r", nil) testURLParseError(t, err) } func TestPullRequestsService_Edit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() tests := []struct { @@ -264,8 +333,10 @@ func TestPullRequestsService_Edit(t *testing.T) { for i, tt := range tests { madeRequest := false + wantAcceptHeaders := []string{mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} mux.HandleFunc(fmt.Sprintf("/repos/o/r/pulls/%v", i), func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PATCH") + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testBody(t, r, tt.wantUpdate+"\n") io.WriteString(w, tt.sendResponse) madeRequest = true @@ -287,12 +358,15 @@ func TestPullRequestsService_Edit(t *testing.T) { } func TestPullRequestsService_Edit_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.PullRequests.Edit(context.Background(), "%", "r", 1, &PullRequest{}) testURLParseError(t, err) } func TestPullRequestsService_ListCommits(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/commits", func(w http.ResponseWriter, r *http.Request) { @@ -349,7 +423,7 @@ func TestPullRequestsService_ListCommits(t *testing.T) { } func TestPullRequestsService_ListFiles(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/files", func(w http.ResponseWriter, r *http.Request) { @@ -411,7 +485,7 @@ func TestPullRequestsService_ListFiles(t *testing.T) { } func TestPullRequestsService_IsMerged(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/merge", func(w http.ResponseWriter, r *http.Request) { @@ -431,12 +505,11 @@ func TestPullRequestsService_IsMerged(t *testing.T) { } func TestPullRequestsService_Merge(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/1/merge", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PUT") - testHeader(t, r, "Accept", mediaTypeSquashPreview) fmt.Fprint(w, ` { "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e", @@ -463,7 +536,7 @@ func TestPullRequestsService_Merge(t *testing.T) { // Test that different merge options produce expected PUT requests. See issue https://github.com/google/go-github/issues/500. func TestPullRequestsService_Merge_options(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() tests := []struct { @@ -500,7 +573,6 @@ func TestPullRequestsService_Merge_options(t *testing.T) { madeRequest := false mux.HandleFunc(fmt.Sprintf("/repos/o/r/pulls/%d/merge", i), func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PUT") - testHeader(t, r, "Accept", mediaTypeSquashPreview) testBody(t, r, test.wantBody+"\n") madeRequest = true }) diff --git a/vendor/github.com/google/go-github/github/reactions.go b/vendor/github.com/google/go-github/github/reactions.go index 739413d..0865f8c 100644 --- a/vendor/github.com/google/go-github/github/reactions.go +++ b/vendor/github.com/google/go-github/github/reactions.go @@ -19,8 +19,9 @@ type ReactionsService service // Reaction represents a GitHub reaction. type Reaction struct { // ID is the Reaction ID. - ID *int `json:"id,omitempty"` - User *User `json:"user,omitempty"` + ID *int64 `json:"id,omitempty"` + User *User `json:"user,omitempty"` + NodeID *string `json:"node_id,omitempty"` // Content is the type of reaction. // Possible values are: // "+1", "-1", "laugh", "confused", "heart", "hooray". @@ -46,7 +47,7 @@ func (r Reaction) String() string { // ListCommentReactions lists the reactions for a commit comment. // // GitHub API docs: https://developer.github.com/v3/reactions/#list-reactions-for-a-commit-comment -func (s *ReactionsService) ListCommentReactions(ctx context.Context, owner, repo string, id int, opt *ListOptions) ([]*Reaction, *Response, error) { +func (s *ReactionsService) ListCommentReactions(ctx context.Context, owner, repo string, id int64, opt *ListOptions) ([]*Reaction, *Response, error) { u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions", owner, repo, id) u, err := addOptions(u, opt) if err != nil { @@ -58,7 +59,7 @@ func (s *ReactionsService) ListCommentReactions(ctx context.Context, owner, repo return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeReactionsPreview) var m []*Reaction @@ -73,9 +74,10 @@ func (s *ReactionsService) ListCommentReactions(ctx context.Context, owner, repo // CreateCommentReaction creates a reaction for a commit comment. // Note that if you have already created a reaction of type content, the // previously created reaction will be returned with Status: 200 OK. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray". // // GitHub API docs: https://developer.github.com/v3/reactions/#create-reaction-for-a-commit-comment -func (s ReactionsService) CreateCommentReaction(ctx context.Context, owner, repo string, id int, content string) (*Reaction, *Response, error) { +func (s ReactionsService) CreateCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions", owner, repo, id) body := &Reaction{Content: String(content)} @@ -84,7 +86,7 @@ func (s ReactionsService) CreateCommentReaction(ctx context.Context, owner, repo return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeReactionsPreview) m := &Reaction{} @@ -111,7 +113,7 @@ func (s *ReactionsService) ListIssueReactions(ctx context.Context, owner, repo s return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeReactionsPreview) var m []*Reaction @@ -126,6 +128,7 @@ func (s *ReactionsService) ListIssueReactions(ctx context.Context, owner, repo s // CreateIssueReaction creates a reaction for an issue. // Note that if you have already created a reaction of type content, the // previously created reaction will be returned with Status: 200 OK. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray". // // GitHub API docs: https://developer.github.com/v3/reactions/#create-reaction-for-an-issue func (s ReactionsService) CreateIssueReaction(ctx context.Context, owner, repo string, number int, content string) (*Reaction, *Response, error) { @@ -137,7 +140,7 @@ func (s ReactionsService) CreateIssueReaction(ctx context.Context, owner, repo s return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeReactionsPreview) m := &Reaction{} @@ -152,7 +155,7 @@ func (s ReactionsService) CreateIssueReaction(ctx context.Context, owner, repo s // ListIssueCommentReactions lists the reactions for an issue comment. // // GitHub API docs: https://developer.github.com/v3/reactions/#list-reactions-for-an-issue-comment -func (s *ReactionsService) ListIssueCommentReactions(ctx context.Context, owner, repo string, id int, opt *ListOptions) ([]*Reaction, *Response, error) { +func (s *ReactionsService) ListIssueCommentReactions(ctx context.Context, owner, repo string, id int64, opt *ListOptions) ([]*Reaction, *Response, error) { u := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions", owner, repo, id) u, err := addOptions(u, opt) if err != nil { @@ -164,7 +167,7 @@ func (s *ReactionsService) ListIssueCommentReactions(ctx context.Context, owner, return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeReactionsPreview) var m []*Reaction @@ -179,9 +182,10 @@ func (s *ReactionsService) ListIssueCommentReactions(ctx context.Context, owner, // CreateIssueCommentReaction creates a reaction for an issue comment. // Note that if you have already created a reaction of type content, the // previously created reaction will be returned with Status: 200 OK. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray". // // GitHub API docs: https://developer.github.com/v3/reactions/#create-reaction-for-an-issue-comment -func (s ReactionsService) CreateIssueCommentReaction(ctx context.Context, owner, repo string, id int, content string) (*Reaction, *Response, error) { +func (s ReactionsService) CreateIssueCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { u := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions", owner, repo, id) body := &Reaction{Content: String(content)} @@ -190,7 +194,7 @@ func (s ReactionsService) CreateIssueCommentReaction(ctx context.Context, owner, return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeReactionsPreview) m := &Reaction{} @@ -205,7 +209,7 @@ func (s ReactionsService) CreateIssueCommentReaction(ctx context.Context, owner, // ListPullRequestCommentReactions lists the reactions for a pull request review comment. // // GitHub API docs: https://developer.github.com/v3/reactions/#list-reactions-for-an-issue-comment -func (s *ReactionsService) ListPullRequestCommentReactions(ctx context.Context, owner, repo string, id int, opt *ListOptions) ([]*Reaction, *Response, error) { +func (s *ReactionsService) ListPullRequestCommentReactions(ctx context.Context, owner, repo string, id int64, opt *ListOptions) ([]*Reaction, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions", owner, repo, id) u, err := addOptions(u, opt) if err != nil { @@ -217,7 +221,7 @@ func (s *ReactionsService) ListPullRequestCommentReactions(ctx context.Context, return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeReactionsPreview) var m []*Reaction @@ -232,9 +236,10 @@ func (s *ReactionsService) ListPullRequestCommentReactions(ctx context.Context, // CreatePullRequestCommentReaction creates a reaction for a pull request review comment. // Note that if you have already created a reaction of type content, the // previously created reaction will be returned with Status: 200 OK. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray". // // GitHub API docs: https://developer.github.com/v3/reactions/#create-reaction-for-an-issue-comment -func (s ReactionsService) CreatePullRequestCommentReaction(ctx context.Context, owner, repo string, id int, content string) (*Reaction, *Response, error) { +func (s ReactionsService) CreatePullRequestCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions", owner, repo, id) body := &Reaction{Content: String(content)} @@ -243,7 +248,106 @@ func (s ReactionsService) CreatePullRequestCommentReaction(ctx context.Context, return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + m := &Reaction{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// ListTeamDiscussionReactions lists the reactions for a team discussion. +// +// GitHub API docs: https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion +func (s *ReactionsService) ListTeamDiscussionReactions(ctx context.Context, teamID int64, discussionNumber int, opt *ListOptions) ([]*Reaction, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/reactions", teamID, discussionNumber) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var m []*Reaction + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// CreateTeamDiscussionReaction creates a reaction for a team discussion. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray". +// +// GitHub API docs: https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion +func (s *ReactionsService) CreateTeamDiscussionReaction(ctx context.Context, teamID int64, discussionNumber int, content string) (*Reaction, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/reactions", teamID, discussionNumber) + + body := &Reaction{Content: String(content)} + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeReactionsPreview) + + m := &Reaction{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// ListTeamDiscussionCommentReactions lists the reactions for a team discussion comment. +// +// GitHub API docs: https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-comment +func (s *ReactionsService) ListTeamDiscussionCommentReactions(ctx context.Context, teamID int64, discussionNumber, commentNumber int, opt *ListOptions) ([]*Reaction, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v/reactions", teamID, discussionNumber, commentNumber) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var m []*Reaction + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, nil, err + } + return m, resp, nil +} + +// CreateTeamDiscussionCommentReaction creates a reaction for a team discussion comment. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray". +// +// GitHub API docs: https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-comment +func (s *ReactionsService) CreateTeamDiscussionCommentReaction(ctx context.Context, teamID int64, discussionNumber, commentNumber int, content string) (*Reaction, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v/reactions", teamID, discussionNumber, commentNumber) + + body := &Reaction{Content: String(content)} + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + req.Header.Set("Accept", mediaTypeReactionsPreview) m := &Reaction{} @@ -258,7 +362,7 @@ func (s ReactionsService) CreatePullRequestCommentReaction(ctx context.Context, // DeleteReaction deletes a reaction. // // GitHub API docs: https://developer.github.com/v3/reaction/reactions/#delete-a-reaction-archive -func (s *ReactionsService) DeleteReaction(ctx context.Context, id int) (*Response, error) { +func (s *ReactionsService) DeleteReaction(ctx context.Context, id int64) (*Response, error) { u := fmt.Sprintf("reactions/%v", id) req, err := s.client.NewRequest("DELETE", u, nil) diff --git a/vendor/github.com/google/go-github/github/reactions_test.go b/vendor/github.com/google/go-github/github/reactions_test.go index 2725298..e1baec3 100644 --- a/vendor/github.com/google/go-github/github/reactions_test.go +++ b/vendor/github.com/google/go-github/github/reactions_test.go @@ -13,7 +13,7 @@ import ( ) func TestReactionsService_ListCommentReactions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/comments/1/reactions", func(w http.ResponseWriter, r *http.Request) { @@ -28,13 +28,14 @@ func TestReactionsService_ListCommentReactions(t *testing.T) { if err != nil { t.Errorf("ListCommentReactions returned error: %v", err) } - if want := []*Reaction{{ID: Int(1), User: &User{Login: String("l"), ID: Int(2)}, Content: String("+1")}}; !reflect.DeepEqual(got, want) { + want := []*Reaction{{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")}} + if !reflect.DeepEqual(got, want) { t.Errorf("ListCommentReactions = %+v, want %+v", got, want) } } func TestReactionsService_CreateCommentReaction(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/comments/1/reactions", func(w http.ResponseWriter, r *http.Request) { @@ -49,14 +50,14 @@ func TestReactionsService_CreateCommentReaction(t *testing.T) { if err != nil { t.Errorf("CreateCommentReaction returned error: %v", err) } - want := &Reaction{ID: Int(1), User: &User{Login: String("l"), ID: Int(2)}, Content: String("+1")} + want := &Reaction{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")} if !reflect.DeepEqual(got, want) { t.Errorf("CreateCommentReaction = %+v, want %+v", got, want) } } func TestReactionsService_ListIssueReactions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/reactions", func(w http.ResponseWriter, r *http.Request) { @@ -71,13 +72,14 @@ func TestReactionsService_ListIssueReactions(t *testing.T) { if err != nil { t.Errorf("ListIssueReactions returned error: %v", err) } - if want := []*Reaction{{ID: Int(1), User: &User{Login: String("l"), ID: Int(2)}, Content: String("+1")}}; !reflect.DeepEqual(got, want) { + want := []*Reaction{{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")}} + if !reflect.DeepEqual(got, want) { t.Errorf("ListIssueReactions = %+v, want %+v", got, want) } } func TestReactionsService_CreateIssueReaction(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/1/reactions", func(w http.ResponseWriter, r *http.Request) { @@ -92,14 +94,14 @@ func TestReactionsService_CreateIssueReaction(t *testing.T) { if err != nil { t.Errorf("CreateIssueReaction returned error: %v", err) } - want := &Reaction{ID: Int(1), User: &User{Login: String("l"), ID: Int(2)}, Content: String("+1")} + want := &Reaction{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")} if !reflect.DeepEqual(got, want) { t.Errorf("CreateIssueReaction = %+v, want %+v", got, want) } } func TestReactionsService_ListIssueCommentReactions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/comments/1/reactions", func(w http.ResponseWriter, r *http.Request) { @@ -114,13 +116,14 @@ func TestReactionsService_ListIssueCommentReactions(t *testing.T) { if err != nil { t.Errorf("ListIssueCommentReactions returned error: %v", err) } - if want := []*Reaction{{ID: Int(1), User: &User{Login: String("l"), ID: Int(2)}, Content: String("+1")}}; !reflect.DeepEqual(got, want) { + want := []*Reaction{{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")}} + if !reflect.DeepEqual(got, want) { t.Errorf("ListIssueCommentReactions = %+v, want %+v", got, want) } } func TestReactionsService_CreateIssueCommentReaction(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/issues/comments/1/reactions", func(w http.ResponseWriter, r *http.Request) { @@ -135,14 +138,14 @@ func TestReactionsService_CreateIssueCommentReaction(t *testing.T) { if err != nil { t.Errorf("CreateIssueCommentReaction returned error: %v", err) } - want := &Reaction{ID: Int(1), User: &User{Login: String("l"), ID: Int(2)}, Content: String("+1")} + want := &Reaction{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")} if !reflect.DeepEqual(got, want) { t.Errorf("CreateIssueCommentReaction = %+v, want %+v", got, want) } } func TestReactionsService_ListPullRequestCommentReactions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/comments/1/reactions", func(w http.ResponseWriter, r *http.Request) { @@ -157,13 +160,14 @@ func TestReactionsService_ListPullRequestCommentReactions(t *testing.T) { if err != nil { t.Errorf("ListPullRequestCommentReactions returned error: %v", err) } - if want := []*Reaction{{ID: Int(1), User: &User{Login: String("l"), ID: Int(2)}, Content: String("+1")}}; !reflect.DeepEqual(got, want) { + want := []*Reaction{{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")}} + if !reflect.DeepEqual(got, want) { t.Errorf("ListPullRequestCommentReactions = %+v, want %+v", got, want) } } func TestReactionsService_CreatePullRequestCommentReaction(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pulls/comments/1/reactions", func(w http.ResponseWriter, r *http.Request) { @@ -178,14 +182,102 @@ func TestReactionsService_CreatePullRequestCommentReaction(t *testing.T) { if err != nil { t.Errorf("CreatePullRequestCommentReaction returned error: %v", err) } - want := &Reaction{ID: Int(1), User: &User{Login: String("l"), ID: Int(2)}, Content: String("+1")} + want := &Reaction{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")} if !reflect.DeepEqual(got, want) { t.Errorf("CreatePullRequestCommentReaction = %+v, want %+v", got, want) } } +func TestReactionsService_ListTeamDiscussionReactions(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/discussions/2/reactions", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeReactionsPreview) + + w.WriteHeader(http.StatusOK) + w.Write([]byte(`[{"id":1,"user":{"login":"l","id":2},"content":"+1"}]`)) + }) + + got, _, err := client.Reactions.ListTeamDiscussionReactions(context.Background(), 1, 2, nil) + if err != nil { + t.Errorf("ListTeamDiscussionReactions returned error: %v", err) + } + want := []*Reaction{{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")}} + if !reflect.DeepEqual(got, want) { + t.Errorf("ListTeamDiscussionReactions = %+v, want %+v", got, want) + } +} + +func TestReactionsService_CreateTeamDiscussionReaction(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/discussions/2/reactions", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeReactionsPreview) + + w.WriteHeader(http.StatusCreated) + w.Write([]byte(`{"id":1,"user":{"login":"l","id":2},"content":"+1"}`)) + }) + + got, _, err := client.Reactions.CreateTeamDiscussionReaction(context.Background(), 1, 2, "+1") + if err != nil { + t.Errorf("CreateTeamDiscussionReaction returned error: %v", err) + } + want := &Reaction{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")} + if !reflect.DeepEqual(got, want) { + t.Errorf("CreateTeamDiscussionReaction = %+v, want %+v", got, want) + } +} + +func TestReactionService_ListTeamDiscussionCommentReactions(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/discussions/2/comments/3/reactions", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeReactionsPreview) + + w.WriteHeader(http.StatusOK) + w.Write([]byte(`[{"id":1,"user":{"login":"l","id":2},"content":"+1"}]`)) + }) + + got, _, err := client.Reactions.ListTeamDiscussionCommentReactions(context.Background(), 1, 2, 3, nil) + if err != nil { + t.Errorf("ListTeamDiscussionCommentReactions returned error: %v", err) + } + want := []*Reaction{{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")}} + if !reflect.DeepEqual(got, want) { + t.Errorf("ListTeamDiscussionCommentReactions = %+v, want %+v", got, want) + } +} + +func TestReactionService_CreateTeamDiscussionCommentReaction(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/discussions/2/comments/3/reactions", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeReactionsPreview) + + w.WriteHeader(http.StatusCreated) + w.Write([]byte(`{"id":1,"user":{"login":"l","id":2},"content":"+1"}`)) + }) + + got, _, err := client.Reactions.CreateTeamDiscussionCommentReaction(context.Background(), 1, 2, 3, "+1") + if err != nil { + t.Errorf("CreateTeamDiscussionCommentReaction returned error: %v", err) + } + want := &Reaction{ID: Int64(1), User: &User{Login: String("l"), ID: Int64(2)}, Content: String("+1")} + if !reflect.DeepEqual(got, want) { + t.Errorf("CreateTeamDiscussionCommentReaction = %+v, want %+v", got, want) + } +} + func TestReactionsService_DeleteReaction(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/reactions/1", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/repos.go b/vendor/github.com/google/go-github/github/repos.go index 38bbc15..617c20d 100644 --- a/vendor/github.com/google/go-github/github/repos.go +++ b/vendor/github.com/google/go-github/github/repos.go @@ -7,7 +7,6 @@ package github import ( "context" - "encoding/json" "fmt" "strings" ) @@ -20,7 +19,8 @@ type RepositoriesService service // Repository represents a GitHub repository. type Repository struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` Owner *User `json:"owner,omitempty"` Name *string `json:"name,omitempty"` FullName *string `json:"full_name,omitempty"` @@ -39,7 +39,7 @@ type Repository struct { SSHURL *string `json:"ssh_url,omitempty"` SVNURL *string `json:"svn_url,omitempty"` Language *string `json:"language,omitempty"` - Fork *bool `json:"fork"` + Fork *bool `json:"fork,omitempty"` ForksCount *int `json:"forks_count,omitempty"` NetworkCount *int `json:"network_count,omitempty"` OpenIssuesCount *int `json:"open_issues_count,omitempty"` @@ -55,21 +55,24 @@ type Repository struct { AllowRebaseMerge *bool `json:"allow_rebase_merge,omitempty"` AllowSquashMerge *bool `json:"allow_squash_merge,omitempty"` AllowMergeCommit *bool `json:"allow_merge_commit,omitempty"` + Topics []string `json:"topics,omitempty"` + Archived *bool `json:"archived,omitempty"` // Only provided when using RepositoriesService.Get while in preview License *License `json:"license,omitempty"` // Additional mutable fields when creating and editing a repository - Private *bool `json:"private"` - HasIssues *bool `json:"has_issues"` - HasWiki *bool `json:"has_wiki"` - HasPages *bool `json:"has_pages"` - HasDownloads *bool `json:"has_downloads"` + Private *bool `json:"private,omitempty"` + HasIssues *bool `json:"has_issues,omitempty"` + HasWiki *bool `json:"has_wiki,omitempty"` + HasPages *bool `json:"has_pages,omitempty"` + HasProjects *bool `json:"has_projects,omitempty"` + HasDownloads *bool `json:"has_downloads,omitempty"` LicenseTemplate *string `json:"license_template,omitempty"` GitignoreTemplate *string `json:"gitignore_template,omitempty"` // Creating an organization repository. Required for non-owners. - TeamID *int `json:"team_id"` + TeamID *int64 `json:"team_id,omitempty"` // API URLs URL *string `json:"url,omitempty"` @@ -175,8 +178,9 @@ func (s *RepositoriesService) List(ctx context.Context, user string, opt *Reposi return nil, nil, err } - // TODO: remove custom Accept header when license support fully launches - req.Header.Set("Accept", mediaTypeLicensesPreview) + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeTopicsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) var repos []*Repository resp, err := s.client.Do(ctx, req, &repos) @@ -212,8 +216,9 @@ func (s *RepositoriesService) ListByOrg(ctx context.Context, org string, opt *Re return nil, nil, err } - // TODO: remove custom Accept header when license support fully launches - req.Header.Set("Accept", mediaTypeLicensesPreview) + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeTopicsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) var repos []*Repository resp, err := s.client.Do(ctx, req, &repos) @@ -228,9 +233,7 @@ func (s *RepositoriesService) ListByOrg(ctx context.Context, org string, opt *Re // RepositoriesService.ListAll method. type RepositoryListAllOptions struct { // ID of the last repository seen - Since int `url:"since,omitempty"` - - ListOptions + Since int64 `url:"since,omitempty"` } // ListAll lists all GitHub repositories in the order that they were created. @@ -256,10 +259,40 @@ func (s *RepositoriesService) ListAll(ctx context.Context, opt *RepositoryListAl return repos, resp, nil } +// createRepoRequest is a subset of Repository and is used internally +// by Create to pass only the known fields for the endpoint. +// +// See https://github.com/google/go-github/issues/1014 for more +// information. +type createRepoRequest struct { + // Name is required when creating a repo. + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + Homepage *string `json:"homepage,omitempty"` + + Private *bool `json:"private,omitempty"` + HasIssues *bool `json:"has_issues,omitempty"` + HasProjects *bool `json:"has_projects,omitempty"` + HasWiki *bool `json:"has_wiki,omitempty"` + + // Creating an organization repository. Required for non-owners. + TeamID *int64 `json:"team_id,omitempty"` + + AutoInit *bool `json:"auto_init,omitempty"` + GitignoreTemplate *string `json:"gitignore_template,omitempty"` + LicenseTemplate *string `json:"license_template,omitempty"` + AllowSquashMerge *bool `json:"allow_squash_merge,omitempty"` + AllowMergeCommit *bool `json:"allow_merge_commit,omitempty"` + AllowRebaseMerge *bool `json:"allow_rebase_merge,omitempty"` +} + // Create a new repository. If an organization is specified, the new // repository will be created under that org. If the empty string is // specified, it will be created for the authenticated user. // +// Note that only a subset of the repo fields are used and repo must +// not be nil. +// // GitHub API docs: https://developer.github.com/v3/repos/#create func (s *RepositoriesService) Create(ctx context.Context, org string, repo *Repository) (*Repository, *Response, error) { var u string @@ -269,7 +302,24 @@ func (s *RepositoriesService) Create(ctx context.Context, org string, repo *Repo u = "user/repos" } - req, err := s.client.NewRequest("POST", u, repo) + repoReq := &createRepoRequest{ + Name: repo.Name, + Description: repo.Description, + Homepage: repo.Homepage, + Private: repo.Private, + HasIssues: repo.HasIssues, + HasProjects: repo.HasProjects, + HasWiki: repo.HasWiki, + TeamID: repo.TeamID, + AutoInit: repo.AutoInit, + GitignoreTemplate: repo.GitignoreTemplate, + LicenseTemplate: repo.LicenseTemplate, + AllowSquashMerge: repo.AllowSquashMerge, + AllowMergeCommit: repo.AllowMergeCommit, + AllowRebaseMerge: repo.AllowRebaseMerge, + } + + req, err := s.client.NewRequest("POST", u, repoReq) if err != nil { return nil, nil, err } @@ -295,7 +345,7 @@ func (s *RepositoriesService) Get(ctx context.Context, owner, repo string) (*Rep // TODO: remove custom Accept header when the license support fully launches // https://developer.github.com/v3/licenses/#get-a-repositorys-license - acceptHeaders := []string{mediaTypeLicensesPreview, mediaTypeSquashPreview, mediaTypeCodesOfConductPreview} + acceptHeaders := []string{mediaTypeCodesOfConductPreview, mediaTypeTopicsPreview} req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) repository := new(Repository) @@ -332,17 +382,13 @@ func (s *RepositoriesService) GetCodeOfConduct(ctx context.Context, owner, repo // GetByID fetches a repository. // // Note: GetByID uses the undocumented GitHub API endpoint /repositories/:id. -func (s *RepositoriesService) GetByID(ctx context.Context, id int) (*Repository, *Response, error) { +func (s *RepositoriesService) GetByID(ctx context.Context, id int64) (*Repository, *Response, error) { u := fmt.Sprintf("repositories/%d", id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when the license support fully launches - // https://developer.github.com/v3/licenses/#get-a-repositorys-license - req.Header.Set("Accept", mediaTypeLicensesPreview) - repository := new(Repository) resp, err := s.client.Do(ctx, req, repository) if err != nil { @@ -362,9 +408,6 @@ func (s *RepositoriesService) Edit(ctx context.Context, owner, repo string, repo return nil, nil, err } - // TODO: Remove this preview header after API is fully vetted. - req.Header.Set("Accept", mediaTypeSquashPreview) - r := new(Repository) resp, err := s.client.Do(ctx, req, r) if err != nil { @@ -390,7 +433,7 @@ func (s *RepositoriesService) Delete(ctx context.Context, owner, repo string) (* // Contributor represents a repository contributor type Contributor struct { Login *string `json:"login,omitempty"` - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` AvatarURL *string `json:"avatar_url,omitempty"` GravatarID *string `json:"gravatar_id,omitempty"` URL *string `json:"url,omitempty"` @@ -405,7 +448,7 @@ type Contributor struct { EventsURL *string `json:"events_url,omitempty"` ReceivedEventsURL *string `json:"received_events_url,omitempty"` Type *string `json:"type,omitempty"` - SiteAdmin *bool `json:"site_admin"` + SiteAdmin *bool `json:"site_admin,omitempty"` Contributions *int `json:"contributions,omitempty"` } @@ -483,6 +526,8 @@ func (s *RepositoriesService) ListTeams(ctx context.Context, owner string, repo return nil, nil, err } + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + var teams []*Team resp, err := s.client.Do(ctx, req, &teams) if err != nil { @@ -556,55 +601,55 @@ type RequiredStatusChecks struct { Contexts []string `json:"contexts"` } +// RequiredStatusChecksRequest represents a request to edit a protected branch's status checks. +type RequiredStatusChecksRequest struct { + Strict *bool `json:"strict,omitempty"` + Contexts []string `json:"contexts,omitempty"` +} + // PullRequestReviewsEnforcement represents the pull request reviews enforcement of a protected branch. type PullRequestReviewsEnforcement struct { - // Specifies which users and teams can dismiss pull requets reviews. + // Specifies which users and teams can dismiss pull request reviews. DismissalRestrictions DismissalRestrictions `json:"dismissal_restrictions"` // Specifies if approved reviews are dismissed automatically, when a new commit is pushed. DismissStaleReviews bool `json:"dismiss_stale_reviews"` + // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. + RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` + // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. + // Valid values are 1-6. + RequiredApprovingReviewCount int `json:"required_approving_review_count"` } // PullRequestReviewsEnforcementRequest represents request to set the pull request review // enforcement of a protected branch. It is separate from PullRequestReviewsEnforcement above // because the request structure is different from the response structure. type PullRequestReviewsEnforcementRequest struct { - // Specifies which users and teams should be allowed to dismiss pull requets reviews. Can be nil to disable the restrictions. - DismissalRestrictionsRequest *DismissalRestrictionsRequest `json:"dismissal_restrictions"` + // Specifies which users and teams should be allowed to dismiss pull request reviews. + // User and team dismissal restrictions are only available for + // organization-owned repositories. Must be nil for personal repositories. + DismissalRestrictionsRequest *DismissalRestrictionsRequest `json:"dismissal_restrictions,omitempty"` // Specifies if approved reviews can be dismissed automatically, when a new commit is pushed. (Required) DismissStaleReviews bool `json:"dismiss_stale_reviews"` -} - -// MarshalJSON implements the json.Marshaler interface. -// Converts nil value of PullRequestReviewsEnforcementRequest.DismissalRestrictionsRequest to empty array -func (req PullRequestReviewsEnforcementRequest) MarshalJSON() ([]byte, error) { - if req.DismissalRestrictionsRequest == nil { - newReq := struct { - R []interface{} `json:"dismissal_restrictions"` - D bool `json:"dismiss_stale_reviews"` - }{ - R: []interface{}{}, - D: req.DismissStaleReviews, - } - return json.Marshal(newReq) - } - newReq := struct { - R *DismissalRestrictionsRequest `json:"dismissal_restrictions"` - D bool `json:"dismiss_stale_reviews"` - }{ - R: req.DismissalRestrictionsRequest, - D: req.DismissStaleReviews, - } - return json.Marshal(newReq) + // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. + RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` + // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. + // Valid values are 1-6. + RequiredApprovingReviewCount int `json:"required_approving_review_count"` } // PullRequestReviewsEnforcementUpdate represents request to patch the pull request review // enforcement of a protected branch. It is separate from PullRequestReviewsEnforcementRequest above // because the patch request does not require all fields to be initialized. type PullRequestReviewsEnforcementUpdate struct { - // Specifies which users and teams can dismiss pull requets reviews. Can be ommitted. + // Specifies which users and teams can dismiss pull request reviews. Can be omitted. DismissalRestrictionsRequest *DismissalRestrictionsRequest `json:"dismissal_restrictions,omitempty"` - // Specifies if approved reviews can be dismissed automatically, when a new commit is pushed. Can be ommited. + // Specifies if approved reviews can be dismissed automatically, when a new commit is pushed. Can be omitted. DismissStaleReviews *bool `json:"dismiss_stale_reviews,omitempty"` + // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. + RequireCodeOwnerReviews bool `json:"require_code_owner_reviews,omitempty"` + // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. + // Valid values are 1 - 6. + RequiredApprovingReviewCount int `json:"required_approving_review_count"` } // AdminEnforcement represents the configuration to enforce required status checks for repository administrators. @@ -645,11 +690,19 @@ type DismissalRestrictions struct { // restriction to allows only specific users or teams to dimiss pull request reviews. It is // separate from DismissalRestrictions above because the request structure is // different from the response structure. +// Note: Both Users and Teams must be nil, or both must be non-nil. type DismissalRestrictionsRequest struct { - // The list of user logins who can dismiss pull request reviews. (Required; use []string{} instead of nil for empty list.) - Users []string `json:"users"` - // The list of team slugs which can dismiss pull request reviews. (Required; use []string{} instead of nil for empty list.) - Teams []string `json:"teams"` + // The list of user logins who can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) + Users *[]string `json:"users,omitempty"` + // The list of team slugs which can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) + Teams *[]string `json:"teams,omitempty"` +} + +// SignaturesProtectedBranch represents the protection status of an individual branch. +type SignaturesProtectedBranch struct { + URL *string `json:"url,omitempty"` + // Commits pushed to matching branches must have verified signatures. + Enabled *bool `json:"enabled,omitempty"` } // ListBranches lists branches for the specified repository. @@ -668,7 +721,7 @@ func (s *RepositoriesService) ListBranches(ctx context.Context, owner string, re } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) var branches []*Branch resp, err := s.client.Do(ctx, req, &branches) @@ -690,7 +743,7 @@ func (s *RepositoriesService) GetBranch(ctx context.Context, owner, repo, branch } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) b := new(Branch) resp, err := s.client.Do(ctx, req, b) @@ -712,7 +765,7 @@ func (s *RepositoriesService) GetBranchProtection(ctx context.Context, owner, re } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) p := new(Protection) resp, err := s.client.Do(ctx, req, p) @@ -734,7 +787,7 @@ func (s *RepositoriesService) GetRequiredStatusChecks(ctx context.Context, owner } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) p := new(RequiredStatusChecks) resp, err := s.client.Do(ctx, req, p) @@ -756,7 +809,7 @@ func (s *RepositoriesService) ListRequiredStatusChecksContexts(ctx context.Conte } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) resp, err = s.client.Do(ctx, req, &contexts) if err != nil { @@ -777,7 +830,7 @@ func (s *RepositoriesService) UpdateBranchProtection(ctx context.Context, owner, } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) p := new(Protection) resp, err := s.client.Do(ctx, req, p) @@ -799,11 +852,91 @@ func (s *RepositoriesService) RemoveBranchProtection(ctx context.Context, owner, } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) + + return s.client.Do(ctx, req, nil) +} + +// GetSignaturesProtectedBranch gets required signatures of protected branch. +// +// GitHub API docs: https://developer.github.com/v3/repos/branches/#get-required-signatures-of-protected-branch +func (s *RepositoriesService) GetSignaturesProtectedBranch(ctx context.Context, owner, repo, branch string) (*SignaturesProtectedBranch, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, branch) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeSignaturePreview) + + p := new(SignaturesProtectedBranch) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} + +// RequireSignaturesOnProtectedBranch makes signed commits required on a protected branch. +// It requires admin access and branch protection to be enabled. +// +// GitHub API docs: https://developer.github.com/v3/repos/branches/#add-required-signatures-of-protected-branch +func (s *RepositoriesService) RequireSignaturesOnProtectedBranch(ctx context.Context, owner, repo, branch string) (*SignaturesProtectedBranch, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, branch) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeSignaturePreview) + + r := new(SignaturesProtectedBranch) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, err +} + +// OptionalSignaturesOnProtectedBranch removes required signed commits on a given branch. +// +// GitHub API docs: https://developer.github.com/v3/repos/branches/#remove-required-signatures-of-protected-branch +func (s *RepositoriesService) OptionalSignaturesOnProtectedBranch(ctx context.Context, owner, repo, branch string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, branch) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeSignaturePreview) return s.client.Do(ctx, req, nil) } +// UpdateRequiredStatusChecks updates the required status checks for a given protected branch. +// +// GitHub API docs: https://developer.github.com/v3/repos/branches/#update-required-status-checks-of-protected-branch +func (s *RepositoriesService) UpdateRequiredStatusChecks(ctx context.Context, owner, repo, branch string, sreq *RequiredStatusChecksRequest) (*RequiredStatusChecks, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, branch) + req, err := s.client.NewRequest("PATCH", u, sreq) + if err != nil { + return nil, nil, err + } + + sc := new(RequiredStatusChecks) + resp, err := s.client.Do(ctx, req, sc) + if err != nil { + return nil, resp, err + } + + return sc, resp, nil +} + // License gets the contents of a repository's license if one is detected. // // GitHub API docs: https://developer.github.com/v3/licenses/#get-the-contents-of-a-repositorys-license @@ -834,7 +967,7 @@ func (s *RepositoriesService) GetPullRequestReviewEnforcement(ctx context.Contex } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(PullRequestReviewsEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -857,7 +990,7 @@ func (s *RepositoriesService) UpdatePullRequestReviewEnforcement(ctx context.Con } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(PullRequestReviewsEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -885,7 +1018,7 @@ func (s *RepositoriesService) DisableDismissalRestrictions(ctx context.Context, } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(PullRequestReviewsEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -907,7 +1040,7 @@ func (s *RepositoriesService) RemovePullRequestReviewEnforcement(ctx context.Con } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) return s.client.Do(ctx, req, nil) } @@ -923,7 +1056,7 @@ func (s *RepositoriesService) GetAdminEnforcement(ctx context.Context, owner, re } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(AdminEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -946,7 +1079,7 @@ func (s *RepositoriesService) AddAdminEnforcement(ctx context.Context, owner, re } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(AdminEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -968,7 +1101,97 @@ func (s *RepositoriesService) RemoveAdminEnforcement(ctx context.Context, owner, } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) return s.client.Do(ctx, req, nil) } + +// repositoryTopics represents a collection of repository topics. +type repositoryTopics struct { + Names []string `json:"names"` +} + +// ListAllTopics lists topics for a repository. +// +// GitHub API docs: https://developer.github.com/v3/repos/#list-all-topics-for-a-repository +func (s *RepositoriesService) ListAllTopics(ctx context.Context, owner, repo string) ([]string, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/topics", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTopicsPreview) + + topics := new(repositoryTopics) + resp, err := s.client.Do(ctx, req, topics) + if err != nil { + return nil, resp, err + } + + return topics.Names, resp, nil +} + +// ReplaceAllTopics replaces topics for a repository. +// +// GitHub API docs: https://developer.github.com/v3/repos/#replace-all-topics-for-a-repository +func (s *RepositoriesService) ReplaceAllTopics(ctx context.Context, owner, repo string, topics []string) ([]string, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/topics", owner, repo) + t := &repositoryTopics{ + Names: topics, + } + if t.Names == nil { + t.Names = []string{} + } + req, err := s.client.NewRequest("PUT", u, t) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTopicsPreview) + + t = new(repositoryTopics) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t.Names, resp, nil +} + +// TransferRequest represents a request to transfer a repository. +type TransferRequest struct { + NewOwner string `json:"new_owner"` + TeamID []int64 `json:"team_ids,omitempty"` +} + +// Transfer transfers a repository from one account or organization to another. +// +// This method might return an *AcceptedError and a status code of +// 202. This is because this is the status that GitHub returns to signify that +// it has now scheduled the transfer of the repository in a background task. +// A follow up request, after a delay of a second or so, should result +// in a successful request. +// +// GitHub API docs: https://developer.github.com/v3/repos/#transfer-a-repository +func (s *RepositoriesService) Transfer(ctx context.Context, owner, repo string, transfer TransferRequest) (*Repository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/transfer", owner, repo) + + req, err := s.client.NewRequest("POST", u, &transfer) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeRepositoryTransferPreview) + + r := new(Repository) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} diff --git a/vendor/github.com/google/go-github/github/repos_collaborators.go b/vendor/github.com/google/go-github/github/repos_collaborators.go index 61b6331..757e9f3 100644 --- a/vendor/github.com/google/go-github/github/repos_collaborators.go +++ b/vendor/github.com/google/go-github/github/repos_collaborators.go @@ -10,10 +10,26 @@ import ( "fmt" ) +// ListCollaboratorsOptions specifies the optional parameters to the +// RepositoriesService.ListCollaborators method. +type ListCollaboratorsOptions struct { + // Affiliation specifies how collaborators should be filtered by their affiliation. + // Possible values are: + // outside - All outside collaborators of an organization-owned repository + // direct - All collaborators with permissions to an organization-owned repository, + // regardless of organization membership status + // all - All collaborators the authenticated user can see + // + // Default value is "all". + Affiliation string `url:"affiliation,omitempty"` + + ListOptions +} + // ListCollaborators lists the GitHub users that have access to the repository. // -// GitHub API docs: https://developer.github.com/v3/repos/collaborators/#list -func (s *RepositoriesService) ListCollaborators(ctx context.Context, owner, repo string, opt *ListOptions) ([]*User, *Response, error) { +// GitHub API docs: https://developer.github.com/v3/repos/collaborators/#list-collaborators +func (s *RepositoriesService) ListCollaborators(ctx context.Context, owner, repo string, opt *ListCollaboratorsOptions) ([]*User, *Response, error) { u := fmt.Sprintf("repos/%v/%v/collaborators", owner, repo) u, err := addOptions(u, opt) if err != nil { @@ -25,6 +41,8 @@ func (s *RepositoriesService) ListCollaborators(ctx context.Context, owner, repo return nil, nil, err } + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + var users []*User resp, err := s.client.Do(ctx, req, &users) if err != nil { @@ -102,9 +120,6 @@ func (s *RepositoriesService) AddCollaborator(ctx context.Context, owner, repo, return nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeRepositoryInvitationsPreview) - return s.client.Do(ctx, req, nil) } diff --git a/vendor/github.com/google/go-github/github/repos_collaborators_test.go b/vendor/github.com/google/go-github/github/repos_collaborators_test.go index d2e125c..a4d3210 100644 --- a/vendor/github.com/google/go-github/github/repos_collaborators_test.go +++ b/vendor/github.com/google/go-github/github/repos_collaborators_test.go @@ -15,34 +15,65 @@ import ( ) func TestRepositoriesService_ListCollaborators(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/collaborators", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) testFormValues(t, r, values{"page": "2"}) fmt.Fprintf(w, `[{"id":1}, {"id":2}]`) }) - opt := &ListOptions{Page: 2} + opt := &ListCollaboratorsOptions{ + ListOptions: ListOptions{Page: 2}, + } users, _, err := client.Repositories.ListCollaborators(context.Background(), "o", "r", opt) if err != nil { t.Errorf("Repositories.ListCollaborators returned error: %v", err) } - want := []*User{{ID: Int(1)}, {ID: Int(2)}} + want := []*User{{ID: Int64(1)}, {ID: Int64(2)}} + if !reflect.DeepEqual(users, want) { + t.Errorf("Repositories.ListCollaborators returned %+v, want %+v", users, want) + } +} + +func TestRepositoriesService_ListCollaborators_withAffiliation(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/collaborators", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testFormValues(t, r, values{"affiliation": "all", "page": "2"}) + fmt.Fprintf(w, `[{"id":1}, {"id":2}]`) + }) + + opt := &ListCollaboratorsOptions{ + ListOptions: ListOptions{Page: 2}, + Affiliation: "all", + } + users, _, err := client.Repositories.ListCollaborators(context.Background(), "o", "r", opt) + if err != nil { + t.Errorf("Repositories.ListCollaborators returned error: %v", err) + } + + want := []*User{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(users, want) { t.Errorf("Repositories.ListCollaborators returned %+v, want %+v", users, want) } } func TestRepositoriesService_ListCollaborators_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.ListCollaborators(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestRepositoriesService_IsCollaborator_True(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/collaborators/u", func(w http.ResponseWriter, r *http.Request) { @@ -61,7 +92,7 @@ func TestRepositoriesService_IsCollaborator_True(t *testing.T) { } func TestRepositoriesService_IsCollaborator_False(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/collaborators/u", func(w http.ResponseWriter, r *http.Request) { @@ -80,12 +111,15 @@ func TestRepositoriesService_IsCollaborator_False(t *testing.T) { } func TestRepositoriesService_IsCollaborator_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.IsCollaborator(context.Background(), "%", "%", "%") testURLParseError(t, err) } func TestRepositoryService_GetPermissionLevel(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/collaborators/u/permission", func(w http.ResponseWriter, r *http.Request) { @@ -111,7 +145,7 @@ func TestRepositoryService_GetPermissionLevel(t *testing.T) { } func TestRepositoriesService_AddCollaborator(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() opt := &RepositoryAddCollaboratorOptions{Permission: "admin"} @@ -121,7 +155,6 @@ func TestRepositoriesService_AddCollaborator(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "PUT") - testHeader(t, r, "Accept", mediaTypeRepositoryInvitationsPreview) if !reflect.DeepEqual(v, opt) { t.Errorf("Request body = %+v, want %+v", v, opt) } @@ -136,12 +169,15 @@ func TestRepositoriesService_AddCollaborator(t *testing.T) { } func TestRepositoriesService_AddCollaborator_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Repositories.AddCollaborator(context.Background(), "%", "%", "%", nil) testURLParseError(t, err) } func TestRepositoriesService_RemoveCollaborator(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/collaborators/u", func(w http.ResponseWriter, r *http.Request) { @@ -156,6 +192,9 @@ func TestRepositoriesService_RemoveCollaborator(t *testing.T) { } func TestRepositoriesService_RemoveCollaborator_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Repositories.RemoveCollaborator(context.Background(), "%", "%", "%") testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/repos_comments.go b/vendor/github.com/google/go-github/github/repos_comments.go index 4830ee2..fa2377d 100644 --- a/vendor/github.com/google/go-github/github/repos_comments.go +++ b/vendor/github.com/google/go-github/github/repos_comments.go @@ -15,7 +15,7 @@ import ( type RepositoryComment struct { HTMLURL *string `json:"html_url,omitempty"` URL *string `json:"url,omitempty"` - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` CommitID *string `json:"commit_id,omitempty"` User *User `json:"user,omitempty"` Reactions *Reactions `json:"reactions,omitempty"` @@ -110,7 +110,7 @@ func (s *RepositoriesService) CreateComment(ctx context.Context, owner, repo, sh // GetComment gets a single comment from a repository. // // GitHub API docs: https://developer.github.com/v3/repos/comments/#get-a-single-commit-comment -func (s *RepositoriesService) GetComment(ctx context.Context, owner, repo string, id int) (*RepositoryComment, *Response, error) { +func (s *RepositoriesService) GetComment(ctx context.Context, owner, repo string, id int64) (*RepositoryComment, *Response, error) { u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { @@ -132,7 +132,7 @@ func (s *RepositoriesService) GetComment(ctx context.Context, owner, repo string // UpdateComment updates the body of a single comment. // // GitHub API docs: https://developer.github.com/v3/repos/comments/#update-a-commit-comment -func (s *RepositoriesService) UpdateComment(ctx context.Context, owner, repo string, id int, comment *RepositoryComment) (*RepositoryComment, *Response, error) { +func (s *RepositoriesService) UpdateComment(ctx context.Context, owner, repo string, id int64, comment *RepositoryComment) (*RepositoryComment, *Response, error) { u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) req, err := s.client.NewRequest("PATCH", u, comment) if err != nil { @@ -151,7 +151,7 @@ func (s *RepositoriesService) UpdateComment(ctx context.Context, owner, repo str // DeleteComment deletes a single comment from a repository. // // GitHub API docs: https://developer.github.com/v3/repos/comments/#delete-a-commit-comment -func (s *RepositoriesService) DeleteComment(ctx context.Context, owner, repo string, id int) (*Response, error) { +func (s *RepositoriesService) DeleteComment(ctx context.Context, owner, repo string, id int64) (*Response, error) { u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { diff --git a/vendor/github.com/google/go-github/github/repos_comments_test.go b/vendor/github.com/google/go-github/github/repos_comments_test.go index 249a37a..0eb54eb 100644 --- a/vendor/github.com/google/go-github/github/repos_comments_test.go +++ b/vendor/github.com/google/go-github/github/repos_comments_test.go @@ -15,7 +15,7 @@ import ( ) func TestRepositoriesService_ListComments(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/comments", func(w http.ResponseWriter, r *http.Request) { @@ -31,19 +31,22 @@ func TestRepositoriesService_ListComments(t *testing.T) { t.Errorf("Repositories.ListComments returned error: %v", err) } - want := []*RepositoryComment{{ID: Int(1)}, {ID: Int(2)}} + want := []*RepositoryComment{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(comments, want) { t.Errorf("Repositories.ListComments returned %+v, want %+v", comments, want) } } func TestRepositoriesService_ListComments_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.ListComments(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestRepositoriesService_ListCommitComments(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/commits/s/comments", func(w http.ResponseWriter, r *http.Request) { @@ -59,19 +62,22 @@ func TestRepositoriesService_ListCommitComments(t *testing.T) { t.Errorf("Repositories.ListCommitComments returned error: %v", err) } - want := []*RepositoryComment{{ID: Int(1)}, {ID: Int(2)}} + want := []*RepositoryComment{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(comments, want) { t.Errorf("Repositories.ListCommitComments returned %+v, want %+v", comments, want) } } func TestRepositoriesService_ListCommitComments_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.ListCommitComments(context.Background(), "%", "%", "%", nil) testURLParseError(t, err) } func TestRepositoriesService_CreateComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &RepositoryComment{Body: String("b")} @@ -93,19 +99,22 @@ func TestRepositoriesService_CreateComment(t *testing.T) { t.Errorf("Repositories.CreateComment returned error: %v", err) } - want := &RepositoryComment{ID: Int(1)} + want := &RepositoryComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("Repositories.CreateComment returned %+v, want %+v", comment, want) } } func TestRepositoriesService_CreateComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.CreateComment(context.Background(), "%", "%", "%", nil) testURLParseError(t, err) } func TestRepositoriesService_GetComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/comments/1", func(w http.ResponseWriter, r *http.Request) { @@ -119,19 +128,22 @@ func TestRepositoriesService_GetComment(t *testing.T) { t.Errorf("Repositories.GetComment returned error: %v", err) } - want := &RepositoryComment{ID: Int(1)} + want := &RepositoryComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("Repositories.GetComment returned %+v, want %+v", comment, want) } } func TestRepositoriesService_GetComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.GetComment(context.Background(), "%", "%", 1) testURLParseError(t, err) } func TestRepositoriesService_UpdateComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &RepositoryComment{Body: String("b")} @@ -153,19 +165,22 @@ func TestRepositoriesService_UpdateComment(t *testing.T) { t.Errorf("Repositories.UpdateComment returned error: %v", err) } - want := &RepositoryComment{ID: Int(1)} + want := &RepositoryComment{ID: Int64(1)} if !reflect.DeepEqual(comment, want) { t.Errorf("Repositories.UpdateComment returned %+v, want %+v", comment, want) } } func TestRepositoriesService_UpdateComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.UpdateComment(context.Background(), "%", "%", 1, nil) testURLParseError(t, err) } func TestRepositoriesService_DeleteComment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/comments/1", func(w http.ResponseWriter, r *http.Request) { @@ -179,6 +194,9 @@ func TestRepositoriesService_DeleteComment(t *testing.T) { } func TestRepositoriesService_DeleteComment_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Repositories.DeleteComment(context.Background(), "%", "%", 1) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/repos_commits.go b/vendor/github.com/google/go-github/github/repos_commits.go index 7ecf531..a4c6215 100644 --- a/vendor/github.com/google/go-github/github/repos_commits.go +++ b/vendor/github.com/google/go-github/github/repos_commits.go @@ -9,6 +9,7 @@ import ( "bytes" "context" "fmt" + "net/url" "time" ) @@ -48,16 +49,17 @@ func (c CommitStats) String() string { // CommitFile represents a file modified in a commit. type CommitFile struct { - SHA *string `json:"sha,omitempty"` - Filename *string `json:"filename,omitempty"` - Additions *int `json:"additions,omitempty"` - Deletions *int `json:"deletions,omitempty"` - Changes *int `json:"changes,omitempty"` - Status *string `json:"status,omitempty"` - Patch *string `json:"patch,omitempty"` - BlobURL *string `json:"blob_url,omitempty"` - RawURL *string `json:"raw_url,omitempty"` - ContentsURL *string `json:"contents_url,omitempty"` + SHA *string `json:"sha,omitempty"` + Filename *string `json:"filename,omitempty"` + Additions *int `json:"additions,omitempty"` + Deletions *int `json:"deletions,omitempty"` + Changes *int `json:"changes,omitempty"` + Status *string `json:"status,omitempty"` + Patch *string `json:"patch,omitempty"` + BlobURL *string `json:"blob_url,omitempty"` + RawURL *string `json:"raw_url,omitempty"` + ContentsURL *string `json:"contents_url,omitempty"` + PreviousFilename *string `json:"previous_filename,omitempty"` } func (c CommitFile) String() string { @@ -137,10 +139,9 @@ func (s *RepositoriesService) ListCommits(ctx context.Context, owner, repo strin } // GetCommit fetches the specified commit, including all details about it. -// todo: support media formats - https://github.com/google/go-github/issues/6 // // GitHub API docs: https://developer.github.com/v3/repos/commits/#get-a-single-commit -// See also: https://developer.github.com//v3/git/commits/#get-a-single-commit provides the same functionality +// See also: https://developer.github.com/v3/git/commits/#get-a-single-commit provides the same functionality func (s *RepositoriesService) GetCommit(ctx context.Context, owner, repo, sha string) (*RepositoryCommit, *Response, error) { u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, sha) @@ -149,9 +150,6 @@ func (s *RepositoriesService) GetCommit(ctx context.Context, owner, repo, sha st return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGitSigningPreview) - commit := new(RepositoryCommit) resp, err := s.client.Do(ctx, req, commit) if err != nil { @@ -161,12 +159,38 @@ func (s *RepositoriesService) GetCommit(ctx context.Context, owner, repo, sha st return commit, resp, nil } +// GetCommitRaw fetches the specified commit in raw (diff or patch) format. +func (s *RepositoriesService) GetCommitRaw(ctx context.Context, owner string, repo string, sha string, opt RawOptions) (string, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, sha) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return "", nil, err + } + + switch opt.Type { + case Diff: + req.Header.Set("Accept", mediaTypeV3Diff) + case Patch: + req.Header.Set("Accept", mediaTypeV3Patch) + default: + return "", nil, fmt.Errorf("unsupported raw type %d", opt.Type) + } + + var buf bytes.Buffer + resp, err := s.client.Do(ctx, req, &buf) + if err != nil { + return "", resp, err + } + + return buf.String(), resp, nil +} + // GetCommitSHA1 gets the SHA-1 of a commit reference. If a last-known SHA1 is // supplied and no new commits have occurred, a 304 Unmodified response is returned. // // GitHub API docs: https://developer.github.com/v3/repos/commits/#get-the-sha-1-of-a-commit-reference func (s *RepositoriesService) GetCommitSHA1(ctx context.Context, owner, repo, ref, lastSHA string) (string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, ref) + u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, url.QueryEscape(ref)) req, err := s.client.NewRequest("GET", u, nil) if err != nil { @@ -190,7 +214,7 @@ func (s *RepositoriesService) GetCommitSHA1(ctx context.Context, owner, repo, re // CompareCommits compares a range of commits with each other. // todo: support media formats - https://github.com/google/go-github/issues/6 // -// GitHub API docs: https://developer.github.com/v3/repos/commits/index.html#compare-two-commits +// GitHub API docs: https://developer.github.com/v3/repos/commits/#compare-two-commits func (s *RepositoriesService) CompareCommits(ctx context.Context, owner, repo string, base, head string) (*CommitsComparison, *Response, error) { u := fmt.Sprintf("repos/%v/%v/compare/%v...%v", owner, repo, base, head) diff --git a/vendor/github.com/google/go-github/github/repos_commits_test.go b/vendor/github.com/google/go-github/github/repos_commits_test.go index 7790a33..e076feb 100644 --- a/vendor/github.com/google/go-github/github/repos_commits_test.go +++ b/vendor/github.com/google/go-github/github/repos_commits_test.go @@ -10,12 +10,13 @@ import ( "fmt" "net/http" "reflect" + "strings" "testing" "time" ) func TestRepositoriesService_ListCommits(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() // given @@ -51,12 +52,11 @@ func TestRepositoriesService_ListCommits(t *testing.T) { } func TestRepositoriesService_GetCommit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/commits/s", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeGitSigningPreview) fmt.Fprintf(w, `{ "sha": "s", "commit": { "message": "m" }, @@ -125,8 +125,65 @@ func TestRepositoriesService_GetCommit(t *testing.T) { } } +func TestRepositoriesService_GetCommitRaw_diff(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + const rawStr = "@@diff content" + + mux.HandleFunc("/repos/o/r/commits/s", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeV3Diff) + fmt.Fprint(w, rawStr) + }) + + got, _, err := client.Repositories.GetCommitRaw(context.Background(), "o", "r", "s", RawOptions{Type: Diff}) + if err != nil { + t.Fatalf("Repositories.GetCommitRaw returned error: %v", err) + } + want := rawStr + if got != want { + t.Errorf("Repositories.GetCommitRaw returned %s want %s", got, want) + } +} + +func TestRepositoriesService_GetCommitRaw_patch(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + const rawStr = "@@patch content" + + mux.HandleFunc("/repos/o/r/commits/s", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeV3Patch) + fmt.Fprint(w, rawStr) + }) + + got, _, err := client.Repositories.GetCommitRaw(context.Background(), "o", "r", "s", RawOptions{Type: Patch}) + if err != nil { + t.Fatalf("Repositories.GetCommitRaw returned error: %v", err) + } + want := rawStr + if got != want { + t.Errorf("Repositories.GetCommitRaw returned %s want %s", got, want) + } +} + +func TestRepositoriesService_GetCommitRaw_invalid(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, _, err := client.Repositories.GetCommitRaw(context.Background(), "o", "r", "s", RawOptions{100}) + if err == nil { + t.Fatal("Repositories.GetCommitRaw should return error") + } + if !strings.Contains(err.Error(), "unsupported raw type") { + t.Error("Repositories.GetCommitRaw should return unsupported raw type error") + } +} + func TestRepositoriesService_GetCommitSHA1(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() const sha1 = "01234abcde" @@ -166,8 +223,47 @@ func TestRepositoriesService_GetCommitSHA1(t *testing.T) { } } +func TestRepositoriesService_NonAlphabetCharacter_GetCommitSHA1(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + const sha1 = "01234abcde" + + mux.HandleFunc("/repos/o/r/commits/master%20hash", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeV3SHA) + + fmt.Fprintf(w, sha1) + }) + + got, _, err := client.Repositories.GetCommitSHA1(context.Background(), "o", "r", "master%20hash", "") + if err != nil { + t.Errorf("Repositories.GetCommitSHA1 returned error: %v", err) + } + + if want := sha1; got != want { + t.Errorf("Repositories.GetCommitSHA1 = %v, want %v", got, want) + } + + mux.HandleFunc("/repos/o/r/commits/tag", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeV3SHA) + testHeader(t, r, "If-None-Match", `"`+sha1+`"`) + + w.WriteHeader(http.StatusNotModified) + }) + + got, _, err = client.Repositories.GetCommitSHA1(context.Background(), "o", "r", "tag", sha1) + if err == nil { + t.Errorf("Expected HTTP 304 response") + } + + if want := ""; got != want { + t.Errorf("Repositories.GetCommitSHA1 = %v, want %v", got, want) + } +} + func TestRepositoriesService_CompareCommits(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/compare/b...h", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/repos_community_health.go b/vendor/github.com/google/go-github/github/repos_community_health.go index b5c75d6..73d1d57 100644 --- a/vendor/github.com/google/go-github/github/repos_community_health.go +++ b/vendor/github.com/google/go-github/github/repos_community_health.go @@ -21,10 +21,12 @@ type Metric struct { // CommunityHealthFiles represents the different files in the community health metrics response. type CommunityHealthFiles struct { - CodeOfConduct *Metric `json:"code_of_conduct"` - Contributing *Metric `json:"contributing"` - License *Metric `json:"license"` - Readme *Metric `json:"readme"` + CodeOfConduct *Metric `json:"code_of_conduct"` + Contributing *Metric `json:"contributing"` + IssueTemplate *Metric `json:"issue_template"` + PullRequestTemplate *Metric `json:"pull_request_template"` + License *Metric `json:"license"` + Readme *Metric `json:"readme"` } // CommunityHealthMetrics represents a response containing the community metrics of a repository. diff --git a/vendor/github.com/google/go-github/github/repos_community_health_test.go b/vendor/github.com/google/go-github/github/repos_community_health_test.go index a90c8c0..6092af0 100644 --- a/vendor/github.com/google/go-github/github/repos_community_health_test.go +++ b/vendor/github.com/google/go-github/github/repos_community_health_test.go @@ -15,7 +15,7 @@ import ( ) func TestRepositoriesService_GetCommunityHealthMetrics(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/community/profile", func(w http.ResponseWriter, r *http.Request) { @@ -54,7 +54,7 @@ func TestRepositoriesService_GetCommunityHealthMetrics(t *testing.T) { t.Errorf("Repositories.GetCommunityHealthMetrics returned error: %v", err) } - updatedAt := time.Date(2017, 02, 28, 0, 0, 0, 0, time.UTC) + updatedAt := time.Date(2017, time.February, 28, 0, 0, 0, 0, time.UTC) want := &CommunityHealthMetrics{ HealthPercentage: Int(100), UpdatedAt: &updatedAt, diff --git a/vendor/github.com/google/go-github/github/repos_contents.go b/vendor/github.com/google/go-github/github/repos_contents.go index f9adaf7..bf6cabc 100644 --- a/vendor/github.com/google/go-github/github/repos_contents.go +++ b/vendor/github.com/google/go-github/github/repos_contents.go @@ -21,7 +21,10 @@ import ( // RepositoryContent represents a file or directory in a github repository. type RepositoryContent struct { - Type *string `json:"type,omitempty"` + Type *string `json:"type,omitempty"` + // Target is only set if the type is "symlink" and the target is not a normal file. + // If Target is set, Path will be the symlink path. + Target *string `json:"target,omitempty"` Encoding *string `json:"encoding,omitempty"` Size *int `json:"size,omitempty"` Name *string `json:"name,omitempty"` @@ -248,7 +251,7 @@ func (s *RepositoriesService) GetArchiveLink(ctx context.Context, owner, repo st } var resp *http.Response // Use http.DefaultTransport if no custom Transport is configured - ctx, req = withContext(ctx, req) + req = withContext(ctx, req) if s.client.client.Transport == nil { resp, err = http.DefaultTransport.RoundTrip(req) } else { diff --git a/vendor/github.com/google/go-github/github/repos_contents_test.go b/vendor/github.com/google/go-github/github/repos_contents_test.go index 498dfc8..29bab8d 100644 --- a/vendor/github.com/google/go-github/github/repos_contents_test.go +++ b/vendor/github.com/google/go-github/github/repos_contents_test.go @@ -56,10 +56,12 @@ func TestRepositoryContent_GetContent(t *testing.T) { r := RepositoryContent{Encoding: tt.encoding, Content: tt.content} got, err := r.GetContent() if err != nil && !tt.wantErr { - t.Errorf("RepositoryContent(%q, %q) returned unexpected error: %v", tt.encoding, tt.content, err) + t.Errorf("RepositoryContent(%s, %s) returned unexpected error: %v", + stringOrNil(tt.encoding), stringOrNil(tt.content), err) } if err == nil && tt.wantErr { - t.Errorf("RepositoryContent(%q, %q) did not return unexpected error", tt.encoding, tt.content) + t.Errorf("RepositoryContent(%s, %s) did not return unexpected error", + stringOrNil(tt.encoding), stringOrNil(tt.content)) } if want := tt.want; got != want { t.Errorf("RepositoryContent.GetContent returned %+v, want %+v", got, want) @@ -67,8 +69,17 @@ func TestRepositoryContent_GetContent(t *testing.T) { } } +// stringOrNil converts a potentially null string pointer to string. +// For non-nil input pointer, the returned string is enclosed in double-quotes. +func stringOrNil(s *string) string { + if s == nil { + return "" + } + return fmt.Sprintf("%q", *s) +} + func TestRepositoriesService_GetReadme(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/readme", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") @@ -91,14 +102,14 @@ func TestRepositoriesService_GetReadme(t *testing.T) { } func TestRepositoriesService_DownloadContents_Success(t *testing.T) { - setup() + client, mux, serverURL, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/d", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") fmt.Fprint(w, `[{ "type": "file", "name": "f", - "download_url": "`+server.URL+`/download/f" + "download_url": "`+serverURL+baseURLPath+`/download/f" }]`) }) mux.HandleFunc("/download/f", func(w http.ResponseWriter, r *http.Request) { @@ -123,7 +134,7 @@ func TestRepositoriesService_DownloadContents_Success(t *testing.T) { } func TestRepositoriesService_DownloadContents_NoDownloadURL(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/d", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") @@ -140,7 +151,7 @@ func TestRepositoriesService_DownloadContents_NoDownloadURL(t *testing.T) { } func TestRepositoriesService_DownloadContents_NoFile(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/d", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") @@ -154,7 +165,7 @@ func TestRepositoriesService_DownloadContents_NoFile(t *testing.T) { } func TestRepositoriesService_GetContents_File(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/p", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") @@ -177,7 +188,7 @@ func TestRepositoriesService_GetContents_File(t *testing.T) { } func TestRepositoriesService_GetContents_FilenameNeedsEscape(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/p#?%/中.go", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") @@ -190,7 +201,7 @@ func TestRepositoriesService_GetContents_FilenameNeedsEscape(t *testing.T) { } func TestRepositoriesService_GetContents_DirectoryWithSpaces(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/some directory/file.go", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") @@ -203,7 +214,7 @@ func TestRepositoriesService_GetContents_DirectoryWithSpaces(t *testing.T) { } func TestRepositoriesService_GetContents_DirectoryWithPlusChars(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/some directory+name/file.go", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") @@ -216,7 +227,7 @@ func TestRepositoriesService_GetContents_DirectoryWithPlusChars(t *testing.T) { } func TestRepositoriesService_GetContents_Directory(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/p", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") @@ -244,7 +255,7 @@ func TestRepositoriesService_GetContents_Directory(t *testing.T) { } func TestRepositoriesService_CreateFile(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/p", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PUT") @@ -282,7 +293,7 @@ func TestRepositoriesService_CreateFile(t *testing.T) { } func TestRepositoriesService_UpdateFile(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/p", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PUT") @@ -322,7 +333,7 @@ func TestRepositoriesService_UpdateFile(t *testing.T) { } func TestRepositoriesService_DeleteFile(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contents/p", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "DELETE") @@ -358,7 +369,7 @@ func TestRepositoriesService_DeleteFile(t *testing.T) { } func TestRepositoriesService_GetArchiveLink(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/tarball", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") diff --git a/vendor/github.com/google/go-github/github/repos_deployments.go b/vendor/github.com/google/go-github/github/repos_deployments.go index 9054ca9..604632e 100644 --- a/vendor/github.com/google/go-github/github/repos_deployments.go +++ b/vendor/github.com/google/go-github/github/repos_deployments.go @@ -9,12 +9,13 @@ import ( "context" "encoding/json" "fmt" + "strings" ) // Deployment represents a deployment in a repo type Deployment struct { URL *string `json:"url,omitempty"` - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` SHA *string `json:"sha,omitempty"` Ref *string `json:"ref,omitempty"` Task *string `json:"task,omitempty"` @@ -23,9 +24,10 @@ type Deployment struct { Description *string `json:"description,omitempty"` Creator *User `json:"creator,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"pushed_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` StatusesURL *string `json:"statuses_url,omitempty"` RepositoryURL *string `json:"repository_url,omitempty"` + NodeID *string `json:"node_id,omitempty"` } // DeploymentRequest represents a deployment request @@ -86,7 +88,7 @@ func (s *RepositoriesService) ListDeployments(ctx context.Context, owner, repo s // GetDeployment returns a single deployment of a repository. // // GitHub API docs: https://developer.github.com/v3/repos/deployments/#get-a-single-deployment -func (s *RepositoriesService) GetDeployment(ctx context.Context, owner, repo string, deploymentID int) (*Deployment, *Response, error) { +func (s *RepositoriesService) GetDeployment(ctx context.Context, owner, repo string, deploymentID int64) (*Deployment, *Response, error) { u := fmt.Sprintf("repos/%v/%v/deployments/%v", owner, repo, deploymentID) req, err := s.client.NewRequest("GET", u, nil) @@ -114,8 +116,9 @@ func (s *RepositoriesService) CreateDeployment(ctx context.Context, owner, repo return nil, nil, err } - // TODO: remove custom Accept header when deployment support fully launches - req.Header.Set("Accept", mediaTypeDeploymentStatusPreview) + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) d := new(Deployment) resp, err := s.client.Do(ctx, req, d) @@ -129,7 +132,7 @@ func (s *RepositoriesService) CreateDeployment(ctx context.Context, owner, repo // DeploymentStatus represents the status of a // particular deployment. type DeploymentStatus struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` // State is the deployment state. // Possible values are: "pending", "success", "failure", "error", "inactive". State *string `json:"state,omitempty"` @@ -137,9 +140,10 @@ type DeploymentStatus struct { Description *string `json:"description,omitempty"` TargetURL *string `json:"target_url,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"pushed_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` DeploymentURL *string `json:"deployment_url,omitempty"` RepositoryURL *string `json:"repository_url,omitempty"` + NodeID *string `json:"node_id,omitempty"` } // DeploymentStatusRequest represents a deployment request @@ -147,6 +151,7 @@ type DeploymentStatusRequest struct { State *string `json:"state,omitempty"` LogURL *string `json:"log_url,omitempty"` Description *string `json:"description,omitempty"` + Environment *string `json:"environment,omitempty"` EnvironmentURL *string `json:"environment_url,omitempty"` AutoInactive *bool `json:"auto_inactive,omitempty"` } @@ -154,7 +159,7 @@ type DeploymentStatusRequest struct { // ListDeploymentStatuses lists the statuses of a given deployment of a repository. // // GitHub API docs: https://developer.github.com/v3/repos/deployments/#list-deployment-statuses -func (s *RepositoriesService) ListDeploymentStatuses(ctx context.Context, owner, repo string, deployment int, opt *ListOptions) ([]*DeploymentStatus, *Response, error) { +func (s *RepositoriesService) ListDeploymentStatuses(ctx context.Context, owner, repo string, deployment int64, opt *ListOptions) ([]*DeploymentStatus, *Response, error) { u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses", owner, repo, deployment) u, err := addOptions(u, opt) if err != nil { @@ -178,7 +183,7 @@ func (s *RepositoriesService) ListDeploymentStatuses(ctx context.Context, owner, // GetDeploymentStatus returns a single deployment status of a repository. // // GitHub API docs: https://developer.github.com/v3/repos/deployments/#get-a-single-deployment-status -func (s *RepositoriesService) GetDeploymentStatus(ctx context.Context, owner, repo string, deploymentID, deploymentStatusID int) (*DeploymentStatus, *Response, error) { +func (s *RepositoriesService) GetDeploymentStatus(ctx context.Context, owner, repo string, deploymentID, deploymentStatusID int64) (*DeploymentStatus, *Response, error) { u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses/%v", owner, repo, deploymentID, deploymentStatusID) req, err := s.client.NewRequest("GET", u, nil) @@ -186,8 +191,9 @@ func (s *RepositoriesService) GetDeploymentStatus(ctx context.Context, owner, re return nil, nil, err } - // TODO: remove custom Accept header when deployment support fully launches - req.Header.Set("Accept", mediaTypeDeploymentStatusPreview) + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) d := new(DeploymentStatus) resp, err := s.client.Do(ctx, req, d) @@ -201,7 +207,7 @@ func (s *RepositoriesService) GetDeploymentStatus(ctx context.Context, owner, re // CreateDeploymentStatus creates a new status for a deployment. // // GitHub API docs: https://developer.github.com/v3/repos/deployments/#create-a-deployment-status -func (s *RepositoriesService) CreateDeploymentStatus(ctx context.Context, owner, repo string, deployment int, request *DeploymentStatusRequest) (*DeploymentStatus, *Response, error) { +func (s *RepositoriesService) CreateDeploymentStatus(ctx context.Context, owner, repo string, deployment int64, request *DeploymentStatusRequest) (*DeploymentStatus, *Response, error) { u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses", owner, repo, deployment) req, err := s.client.NewRequest("POST", u, request) @@ -209,8 +215,9 @@ func (s *RepositoriesService) CreateDeploymentStatus(ctx context.Context, owner, return nil, nil, err } - // TODO: remove custom Accept header when deployment support fully launches - req.Header.Set("Accept", mediaTypeDeploymentStatusPreview) + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) d := new(DeploymentStatus) resp, err := s.client.Do(ctx, req, d) diff --git a/vendor/github.com/google/go-github/github/repos_deployments_test.go b/vendor/github.com/google/go-github/github/repos_deployments_test.go index 9fa44b2..4e6ae6d 100644 --- a/vendor/github.com/google/go-github/github/repos_deployments_test.go +++ b/vendor/github.com/google/go-github/github/repos_deployments_test.go @@ -11,11 +11,12 @@ import ( "fmt" "net/http" "reflect" + "strings" "testing" ) func TestRepositoriesService_ListDeployments(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/deployments", func(w http.ResponseWriter, r *http.Request) { @@ -30,14 +31,14 @@ func TestRepositoriesService_ListDeployments(t *testing.T) { t.Errorf("Repositories.ListDeployments returned error: %v", err) } - want := []*Deployment{{ID: Int(1)}, {ID: Int(2)}} + want := []*Deployment{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(deployments, want) { t.Errorf("Repositories.ListDeployments returned %+v, want %+v", deployments, want) } } func TestRepositoriesService_GetDeployment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/deployments/3", func(w http.ResponseWriter, r *http.Request) { @@ -50,7 +51,7 @@ func TestRepositoriesService_GetDeployment(t *testing.T) { t.Errorf("Repositories.GetDeployment returned error: %v", err) } - want := &Deployment{ID: Int(3)} + want := &Deployment{ID: Int64(3)} if !reflect.DeepEqual(deployment, want) { t.Errorf("Repositories.GetDeployment returned %+v, want %+v", deployment, want) @@ -58,7 +59,7 @@ func TestRepositoriesService_GetDeployment(t *testing.T) { } func TestRepositoriesService_CreateDeployment(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &DeploymentRequest{Ref: String("1111"), Task: String("deploy"), TransientEnvironment: Bool(true)} @@ -68,7 +69,8 @@ func TestRepositoriesService_CreateDeployment(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") - testHeader(t, r, "Accept", mediaTypeDeploymentStatusPreview) + wantAcceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } @@ -88,7 +90,7 @@ func TestRepositoriesService_CreateDeployment(t *testing.T) { } func TestRepositoriesService_ListDeploymentStatuses(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/deployments/1/statuses", func(w http.ResponseWriter, r *http.Request) { @@ -103,19 +105,20 @@ func TestRepositoriesService_ListDeploymentStatuses(t *testing.T) { t.Errorf("Repositories.ListDeploymentStatuses returned error: %v", err) } - want := []*DeploymentStatus{{ID: Int(1)}, {ID: Int(2)}} + want := []*DeploymentStatus{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(statutses, want) { t.Errorf("Repositories.ListDeploymentStatuses returned %+v, want %+v", statutses, want) } } func TestRepositoriesService_GetDeploymentStatus(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} mux.HandleFunc("/repos/o/r/deployments/3/statuses/4", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeDeploymentStatusPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) fmt.Fprint(w, `{"id":4}`) }) @@ -124,14 +127,14 @@ func TestRepositoriesService_GetDeploymentStatus(t *testing.T) { t.Errorf("Repositories.GetDeploymentStatus returned error: %v", err) } - want := &DeploymentStatus{ID: Int(4)} + want := &DeploymentStatus{ID: Int64(4)} if !reflect.DeepEqual(deploymentStatus, want) { t.Errorf("Repositories.GetDeploymentStatus returned %+v, want %+v", deploymentStatus, want) } } func TestRepositoriesService_CreateDeploymentStatus(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &DeploymentStatusRequest{State: String("inactive"), Description: String("deploy"), AutoInactive: Bool(false)} @@ -141,7 +144,8 @@ func TestRepositoriesService_CreateDeploymentStatus(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") - testHeader(t, r, "Accept", mediaTypeDeploymentStatusPreview) + wantAcceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } diff --git a/vendor/github.com/google/go-github/github/repos_forks.go b/vendor/github.com/google/go-github/github/repos_forks.go index 6b5e4ea..bfff27b 100644 --- a/vendor/github.com/google/go-github/github/repos_forks.go +++ b/vendor/github.com/google/go-github/github/repos_forks.go @@ -8,6 +8,8 @@ package github import ( "context" "fmt" + + "encoding/json" ) // RepositoryListForksOptions specifies the optional parameters to the @@ -35,6 +37,9 @@ func (s *RepositoriesService) ListForks(ctx context.Context, owner, repo string, return nil, nil, err } + // TODO: remove custom Accept header when topics API fully launches. + req.Header.Set("Accept", mediaTypeTopicsPreview) + var repos []*Repository resp, err := s.client.Do(ctx, req, &repos) if err != nil { @@ -55,7 +60,8 @@ type RepositoryCreateForkOptions struct { // // This method might return an *AcceptedError and a status code of // 202. This is because this is the status that GitHub returns to signify that -// it is now computing creating the fork in a background task. +// it is now computing creating the fork in a background task. In this event, +// the Repository value will be returned, which includes the details about the pending fork. // A follow up request, after a delay of a second or so, should result // in a successful request. // @@ -75,6 +81,14 @@ func (s *RepositoriesService) CreateFork(ctx context.Context, owner, repo string fork := new(Repository) resp, err := s.client.Do(ctx, req, fork) if err != nil { + // Persist AcceptedError's metadata to the Repository object. + if aerr, ok := err.(*AcceptedError); ok { + if err := json.Unmarshal(aerr.Raw, fork); err != nil { + return fork, resp, err + } + + return fork, resp, err + } return nil, resp, err } diff --git a/vendor/github.com/google/go-github/github/repos_forks_test.go b/vendor/github.com/google/go-github/github/repos_forks_test.go index 40916ea..065f592 100644 --- a/vendor/github.com/google/go-github/github/repos_forks_test.go +++ b/vendor/github.com/google/go-github/github/repos_forks_test.go @@ -14,11 +14,12 @@ import ( ) func TestRepositoriesService_ListForks(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/forks", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeTopicsPreview) testFormValues(t, r, values{ "sort": "newest", "page": "3", @@ -35,19 +36,22 @@ func TestRepositoriesService_ListForks(t *testing.T) { t.Errorf("Repositories.ListForks returned error: %v", err) } - want := []*Repository{{ID: Int(1)}, {ID: Int(2)}} + want := []*Repository{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(repos, want) { t.Errorf("Repositories.ListForks returned %+v, want %+v", repos, want) } } func TestRepositoriesService_ListForks_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.ListForks(context.Background(), "%", "r", nil) testURLParseError(t, err) } func TestRepositoriesService_CreateFork(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/forks", func(w http.ResponseWriter, r *http.Request) { @@ -62,13 +66,40 @@ func TestRepositoriesService_CreateFork(t *testing.T) { t.Errorf("Repositories.CreateFork returned error: %v", err) } - want := &Repository{ID: Int(1)} + want := &Repository{ID: Int64(1)} + if !reflect.DeepEqual(repo, want) { + t.Errorf("Repositories.CreateFork returned %+v, want %+v", repo, want) + } +} + +func TestRepositoriesService_CreateFork_deferred(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/forks", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testFormValues(t, r, values{"organization": "o"}) + // This response indicates the fork will happen asynchronously. + w.WriteHeader(http.StatusAccepted) + fmt.Fprint(w, `{"id":1}`) + }) + + opt := &RepositoryCreateForkOptions{Organization: "o"} + repo, _, err := client.Repositories.CreateFork(context.Background(), "o", "r", opt) + if _, ok := err.(*AcceptedError); !ok { + t.Errorf("Repositories.CreateFork returned error: %v (want AcceptedError)", err) + } + + want := &Repository{ID: Int64(1)} if !reflect.DeepEqual(repo, want) { t.Errorf("Repositories.CreateFork returned %+v, want %+v", repo, want) } } func TestRepositoriesService_CreateFork_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.CreateFork(context.Background(), "%", "r", nil) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/repos_hooks.go b/vendor/github.com/google/go-github/github/repos_hooks.go index 67ce96a..7674947 100644 --- a/vendor/github.com/google/go-github/github/repos_hooks.go +++ b/vendor/github.com/google/go-github/github/repos_hooks.go @@ -69,27 +69,53 @@ func (w WebHookAuthor) String() string { // Hook represents a GitHub (web and service) hook for a repository. type Hook struct { - CreatedAt *time.Time `json:"created_at,omitempty"` - UpdatedAt *time.Time `json:"updated_at,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` - Events []string `json:"events,omitempty"` - Active *bool `json:"active,omitempty"` - Config map[string]interface{} `json:"config,omitempty"` - ID *int `json:"id,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + URL *string `json:"url,omitempty"` + ID *int64 `json:"id,omitempty"` + + // Only the following fields are used when creating a hook. + // Config is required. + Config map[string]interface{} `json:"config,omitempty"` + Events []string `json:"events,omitempty"` + Active *bool `json:"active,omitempty"` } func (h Hook) String() string { return Stringify(h) } +// createHookRequest is a subset of Hook and is used internally +// by CreateHook to pass only the known fields for the endpoint. +// +// See https://github.com/google/go-github/issues/1015 for more +// information. +type createHookRequest struct { + // Config is required. + Name string `json:"name"` + Config map[string]interface{} `json:"config,omitempty"` + Events []string `json:"events,omitempty"` + Active *bool `json:"active,omitempty"` +} + // CreateHook creates a Hook for the specified repository. -// Name and Config are required fields. +// Config is a required field. +// +// Note that only a subset of the hook fields are used and hook must +// not be nil. // // GitHub API docs: https://developer.github.com/v3/repos/hooks/#create-a-hook func (s *RepositoriesService) CreateHook(ctx context.Context, owner, repo string, hook *Hook) (*Hook, *Response, error) { u := fmt.Sprintf("repos/%v/%v/hooks", owner, repo) - req, err := s.client.NewRequest("POST", u, hook) + + hookReq := &createHookRequest{ + Name: "web", + Events: hook.Events, + Active: hook.Active, + Config: hook.Config, + } + + req, err := s.client.NewRequest("POST", u, hookReq) if err != nil { return nil, nil, err } @@ -130,21 +156,25 @@ func (s *RepositoriesService) ListHooks(ctx context.Context, owner, repo string, // GetHook returns a single specified Hook. // // GitHub API docs: https://developer.github.com/v3/repos/hooks/#get-single-hook -func (s *RepositoriesService) GetHook(ctx context.Context, owner, repo string, id int) (*Hook, *Response, error) { +func (s *RepositoriesService) GetHook(ctx context.Context, owner, repo string, id int64) (*Hook, *Response, error) { u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err } - hook := new(Hook) - resp, err := s.client.Do(ctx, req, hook) - return hook, resp, err + h := new(Hook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil } // EditHook updates a specified Hook. // // GitHub API docs: https://developer.github.com/v3/repos/hooks/#edit-a-hook -func (s *RepositoriesService) EditHook(ctx context.Context, owner, repo string, id int, hook *Hook) (*Hook, *Response, error) { +func (s *RepositoriesService) EditHook(ctx context.Context, owner, repo string, id int64, hook *Hook) (*Hook, *Response, error) { u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) req, err := s.client.NewRequest("PATCH", u, hook) if err != nil { @@ -152,13 +182,17 @@ func (s *RepositoriesService) EditHook(ctx context.Context, owner, repo string, } h := new(Hook) resp, err := s.client.Do(ctx, req, h) - return h, resp, err + if err != nil { + return nil, resp, err + } + + return h, resp, nil } // DeleteHook deletes a specified Hook. // // GitHub API docs: https://developer.github.com/v3/repos/hooks/#delete-a-hook -func (s *RepositoriesService) DeleteHook(ctx context.Context, owner, repo string, id int) (*Response, error) { +func (s *RepositoriesService) DeleteHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { @@ -170,7 +204,7 @@ func (s *RepositoriesService) DeleteHook(ctx context.Context, owner, repo string // PingHook triggers a 'ping' event to be sent to the Hook. // // GitHub API docs: https://developer.github.com/v3/repos/hooks/#ping-a-hook -func (s *RepositoriesService) PingHook(ctx context.Context, owner, repo string, id int) (*Response, error) { +func (s *RepositoriesService) PingHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { u := fmt.Sprintf("repos/%v/%v/hooks/%d/pings", owner, repo, id) req, err := s.client.NewRequest("POST", u, nil) if err != nil { @@ -182,7 +216,7 @@ func (s *RepositoriesService) PingHook(ctx context.Context, owner, repo string, // TestHook triggers a test Hook by github. // // GitHub API docs: https://developer.github.com/v3/repos/hooks/#test-a-push-hook -func (s *RepositoriesService) TestHook(ctx context.Context, owner, repo string, id int) (*Response, error) { +func (s *RepositoriesService) TestHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { u := fmt.Sprintf("repos/%v/%v/hooks/%d/tests", owner, repo, id) req, err := s.client.NewRequest("POST", u, nil) if err != nil { diff --git a/vendor/github.com/google/go-github/github/repos_hooks_test.go b/vendor/github.com/google/go-github/github/repos_hooks_test.go index 364f458..9e4691c 100644 --- a/vendor/github.com/google/go-github/github/repos_hooks_test.go +++ b/vendor/github.com/google/go-github/github/repos_hooks_test.go @@ -15,18 +15,19 @@ import ( ) func TestRepositoriesService_CreateHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &Hook{Name: String("t")} + input := &Hook{CreatedAt: &referenceTime} mux.HandleFunc("/repos/o/r/hooks", func(w http.ResponseWriter, r *http.Request) { - v := new(Hook) + v := new(createHookRequest) json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") - if !reflect.DeepEqual(v, input) { - t.Errorf("Request body = %+v, want %+v", v, input) + want := &createHookRequest{Name: "web"} + if !reflect.DeepEqual(v, want) { + t.Errorf("Request body = %+v, want %+v", v, want) } fmt.Fprint(w, `{"id":1}`) @@ -37,19 +38,14 @@ func TestRepositoriesService_CreateHook(t *testing.T) { t.Errorf("Repositories.CreateHook returned error: %v", err) } - want := &Hook{ID: Int(1)} + want := &Hook{ID: Int64(1)} if !reflect.DeepEqual(hook, want) { t.Errorf("Repositories.CreateHook returned %+v, want %+v", hook, want) } } -func TestRepositoriesService_CreateHook_invalidOwner(t *testing.T) { - _, _, err := client.Repositories.CreateHook(context.Background(), "%", "%", nil) - testURLParseError(t, err) -} - func TestRepositoriesService_ListHooks(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/hooks", func(w http.ResponseWriter, r *http.Request) { @@ -65,19 +61,22 @@ func TestRepositoriesService_ListHooks(t *testing.T) { t.Errorf("Repositories.ListHooks returned error: %v", err) } - want := []*Hook{{ID: Int(1)}, {ID: Int(2)}} + want := []*Hook{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(hooks, want) { t.Errorf("Repositories.ListHooks returned %+v, want %+v", hooks, want) } } func TestRepositoriesService_ListHooks_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.ListHooks(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestRepositoriesService_GetHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/hooks/1", func(w http.ResponseWriter, r *http.Request) { @@ -90,22 +89,25 @@ func TestRepositoriesService_GetHook(t *testing.T) { t.Errorf("Repositories.GetHook returned error: %v", err) } - want := &Hook{ID: Int(1)} + want := &Hook{ID: Int64(1)} if !reflect.DeepEqual(hook, want) { t.Errorf("Repositories.GetHook returned %+v, want %+v", hook, want) } } func TestRepositoriesService_GetHook_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.GetHook(context.Background(), "%", "%", 1) testURLParseError(t, err) } func TestRepositoriesService_EditHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &Hook{Name: String("t")} + input := &Hook{} mux.HandleFunc("/repos/o/r/hooks/1", func(w http.ResponseWriter, r *http.Request) { v := new(Hook) @@ -124,19 +126,22 @@ func TestRepositoriesService_EditHook(t *testing.T) { t.Errorf("Repositories.EditHook returned error: %v", err) } - want := &Hook{ID: Int(1)} + want := &Hook{ID: Int64(1)} if !reflect.DeepEqual(hook, want) { t.Errorf("Repositories.EditHook returned %+v, want %+v", hook, want) } } func TestRepositoriesService_EditHook_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.EditHook(context.Background(), "%", "%", 1, nil) testURLParseError(t, err) } func TestRepositoriesService_DeleteHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/hooks/1", func(w http.ResponseWriter, r *http.Request) { @@ -150,12 +155,15 @@ func TestRepositoriesService_DeleteHook(t *testing.T) { } func TestRepositoriesService_DeleteHook_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Repositories.DeleteHook(context.Background(), "%", "%", 1) testURLParseError(t, err) } func TestRepositoriesService_PingHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/hooks/1/pings", func(w http.ResponseWriter, r *http.Request) { @@ -169,7 +177,7 @@ func TestRepositoriesService_PingHook(t *testing.T) { } func TestRepositoriesService_TestHook(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/hooks/1/tests", func(w http.ResponseWriter, r *http.Request) { @@ -183,6 +191,9 @@ func TestRepositoriesService_TestHook(t *testing.T) { } func TestRepositoriesService_TestHook_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Repositories.TestHook(context.Background(), "%", "%", 1) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/repos_invitations.go b/vendor/github.com/google/go-github/github/repos_invitations.go index 0a2b7c1..b88e935 100644 --- a/vendor/github.com/google/go-github/github/repos_invitations.go +++ b/vendor/github.com/google/go-github/github/repos_invitations.go @@ -12,7 +12,7 @@ import ( // RepositoryInvitation represents an invitation to collaborate on a repo. type RepositoryInvitation struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` Repo *Repository `json:"repository,omitempty"` Invitee *User `json:"invitee,omitempty"` Inviter *User `json:"inviter,omitempty"` @@ -40,9 +40,6 @@ func (s *RepositoriesService) ListInvitations(ctx context.Context, owner, repo s return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeRepositoryInvitationsPreview) - invites := []*RepositoryInvitation{} resp, err := s.client.Do(ctx, req, &invites) if err != nil { @@ -55,16 +52,13 @@ func (s *RepositoriesService) ListInvitations(ctx context.Context, owner, repo s // DeleteInvitation deletes a repository invitation. // // GitHub API docs: https://developer.github.com/v3/repos/invitations/#delete-a-repository-invitation -func (s *RepositoriesService) DeleteInvitation(ctx context.Context, owner, repo string, invitationID int) (*Response, error) { +func (s *RepositoriesService) DeleteInvitation(ctx context.Context, owner, repo string, invitationID int64) (*Response, error) { u := fmt.Sprintf("repos/%v/%v/invitations/%v", owner, repo, invitationID) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { return nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeRepositoryInvitationsPreview) - return s.client.Do(ctx, req, nil) } @@ -75,7 +69,7 @@ func (s *RepositoriesService) DeleteInvitation(ctx context.Context, owner, repo // on the repository. Possible values are: "read", "write", "admin". // // GitHub API docs: https://developer.github.com/v3/repos/invitations/#update-a-repository-invitation -func (s *RepositoriesService) UpdateInvitation(ctx context.Context, owner, repo string, invitationID int, permissions string) (*RepositoryInvitation, *Response, error) { +func (s *RepositoriesService) UpdateInvitation(ctx context.Context, owner, repo string, invitationID int64, permissions string) (*RepositoryInvitation, *Response, error) { opts := &struct { Permissions string `json:"permissions"` }{Permissions: permissions} @@ -85,9 +79,6 @@ func (s *RepositoriesService) UpdateInvitation(ctx context.Context, owner, repo return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeRepositoryInvitationsPreview) - invite := &RepositoryInvitation{} resp, err := s.client.Do(ctx, req, invite) if err != nil { diff --git a/vendor/github.com/google/go-github/github/repos_invitations_test.go b/vendor/github.com/google/go-github/github/repos_invitations_test.go index 01ac666..b3df03c 100644 --- a/vendor/github.com/google/go-github/github/repos_invitations_test.go +++ b/vendor/github.com/google/go-github/github/repos_invitations_test.go @@ -14,12 +14,11 @@ import ( ) func TestRepositoriesService_ListInvitations(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/invitations", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeRepositoryInvitationsPreview) testFormValues(t, r, values{"page": "2"}) fmt.Fprintf(w, `[{"id":1}, {"id":2}]`) }) @@ -30,19 +29,18 @@ func TestRepositoriesService_ListInvitations(t *testing.T) { t.Errorf("Repositories.ListInvitations returned error: %v", err) } - want := []*RepositoryInvitation{{ID: Int(1)}, {ID: Int(2)}} + want := []*RepositoryInvitation{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(got, want) { t.Errorf("Repositories.ListInvitations = %+v, want %+v", got, want) } } func TestRepositoriesService_DeleteInvitation(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/invitations/2", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "DELETE") - testHeader(t, r, "Accept", mediaTypeRepositoryInvitationsPreview) w.WriteHeader(http.StatusNoContent) }) @@ -53,12 +51,11 @@ func TestRepositoriesService_DeleteInvitation(t *testing.T) { } func TestRepositoriesService_UpdateInvitation(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/invitations/2", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PATCH") - testHeader(t, r, "Accept", mediaTypeRepositoryInvitationsPreview) fmt.Fprintf(w, `{"id":1}`) }) @@ -67,7 +64,7 @@ func TestRepositoriesService_UpdateInvitation(t *testing.T) { t.Errorf("Repositories.UpdateInvitation returned error: %v", err) } - want := &RepositoryInvitation{ID: Int(1)} + want := &RepositoryInvitation{ID: Int64(1)} if !reflect.DeepEqual(got, want) { t.Errorf("Repositories.UpdateInvitation = %+v, want %+v", got, want) } diff --git a/vendor/github.com/google/go-github/github/repos_keys.go b/vendor/github.com/google/go-github/github/repos_keys.go index f5a8658..b484f84 100644 --- a/vendor/github.com/google/go-github/github/repos_keys.go +++ b/vendor/github.com/google/go-github/github/repos_keys.go @@ -39,7 +39,7 @@ func (s *RepositoriesService) ListKeys(ctx context.Context, owner string, repo s // GetKey fetches a single deploy key. // // GitHub API docs: https://developer.github.com/v3/repos/keys/#get -func (s *RepositoriesService) GetKey(ctx context.Context, owner string, repo string, id int) (*Key, *Response, error) { +func (s *RepositoriesService) GetKey(ctx context.Context, owner string, repo string, id int64) (*Key, *Response, error) { u := fmt.Sprintf("repos/%v/%v/keys/%v", owner, repo, id) req, err := s.client.NewRequest("GET", u, nil) @@ -79,7 +79,7 @@ func (s *RepositoriesService) CreateKey(ctx context.Context, owner string, repo // EditKey edits a deploy key. // // GitHub API docs: https://developer.github.com/v3/repos/keys/#edit -func (s *RepositoriesService) EditKey(ctx context.Context, owner string, repo string, id int, key *Key) (*Key, *Response, error) { +func (s *RepositoriesService) EditKey(ctx context.Context, owner string, repo string, id int64, key *Key) (*Key, *Response, error) { u := fmt.Sprintf("repos/%v/%v/keys/%v", owner, repo, id) req, err := s.client.NewRequest("PATCH", u, key) @@ -99,7 +99,7 @@ func (s *RepositoriesService) EditKey(ctx context.Context, owner string, repo st // DeleteKey deletes a deploy key. // // GitHub API docs: https://developer.github.com/v3/repos/keys/#delete -func (s *RepositoriesService) DeleteKey(ctx context.Context, owner string, repo string, id int) (*Response, error) { +func (s *RepositoriesService) DeleteKey(ctx context.Context, owner string, repo string, id int64) (*Response, error) { u := fmt.Sprintf("repos/%v/%v/keys/%v", owner, repo, id) req, err := s.client.NewRequest("DELETE", u, nil) diff --git a/vendor/github.com/google/go-github/github/repos_keys_test.go b/vendor/github.com/google/go-github/github/repos_keys_test.go index 2ef04fd..75a1735 100644 --- a/vendor/github.com/google/go-github/github/repos_keys_test.go +++ b/vendor/github.com/google/go-github/github/repos_keys_test.go @@ -15,7 +15,7 @@ import ( ) func TestRepositoriesService_ListKeys(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/keys", func(w http.ResponseWriter, r *http.Request) { @@ -30,19 +30,22 @@ func TestRepositoriesService_ListKeys(t *testing.T) { t.Errorf("Repositories.ListKeys returned error: %v", err) } - want := []*Key{{ID: Int(1)}} + want := []*Key{{ID: Int64(1)}} if !reflect.DeepEqual(keys, want) { t.Errorf("Repositories.ListKeys returned %+v, want %+v", keys, want) } } func TestRepositoriesService_ListKeys_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.ListKeys(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestRepositoriesService_GetKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/keys/1", func(w http.ResponseWriter, r *http.Request) { @@ -55,19 +58,22 @@ func TestRepositoriesService_GetKey(t *testing.T) { t.Errorf("Repositories.GetKey returned error: %v", err) } - want := &Key{ID: Int(1)} + want := &Key{ID: Int64(1)} if !reflect.DeepEqual(key, want) { t.Errorf("Repositories.GetKey returned %+v, want %+v", key, want) } } func TestRepositoriesService_GetKey_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.GetKey(context.Background(), "%", "%", 1) testURLParseError(t, err) } func TestRepositoriesService_CreateKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Key{Key: String("k"), Title: String("t")} @@ -89,19 +95,22 @@ func TestRepositoriesService_CreateKey(t *testing.T) { t.Errorf("Repositories.GetKey returned error: %v", err) } - want := &Key{ID: Int(1)} + want := &Key{ID: Int64(1)} if !reflect.DeepEqual(key, want) { t.Errorf("Repositories.GetKey returned %+v, want %+v", key, want) } } func TestRepositoriesService_CreateKey_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.CreateKey(context.Background(), "%", "%", nil) testURLParseError(t, err) } func TestRepositoriesService_EditKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Key{Key: String("k"), Title: String("t")} @@ -123,19 +132,22 @@ func TestRepositoriesService_EditKey(t *testing.T) { t.Errorf("Repositories.EditKey returned error: %v", err) } - want := &Key{ID: Int(1)} + want := &Key{ID: Int64(1)} if !reflect.DeepEqual(key, want) { t.Errorf("Repositories.EditKey returned %+v, want %+v", key, want) } } func TestRepositoriesService_EditKey_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.EditKey(context.Background(), "%", "%", 1, nil) testURLParseError(t, err) } func TestRepositoriesService_DeleteKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/keys/1", func(w http.ResponseWriter, r *http.Request) { @@ -149,6 +161,9 @@ func TestRepositoriesService_DeleteKey(t *testing.T) { } func TestRepositoriesService_DeleteKey_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Repositories.DeleteKey(context.Background(), "%", "%", 1) testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/repos_merging_test.go b/vendor/github.com/google/go-github/github/repos_merging_test.go index e2e264f..086f24b 100644 --- a/vendor/github.com/google/go-github/github/repos_merging_test.go +++ b/vendor/github.com/google/go-github/github/repos_merging_test.go @@ -15,7 +15,7 @@ import ( ) func TestRepositoriesService_Merge(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &RepositoryMergeRequest{ diff --git a/vendor/github.com/google/go-github/github/repos_pages.go b/vendor/github.com/google/go-github/github/repos_pages.go index 075f574..94a95f2 100644 --- a/vendor/github.com/google/go-github/github/repos_pages.go +++ b/vendor/github.com/google/go-github/github/repos_pages.go @@ -104,7 +104,7 @@ func (s *RepositoriesService) GetLatestPagesBuild(ctx context.Context, owner, re // GetPageBuild fetches the specific build information for a GitHub pages site. // // GitHub API docs: https://developer.github.com/v3/repos/pages/#list-a-specific-pages-build -func (s *RepositoriesService) GetPageBuild(ctx context.Context, owner, repo string, id int) (*PagesBuild, *Response, error) { +func (s *RepositoriesService) GetPageBuild(ctx context.Context, owner, repo string, id int64) (*PagesBuild, *Response, error) { u := fmt.Sprintf("repos/%v/%v/pages/builds/%v", owner, repo, id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { diff --git a/vendor/github.com/google/go-github/github/repos_pages_test.go b/vendor/github.com/google/go-github/github/repos_pages_test.go index c9e19a1..21a00f9 100644 --- a/vendor/github.com/google/go-github/github/repos_pages_test.go +++ b/vendor/github.com/google/go-github/github/repos_pages_test.go @@ -14,7 +14,7 @@ import ( ) func TestRepositoriesService_GetPagesInfo(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pages", func(w http.ResponseWriter, r *http.Request) { @@ -35,7 +35,7 @@ func TestRepositoriesService_GetPagesInfo(t *testing.T) { } func TestRepositoriesService_ListPagesBuilds(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pages/builds", func(w http.ResponseWriter, r *http.Request) { @@ -55,7 +55,7 @@ func TestRepositoriesService_ListPagesBuilds(t *testing.T) { } func TestRepositoriesService_ListPagesBuilds_withOptions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pages/builds", func(w http.ResponseWriter, r *http.Request) { @@ -73,7 +73,7 @@ func TestRepositoriesService_ListPagesBuilds_withOptions(t *testing.T) { } func TestRepositoriesService_GetLatestPagesBuild(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pages/builds/latest", func(w http.ResponseWriter, r *http.Request) { @@ -93,7 +93,7 @@ func TestRepositoriesService_GetLatestPagesBuild(t *testing.T) { } func TestRepositoriesService_GetPageBuild(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pages/builds/1", func(w http.ResponseWriter, r *http.Request) { @@ -113,7 +113,7 @@ func TestRepositoriesService_GetPageBuild(t *testing.T) { } func TestRepositoriesService_RequestPageBuild(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/pages/builds", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/repos_prereceive_hooks.go b/vendor/github.com/google/go-github/github/repos_prereceive_hooks.go new file mode 100644 index 0000000..cab09f7 --- /dev/null +++ b/vendor/github.com/google/go-github/github/repos_prereceive_hooks.go @@ -0,0 +1,110 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// PreReceiveHook represents a GitHub pre-receive hook for a repository. +type PreReceiveHook struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Enforcement *string `json:"enforcement,omitempty"` + ConfigURL *string `json:"configuration_url,omitempty"` +} + +func (p PreReceiveHook) String() string { + return Stringify(p) +} + +// ListPreReceiveHooks lists all pre-receive hooks for the specified repository. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#list-pre-receive-hooks +func (s *RepositoriesService) ListPreReceiveHooks(ctx context.Context, owner, repo string, opt *ListOptions) ([]*PreReceiveHook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks", owner, repo) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + var hooks []*PreReceiveHook + resp, err := s.client.Do(ctx, req, &hooks) + if err != nil { + return nil, resp, err + } + + return hooks, resp, nil +} + +// GetPreReceiveHook returns a single specified pre-receive hook. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#get-a-single-pre-receive-hook +func (s *RepositoriesService) GetPreReceiveHook(ctx context.Context, owner, repo string, id int64) (*PreReceiveHook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + h := new(PreReceiveHook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// UpdatePreReceiveHook updates a specified pre-receive hook. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#update-pre-receive-hook-enforcement +func (s *RepositoriesService) UpdatePreReceiveHook(ctx context.Context, owner, repo string, id int64, hook *PreReceiveHook) (*PreReceiveHook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("PATCH", u, hook) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + h := new(PreReceiveHook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// DeletePreReceiveHook deletes a specified pre-receive hook. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#remove-enforcement-overrides-for-a-pre-receive-hook +func (s *RepositoriesService) DeletePreReceiveHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/repos_prereceive_hooks_test.go b/vendor/github.com/google/go-github/github/repos_prereceive_hooks_test.go new file mode 100644 index 0000000..43ff614 --- /dev/null +++ b/vendor/github.com/google/go-github/github/repos_prereceive_hooks_test.go @@ -0,0 +1,135 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "reflect" + "testing" +) + +func TestRepositoriesService_ListPreReceiveHooks(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/pre-receive-hooks", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypePreReceiveHooksPreview) + testFormValues(t, r, values{"page": "2"}) + fmt.Fprint(w, `[{"id":1}, {"id":2}]`) + }) + + opt := &ListOptions{Page: 2} + + hooks, _, err := client.Repositories.ListPreReceiveHooks(context.Background(), "o", "r", opt) + if err != nil { + t.Errorf("Repositories.ListHooks returned error: %v", err) + } + + want := []*PreReceiveHook{{ID: Int64(1)}, {ID: Int64(2)}} + if !reflect.DeepEqual(hooks, want) { + t.Errorf("Repositories.ListPreReceiveHooks returned %+v, want %+v", hooks, want) + } +} + +func TestRepositoriesService_ListPreReceiveHooks_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, _, err := client.Repositories.ListPreReceiveHooks(context.Background(), "%", "%", nil) + testURLParseError(t, err) +} + +func TestRepositoriesService_GetPreReceiveHook(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/pre-receive-hooks/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypePreReceiveHooksPreview) + fmt.Fprint(w, `{"id":1}`) + }) + + hook, _, err := client.Repositories.GetPreReceiveHook(context.Background(), "o", "r", 1) + if err != nil { + t.Errorf("Repositories.GetPreReceiveHook returned error: %v", err) + } + + want := &PreReceiveHook{ID: Int64(1)} + if !reflect.DeepEqual(hook, want) { + t.Errorf("Repositories.GetPreReceiveHook returned %+v, want %+v", hook, want) + } +} + +func TestRepositoriesService_GetPreReceiveHook_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, _, err := client.Repositories.GetPreReceiveHook(context.Background(), "%", "%", 1) + testURLParseError(t, err) +} + +func TestRepositoriesService_UpdatePreReceiveHook(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := &PreReceiveHook{} + + mux.HandleFunc("/repos/o/r/pre-receive-hooks/1", func(w http.ResponseWriter, r *http.Request) { + v := new(PreReceiveHook) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "PATCH") + if !reflect.DeepEqual(v, input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + fmt.Fprint(w, `{"id":1}`) + }) + + hook, _, err := client.Repositories.UpdatePreReceiveHook(context.Background(), "o", "r", 1, input) + if err != nil { + t.Errorf("Repositories.UpdatePreReceiveHook returned error: %v", err) + } + + want := &PreReceiveHook{ID: Int64(1)} + if !reflect.DeepEqual(hook, want) { + t.Errorf("Repositories.UpdatePreReceiveHook returned %+v, want %+v", hook, want) + } +} + +func TestRepositoriesService_PreReceiveHook_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, _, err := client.Repositories.UpdatePreReceiveHook(context.Background(), "%", "%", 1, nil) + testURLParseError(t, err) +} + +func TestRepositoriesService_DeletePreReceiveHook(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/pre-receive-hooks/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + }) + + _, err := client.Repositories.DeletePreReceiveHook(context.Background(), "o", "r", 1) + if err != nil { + t.Errorf("Repositories.DeletePreReceiveHook returned error: %v", err) + } +} + +func TestRepositoriesService_DeletePreReceiveHook_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, err := client.Repositories.DeletePreReceiveHook(context.Background(), "%", "%", 1) + testURLParseError(t, err) +} diff --git a/vendor/github.com/google/go-github/github/repos_projects.go b/vendor/github.com/google/go-github/github/repos_projects.go index 770ffc7..d6486d2 100644 --- a/vendor/github.com/google/go-github/github/repos_projects.go +++ b/vendor/github.com/google/go-github/github/repos_projects.go @@ -34,7 +34,7 @@ func (s *RepositoriesService) ListProjects(ctx context.Context, owner, repo stri return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) var projects []*Project @@ -56,7 +56,7 @@ func (s *RepositoriesService) CreateProject(ctx context.Context, owner, repo str return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. + // TODO: remove custom Accept headers when APIs fully launch. req.Header.Set("Accept", mediaTypeProjectsPreview) project := &Project{} diff --git a/vendor/github.com/google/go-github/github/repos_projects_test.go b/vendor/github.com/google/go-github/github/repos_projects_test.go index 57dbdd1..2d54eaa 100644 --- a/vendor/github.com/google/go-github/github/repos_projects_test.go +++ b/vendor/github.com/google/go-github/github/repos_projects_test.go @@ -15,7 +15,7 @@ import ( ) func TestRepositoriesService_ListProjects(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/projects", func(w http.ResponseWriter, r *http.Request) { @@ -31,17 +31,17 @@ func TestRepositoriesService_ListProjects(t *testing.T) { t.Errorf("Repositories.ListProjects returned error: %v", err) } - want := []*Project{{ID: Int(1)}} + want := []*Project{{ID: Int64(1)}} if !reflect.DeepEqual(projects, want) { t.Errorf("Repositories.ListProjects returned %+v, want %+v", projects, want) } } func TestRepositoriesService_CreateProject(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &ProjectOptions{Name: "Project Name", Body: "Project body."} + input := &ProjectOptions{Name: String("Project Name"), Body: String("Project body.")} mux.HandleFunc("/repos/o/r/projects", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "POST") @@ -61,7 +61,7 @@ func TestRepositoriesService_CreateProject(t *testing.T) { t.Errorf("Repositories.CreateProject returned error: %v", err) } - want := &Project{ID: Int(1)} + want := &Project{ID: Int64(1)} if !reflect.DeepEqual(project, want) { t.Errorf("Repositories.CreateProject returned %+v, want %+v", project, want) } diff --git a/vendor/github.com/google/go-github/github/repos_releases.go b/vendor/github.com/google/go-github/github/repos_releases.go index 5c27565..5c0a1ce 100644 --- a/vendor/github.com/google/go-github/github/repos_releases.go +++ b/vendor/github.com/google/go-github/github/repos_releases.go @@ -19,23 +19,26 @@ import ( // RepositoryRelease represents a GitHub release in a repository. type RepositoryRelease struct { - ID *int `json:"id,omitempty"` - TagName *string `json:"tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` - Name *string `json:"name,omitempty"` - Body *string `json:"body,omitempty"` - Draft *bool `json:"draft,omitempty"` - Prerelease *bool `json:"prerelease,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PublishedAt *Timestamp `json:"published_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - AssetsURL *string `json:"assets_url,omitempty"` - Assets []ReleaseAsset `json:"assets,omitempty"` - UploadURL *string `json:"upload_url,omitempty"` - ZipballURL *string `json:"zipball_url,omitempty"` - TarballURL *string `json:"tarball_url,omitempty"` - Author *User `json:"author,omitempty"` + TagName *string `json:"tag_name,omitempty"` + TargetCommitish *string `json:"target_commitish,omitempty"` + Name *string `json:"name,omitempty"` + Body *string `json:"body,omitempty"` + Draft *bool `json:"draft,omitempty"` + Prerelease *bool `json:"prerelease,omitempty"` + + // The following fields are not used in CreateRelease or EditRelease: + ID *int64 `json:"id,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + PublishedAt *Timestamp `json:"published_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + AssetsURL *string `json:"assets_url,omitempty"` + Assets []ReleaseAsset `json:"assets,omitempty"` + UploadURL *string `json:"upload_url,omitempty"` + ZipballURL *string `json:"zipball_url,omitempty"` + TarballURL *string `json:"tarball_url,omitempty"` + Author *User `json:"author,omitempty"` + NodeID *string `json:"node_id,omitempty"` } func (r RepositoryRelease) String() string { @@ -44,7 +47,7 @@ func (r RepositoryRelease) String() string { // ReleaseAsset represents a GitHub release asset in a repository. type ReleaseAsset struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` URL *string `json:"url,omitempty"` Name *string `json:"name,omitempty"` Label *string `json:"label,omitempty"` @@ -56,6 +59,7 @@ type ReleaseAsset struct { UpdatedAt *Timestamp `json:"updated_at,omitempty"` BrowserDownloadURL *string `json:"browser_download_url,omitempty"` Uploader *User `json:"uploader,omitempty"` + NodeID *string `json:"node_id,omitempty"` } func (r ReleaseAsset) String() string { @@ -88,7 +92,7 @@ func (s *RepositoriesService) ListReleases(ctx context.Context, owner, repo stri // GetRelease fetches a single release. // // GitHub API docs: https://developer.github.com/v3/repos/releases/#get-a-single-release -func (s *RepositoriesService) GetRelease(ctx context.Context, owner, repo string, id int) (*RepositoryRelease, *Response, error) { +func (s *RepositoriesService) GetRelease(ctx context.Context, owner, repo string, id int64) (*RepositoryRelease, *Response, error) { u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) return s.getSingleRelease(ctx, u) } @@ -123,13 +127,40 @@ func (s *RepositoriesService) getSingleRelease(ctx context.Context, url string) return release, resp, nil } +// repositoryReleaseRequest is a subset of RepositoryRelease and +// is used internally by CreateRelease and EditRelease to pass +// only the known fields for these endpoints. +// +// See https://github.com/google/go-github/issues/992 for more +// information. +type repositoryReleaseRequest struct { + TagName *string `json:"tag_name,omitempty"` + TargetCommitish *string `json:"target_commitish,omitempty"` + Name *string `json:"name,omitempty"` + Body *string `json:"body,omitempty"` + Draft *bool `json:"draft,omitempty"` + Prerelease *bool `json:"prerelease,omitempty"` +} + // CreateRelease adds a new release for a repository. // +// Note that only a subset of the release fields are used. +// See RepositoryRelease for more information. +// // GitHub API docs: https://developer.github.com/v3/repos/releases/#create-a-release func (s *RepositoriesService) CreateRelease(ctx context.Context, owner, repo string, release *RepositoryRelease) (*RepositoryRelease, *Response, error) { u := fmt.Sprintf("repos/%s/%s/releases", owner, repo) - req, err := s.client.NewRequest("POST", u, release) + releaseReq := &repositoryReleaseRequest{ + TagName: release.TagName, + TargetCommitish: release.TargetCommitish, + Name: release.Name, + Body: release.Body, + Draft: release.Draft, + Prerelease: release.Prerelease, + } + + req, err := s.client.NewRequest("POST", u, releaseReq) if err != nil { return nil, nil, err } @@ -144,11 +175,23 @@ func (s *RepositoriesService) CreateRelease(ctx context.Context, owner, repo str // EditRelease edits a repository release. // +// Note that only a subset of the release fields are used. +// See RepositoryRelease for more information. +// // GitHub API docs: https://developer.github.com/v3/repos/releases/#edit-a-release -func (s *RepositoriesService) EditRelease(ctx context.Context, owner, repo string, id int, release *RepositoryRelease) (*RepositoryRelease, *Response, error) { +func (s *RepositoriesService) EditRelease(ctx context.Context, owner, repo string, id int64, release *RepositoryRelease) (*RepositoryRelease, *Response, error) { u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, release) + releaseReq := &repositoryReleaseRequest{ + TagName: release.TagName, + TargetCommitish: release.TargetCommitish, + Name: release.Name, + Body: release.Body, + Draft: release.Draft, + Prerelease: release.Prerelease, + } + + req, err := s.client.NewRequest("PATCH", u, releaseReq) if err != nil { return nil, nil, err } @@ -164,7 +207,7 @@ func (s *RepositoriesService) EditRelease(ctx context.Context, owner, repo strin // DeleteRelease delete a single release from a repository. // // GitHub API docs: https://developer.github.com/v3/repos/releases/#delete-a-release -func (s *RepositoriesService) DeleteRelease(ctx context.Context, owner, repo string, id int) (*Response, error) { +func (s *RepositoriesService) DeleteRelease(ctx context.Context, owner, repo string, id int64) (*Response, error) { u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) req, err := s.client.NewRequest("DELETE", u, nil) @@ -177,7 +220,7 @@ func (s *RepositoriesService) DeleteRelease(ctx context.Context, owner, repo str // ListReleaseAssets lists the release's assets. // // GitHub API docs: https://developer.github.com/v3/repos/releases/#list-assets-for-a-release -func (s *RepositoriesService) ListReleaseAssets(ctx context.Context, owner, repo string, id int, opt *ListOptions) ([]*ReleaseAsset, *Response, error) { +func (s *RepositoriesService) ListReleaseAssets(ctx context.Context, owner, repo string, id int64, opt *ListOptions) ([]*ReleaseAsset, *Response, error) { u := fmt.Sprintf("repos/%s/%s/releases/%d/assets", owner, repo, id) u, err := addOptions(u, opt) if err != nil { @@ -200,7 +243,7 @@ func (s *RepositoriesService) ListReleaseAssets(ctx context.Context, owner, repo // GetReleaseAsset fetches a single release asset. // // GitHub API docs: https://developer.github.com/v3/repos/releases/#get-a-single-release-asset -func (s *RepositoriesService) GetReleaseAsset(ctx context.Context, owner, repo string, id int) (*ReleaseAsset, *Response, error) { +func (s *RepositoriesService) GetReleaseAsset(ctx context.Context, owner, repo string, id int64) (*ReleaseAsset, *Response, error) { u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) req, err := s.client.NewRequest("GET", u, nil) @@ -224,7 +267,7 @@ func (s *RepositoriesService) GetReleaseAsset(ctx context.Context, owner, repo s // of the io.ReadCloser. Exactly one of rc and redirectURL will be zero. // // GitHub API docs: https://developer.github.com/v3/repos/releases/#get-a-single-release-asset -func (s *RepositoriesService) DownloadReleaseAsset(ctx context.Context, owner, repo string, id int) (rc io.ReadCloser, redirectURL string, err error) { +func (s *RepositoriesService) DownloadReleaseAsset(ctx context.Context, owner, repo string, id int64) (rc io.ReadCloser, redirectURL string, err error) { u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) req, err := s.client.NewRequest("GET", u, nil) @@ -244,7 +287,7 @@ func (s *RepositoriesService) DownloadReleaseAsset(ctx context.Context, owner, r } defer func() { s.client.client.CheckRedirect = saveRedirect }() - ctx, req = withContext(ctx, req) + req = withContext(ctx, req) resp, err := s.client.client.Do(req) if err != nil { if !strings.Contains(err.Error(), "disable redirect") { @@ -264,7 +307,7 @@ func (s *RepositoriesService) DownloadReleaseAsset(ctx context.Context, owner, r // EditReleaseAsset edits a repository release asset. // // GitHub API docs: https://developer.github.com/v3/repos/releases/#edit-a-release-asset -func (s *RepositoriesService) EditReleaseAsset(ctx context.Context, owner, repo string, id int, release *ReleaseAsset) (*ReleaseAsset, *Response, error) { +func (s *RepositoriesService) EditReleaseAsset(ctx context.Context, owner, repo string, id int64, release *ReleaseAsset) (*ReleaseAsset, *Response, error) { u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) req, err := s.client.NewRequest("PATCH", u, release) @@ -283,7 +326,7 @@ func (s *RepositoriesService) EditReleaseAsset(ctx context.Context, owner, repo // DeleteReleaseAsset delete a single release asset from a repository. // // GitHub API docs: https://developer.github.com/v3/repos/releases/#delete-a-release-asset -func (s *RepositoriesService) DeleteReleaseAsset(ctx context.Context, owner, repo string, id int) (*Response, error) { +func (s *RepositoriesService) DeleteReleaseAsset(ctx context.Context, owner, repo string, id int64) (*Response, error) { u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) req, err := s.client.NewRequest("DELETE", u, nil) @@ -297,7 +340,7 @@ func (s *RepositoriesService) DeleteReleaseAsset(ctx context.Context, owner, rep // To upload assets that cannot be represented by an os.File, call NewUploadRequest directly. // // GitHub API docs: https://developer.github.com/v3/repos/releases/#upload-a-release-asset -func (s *RepositoriesService) UploadReleaseAsset(ctx context.Context, owner, repo string, id int, opt *UploadOptions, file *os.File) (*ReleaseAsset, *Response, error) { +func (s *RepositoriesService) UploadReleaseAsset(ctx context.Context, owner, repo string, id int64, opt *UploadOptions, file *os.File) (*ReleaseAsset, *Response, error) { u := fmt.Sprintf("repos/%s/%s/releases/%d/assets", owner, repo, id) u, err := addOptions(u, opt) if err != nil { @@ -313,6 +356,10 @@ func (s *RepositoriesService) UploadReleaseAsset(ctx context.Context, owner, rep } mediaType := mime.TypeByExtension(filepath.Ext(file.Name())) + if opt.MediaType != "" { + mediaType = opt.MediaType + } + req, err := s.client.NewUploadRequest(u, file, stat.Size(), mediaType) if err != nil { return nil, nil, err diff --git a/vendor/github.com/google/go-github/github/repos_releases_test.go b/vendor/github.com/google/go-github/github/repos_releases_test.go index 9a2c4c8..9e38d2d 100644 --- a/vendor/github.com/google/go-github/github/repos_releases_test.go +++ b/vendor/github.com/google/go-github/github/repos_releases_test.go @@ -19,7 +19,7 @@ import ( ) func TestRepositoriesService_ListReleases(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases", func(w http.ResponseWriter, r *http.Request) { @@ -33,14 +33,14 @@ func TestRepositoriesService_ListReleases(t *testing.T) { if err != nil { t.Errorf("Repositories.ListReleases returned error: %v", err) } - want := []*RepositoryRelease{{ID: Int(1)}} + want := []*RepositoryRelease{{ID: Int64(1)}} if !reflect.DeepEqual(releases, want) { t.Errorf("Repositories.ListReleases returned %+v, want %+v", releases, want) } } func TestRepositoriesService_GetRelease(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/1", func(w http.ResponseWriter, r *http.Request) { @@ -53,14 +53,14 @@ func TestRepositoriesService_GetRelease(t *testing.T) { t.Errorf("Repositories.GetRelease returned error: %v\n%v", err, resp.Body) } - want := &RepositoryRelease{ID: Int(1), Author: &User{Login: String("l")}} + want := &RepositoryRelease{ID: Int64(1), Author: &User{Login: String("l")}} if !reflect.DeepEqual(release, want) { t.Errorf("Repositories.GetRelease returned %+v, want %+v", release, want) } } func TestRepositoriesService_GetLatestRelease(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/latest", func(w http.ResponseWriter, r *http.Request) { @@ -73,14 +73,14 @@ func TestRepositoriesService_GetLatestRelease(t *testing.T) { t.Errorf("Repositories.GetLatestRelease returned error: %v\n%v", err, resp.Body) } - want := &RepositoryRelease{ID: Int(3)} + want := &RepositoryRelease{ID: Int64(3)} if !reflect.DeepEqual(release, want) { t.Errorf("Repositories.GetLatestRelease returned %+v, want %+v", release, want) } } func TestRepositoriesService_GetReleaseByTag(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/tags/foo", func(w http.ResponseWriter, r *http.Request) { @@ -93,25 +93,41 @@ func TestRepositoriesService_GetReleaseByTag(t *testing.T) { t.Errorf("Repositories.GetReleaseByTag returned error: %v\n%v", err, resp.Body) } - want := &RepositoryRelease{ID: Int(13)} + want := &RepositoryRelease{ID: Int64(13)} if !reflect.DeepEqual(release, want) { t.Errorf("Repositories.GetReleaseByTag returned %+v, want %+v", release, want) } } func TestRepositoriesService_CreateRelease(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &RepositoryRelease{Name: String("v1.0")} + input := &RepositoryRelease{ + Name: String("v1.0"), + // Fields to be removed: + ID: Int64(2), + CreatedAt: &Timestamp{referenceTime}, + PublishedAt: &Timestamp{referenceTime}, + URL: String("http://url/"), + HTMLURL: String("http://htmlurl/"), + AssetsURL: String("http://assetsurl/"), + Assets: []ReleaseAsset{{ID: Int64(5)}}, + UploadURL: String("http://uploadurl/"), + ZipballURL: String("http://zipballurl/"), + TarballURL: String("http://tarballurl/"), + Author: &User{Name: String("octocat")}, + NodeID: String("nodeid"), + } mux.HandleFunc("/repos/o/r/releases", func(w http.ResponseWriter, r *http.Request) { - v := new(RepositoryRelease) + v := new(repositoryReleaseRequest) json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") - if !reflect.DeepEqual(v, input) { - t.Errorf("Request body = %+v, want %+v", v, input) + want := &repositoryReleaseRequest{Name: String("v1.0")} + if !reflect.DeepEqual(v, want) { + t.Errorf("Request body = %+v, want %+v", v, want) } fmt.Fprint(w, `{"id":1}`) }) @@ -121,25 +137,41 @@ func TestRepositoriesService_CreateRelease(t *testing.T) { t.Errorf("Repositories.CreateRelease returned error: %v", err) } - want := &RepositoryRelease{ID: Int(1)} + want := &RepositoryRelease{ID: Int64(1)} if !reflect.DeepEqual(release, want) { t.Errorf("Repositories.CreateRelease returned %+v, want %+v", release, want) } } func TestRepositoriesService_EditRelease(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &RepositoryRelease{Name: String("n")} + input := &RepositoryRelease{ + Name: String("n"), + // Fields to be removed: + ID: Int64(2), + CreatedAt: &Timestamp{referenceTime}, + PublishedAt: &Timestamp{referenceTime}, + URL: String("http://url/"), + HTMLURL: String("http://htmlurl/"), + AssetsURL: String("http://assetsurl/"), + Assets: []ReleaseAsset{{ID: Int64(5)}}, + UploadURL: String("http://uploadurl/"), + ZipballURL: String("http://zipballurl/"), + TarballURL: String("http://tarballurl/"), + Author: &User{Name: String("octocat")}, + NodeID: String("nodeid"), + } mux.HandleFunc("/repos/o/r/releases/1", func(w http.ResponseWriter, r *http.Request) { - v := new(RepositoryRelease) + v := new(repositoryReleaseRequest) json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "PATCH") - if !reflect.DeepEqual(v, input) { - t.Errorf("Request body = %+v, want %+v", v, input) + want := &repositoryReleaseRequest{Name: String("n")} + if !reflect.DeepEqual(v, want) { + t.Errorf("Request body = %+v, want %+v", v, want) } fmt.Fprint(w, `{"id":1}`) }) @@ -148,14 +180,14 @@ func TestRepositoriesService_EditRelease(t *testing.T) { if err != nil { t.Errorf("Repositories.EditRelease returned error: %v", err) } - want := &RepositoryRelease{ID: Int(1)} + want := &RepositoryRelease{ID: Int64(1)} if !reflect.DeepEqual(release, want) { t.Errorf("Repositories.EditRelease returned = %+v, want %+v", release, want) } } func TestRepositoriesService_DeleteRelease(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/1", func(w http.ResponseWriter, r *http.Request) { @@ -169,7 +201,7 @@ func TestRepositoriesService_DeleteRelease(t *testing.T) { } func TestRepositoriesService_ListReleaseAssets(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/1/assets", func(w http.ResponseWriter, r *http.Request) { @@ -183,14 +215,14 @@ func TestRepositoriesService_ListReleaseAssets(t *testing.T) { if err != nil { t.Errorf("Repositories.ListReleaseAssets returned error: %v", err) } - want := []*ReleaseAsset{{ID: Int(1)}} + want := []*ReleaseAsset{{ID: Int64(1)}} if !reflect.DeepEqual(assets, want) { t.Errorf("Repositories.ListReleaseAssets returned %+v, want %+v", assets, want) } } func TestRepositoriesService_GetReleaseAsset(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/assets/1", func(w http.ResponseWriter, r *http.Request) { @@ -202,14 +234,14 @@ func TestRepositoriesService_GetReleaseAsset(t *testing.T) { if err != nil { t.Errorf("Repositories.GetReleaseAsset returned error: %v", err) } - want := &ReleaseAsset{ID: Int(1)} + want := &ReleaseAsset{ID: Int64(1)} if !reflect.DeepEqual(asset, want) { t.Errorf("Repositories.GetReleaseAsset returned %+v, want %+v", asset, want) } } func TestRepositoriesService_DownloadReleaseAsset_Stream(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/assets/1", func(w http.ResponseWriter, r *http.Request) { @@ -235,7 +267,7 @@ func TestRepositoriesService_DownloadReleaseAsset_Stream(t *testing.T) { } func TestRepositoriesService_DownloadReleaseAsset_Redirect(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/assets/1", func(w http.ResponseWriter, r *http.Request) { @@ -255,7 +287,7 @@ func TestRepositoriesService_DownloadReleaseAsset_Redirect(t *testing.T) { } func TestRepositoriesService_DownloadReleaseAsset_APIError(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/assets/1", func(w http.ResponseWriter, r *http.Request) { @@ -281,7 +313,7 @@ func TestRepositoriesService_DownloadReleaseAsset_APIError(t *testing.T) { } func TestRepositoriesService_EditReleaseAsset(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &ReleaseAsset{Name: String("n")} @@ -301,14 +333,14 @@ func TestRepositoriesService_EditReleaseAsset(t *testing.T) { if err != nil { t.Errorf("Repositories.EditReleaseAsset returned error: %v", err) } - want := &ReleaseAsset{ID: Int(1)} + want := &ReleaseAsset{ID: Int64(1)} if !reflect.DeepEqual(asset, want) { t.Errorf("Repositories.EditReleaseAsset returned = %+v, want %+v", asset, want) } } func TestRepositoriesService_DeleteReleaseAsset(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/releases/assets/1", func(w http.ResponseWriter, r *http.Request) { @@ -322,32 +354,89 @@ func TestRepositoriesService_DeleteReleaseAsset(t *testing.T) { } func TestRepositoriesService_UploadReleaseAsset(t *testing.T) { - setup() - defer teardown() - - mux.HandleFunc("/repos/o/r/releases/1/assets", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "POST") - testHeader(t, r, "Content-Type", "text/plain; charset=utf-8") - testHeader(t, r, "Content-Length", "12") - testFormValues(t, r, values{"name": "n"}) - testBody(t, r, "Upload me !\n") + var ( + defaultUploadOptions = &UploadOptions{Name: "n"} + defaultExpectedFormValue = values{"name": "n"} + mediaTypeTextPlain = "text/plain; charset=utf-8" + ) + uploadTests := []struct { + uploadOpts *UploadOptions + fileName string + expectedFormValues values + expectedMediaType string + }{ + // No file extension and no explicit media type. + { + defaultUploadOptions, + "upload", + defaultExpectedFormValue, + defaultMediaType, + }, + // File extension and no explicit media type. + { + defaultUploadOptions, + "upload.txt", + defaultExpectedFormValue, + mediaTypeTextPlain, + }, + // No file extension and explicit media type. + { + &UploadOptions{Name: "n", MediaType: "image/png"}, + "upload", + defaultExpectedFormValue, + "image/png", + }, + // File extension and explicit media type. + { + &UploadOptions{Name: "n", MediaType: "image/png"}, + "upload.png", + defaultExpectedFormValue, + "image/png", + }, + // Label provided. + { + &UploadOptions{Name: "n", Label: "l"}, + "upload.txt", + values{"name": "n", "label": "l"}, + mediaTypeTextPlain, + }, + // No label provided. + { + defaultUploadOptions, + "upload.txt", + defaultExpectedFormValue, + mediaTypeTextPlain, + }, + } - fmt.Fprintf(w, `{"id":1}`) - }) + client, mux, _, teardown := setup() + defer teardown() - file, dir, err := openTestFile("upload.txt", "Upload me !\n") - if err != nil { - t.Fatalf("Unable to create temp file: %v", err) - } - defer os.RemoveAll(dir) + for key, test := range uploadTests { + releaseEndpoint := fmt.Sprintf("/repos/o/r/releases/%d/assets", key) + mux.HandleFunc(releaseEndpoint, func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testHeader(t, r, "Content-Type", test.expectedMediaType) + testHeader(t, r, "Content-Length", "12") + testFormValues(t, r, test.expectedFormValues) + testBody(t, r, "Upload me !\n") + + fmt.Fprintf(w, `{"id":1}`) + }) + + file, dir, err := openTestFile(test.fileName, "Upload me !\n") + if err != nil { + t.Fatalf("Unable to create temp file: %v", err) + } + defer os.RemoveAll(dir) - opt := &UploadOptions{Name: "n"} - asset, _, err := client.Repositories.UploadReleaseAsset(context.Background(), "o", "r", 1, opt, file) - if err != nil { - t.Errorf("Repositories.UploadReleaseAssert returned error: %v", err) - } - want := &ReleaseAsset{ID: Int(1)} - if !reflect.DeepEqual(asset, want) { - t.Errorf("Repositories.UploadReleaseAssert returned %+v, want %+v", asset, want) + asset, _, err := client.Repositories.UploadReleaseAsset(context.Background(), "o", "r", int64(key), test.uploadOpts, file) + if err != nil { + t.Errorf("Repositories.UploadReleaseAssert returned error: %v", err) + } + want := &ReleaseAsset{ID: Int64(1)} + if !reflect.DeepEqual(asset, want) { + t.Errorf("Repositories.UploadReleaseAssert returned %+v, want %+v", asset, want) + } } } diff --git a/vendor/github.com/google/go-github/github/repos_stats.go b/vendor/github.com/google/go-github/github/repos_stats.go index 30fc7bd..bb355ae 100644 --- a/vendor/github.com/google/go-github/github/repos_stats.go +++ b/vendor/github.com/google/go-github/github/repos_stats.go @@ -182,7 +182,7 @@ func (s *RepositoriesService) ListParticipation(ctx context.Context, owner, repo } // PunchCard represents the number of commits made during a given hour of a -// day of thew eek. +// day of the week. type PunchCard struct { Day *int // Day of the week (0-6: =Sunday - Saturday). Hour *int // Hour of day (0-23). diff --git a/vendor/github.com/google/go-github/github/repos_stats_test.go b/vendor/github.com/google/go-github/github/repos_stats_test.go index 1da94d0..055504d 100644 --- a/vendor/github.com/google/go-github/github/repos_stats_test.go +++ b/vendor/github.com/google/go-github/github/repos_stats_test.go @@ -15,7 +15,7 @@ import ( ) func TestRepositoriesService_ListContributorsStats(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/stats/contributors", func(w http.ResponseWriter, r *http.Request) { @@ -49,12 +49,12 @@ func TestRepositoriesService_ListContributorsStats(t *testing.T) { want := []*ContributorStats{ { Author: &Contributor{ - ID: Int(1), + ID: Int64(1), }, Total: Int(135), Weeks: []WeeklyStats{ { - Week: &Timestamp{time.Date(2013, 05, 05, 00, 00, 00, 0, time.UTC).Local()}, + Week: &Timestamp{time.Date(2013, time.May, 05, 00, 00, 00, 0, time.UTC).Local()}, Additions: Int(6898), Deletions: Int(77), Commits: Int(10), @@ -69,7 +69,7 @@ func TestRepositoriesService_ListContributorsStats(t *testing.T) { } func TestRepositoriesService_ListCommitActivity(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/stats/commit_activity", func(w http.ResponseWriter, r *http.Request) { @@ -95,7 +95,7 @@ func TestRepositoriesService_ListCommitActivity(t *testing.T) { { Days: []int{0, 3, 26, 20, 39, 1, 0}, Total: Int(89), - Week: &Timestamp{time.Date(2012, 05, 06, 05, 00, 00, 0, time.UTC).Local()}, + Week: &Timestamp{time.Date(2012, time.May, 06, 05, 00, 00, 0, time.UTC).Local()}, }, } @@ -105,7 +105,7 @@ func TestRepositoriesService_ListCommitActivity(t *testing.T) { } func TestRepositoriesService_ListCodeFrequency(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/stats/code_frequency", func(w http.ResponseWriter, r *http.Request) { @@ -120,7 +120,7 @@ func TestRepositoriesService_ListCodeFrequency(t *testing.T) { } want := []*WeeklyStats{{ - Week: &Timestamp{time.Date(2011, 04, 17, 00, 00, 00, 0, time.UTC).Local()}, + Week: &Timestamp{time.Date(2011, time.April, 17, 00, 00, 00, 0, time.UTC).Local()}, Additions: Int(1124), Deletions: Int(-435), }} @@ -131,7 +131,7 @@ func TestRepositoriesService_ListCodeFrequency(t *testing.T) { } func TestRepositoriesService_Participation(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/stats/participation", func(w http.ResponseWriter, r *http.Request) { @@ -181,7 +181,7 @@ func TestRepositoriesService_Participation(t *testing.T) { } func TestRepositoriesService_ListPunchCard(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/stats/punch_card", func(w http.ResponseWriter, r *http.Request) { @@ -209,3 +209,28 @@ func TestRepositoriesService_ListPunchCard(t *testing.T) { t.Errorf("RepositoriesService.ListPunchCard returned %+v, want %+v", card, want) } } + +func TestRepositoriesService_AcceptedError(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/stats/contributors", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + // This response indicates the fork will happen asynchronously. + w.WriteHeader(http.StatusAccepted) + fmt.Fprint(w, `{"id":1}`) + }) + + stats, _, err := client.Repositories.ListContributorsStats(context.Background(), "o", "r") + if err == nil { + t.Errorf("RepositoriesService.AcceptedError should have returned an error") + } + + if _, ok := err.(*AcceptedError); !ok { + t.Errorf("RepositoriesService.AcceptedError returned an AcceptedError: %v", err) + } + + if stats != nil { + t.Errorf("RepositoriesService.AcceptedError expected stats to be nil: %v", stats) + } +} diff --git a/vendor/github.com/google/go-github/github/repos_statuses.go b/vendor/github.com/google/go-github/github/repos_statuses.go index 6db5010..c889b31 100644 --- a/vendor/github.com/google/go-github/github/repos_statuses.go +++ b/vendor/github.com/google/go-github/github/repos_statuses.go @@ -8,12 +8,13 @@ package github import ( "context" "fmt" + "net/url" "time" ) // RepoStatus represents the status of a repository at a particular reference. type RepoStatus struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` URL *string `json:"url,omitempty"` // State is the current state of the repository. Possible values are: @@ -44,7 +45,7 @@ func (r RepoStatus) String() string { // // GitHub API docs: https://developer.github.com/v3/repos/statuses/#list-statuses-for-a-specific-ref func (s *RepositoriesService) ListStatuses(ctx context.Context, owner, repo, ref string, opt *ListOptions) ([]*RepoStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/statuses", owner, repo, ref) + u := fmt.Sprintf("repos/%v/%v/commits/%v/statuses", owner, repo, url.QueryEscape(ref)) u, err := addOptions(u, opt) if err != nil { return nil, nil, err @@ -69,7 +70,7 @@ func (s *RepositoriesService) ListStatuses(ctx context.Context, owner, repo, ref // // GitHub API docs: https://developer.github.com/v3/repos/statuses/#create-a-status func (s *RepositoriesService) CreateStatus(ctx context.Context, owner, repo, ref string, status *RepoStatus) (*RepoStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/statuses/%v", owner, repo, ref) + u := fmt.Sprintf("repos/%v/%v/statuses/%v", owner, repo, url.QueryEscape(ref)) req, err := s.client.NewRequest("POST", u, status) if err != nil { return nil, nil, err @@ -108,7 +109,7 @@ func (s CombinedStatus) String() string { // // GitHub API docs: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref func (s *RepositoriesService) GetCombinedStatus(ctx context.Context, owner, repo, ref string, opt *ListOptions) (*CombinedStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/status", owner, repo, ref) + u := fmt.Sprintf("repos/%v/%v/commits/%v/status", owner, repo, url.QueryEscape(ref)) u, err := addOptions(u, opt) if err != nil { return nil, nil, err diff --git a/vendor/github.com/google/go-github/github/repos_statuses_test.go b/vendor/github.com/google/go-github/github/repos_statuses_test.go index 8e6c937..b556b71 100644 --- a/vendor/github.com/google/go-github/github/repos_statuses_test.go +++ b/vendor/github.com/google/go-github/github/repos_statuses_test.go @@ -15,7 +15,7 @@ import ( ) func TestRepositoriesService_ListStatuses(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/commits/r/statuses", func(w http.ResponseWriter, r *http.Request) { @@ -30,19 +30,22 @@ func TestRepositoriesService_ListStatuses(t *testing.T) { t.Errorf("Repositories.ListStatuses returned error: %v", err) } - want := []*RepoStatus{{ID: Int(1)}} + want := []*RepoStatus{{ID: Int64(1)}} if !reflect.DeepEqual(statuses, want) { t.Errorf("Repositories.ListStatuses returned %+v, want %+v", statuses, want) } } func TestRepositoriesService_ListStatuses_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.ListStatuses(context.Background(), "%", "r", "r", nil) testURLParseError(t, err) } func TestRepositoriesService_CreateStatus(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &RepoStatus{State: String("s"), TargetURL: String("t"), Description: String("d")} @@ -63,19 +66,22 @@ func TestRepositoriesService_CreateStatus(t *testing.T) { t.Errorf("Repositories.CreateStatus returned error: %v", err) } - want := &RepoStatus{ID: Int(1)} + want := &RepoStatus{ID: Int64(1)} if !reflect.DeepEqual(status, want) { t.Errorf("Repositories.CreateStatus returned %+v, want %+v", status, want) } } func TestRepositoriesService_CreateStatus_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.CreateStatus(context.Background(), "%", "r", "r", nil) testURLParseError(t, err) } func TestRepositoriesService_GetCombinedStatus(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/commits/r/status", func(w http.ResponseWriter, r *http.Request) { @@ -90,7 +96,7 @@ func TestRepositoriesService_GetCombinedStatus(t *testing.T) { t.Errorf("Repositories.GetCombinedStatus returned error: %v", err) } - want := &CombinedStatus{State: String("success"), Statuses: []RepoStatus{{ID: Int(1)}}} + want := &CombinedStatus{State: String("success"), Statuses: []RepoStatus{{ID: Int64(1)}}} if !reflect.DeepEqual(status, want) { t.Errorf("Repositories.GetCombinedStatus returned %+v, want %+v", status, want) } diff --git a/vendor/github.com/google/go-github/github/repos_test.go b/vendor/github.com/google/go-github/github/repos_test.go index f025bb1..6f4a8a3 100644 --- a/vendor/github.com/google/go-github/github/repos_test.go +++ b/vendor/github.com/google/go-github/github/repos_test.go @@ -16,12 +16,13 @@ import ( ) func TestRepositoriesService_List_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeTopicsPreview} mux.HandleFunc("/user/repos", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeLicensesPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) fmt.Fprint(w, `[{"id":1},{"id":2}]`) }) @@ -30,19 +31,20 @@ func TestRepositoriesService_List_authenticatedUser(t *testing.T) { t.Errorf("Repositories.List returned error: %v", err) } - want := []*Repository{{ID: Int(1)}, {ID: Int(2)}} + want := []*Repository{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(repos, want) { t.Errorf("Repositories.List returned %+v, want %+v", repos, want) } } func TestRepositoriesService_List_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeTopicsPreview} mux.HandleFunc("/users/u/repos", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeLicensesPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testFormValues(t, r, values{ "visibility": "public", "affiliation": "owner,collaborator", @@ -65,19 +67,20 @@ func TestRepositoriesService_List_specifiedUser(t *testing.T) { t.Errorf("Repositories.List returned error: %v", err) } - want := []*Repository{{ID: Int(1)}} + want := []*Repository{{ID: Int64(1)}} if !reflect.DeepEqual(repos, want) { t.Errorf("Repositories.List returned %+v, want %+v", repos, want) } } func TestRepositoriesService_List_specifiedUser_type(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeTopicsPreview} mux.HandleFunc("/users/u/repos", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeLicensesPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testFormValues(t, r, values{ "type": "owner", }) @@ -92,24 +95,28 @@ func TestRepositoriesService_List_specifiedUser_type(t *testing.T) { t.Errorf("Repositories.List returned error: %v", err) } - want := []*Repository{{ID: Int(1)}} + want := []*Repository{{ID: Int64(1)}} if !reflect.DeepEqual(repos, want) { t.Errorf("Repositories.List returned %+v, want %+v", repos, want) } } func TestRepositoriesService_List_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.List(context.Background(), "%", nil) testURLParseError(t, err) } func TestRepositoriesService_ListByOrg(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() + wantAcceptHeaders := []string{mediaTypeTopicsPreview} mux.HandleFunc("/orgs/o/repos", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeLicensesPreview) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) testFormValues(t, r, values{ "type": "forks", "page": "2", @@ -123,56 +130,61 @@ func TestRepositoriesService_ListByOrg(t *testing.T) { t.Errorf("Repositories.ListByOrg returned error: %v", err) } - want := []*Repository{{ID: Int(1)}} + want := []*Repository{{ID: Int64(1)}} if !reflect.DeepEqual(repos, want) { t.Errorf("Repositories.ListByOrg returned %+v, want %+v", repos, want) } } func TestRepositoriesService_ListByOrg_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.ListByOrg(context.Background(), "%", nil) testURLParseError(t, err) } func TestRepositoriesService_ListAll(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repositories", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testFormValues(t, r, values{ - "since": "1", - "page": "2", - "per_page": "3", + "since": "1", }) fmt.Fprint(w, `[{"id":1}]`) }) - opt := &RepositoryListAllOptions{1, ListOptions{2, 3}} + opt := &RepositoryListAllOptions{1} repos, _, err := client.Repositories.ListAll(context.Background(), opt) if err != nil { t.Errorf("Repositories.ListAll returned error: %v", err) } - want := []*Repository{{ID: Int(1)}} + want := []*Repository{{ID: Int64(1)}} if !reflect.DeepEqual(repos, want) { t.Errorf("Repositories.ListAll returned %+v, want %+v", repos, want) } } func TestRepositoriesService_Create_user(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &Repository{Name: String("n")} + input := &Repository{ + Name: String("n"), + Archived: Bool(true), // not passed along. + } mux.HandleFunc("/user/repos", func(w http.ResponseWriter, r *http.Request) { - v := new(Repository) + v := new(createRepoRequest) json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") - if !reflect.DeepEqual(v, input) { - t.Errorf("Request body = %+v, want %+v", v, input) + want := &createRepoRequest{Name: String("n")} + if !reflect.DeepEqual(v, want) { + t.Errorf("Request body = %+v, want %+v", v, want) } fmt.Fprint(w, `{"id":1}`) @@ -183,25 +195,29 @@ func TestRepositoriesService_Create_user(t *testing.T) { t.Errorf("Repositories.Create returned error: %v", err) } - want := &Repository{ID: Int(1)} + want := &Repository{ID: Int64(1)} if !reflect.DeepEqual(repo, want) { t.Errorf("Repositories.Create returned %+v, want %+v", repo, want) } } func TestRepositoriesService_Create_org(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - input := &Repository{Name: String("n")} + input := &Repository{ + Name: String("n"), + Archived: Bool(true), // not passed along. + } mux.HandleFunc("/orgs/o/repos", func(w http.ResponseWriter, r *http.Request) { - v := new(Repository) + v := new(createRepoRequest) json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") - if !reflect.DeepEqual(v, input) { - t.Errorf("Request body = %+v, want %+v", v, input) + want := &createRepoRequest{Name: String("n")} + if !reflect.DeepEqual(v, want) { + t.Errorf("Request body = %+v, want %+v", v, want) } fmt.Fprint(w, `{"id":1}`) @@ -212,25 +228,20 @@ func TestRepositoriesService_Create_org(t *testing.T) { t.Errorf("Repositories.Create returned error: %v", err) } - want := &Repository{ID: Int(1)} + want := &Repository{ID: Int64(1)} if !reflect.DeepEqual(repo, want) { t.Errorf("Repositories.Create returned %+v, want %+v", repo, want) } } -func TestRepositoriesService_Create_invalidOrg(t *testing.T) { - _, _, err := client.Repositories.Create(context.Background(), "%", nil) - testURLParseError(t, err) -} - func TestRepositoriesService_Get(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() - acceptHeader := []string{mediaTypeLicensesPreview, mediaTypeSquashPreview, mediaTypeCodesOfConductPreview} + wantAcceptHeaders := []string{mediaTypeCodesOfConductPreview, mediaTypeTopicsPreview} mux.HandleFunc("/repos/o/r", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", strings.Join(acceptHeader, ", ")) + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) fmt.Fprint(w, `{"id":1,"name":"n","description":"d","owner":{"login":"l"},"license":{"key":"mit"}}`) }) @@ -239,14 +250,14 @@ func TestRepositoriesService_Get(t *testing.T) { t.Errorf("Repositories.Get returned error: %v", err) } - want := &Repository{ID: Int(1), Name: String("n"), Description: String("d"), Owner: &User{Login: String("l")}, License: &License{Key: String("mit")}} + want := &Repository{ID: Int64(1), Name: String("n"), Description: String("d"), Owner: &User{Login: String("l")}, License: &License{Key: String("mit")}} if !reflect.DeepEqual(repo, want) { t.Errorf("Repositories.Get returned %+v, want %+v", repo, want) } } func TestRepositoriesService_GetCodeOfConduct(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/community/code_of_conduct", func(w http.ResponseWriter, r *http.Request) { @@ -278,12 +289,11 @@ func TestRepositoriesService_GetCodeOfConduct(t *testing.T) { } func TestRepositoriesService_GetByID(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repositories/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeLicensesPreview) fmt.Fprint(w, `{"id":1,"name":"n","description":"d","owner":{"login":"l"},"license":{"key":"mit"}}`) }) @@ -292,14 +302,14 @@ func TestRepositoriesService_GetByID(t *testing.T) { t.Fatalf("Repositories.GetByID returned error: %v", err) } - want := &Repository{ID: Int(1), Name: String("n"), Description: String("d"), Owner: &User{Login: String("l")}, License: &License{Key: String("mit")}} + want := &Repository{ID: Int64(1), Name: String("n"), Description: String("d"), Owner: &User{Login: String("l")}, License: &License{Key: String("mit")}} if !reflect.DeepEqual(repo, want) { t.Errorf("Repositories.GetByID returned %+v, want %+v", repo, want) } } func TestRepositoriesService_Edit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() i := true @@ -321,14 +331,14 @@ func TestRepositoriesService_Edit(t *testing.T) { t.Errorf("Repositories.Edit returned error: %v", err) } - want := &Repository{ID: Int(1)} + want := &Repository{ID: Int64(1)} if !reflect.DeepEqual(repo, want) { t.Errorf("Repositories.Edit returned %+v, want %+v", repo, want) } } func TestRepositoriesService_Delete(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r", func(w http.ResponseWriter, r *http.Request) { @@ -342,17 +352,23 @@ func TestRepositoriesService_Delete(t *testing.T) { } func TestRepositoriesService_Get_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.Get(context.Background(), "%", "r") testURLParseError(t, err) } func TestRepositoriesService_Edit_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.Edit(context.Background(), "%", "r", nil) testURLParseError(t, err) } func TestRepositoriesService_ListContributors(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/contributors", func(w http.ResponseWriter, r *http.Request) { @@ -377,7 +393,7 @@ func TestRepositoriesService_ListContributors(t *testing.T) { } func TestRepositoriesService_ListLanguages(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/languages", func(w http.ResponseWriter, r *http.Request) { @@ -397,11 +413,12 @@ func TestRepositoriesService_ListLanguages(t *testing.T) { } func TestRepositoriesService_ListTeams(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/teams", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) testFormValues(t, r, values{"page": "2"}) fmt.Fprint(w, `[{"id":1}]`) }) @@ -412,14 +429,14 @@ func TestRepositoriesService_ListTeams(t *testing.T) { t.Errorf("Repositories.ListTeams returned error: %v", err) } - want := []*Team{{ID: Int(1)}} + want := []*Team{{ID: Int64(1)}} if !reflect.DeepEqual(teams, want) { t.Errorf("Repositories.ListTeams returned %+v, want %+v", teams, want) } } func TestRepositoriesService_ListTags(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/tags", func(w http.ResponseWriter, r *http.Request) { @@ -451,12 +468,13 @@ func TestRepositoriesService_ListTags(t *testing.T) { } func TestRepositoriesService_ListBranches(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) testFormValues(t, r, values{"page": "2"}) fmt.Fprint(w, `[{"name":"master", "commit" : {"sha" : "a57781", "url" : "https://api.github.com/repos/o/r/commits/a57781"}}]`) }) @@ -474,12 +492,13 @@ func TestRepositoriesService_ListBranches(t *testing.T) { } func TestRepositoriesService_GetBranch(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) fmt.Fprint(w, `{"name":"n", "commit":{"sha":"s","commit":{"message":"m"}}, "protected":true}`) }) @@ -505,7 +524,7 @@ func TestRepositoriesService_GetBranch(t *testing.T) { } func TestRepositoriesService_GetBranchProtection(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection", func(w http.ResponseWriter, r *http.Request) { @@ -513,8 +532,37 @@ func TestRepositoriesService_GetBranchProtection(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) - fmt.Fprintf(w, `{"required_status_checks":{"strict":true,"contexts":["continuous-integration"]},"required_pull_request_reviews":{"dismissal_restrictions":{"users":[{"id":3,"login":"u"}],"teams":[{"id":4,"slug":"t"}]},"dismiss_stale_reviews":true},"enforce_admins":{"url":"/repos/o/r/branches/b/protection/enforce_admins","enabled":true},"restrictions":{"users":[{"id":1,"login":"u"}],"teams":[{"id":2,"slug":"t"}]}}`) + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) + fmt.Fprintf(w, `{ + "required_status_checks":{ + "strict":true, + "contexts":["continuous-integration"] + }, + "required_pull_request_reviews":{ + "dismissal_restrictions":{ + "users":[{ + "id":3, + "login":"u" + }], + "teams":[{ + "id":4, + "slug":"t" + }] + }, + "dismiss_stale_reviews":true, + "require_code_owner_reviews":true, + "required_approving_review_count":1 + }, + "enforce_admins":{ + "url":"/repos/o/r/branches/b/protection/enforce_admins", + "enabled":true + }, + "restrictions":{ + "users":[{"id":1,"login":"u"}], + "teams":[{"id":2,"slug":"t"}] + } + }`) }) protection, _, err := client.Repositories.GetBranchProtection(context.Background(), "o", "r", "b") @@ -531,12 +579,14 @@ func TestRepositoriesService_GetBranchProtection(t *testing.T) { DismissStaleReviews: true, DismissalRestrictions: DismissalRestrictions{ Users: []*User{ - {Login: String("u"), ID: Int(3)}, + {Login: String("u"), ID: Int64(3)}, }, Teams: []*Team{ - {Slug: String("t"), ID: Int(4)}, + {Slug: String("t"), ID: Int64(4)}, }, }, + RequireCodeOwnerReviews: true, + RequiredApprovingReviewCount: 1, }, EnforceAdmins: &AdminEnforcement{ URL: String("/repos/o/r/branches/b/protection/enforce_admins"), @@ -544,10 +594,10 @@ func TestRepositoriesService_GetBranchProtection(t *testing.T) { }, Restrictions: &BranchRestrictions{ Users: []*User{ - {Login: String("u"), ID: Int(1)}, + {Login: String("u"), ID: Int64(1)}, }, Teams: []*Team{ - {Slug: String("t"), ID: Int(2)}, + {Slug: String("t"), ID: Int64(2)}, }, }, } @@ -557,7 +607,7 @@ func TestRepositoriesService_GetBranchProtection(t *testing.T) { } func TestRepositoriesService_UpdateBranchProtection(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &ProtectionRequest{ @@ -568,8 +618,8 @@ func TestRepositoriesService_UpdateBranchProtection(t *testing.T) { RequiredPullRequestReviews: &PullRequestReviewsEnforcementRequest{ DismissStaleReviews: true, DismissalRestrictionsRequest: &DismissalRestrictionsRequest{ - Users: []string{"uu"}, - Teams: []string{"tt"}, + Users: &[]string{"uu"}, + Teams: &[]string{"tt"}, }, }, Restrictions: &BranchRestrictionsRequest{ @@ -586,8 +636,33 @@ func TestRepositoriesService_UpdateBranchProtection(t *testing.T) { if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) - fmt.Fprintf(w, `{"required_status_checks":{"strict":true,"contexts":["continuous-integration"]},"required_pull_request_reviews":{"dismissal_restrictions":{"users":[{"id":3,"login":"uu"}],"teams":[{"id":4,"slug":"tt"}]},"dismiss_stale_reviews":true},"restrictions":{"users":[{"id":1,"login":"u"}],"teams":[{"id":2,"slug":"t"}]}}`) + + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) + fmt.Fprintf(w, `{ + "required_status_checks":{ + "strict":true, + "contexts":["continuous-integration"] + }, + "required_pull_request_reviews":{ + "dismissal_restrictions":{ + "users":[{ + "id":3, + "login":"uu" + }], + "teams":[{ + "id":4, + "slug":"tt" + }] + }, + "dismiss_stale_reviews":true, + "require_code_owner_reviews":true + }, + "restrictions":{ + "users":[{"id":1,"login":"u"}], + "teams":[{"id":2,"slug":"t"}] + } + }`) }) protection, _, err := client.Repositories.UpdateBranchProtection(context.Background(), "o", "r", "b", input) @@ -604,19 +679,20 @@ func TestRepositoriesService_UpdateBranchProtection(t *testing.T) { DismissStaleReviews: true, DismissalRestrictions: DismissalRestrictions{ Users: []*User{ - {Login: String("uu"), ID: Int(3)}, + {Login: String("uu"), ID: Int64(3)}, }, Teams: []*Team{ - {Slug: String("tt"), ID: Int(4)}, + {Slug: String("tt"), ID: Int64(4)}, }, }, + RequireCodeOwnerReviews: true, }, Restrictions: &BranchRestrictions{ Users: []*User{ - {Login: String("u"), ID: Int(1)}, + {Login: String("u"), ID: Int64(1)}, }, Teams: []*Team{ - {Slug: String("t"), ID: Int(2)}, + {Slug: String("t"), ID: Int64(2)}, }, }, } @@ -626,12 +702,13 @@ func TestRepositoriesService_UpdateBranchProtection(t *testing.T) { } func TestRepositoriesService_RemoveBranchProtection(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "DELETE") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) w.WriteHeader(http.StatusNoContent) }) @@ -642,12 +719,15 @@ func TestRepositoriesService_RemoveBranchProtection(t *testing.T) { } func TestRepositoriesService_ListLanguages_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Repositories.ListLanguages(context.Background(), "%", "%") testURLParseError(t, err) } func TestRepositoriesService_License(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/license", func(w http.ResponseWriter, r *http.Request) { @@ -678,7 +758,7 @@ func TestRepositoriesService_License(t *testing.T) { } func TestRepositoriesService_GetRequiredStatusChecks(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection/required_status_checks", func(w http.ResponseWriter, r *http.Request) { @@ -686,7 +766,8 @@ func TestRepositoriesService_GetRequiredStatusChecks(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) fmt.Fprint(w, `{"strict": true,"contexts": ["x","y","z"]}`) }) @@ -704,8 +785,43 @@ func TestRepositoriesService_GetRequiredStatusChecks(t *testing.T) { } } +func TestRepositoriesService_UpdateRequiredStatusChecks(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := &RequiredStatusChecksRequest{ + Strict: Bool(true), + Contexts: []string{"continuous-integration"}, + } + + mux.HandleFunc("/repos/o/r/branches/b/protection/required_status_checks", func(w http.ResponseWriter, r *http.Request) { + v := new(RequiredStatusChecksRequest) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "PATCH") + if !reflect.DeepEqual(v, input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + testHeader(t, r, "Accept", mediaTypeV3) + fmt.Fprintf(w, `{"strict":true,"contexts":["continuous-integration"]}`) + }) + + statusChecks, _, err := client.Repositories.UpdateRequiredStatusChecks(context.Background(), "o", "r", "b", input) + if err != nil { + t.Errorf("Repositories.UpdateRequiredStatusChecks returned error: %v", err) + } + + want := &RequiredStatusChecks{ + Strict: true, + Contexts: []string{"continuous-integration"}, + } + if !reflect.DeepEqual(statusChecks, want) { + t.Errorf("Repositories.UpdateRequiredStatusChecks returned %+v, want %+v", statusChecks, want) + } +} + func TestRepositoriesService_ListRequiredStatusChecksContexts(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection/required_status_checks/contexts", func(w http.ResponseWriter, r *http.Request) { @@ -713,7 +829,8 @@ func TestRepositoriesService_ListRequiredStatusChecksContexts(t *testing.T) { json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) fmt.Fprint(w, `["x", "y", "z"]`) }) @@ -729,13 +846,22 @@ func TestRepositoriesService_ListRequiredStatusChecksContexts(t *testing.T) { } func TestRepositoriesService_GetPullRequestReviewEnforcement(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection/required_pull_request_reviews", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) - fmt.Fprintf(w, `{"dismissal_restrictions":{"users":[{"id":1,"login":"u"}],"teams":[{"id":2,"slug":"t"}]},"dismiss_stale_reviews":true}`) + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) + fmt.Fprintf(w, `{ + "dismissal_restrictions":{ + "users":[{"id":1,"login":"u"}], + "teams":[{"id":2,"slug":"t"}] + }, + "dismiss_stale_reviews":true, + "require_code_owner_reviews":true, + "required_approving_review_count":1 + }`) }) enforcement, _, err := client.Repositories.GetPullRequestReviewEnforcement(context.Background(), "o", "r", "b") @@ -747,12 +873,14 @@ func TestRepositoriesService_GetPullRequestReviewEnforcement(t *testing.T) { DismissStaleReviews: true, DismissalRestrictions: DismissalRestrictions{ Users: []*User{ - {Login: String("u"), ID: Int(1)}, + {Login: String("u"), ID: Int64(1)}, }, Teams: []*Team{ - {Slug: String("t"), ID: Int(2)}, + {Slug: String("t"), ID: Int64(2)}, }, }, + RequireCodeOwnerReviews: true, + RequiredApprovingReviewCount: 1, } if !reflect.DeepEqual(enforcement, want) { @@ -761,13 +889,13 @@ func TestRepositoriesService_GetPullRequestReviewEnforcement(t *testing.T) { } func TestRepositoriesService_UpdatePullRequestReviewEnforcement(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &PullRequestReviewsEnforcementUpdate{ DismissalRestrictionsRequest: &DismissalRestrictionsRequest{ - Users: []string{"u"}, - Teams: []string{"t"}, + Users: &[]string{"u"}, + Teams: &[]string{"t"}, }, } @@ -779,8 +907,17 @@ func TestRepositoriesService_UpdatePullRequestReviewEnforcement(t *testing.T) { if !reflect.DeepEqual(v, input) { t.Errorf("Request body = %+v, want %+v", v, input) } - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) - fmt.Fprintf(w, `{"dismissal_restrictions":{"users":[{"id":1,"login":"u"}],"teams":[{"id":2,"slug":"t"}]},"dismiss_stale_reviews":true}`) + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) + fmt.Fprintf(w, `{ + "dismissal_restrictions":{ + "users":[{"id":1,"login":"u"}], + "teams":[{"id":2,"slug":"t"}] + }, + "dismiss_stale_reviews":true, + "require_code_owner_reviews":true, + "required_approving_review_count":3 + }`) }) enforcement, _, err := client.Repositories.UpdatePullRequestReviewEnforcement(context.Background(), "o", "r", "b", input) @@ -792,12 +929,14 @@ func TestRepositoriesService_UpdatePullRequestReviewEnforcement(t *testing.T) { DismissStaleReviews: true, DismissalRestrictions: DismissalRestrictions{ Users: []*User{ - {Login: String("u"), ID: Int(1)}, + {Login: String("u"), ID: Int64(1)}, }, Teams: []*Team{ - {Slug: String("t"), ID: Int(2)}, + {Slug: String("t"), ID: Int64(2)}, }, }, + RequireCodeOwnerReviews: true, + RequiredApprovingReviewCount: 3, } if !reflect.DeepEqual(enforcement, want) { t.Errorf("Repositories.UpdatePullRequestReviewEnforcement returned %+v, want %+v", enforcement, want) @@ -805,14 +944,15 @@ func TestRepositoriesService_UpdatePullRequestReviewEnforcement(t *testing.T) { } func TestRepositoriesService_DisableDismissalRestrictions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection/required_pull_request_reviews", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PATCH") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + // TODO: remove custom Accept header when this API fully launches + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) testBody(t, r, `{"dismissal_restrictions":[]}`+"\n") - fmt.Fprintf(w, `{"dismissal_restrictions":{"users":[],"teams":[]},"dismiss_stale_reviews":true}`) + fmt.Fprintf(w, `{"dismissal_restrictions":{"users":[],"teams":[]},"dismiss_stale_reviews":true,"require_code_owner_reviews":true,"required_approving_review_count":1}`) }) enforcement, _, err := client.Repositories.DisableDismissalRestrictions(context.Background(), "o", "r", "b") @@ -826,6 +966,8 @@ func TestRepositoriesService_DisableDismissalRestrictions(t *testing.T) { Users: []*User{}, Teams: []*Team{}, }, + RequireCodeOwnerReviews: true, + RequiredApprovingReviewCount: 1, } if !reflect.DeepEqual(enforcement, want) { t.Errorf("Repositories.DisableDismissalRestrictions returned %+v, want %+v", enforcement, want) @@ -833,12 +975,12 @@ func TestRepositoriesService_DisableDismissalRestrictions(t *testing.T) { } func TestRepositoriesService_RemovePullRequestReviewEnforcement(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection/required_pull_request_reviews", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "DELETE") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) w.WriteHeader(http.StatusNoContent) }) @@ -849,12 +991,12 @@ func TestRepositoriesService_RemovePullRequestReviewEnforcement(t *testing.T) { } func TestRepositoriesService_GetAdminEnforcement(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection/enforce_admins", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) fmt.Fprintf(w, `{"url":"/repos/o/r/branches/b/protection/enforce_admins","enabled":true}`) }) @@ -874,12 +1016,12 @@ func TestRepositoriesService_GetAdminEnforcement(t *testing.T) { } func TestRepositoriesService_AddAdminEnforcement(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection/enforce_admins", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "POST") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) fmt.Fprintf(w, `{"url":"/repos/o/r/branches/b/protection/enforce_admins","enabled":true}`) }) @@ -898,12 +1040,12 @@ func TestRepositoriesService_AddAdminEnforcement(t *testing.T) { } func TestRepositoriesService_RemoveAdminEnforcement(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/branches/b/protection/enforce_admins", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "DELETE") - testHeader(t, r, "Accept", mediaTypeProtectedBranchesPreview) + testHeader(t, r, "Accept", mediaTypeRequiredApprovingReviewsPreview) w.WriteHeader(http.StatusNoContent) }) @@ -913,16 +1055,250 @@ func TestRepositoriesService_RemoveAdminEnforcement(t *testing.T) { } } +func TestRepositoriesService_GetSignaturesProtectedBranch(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/branches/b/protection/required_signatures", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeSignaturePreview) + fmt.Fprintf(w, `{"url":"/repos/o/r/branches/b/protection/required_signatures","enabled":false}`) + }) + + signature, _, err := client.Repositories.GetSignaturesProtectedBranch(context.Background(), "o", "r", "b") + if err != nil { + t.Errorf("Repositories.GetSignaturesProtectedBranch returned error: %v", err) + } + + want := &SignaturesProtectedBranch{ + URL: String("/repos/o/r/branches/b/protection/required_signatures"), + Enabled: Bool(false), + } + + if !reflect.DeepEqual(signature, want) { + t.Errorf("Repositories.GetSignaturesProtectedBranch returned %+v, want %+v", signature, want) + } +} + +func TestRepositoriesService_RequireSignaturesOnProtectedBranch(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/branches/b/protection/required_signatures", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeSignaturePreview) + fmt.Fprintf(w, `{"url":"/repos/o/r/branches/b/protection/required_signatures","enabled":true}`) + }) + + signature, _, err := client.Repositories.RequireSignaturesOnProtectedBranch(context.Background(), "o", "r", "b") + if err != nil { + t.Errorf("Repositories.RequireSignaturesOnProtectedBranch returned error: %v", err) + } + + want := &SignaturesProtectedBranch{ + URL: String("/repos/o/r/branches/b/protection/required_signatures"), + Enabled: Bool(true), + } + + if !reflect.DeepEqual(signature, want) { + t.Errorf("Repositories.RequireSignaturesOnProtectedBranch returned %+v, want %+v", signature, want) + } +} + +func TestRepositoriesService_OptionalSignaturesOnProtectedBranch(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/branches/b/protection/required_signatures", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeSignaturePreview) + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Repositories.OptionalSignaturesOnProtectedBranch(context.Background(), "o", "r", "b") + if err != nil { + t.Errorf("Repositories.OptionalSignaturesOnProtectedBranch returned error: %v", err) + } +} + func TestPullRequestReviewsEnforcementRequest_MarshalJSON_nilDismissalRestirctions(t *testing.T) { req := PullRequestReviewsEnforcementRequest{} - json, err := json.Marshal(req) + got, err := json.Marshal(req) + if err != nil { + t.Errorf("PullRequestReviewsEnforcementRequest.MarshalJSON returned error: %v", err) + } + + want := `{"dismiss_stale_reviews":false,"require_code_owner_reviews":false,"required_approving_review_count":0}` + if want != string(got) { + t.Errorf("PullRequestReviewsEnforcementRequest.MarshalJSON returned %+v, want %+v", string(got), want) + } + + req = PullRequestReviewsEnforcementRequest{ + DismissalRestrictionsRequest: &DismissalRestrictionsRequest{}, + } + + got, err = json.Marshal(req) if err != nil { t.Errorf("PullRequestReviewsEnforcementRequest.MarshalJSON returned error: %v", err) } - want := `{"dismissal_restrictions":[],"dismiss_stale_reviews":false}` - if want != string(json) { - t.Errorf("PullRequestReviewsEnforcementRequest.MarshalJSON returned %+v, want %+v", string(json), want) + want = `{"dismissal_restrictions":{},"dismiss_stale_reviews":false,"require_code_owner_reviews":false,"required_approving_review_count":0}` + if want != string(got) { + t.Errorf("PullRequestReviewsEnforcementRequest.MarshalJSON returned %+v, want %+v", string(got), want) + } + + req = PullRequestReviewsEnforcementRequest{ + DismissalRestrictionsRequest: &DismissalRestrictionsRequest{ + Users: &[]string{}, + Teams: &[]string{}, + }, + } + + got, err = json.Marshal(req) + if err != nil { + t.Errorf("PullRequestReviewsEnforcementRequest.MarshalJSON returned error: %v", err) + } + + want = `{"dismissal_restrictions":{"users":[],"teams":[]},"dismiss_stale_reviews":false,"require_code_owner_reviews":false,"required_approving_review_count":0}` + if want != string(got) { + t.Errorf("PullRequestReviewsEnforcementRequest.MarshalJSON returned %+v, want %+v", string(got), want) + } +} + +func TestRepositoriesService_ListAllTopics(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/topics", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeTopicsPreview) + fmt.Fprint(w, `{"names":["go", "go-github", "github"]}`) + }) + + got, _, err := client.Repositories.ListAllTopics(context.Background(), "o", "r") + if err != nil { + t.Fatalf("Repositories.ListAllTopics returned error: %v", err) + } + + want := []string{"go", "go-github", "github"} + if !reflect.DeepEqual(got, want) { + t.Errorf("Repositories.ListAllTopics returned %+v, want %+v", got, want) + } +} + +func TestRepositoriesService_ListAllTopics_emptyTopics(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/topics", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeTopicsPreview) + fmt.Fprint(w, `{"names":[]}`) + }) + + got, _, err := client.Repositories.ListAllTopics(context.Background(), "o", "r") + if err != nil { + t.Fatalf("Repositories.ListAllTopics returned error: %v", err) + } + + want := []string{} + if !reflect.DeepEqual(got, want) { + t.Errorf("Repositories.ListAllTopics returned %+v, want %+v", got, want) + } +} + +func TestRepositoriesService_ReplaceAllTopics(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/topics", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testHeader(t, r, "Accept", mediaTypeTopicsPreview) + fmt.Fprint(w, `{"names":["go", "go-github", "github"]}`) + }) + + got, _, err := client.Repositories.ReplaceAllTopics(context.Background(), "o", "r", []string{"go", "go-github", "github"}) + if err != nil { + t.Fatalf("Repositories.ReplaceAllTopics returned error: %v", err) + } + + want := []string{"go", "go-github", "github"} + if !reflect.DeepEqual(got, want) { + t.Errorf("Repositories.ReplaceAllTopics returned %+v, want %+v", got, want) + } +} + +func TestRepositoriesService_ReplaceAllTopics_nilSlice(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/topics", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testHeader(t, r, "Accept", mediaTypeTopicsPreview) + testBody(t, r, `{"names":[]}`+"\n") + fmt.Fprint(w, `{"names":[]}`) + }) + + got, _, err := client.Repositories.ReplaceAllTopics(context.Background(), "o", "r", nil) + if err != nil { + t.Fatalf("Repositories.ReplaceAllTopics returned error: %v", err) + } + + want := []string{} + if !reflect.DeepEqual(got, want) { + t.Errorf("Repositories.ReplaceAllTopics returned %+v, want %+v", got, want) + } +} + +func TestRepositoriesService_ReplaceAllTopics_emptySlice(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/repos/o/r/topics", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testHeader(t, r, "Accept", mediaTypeTopicsPreview) + testBody(t, r, `{"names":[]}`+"\n") + fmt.Fprint(w, `{"names":[]}`) + }) + + got, _, err := client.Repositories.ReplaceAllTopics(context.Background(), "o", "r", []string{}) + if err != nil { + t.Fatalf("Repositories.ReplaceAllTopics returned error: %v", err) + } + + want := []string{} + if !reflect.DeepEqual(got, want) { + t.Errorf("Repositories.ReplaceAllTopics returned %+v, want %+v", got, want) + } +} + +func TestRepositoriesService_Transfer(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := TransferRequest{NewOwner: "a", TeamID: []int64{123}} + + mux.HandleFunc("/repos/o/r/transfer", func(w http.ResponseWriter, r *http.Request) { + var v TransferRequest + json.NewDecoder(r.Body).Decode(&v) + + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeRepositoryTransferPreview) + if !reflect.DeepEqual(v, input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + fmt.Fprint(w, `{"owner":{"login":"a"}}`) + }) + + got, _, err := client.Repositories.Transfer(context.Background(), "o", "r", input) + if err != nil { + t.Errorf("Repositories.Transfer returned error: %v", err) + } + + want := &Repository{Owner: &User{Login: String("a")}} + if !reflect.DeepEqual(got, want) { + t.Errorf("Repositories.Transfer returned %+v, want %+v", got, want) } } diff --git a/vendor/github.com/google/go-github/github/repos_traffic_test.go b/vendor/github.com/google/go-github/github/repos_traffic_test.go index c425f5c..0a2eb4f 100644 --- a/vendor/github.com/google/go-github/github/repos_traffic_test.go +++ b/vendor/github.com/google/go-github/github/repos_traffic_test.go @@ -15,7 +15,7 @@ import ( ) func TestRepositoriesService_ListTrafficReferrers(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/traffic/popular/referrers", func(w http.ResponseWriter, r *http.Request) { @@ -43,7 +43,7 @@ func TestRepositoriesService_ListTrafficReferrers(t *testing.T) { } func TestRepositoriesService_ListTrafficPaths(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/traffic/popular/paths", func(w http.ResponseWriter, r *http.Request) { @@ -73,7 +73,7 @@ func TestRepositoriesService_ListTrafficPaths(t *testing.T) { } func TestRepositoriesService_ListTrafficViews(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/traffic/views", func(w http.ResponseWriter, r *http.Request) { @@ -109,7 +109,7 @@ func TestRepositoriesService_ListTrafficViews(t *testing.T) { } func TestRepositoriesService_ListTrafficClones(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/traffic/clones", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/search.go b/vendor/github.com/google/go-github/github/search.go index 7668b8b..24156f3 100644 --- a/vendor/github.com/google/go-github/github/search.go +++ b/vendor/github.com/google/go-github/github/search.go @@ -8,6 +8,7 @@ package github import ( "context" "fmt" + "strconv" qs "github.com/google/go-querystring/query" ) @@ -15,6 +16,14 @@ import ( // SearchService provides access to the search related functions // in the GitHub API. // +// Each method takes a query string defining the search keywords and any search qualifiers. +// For example, when searching issues, the query "gopher is:issue language:go" will search +// for issues containing the word "gopher" in Go repositories. The method call +// opts := &github.SearchOptions{Sort: "created", Order: "asc"} +// cl.Search.Issues(ctx, "gopher is:issue language:go", opts) +// will search for such issues, sorting by creation date in ascending order +// (i.e., oldest first). +// // GitHub API docs: https://developer.github.com/v3/search/ type SearchService service @@ -40,6 +49,12 @@ type SearchOptions struct { ListOptions } +// Common search parameters. +type searchParameters struct { + Query string + RepositoryID *int64 // Sent if non-nil. +} + // RepositoriesSearchResult represents the result of a repositories search. type RepositoriesSearchResult struct { Total *int `json:"total_count,omitempty"` @@ -52,7 +67,7 @@ type RepositoriesSearchResult struct { // GitHub API docs: https://developer.github.com/v3/search/#search-repositories func (s *SearchService) Repositories(ctx context.Context, query string, opt *SearchOptions) (*RepositoriesSearchResult, *Response, error) { result := new(RepositoriesSearchResult) - resp, err := s.search(ctx, "repositories", query, opt, result) + resp, err := s.search(ctx, "repositories", &searchParameters{Query: query}, opt, result) return result, resp, err } @@ -83,7 +98,7 @@ type CommitResult struct { // GitHub API docs: https://developer.github.com/v3/search/#search-commits func (s *SearchService) Commits(ctx context.Context, query string, opt *SearchOptions) (*CommitsSearchResult, *Response, error) { result := new(CommitsSearchResult) - resp, err := s.search(ctx, "commits", query, opt, result) + resp, err := s.search(ctx, "commits", &searchParameters{Query: query}, opt, result) return result, resp, err } @@ -99,7 +114,7 @@ type IssuesSearchResult struct { // GitHub API docs: https://developer.github.com/v3/search/#search-issues func (s *SearchService) Issues(ctx context.Context, query string, opt *SearchOptions) (*IssuesSearchResult, *Response, error) { result := new(IssuesSearchResult) - resp, err := s.search(ctx, "issues", query, opt, result) + resp, err := s.search(ctx, "issues", &searchParameters{Query: query}, opt, result) return result, resp, err } @@ -115,7 +130,7 @@ type UsersSearchResult struct { // GitHub API docs: https://developer.github.com/v3/search/#search-users func (s *SearchService) Users(ctx context.Context, query string, opt *SearchOptions) (*UsersSearchResult, *Response, error) { result := new(UsersSearchResult) - resp, err := s.search(ctx, "users", query, opt, result) + resp, err := s.search(ctx, "users", &searchParameters{Query: query}, opt, result) return result, resp, err } @@ -164,18 +179,55 @@ func (c CodeResult) String() string { // GitHub API docs: https://developer.github.com/v3/search/#search-code func (s *SearchService) Code(ctx context.Context, query string, opt *SearchOptions) (*CodeSearchResult, *Response, error) { result := new(CodeSearchResult) - resp, err := s.search(ctx, "code", query, opt, result) + resp, err := s.search(ctx, "code", &searchParameters{Query: query}, opt, result) + return result, resp, err +} + +// LabelsSearchResult represents the result of a code search. +type LabelsSearchResult struct { + Total *int `json:"total_count,omitempty"` + IncompleteResults *bool `json:"incomplete_results,omitempty"` + Labels []*LabelResult `json:"items,omitempty"` +} + +// LabelResult represents a single search result. +type LabelResult struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + Name *string `json:"name,omitempty"` + Color *string `json:"color,omitempty"` + Default *bool `json:"default,omitempty"` + Description *string `json:"description,omitempty"` + Score *float64 `json:"score,omitempty"` +} + +func (l LabelResult) String() string { + return Stringify(l) +} + +// Labels searches labels in the repository with ID repoID via various criteria. +// +// GitHub API docs: https://developer.github.com/v3/search/#search-labels +func (s *SearchService) Labels(ctx context.Context, repoID int64, query string, opt *SearchOptions) (*LabelsSearchResult, *Response, error) { + result := new(LabelsSearchResult) + resp, err := s.search(ctx, "labels", &searchParameters{RepositoryID: &repoID, Query: query}, opt, result) return result, resp, err } // Helper function that executes search queries against different -// GitHub search types (repositories, commits, code, issues, users) -func (s *SearchService) search(ctx context.Context, searchType string, query string, opt *SearchOptions, result interface{}) (*Response, error) { +// GitHub search types (repositories, commits, code, issues, users, labels) +// +// If searchParameters.Query includes multiple condition, it MUST NOT include "+" as condition separator. +// For example, querying with "language:c++" and "leveldb", then searchParameters.Query should be "language:c++ leveldb" but not "language:c+++leveldb". +func (s *SearchService) search(ctx context.Context, searchType string, parameters *searchParameters, opt *SearchOptions, result interface{}) (*Response, error) { params, err := qs.Values(opt) if err != nil { return nil, err } - params.Set("q", query) + if parameters.RepositoryID != nil { + params.Set("repository_id", strconv.FormatInt(*parameters.RepositoryID, 10)) + } + params.Set("q", parameters.Query) u := fmt.Sprintf("search/%s?%s", searchType, params.Encode()) req, err := s.client.NewRequest("GET", u, nil) @@ -188,6 +240,14 @@ func (s *SearchService) search(ctx context.Context, searchType string, query str // Accept header for search commits preview endpoint // TODO: remove custom Accept header when this API fully launches. req.Header.Set("Accept", mediaTypeCommitSearchPreview) + case searchType == "repositories": + // Accept header for search repositories based on topics preview endpoint + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTopicsPreview) + case searchType == "labels": + // Accept header for search labels based on label description preview endpoint. + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) case opt != nil && opt.TextMatch: // Accept header defaults to "application/vnd.github.v3+json" // We change it here to fetch back text-match metadata diff --git a/vendor/github.com/google/go-github/github/search_test.go b/vendor/github.com/google/go-github/github/search_test.go index ee2f69b..4b68688 100644 --- a/vendor/github.com/google/go-github/github/search_test.go +++ b/vendor/github.com/google/go-github/github/search_test.go @@ -15,7 +15,7 @@ import ( ) func TestSearchService_Repositories(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/search/repositories", func(w http.ResponseWriter, r *http.Request) { @@ -40,7 +40,7 @@ func TestSearchService_Repositories(t *testing.T) { want := &RepositoriesSearchResult{ Total: Int(4), IncompleteResults: Bool(false), - Repositories: []Repository{{ID: Int(1)}, {ID: Int(2)}}, + Repositories: []Repository{{ID: Int64(1)}, {ID: Int64(2)}}, } if !reflect.DeepEqual(result, want) { t.Errorf("Search.Repositories returned %+v, want %+v", result, want) @@ -48,7 +48,7 @@ func TestSearchService_Repositories(t *testing.T) { } func TestSearchService_Commits(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/search/commits", func(w http.ResponseWriter, r *http.Request) { @@ -79,7 +79,7 @@ func TestSearchService_Commits(t *testing.T) { } func TestSearchService_Issues(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/search/issues", func(w http.ResponseWriter, r *http.Request) { @@ -111,8 +111,83 @@ func TestSearchService_Issues(t *testing.T) { } } +func TestSearchService_Issues_withQualifiersNoOpts(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + const q = "gopher is:issue label:bug language:c++ pushed:>=2018-01-01 stars:>=200" + + var requestURI string + mux.HandleFunc("/search/issues", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testFormValues(t, r, values{ + "q": q, + }) + requestURI = r.RequestURI + + fmt.Fprint(w, `{"total_count": 4, "incomplete_results": true, "items": [{"number":1},{"number":2}]}`) + }) + + opts := &SearchOptions{} + result, _, err := client.Search.Issues(context.Background(), q, opts) + if err != nil { + t.Errorf("Search.Issues returned error: %v", err) + } + + if want := "/api-v3/search/issues?q=gopher+is%3Aissue+label%3Abug+language%3Ac%2B%2B+pushed%3A%3E%3D2018-01-01+stars%3A%3E%3D200"; requestURI != want { + t.Fatalf("URI encoding failed: got %v, want %v", requestURI, want) + } + + want := &IssuesSearchResult{ + Total: Int(4), + IncompleteResults: Bool(true), + Issues: []Issue{{Number: Int(1)}, {Number: Int(2)}}, + } + if !reflect.DeepEqual(result, want) { + t.Errorf("Search.Issues returned %+v, want %+v", result, want) + } +} + +func TestSearchService_Issues_withQualifiersAndOpts(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + const q = "gopher is:issue label:bug language:c++ pushed:>=2018-01-01 stars:>=200" + + var requestURI string + mux.HandleFunc("/search/issues", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testFormValues(t, r, values{ + "q": q, + "sort": "forks", + }) + requestURI = r.RequestURI + + fmt.Fprint(w, `{"total_count": 4, "incomplete_results": true, "items": [{"number":1},{"number":2}]}`) + }) + + opts := &SearchOptions{Sort: "forks"} + result, _, err := client.Search.Issues(context.Background(), q, opts) + if err != nil { + t.Errorf("Search.Issues returned error: %v", err) + } + + if want := "/api-v3/search/issues?q=gopher+is%3Aissue+label%3Abug+language%3Ac%2B%2B+pushed%3A%3E%3D2018-01-01+stars%3A%3E%3D200&sort=forks"; requestURI != want { + t.Fatalf("URI encoding failed: got %v, want %v", requestURI, want) + } + + want := &IssuesSearchResult{ + Total: Int(4), + IncompleteResults: Bool(true), + Issues: []Issue{{Number: Int(1)}, {Number: Int(2)}}, + } + if !reflect.DeepEqual(result, want) { + t.Errorf("Search.Issues returned %+v, want %+v", result, want) + } +} + func TestSearchService_Users(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/search/users", func(w http.ResponseWriter, r *http.Request) { @@ -137,7 +212,7 @@ func TestSearchService_Users(t *testing.T) { want := &UsersSearchResult{ Total: Int(4), IncompleteResults: Bool(false), - Users: []User{{ID: Int(1)}, {ID: Int(2)}}, + Users: []User{{ID: Int64(1)}, {ID: Int64(2)}}, } if !reflect.DeepEqual(result, want) { t.Errorf("Search.Users returned %+v, want %+v", result, want) @@ -145,7 +220,7 @@ func TestSearchService_Users(t *testing.T) { } func TestSearchService_Code(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/search/code", func(w http.ResponseWriter, r *http.Request) { @@ -178,7 +253,7 @@ func TestSearchService_Code(t *testing.T) { } func TestSearchService_CodeTextMatch(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/search/code", func(w http.ResponseWriter, r *http.Request) { @@ -237,3 +312,40 @@ func TestSearchService_CodeTextMatch(t *testing.T) { t.Errorf("Search.Code returned %+v, want %+v", result, want) } } + +func TestSearchService_Labels(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/search/labels", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testFormValues(t, r, values{ + "repository_id": "1234", + "q": "blah", + "sort": "updated", + "order": "desc", + "page": "2", + "per_page": "2", + }) + + fmt.Fprint(w, `{"total_count": 4, "incomplete_results": false, "items": [{"id": 1234, "name":"bug", "description": "some text"},{"id": 4567, "name":"feature"}]}`) + }) + + opts := &SearchOptions{Sort: "updated", Order: "desc", ListOptions: ListOptions{Page: 2, PerPage: 2}} + result, _, err := client.Search.Labels(context.Background(), 1234, "blah", opts) + if err != nil { + t.Errorf("Search.Code returned error: %v", err) + } + + want := &LabelsSearchResult{ + Total: Int(4), + IncompleteResults: Bool(false), + Labels: []*LabelResult{ + {ID: Int64(1234), Name: String("bug"), Description: String("some text")}, + {ID: Int64(4567), Name: String("feature")}, + }, + } + if !reflect.DeepEqual(result, want) { + t.Errorf("Search.Labels returned %+v, want %+v", result, want) + } +} diff --git a/vendor/github.com/google/go-github/github/strings_test.go b/vendor/github.com/google/go-github/github/strings_test.go index 6af88ac..e16d4a5 100644 --- a/vendor/github.com/google/go-github/github/strings_test.go +++ b/vendor/github.com/google/go-github/github/strings_test.go @@ -54,19 +54,19 @@ func TestStringify(t *testing.T) { // actual GitHub structs { - Timestamp{time.Date(2006, 01, 02, 15, 04, 05, 0, time.UTC)}, + Timestamp{time.Date(2006, time.January, 02, 15, 04, 05, 0, time.UTC)}, `github.Timestamp{2006-01-02 15:04:05 +0000 UTC}`, }, { - &Timestamp{time.Date(2006, 01, 02, 15, 04, 05, 0, time.UTC)}, + &Timestamp{time.Date(2006, time.January, 02, 15, 04, 05, 0, time.UTC)}, `github.Timestamp{2006-01-02 15:04:05 +0000 UTC}`, }, { - User{ID: Int(123), Name: String("n")}, + User{ID: Int64(123), Name: String("n")}, `github.User{ID:123, Name:"n"}`, }, { - Repository{Owner: &User{ID: Int(123)}}, + Repository{Owner: &User{ID: Int64(123)}}, `github.Repository{Owner:github.User{ID:123}}`, }, } @@ -96,37 +96,37 @@ func TestString(t *testing.T) { {CommitsComparison{TotalCommits: Int(1)}, `github.CommitsComparison{TotalCommits:1}`}, {Commit{SHA: String("s")}, `github.Commit{SHA:"s"}`}, {Event{ID: String("1")}, `github.Event{ID:"1"}`}, - {GistComment{ID: Int(1)}, `github.GistComment{ID:1}`}, + {GistComment{ID: Int64(1)}, `github.GistComment{ID:1}`}, {GistFile{Size: Int(1)}, `github.GistFile{Size:1}`}, {Gist{ID: String("1")}, `github.Gist{ID:"1", Files:map[]}`}, {GitObject{SHA: String("s")}, `github.GitObject{SHA:"s"}`}, {Gitignore{Name: String("n")}, `github.Gitignore{Name:"n"}`}, - {Hook{ID: Int(1)}, `github.Hook{Config:map[], ID:1}`}, - {IssueComment{ID: Int(1)}, `github.IssueComment{ID:1}`}, + {Hook{ID: Int64(1)}, `github.Hook{ID:1, Config:map[]}`}, + {IssueComment{ID: Int64(1)}, `github.IssueComment{ID:1}`}, {Issue{Number: Int(1)}, `github.Issue{Number:1}`}, - {Key{ID: Int(1)}, `github.Key{ID:1}`}, - {Label{ID: Int(1), Name: String("l")}, `github.Label{ID:1, Name:"l"}`}, - {Organization{ID: Int(1)}, `github.Organization{ID:1}`}, - {PullRequestComment{ID: Int(1)}, `github.PullRequestComment{ID:1}`}, + {Key{ID: Int64(1)}, `github.Key{ID:1}`}, + {Label{ID: Int64(1), Name: String("l")}, `github.Label{ID:1, Name:"l"}`}, + {Organization{ID: Int64(1)}, `github.Organization{ID:1}`}, + {PullRequestComment{ID: Int64(1)}, `github.PullRequestComment{ID:1}`}, {PullRequest{Number: Int(1)}, `github.PullRequest{Number:1}`}, - {PullRequestReview{ID: Int(1)}, `github.PullRequestReview{ID:1}`}, + {PullRequestReview{ID: Int64(1)}, `github.PullRequestReview{ID:1}`}, {DraftReviewComment{Position: Int(1)}, `github.DraftReviewComment{Position:1}`}, {PullRequestReviewRequest{Body: String("r")}, `github.PullRequestReviewRequest{Body:"r"}`}, {PullRequestReviewDismissalRequest{Message: String("r")}, `github.PullRequestReviewDismissalRequest{Message:"r"}`}, {PushEventCommit{SHA: String("s")}, `github.PushEventCommit{SHA:"s"}`}, - {PushEvent{PushID: Int(1)}, `github.PushEvent{PushID:1}`}, + {PushEvent{PushID: Int64(1)}, `github.PushEvent{PushID:1}`}, {Reference{Ref: String("r")}, `github.Reference{Ref:"r"}`}, - {ReleaseAsset{ID: Int(1)}, `github.ReleaseAsset{ID:1}`}, - {RepoStatus{ID: Int(1)}, `github.RepoStatus{ID:1}`}, - {RepositoryComment{ID: Int(1)}, `github.RepositoryComment{ID:1}`}, + {ReleaseAsset{ID: Int64(1)}, `github.ReleaseAsset{ID:1}`}, + {RepoStatus{ID: Int64(1)}, `github.RepoStatus{ID:1}`}, + {RepositoryComment{ID: Int64(1)}, `github.RepositoryComment{ID:1}`}, {RepositoryCommit{SHA: String("s")}, `github.RepositoryCommit{SHA:"s"}`}, {RepositoryContent{Name: String("n")}, `github.RepositoryContent{Name:"n"}`}, - {RepositoryRelease{ID: Int(1)}, `github.RepositoryRelease{ID:1}`}, - {Repository{ID: Int(1)}, `github.Repository{ID:1}`}, - {Team{ID: Int(1)}, `github.Team{ID:1}`}, + {RepositoryRelease{ID: Int64(1)}, `github.RepositoryRelease{ID:1}`}, + {Repository{ID: Int64(1)}, `github.Repository{ID:1}`}, + {Team{ID: Int64(1)}, `github.Team{ID:1}`}, {TreeEntry{SHA: String("s")}, `github.TreeEntry{SHA:"s"}`}, {Tree{SHA: String("s")}, `github.Tree{SHA:"s"}`}, - {User{ID: Int(1)}, `github.User{ID:1}`}, + {User{ID: Int64(1)}, `github.User{ID:1}`}, {WebHookAuthor{Name: String("n")}, `github.WebHookAuthor{Name:"n"}`}, {WebHookCommit{ID: String("1")}, `github.WebHookCommit{ID:"1"}`}, {WebHookPayload{Ref: String("r")}, `github.WebHookPayload{Ref:"r"}`}, diff --git a/vendor/github.com/google/go-github/github/teams.go b/vendor/github.com/google/go-github/github/teams.go new file mode 100644 index 0000000..97d038d --- /dev/null +++ b/vendor/github.com/google/go-github/github/teams.go @@ -0,0 +1,457 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "strings" + "time" +) + +// TeamsService provides access to the team-related functions +// in the GitHub API. +// +// GitHub API docs: https://developer.github.com/v3/teams/ +type TeamsService service + +// Team represents a team within a GitHub organization. Teams are used to +// manage access to an organization's repositories. +type Team struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + URL *string `json:"url,omitempty"` + Slug *string `json:"slug,omitempty"` + + // Permission specifies the default permission for repositories owned by the team. + Permission *string `json:"permission,omitempty"` + + // Privacy identifies the level of privacy this team should have. + // Possible values are: + // secret - only visible to organization owners and members of this team + // closed - visible to all members of this organization + // Default is "secret". + Privacy *string `json:"privacy,omitempty"` + + MembersCount *int `json:"members_count,omitempty"` + ReposCount *int `json:"repos_count,omitempty"` + Organization *Organization `json:"organization,omitempty"` + MembersURL *string `json:"members_url,omitempty"` + RepositoriesURL *string `json:"repositories_url,omitempty"` + Parent *Team `json:"parent,omitempty"` + + // LDAPDN is only available in GitHub Enterprise and when the team + // membership is synchronized with LDAP. + LDAPDN *string `json:"ldap_dn,omitempty"` +} + +func (t Team) String() string { + return Stringify(t) +} + +// Invitation represents a team member's invitation status. +type Invitation struct { + ID *int64 `json:"id,omitempty"` + Login *string `json:"login,omitempty"` + Email *string `json:"email,omitempty"` + // Role can be one of the values - 'direct_member', 'admin', 'billing_manager', 'hiring_manager', or 'reinstate'. + Role *string `json:"role,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + Inviter *User `json:"inviter,omitempty"` + TeamCount *int `json:"team_count,omitempty"` + InvitationTeamURL *string `json:"invitation_team_url,omitempty"` +} + +func (i Invitation) String() string { + return Stringify(i) +} + +// ListTeams lists all of the teams for an organization. +// +// GitHub API docs: https://developer.github.com/v3/teams/#list-teams +func (s *TeamsService) ListTeams(ctx context.Context, org string, opt *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams", org) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// GetTeam fetches a team by ID. +// +// GitHub API docs: https://developer.github.com/v3/teams/#get-team +func (s *TeamsService) GetTeam(ctx context.Context, team int64) (*Team, *Response, error) { + u := fmt.Sprintf("teams/%v", team) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// NewTeam represents a team to be created or modified. +type NewTeam struct { + Name string `json:"name"` // Name of the team. (Required.) + Description *string `json:"description,omitempty"` + Maintainers []string `json:"maintainers,omitempty"` + RepoNames []string `json:"repo_names,omitempty"` + ParentTeamID *int64 `json:"parent_team_id,omitempty"` + + // Deprecated: Permission is deprecated when creating or editing a team in an org + // using the new GitHub permission model. It no longer identifies the + // permission a team has on its repos, but only specifies the default + // permission a repo is initially added with. Avoid confusion by + // specifying a permission value when calling AddTeamRepo. + Permission *string `json:"permission,omitempty"` + + // Privacy identifies the level of privacy this team should have. + // Possible values are: + // secret - only visible to organization owners and members of this team + // closed - visible to all members of this organization + // Default is "secret". + Privacy *string `json:"privacy,omitempty"` + + // LDAPDN may be used in GitHub Enterprise when the team membership + // is synchronized with LDAP. + LDAPDN *string `json:"ldap_dn,omitempty"` +} + +func (s NewTeam) String() string { + return Stringify(s) +} + +// CreateTeam creates a new team within an organization. +// +// GitHub API docs: https://developer.github.com/v3/teams/#create-team +func (s *TeamsService) CreateTeam(ctx context.Context, org string, team NewTeam) (*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams", org) + req, err := s.client.NewRequest("POST", u, team) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// EditTeam edits a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/#edit-team +func (s *TeamsService) EditTeam(ctx context.Context, id int64, team NewTeam) (*Team, *Response, error) { + u := fmt.Sprintf("teams/%v", id) + req, err := s.client.NewRequest("PATCH", u, team) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// DeleteTeam deletes a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/#delete-team +func (s *TeamsService) DeleteTeam(ctx context.Context, team int64) (*Response, error) { + u := fmt.Sprintf("teams/%v", team) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ListChildTeams lists child teams for a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/#list-child-teams +func (s *TeamsService) ListChildTeams(ctx context.Context, teamID int64, opt *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("teams/%v/teams", teamID) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// ListTeamRepos lists the repositories that the specified team has access to. +// +// GitHub API docs: https://developer.github.com/v3/teams/#list-team-repos +func (s *TeamsService) ListTeamRepos(ctx context.Context, team int64, opt *ListOptions) ([]*Repository, *Response, error) { + u := fmt.Sprintf("teams/%v/repos", team) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when topics API fully launches. + headers := []string{mediaTypeTopicsPreview, mediaTypeNestedTeamsPreview} + req.Header.Set("Accept", strings.Join(headers, ", ")) + + var repos []*Repository + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// IsTeamRepo checks if a team manages the specified repository. If the +// repository is managed by team, a Repository is returned which includes the +// permissions team has for that repo. +// +// GitHub API docs: https://developer.github.com/v3/teams/#check-if-a-team-manages-a-repository +func (s *TeamsService) IsTeamRepo(ctx context.Context, team int64, owner string, repo string) (*Repository, *Response, error) { + u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + headers := []string{mediaTypeOrgPermissionRepo, mediaTypeNestedTeamsPreview} + req.Header.Set("Accept", strings.Join(headers, ", ")) + + repository := new(Repository) + resp, err := s.client.Do(ctx, req, repository) + if err != nil { + return nil, resp, err + } + + return repository, resp, nil +} + +// TeamAddTeamRepoOptions specifies the optional parameters to the +// TeamsService.AddTeamRepo method. +type TeamAddTeamRepoOptions struct { + // Permission specifies the permission to grant the team on this repository. + // Possible values are: + // pull - team members can pull, but not push to or administer this repository + // push - team members can pull and push, but not administer this repository + // admin - team members can pull, push and administer this repository + // + // If not specified, the team's permission attribute will be used. + Permission string `json:"permission,omitempty"` +} + +// AddTeamRepo adds a repository to be managed by the specified team. The +// specified repository must be owned by the organization to which the team +// belongs, or a direct fork of a repository owned by the organization. +// +// GitHub API docs: https://developer.github.com/v3/teams/#add-team-repo +func (s *TeamsService) AddTeamRepo(ctx context.Context, team int64, owner string, repo string, opt *TeamAddTeamRepoOptions) (*Response, error) { + u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) + req, err := s.client.NewRequest("PUT", u, opt) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveTeamRepo removes a repository from being managed by the specified +// team. Note that this does not delete the repository, it just removes it +// from the team. +// +// GitHub API docs: https://developer.github.com/v3/teams/#remove-team-repo +func (s *TeamsService) RemoveTeamRepo(ctx context.Context, team int64, owner string, repo string) (*Response, error) { + u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListUserTeams lists a user's teams +// GitHub API docs: https://developer.github.com/v3/teams/#list-user-teams +func (s *TeamsService) ListUserTeams(ctx context.Context, opt *ListOptions) ([]*Team, *Response, error) { + u := "user/teams" + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// ListTeamProjects lists the organization projects for a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/#list-team-projects +func (s *TeamsService) ListTeamProjects(ctx context.Context, teamID int64) ([]*Project, *Response, error) { + u := fmt.Sprintf("teams/%v/projects", teamID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeNestedTeamsPreview, mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var projects []*Project + resp, err := s.client.Do(ctx, req, &projects) + if err != nil { + return nil, resp, err + } + + return projects, resp, nil +} + +// ReviewTeamProjects checks whether a team has read, write, or admin +// permissions for an organization project. +// +// GitHub API docs: https://developer.github.com/v3/teams/#review-a-team-project +func (s *TeamsService) ReviewTeamProjects(ctx context.Context, teamID, projectID int64) (*Project, *Response, error) { + u := fmt.Sprintf("teams/%v/projects/%v", teamID, projectID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeNestedTeamsPreview, mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + projects := &Project{} + resp, err := s.client.Do(ctx, req, &projects) + if err != nil { + return nil, resp, err + } + + return projects, resp, nil +} + +// TeamProjectOptions specifies the optional parameters to the +// TeamsService.AddTeamProject method. +type TeamProjectOptions struct { + // Permission specifies the permission to grant to the team for this project. + // Possible values are: + // "read" - team members can read, but not write to or administer this project. + // "write" - team members can read and write, but not administer this project. + // "admin" - team members can read, write and administer this project. + // + Permission *string `json:"permission,omitempty"` +} + +// AddTeamProject adds an organization project to a team. To add a project to a team or +// update the team's permission on a project, the authenticated user must have admin +// permissions for the project. +// +// GitHub API docs: https://developer.github.com/v3/teams/#add-or-update-team-project +func (s *TeamsService) AddTeamProject(ctx context.Context, teamID, projectID int64, opt *TeamProjectOptions) (*Response, error) { + u := fmt.Sprintf("teams/%v/projects/%v", teamID, projectID) + req, err := s.client.NewRequest("PUT", u, opt) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeNestedTeamsPreview, mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + return s.client.Do(ctx, req, nil) +} + +// RemoveTeamProject removes an organization project from a team. An organization owner or +// a team maintainer can remove any project from the team. To remove a project from a team +// as an organization member, the authenticated user must have "read" access to both the team +// and project, or "admin" access to the team or project. +// Note: This endpoint removes the project from the team, but does not delete it. +// +// GitHub API docs: https://developer.github.com/v3/teams/#remove-team-project +func (s *TeamsService) RemoveTeamProject(ctx context.Context, teamID int64, projectID int64) (*Response, error) { + u := fmt.Sprintf("teams/%v/projects/%v", teamID, projectID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeNestedTeamsPreview, mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/teams_discussion_comments.go b/vendor/github.com/google/go-github/github/teams_discussion_comments.go new file mode 100644 index 0000000..a0206b9 --- /dev/null +++ b/vendor/github.com/google/go-github/github/teams_discussion_comments.go @@ -0,0 +1,155 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// DiscussionComment represents a GitHub dicussion in a team. +type DiscussionComment struct { + Author *User `json:"author,omitempty"` + Body *string `json:"body,omitempty"` + BodyHTML *string `json:"body_html,omitempty"` + BodyVersion *string `json:"body_version,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + LastEditedAt *Timestamp `json:"last_edited_at,omitempty"` + DiscussionURL *string `json:"discussion_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Number *int `json:"number,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + URL *string `json:"url,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` +} + +func (c DiscussionComment) String() string { + return Stringify(c) +} + +// DiscussionCommentListOptions specifies optional parameters to the +// TeamServices.ListComments method. +type DiscussionCommentListOptions struct { + // Sorts the discussion comments by the date they were created. + // Accepted values are asc and desc. Default is desc. + Direction string `url:"direction,omitempty"` +} + +// ListComments lists all comments on a team discussion. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussion_comments/#list-comments +func (s *TeamsService) ListComments(ctx context.Context, teamID int64, discussionNumber int, options *DiscussionCommentListOptions) ([]*DiscussionComment, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/comments", teamID, discussionNumber) + u, err := addOptions(u, options) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + var comments []*DiscussionComment + resp, err := s.client.Do(ctx, req, &comments) + if err != nil { + return nil, resp, err + } + + return comments, resp, nil +} + +// GetComment gets a specific comment on a team discussion. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussion_comments/#get-a-single-comment +func (s *TeamsService) GetComment(ctx context.Context, teamID int64, discussionNumber, commentNumber int) (*DiscussionComment, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v", teamID, discussionNumber, commentNumber) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + discussionComment := &DiscussionComment{} + resp, err := s.client.Do(ctx, req, discussionComment) + if err != nil { + return nil, resp, err + } + + return discussionComment, resp, nil +} + +// CreateComment creates a new discussion post on a team discussion. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussion_comments/#create-a-comment +func (s *TeamsService) CreateComment(ctx context.Context, teamID int64, discsusionNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/comments", teamID, discsusionNumber) + req, err := s.client.NewRequest("POST", u, comment) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + discussionComment := &DiscussionComment{} + resp, err := s.client.Do(ctx, req, discussionComment) + if err != nil { + return nil, resp, err + } + + return discussionComment, resp, nil +} + +// EditComment edits the body text of a discussion comment. +// Authenticated user must grant write:discussion scope. +// User is allowed to edit body of a comment only. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussion_comments/#edit-a-comment +func (s *TeamsService) EditComment(ctx context.Context, teamID int64, discussionNumber, commentNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v", teamID, discussionNumber, commentNumber) + req, err := s.client.NewRequest("PATCH", u, comment) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + discussionComment := &DiscussionComment{} + resp, err := s.client.Do(ctx, req, discussionComment) + if err != nil { + return nil, resp, err + } + + return discussionComment, resp, nil +} + +// DeleteComment deletes a comment on a team discussion. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussion_comments/#delete-a-comment +func (s *TeamsService) DeleteComment(ctx context.Context, teamID int64, discussionNumber, commentNumber int) (*Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v", teamID, discussionNumber, commentNumber) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/teams_discussion_comments_test.go b/vendor/github.com/google/go-github/github/teams_discussion_comments_test.go new file mode 100644 index 0000000..f4c9f96 --- /dev/null +++ b/vendor/github.com/google/go-github/github/teams_discussion_comments_test.go @@ -0,0 +1,203 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "reflect" + "testing" + "time" +) + +func TestTeamsService_ListComments(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/2/discussions/3/comments", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + testFormValues(t, r, values{ + "direction": "desc", + }) + fmt.Fprintf(w, + `[ + { + "author": { + "login": "author", + "id": 0, + "avatar_url": "https://avatars1.githubusercontent.com/u/0?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/author", + "html_url": "https://github.com/author", + "followers_url": "https://api.github.com/users/author/followers", + "following_url": "https://api.github.com/users/author/following{/other_user}", + "gists_url": "https://api.github.com/users/author/gists{/gist_id}", + "starred_url": "https://api.github.com/users/author/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/author/subscriptions", + "organizations_url": "https://api.github.com/users/author/orgs", + "repos_url": "https://api.github.com/users/author/repos", + "events_url": "https://api.github.com/users/author/events{/privacy}", + "received_events_url": "https://api.github.com/users/author/received_events", + "type": "User", + "site_admin": false + }, + "body": "comment", + "body_html": "

comment

", + "body_version": "version", + "created_at": "2018-01-01T00:00:00Z", + "last_edited_at": null, + "discussion_url": "https://api.github.com/teams/2/discussions/3", + "html_url": "https://github.com/orgs/1/teams/2/discussions/3/comments/4", + "node_id": "node", + "number": 4, + "updated_at": "2018-01-01T00:00:00Z", + "url": "https://api.github.com/teams/2/discussions/3/comments/4" + } + ]`) + }) + + comments, _, err := client.Teams.ListComments(context.Background(), 2, 3, &DiscussionCommentListOptions{"desc"}) + if err != nil { + t.Errorf("Teams.ListComments returned error: %v", err) + } + + want := []*DiscussionComment{ + { + Author: &User{ + Login: String("author"), + ID: Int64(0), + AvatarURL: String("https://avatars1.githubusercontent.com/u/0?v=4"), + GravatarID: String(""), + URL: String("https://api.github.com/users/author"), + HTMLURL: String("https://github.com/author"), + FollowersURL: String("https://api.github.com/users/author/followers"), + FollowingURL: String("https://api.github.com/users/author/following{/other_user}"), + GistsURL: String("https://api.github.com/users/author/gists{/gist_id}"), + StarredURL: String("https://api.github.com/users/author/starred{/owner}{/repo}"), + SubscriptionsURL: String("https://api.github.com/users/author/subscriptions"), + OrganizationsURL: String("https://api.github.com/users/author/orgs"), + ReposURL: String("https://api.github.com/users/author/repos"), + EventsURL: String("https://api.github.com/users/author/events{/privacy}"), + ReceivedEventsURL: String("https://api.github.com/users/author/received_events"), + Type: String("User"), + SiteAdmin: Bool(false), + }, + Body: String("comment"), + BodyHTML: String("

comment

"), + BodyVersion: String("version"), + CreatedAt: &Timestamp{time.Date(2018, time.January, 1, 0, 0, 0, 0, time.UTC)}, + LastEditedAt: nil, + DiscussionURL: String("https://api.github.com/teams/2/discussions/3"), + HTMLURL: String("https://github.com/orgs/1/teams/2/discussions/3/comments/4"), + NodeID: String("node"), + Number: Int(4), + UpdatedAt: &Timestamp{time.Date(2018, time.January, 1, 0, 0, 0, 0, time.UTC)}, + URL: String("https://api.github.com/teams/2/discussions/3/comments/4"), + }, + } + if !reflect.DeepEqual(comments, want) { + t.Errorf("Teams.ListComments returned %+v, want %+v", comments, want) + } +} + +func TestTeamsService_GetComment(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/2/discussions/3/comments/4", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + fmt.Fprint(w, `{"number":4}`) + }) + + comment, _, err := client.Teams.GetComment(context.Background(), 2, 3, 4) + if err != nil { + t.Errorf("Teams.GetComment returned error: %v", err) + } + + want := &DiscussionComment{Number: Int(4)} + if !reflect.DeepEqual(comment, want) { + t.Errorf("Teams.GetComment returned %+v, want %+v", comment, want) + } +} + +func TestTeamsService_CreateComment(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := DiscussionComment{Body: String("c")} + + mux.HandleFunc("/teams/2/discussions/3/comments", func(w http.ResponseWriter, r *http.Request) { + v := new(DiscussionComment) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + if !reflect.DeepEqual(v, &input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + fmt.Fprint(w, `{"number":4}`) + }) + + comment, _, err := client.Teams.CreateComment(context.Background(), 2, 3, input) + if err != nil { + t.Errorf("Teams.CreateComment returned error: %v", err) + } + + want := &DiscussionComment{Number: Int(4)} + if !reflect.DeepEqual(comment, want) { + t.Errorf("Teams.CreateComment returned %+v, want %+v", comment, want) + } +} + +func TestTeamsService_EditComment(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := DiscussionComment{Body: String("e")} + + mux.HandleFunc("/teams/2/discussions/3/comments/4", func(w http.ResponseWriter, r *http.Request) { + v := new(DiscussionComment) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "PATCH") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + if !reflect.DeepEqual(v, &input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + fmt.Fprint(w, `{"number":4}`) + }) + + comment, _, err := client.Teams.EditComment(context.Background(), 2, 3, 4, input) + if err != nil { + t.Errorf("Teams.EditComment returned error: %v", err) + } + + want := &DiscussionComment{Number: Int(4)} + if !reflect.DeepEqual(comment, want) { + t.Errorf("Teams.EditComment returned %+v, want %+v", comment, want) + } +} + +func TestTeamsService_DeleteComment(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/2/discussions/3/comments/4", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + }) + + _, err := client.Teams.DeleteComment(context.Background(), 2, 3, 4) + if err != nil { + t.Errorf("Teams.DeleteComment returned error: %v", err) + } +} diff --git a/vendor/github.com/google/go-github/github/teams_discussions.go b/vendor/github.com/google/go-github/github/teams_discussions.go new file mode 100644 index 0000000..f491c9d --- /dev/null +++ b/vendor/github.com/google/go-github/github/teams_discussions.go @@ -0,0 +1,160 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// TeamDiscussion represents a GitHub dicussion in a team. +type TeamDiscussion struct { + Author *User `json:"author,omitempty"` + Body *string `json:"body,omitempty"` + BodyHTML *string `json:"body_html,omitempty"` + BodyVersion *string `json:"body_version,omitempty"` + CommentsCount *int `json:"comments_count,omitempty"` + CommentsURL *string `json:"comments_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + LastEditedAt *Timestamp `json:"last_edited_at,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Number *int `json:"number,omitempty"` + Pinned *bool `json:"pinned,omitempty"` + Private *bool `json:"private,omitempty"` + TeamURL *string `json:"team_url,omitempty"` + Title *string `json:"title,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + URL *string `json:"url,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` +} + +func (d TeamDiscussion) String() string { + return Stringify(d) +} + +// DiscussionListOptions specifies optional parameters to the +// TeamServices.ListDiscussions method. +type DiscussionListOptions struct { + // Sorts the discussion by the date they were created. + // Accepted values are asc and desc. Default is desc. + Direction string `url:"direction,omitempty"` +} + +// ListDiscussions lists all discussions on team's page. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussions/#list-discussions +func (s *TeamsService) ListDiscussions(ctx context.Context, teamID int64, options *DiscussionListOptions) ([]*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions", teamID) + u, err := addOptions(u, options) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + var teamDiscussions []*TeamDiscussion + resp, err := s.client.Do(ctx, req, &teamDiscussions) + if err != nil { + return nil, resp, err + } + + return teamDiscussions, resp, nil +} + +// GetDiscussion gets a specific discussion on a team's page. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussions/#get-a-single-discussion +func (s *TeamsService) GetDiscussion(ctx context.Context, teamID int64, discussionNumber int) (*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v", teamID, discussionNumber) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + teamDiscussion := &TeamDiscussion{} + resp, err := s.client.Do(ctx, req, teamDiscussion) + if err != nil { + return nil, resp, err + } + + return teamDiscussion, resp, nil +} + +// CreateDiscussion creates a new discussion post on a team's page. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussions/#create-a-discussion +func (s *TeamsService) CreateDiscussion(ctx context.Context, teamID int64, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions", teamID) + req, err := s.client.NewRequest("POST", u, discussion) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + teamDiscussion := &TeamDiscussion{} + resp, err := s.client.Do(ctx, req, teamDiscussion) + if err != nil { + return nil, resp, err + } + + return teamDiscussion, resp, nil +} + +// EditDiscussion edits the title and body text of a discussion post. +// Authenticated user must grant write:discussion scope. +// User is allowed to change Title and Body of a discussion only. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussions/#edit-a-discussion +func (s *TeamsService) EditDiscussion(ctx context.Context, teamID int64, discussionNumber int, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v", teamID, discussionNumber) + req, err := s.client.NewRequest("PATCH", u, discussion) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + teamDiscussion := &TeamDiscussion{} + resp, err := s.client.Do(ctx, req, teamDiscussion) + if err != nil { + return nil, resp, err + } + + return teamDiscussion, resp, nil +} + +// DeleteDiscussion deletes a discussion from team's page. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://developer.github.com/v3/teams/discussions/#delete-a-discussion +func (s *TeamsService) DeleteDiscussion(ctx context.Context, teamID int64, discussionNumber int) (*Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v", teamID, discussionNumber) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTeamDiscussionsPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/teams_discussions_test.go b/vendor/github.com/google/go-github/github/teams_discussions_test.go new file mode 100644 index 0000000..c588c22 --- /dev/null +++ b/vendor/github.com/google/go-github/github/teams_discussions_test.go @@ -0,0 +1,212 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "reflect" + "testing" + "time" +) + +func TestTeamsService_ListDiscussions(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/2/discussions", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + testFormValues(t, r, values{ + "direction": "desc", + }) + fmt.Fprintf(w, + `[ + { + "author": { + "login": "author", + "id": 0, + "avatar_url": "https://avatars1.githubusercontent.com/u/0?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/author", + "html_url": "https://github.com/author", + "followers_url": "https://api.github.com/users/author/followers", + "following_url": "https://api.github.com/users/author/following{/other_user}", + "gists_url": "https://api.github.com/users/author/gists{/gist_id}", + "starred_url": "https://api.github.com/users/author/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/author/subscriptions", + "organizations_url": "https://api.github.com/users/author/orgs", + "repos_url": "https://api.github.com/users/author/repos", + "events_url": "https://api.github.com/users/author/events{/privacy}", + "received_events_url": "https://api.github.com/users/author/received_events", + "type": "User", + "site_admin": false + }, + "body": "test", + "body_html": "

test

", + "body_version": "version", + "comments_count": 1, + "comments_url": "https://api.github.com/teams/2/discussions/3/comments", + "created_at": "2018-01-01T00:00:00Z", + "last_edited_at": null, + "html_url": "https://github.com/orgs/1/teams/2/discussions/3", + "node_id": "node", + "number": 3, + "pinned": false, + "private": false, + "team_url": "https://api.github.com/teams/2", + "title": "test", + "updated_at": "2018-01-01T00:00:00Z", + "url": "https://api.github.com/teams/2/discussions/3" + } + ]`) + }) + discussions, _, err := client.Teams.ListDiscussions(context.Background(), 2, &DiscussionListOptions{"desc"}) + if err != nil { + t.Errorf("Teams.ListDiscussions returned error: %v", err) + } + + want := []*TeamDiscussion{ + { + Author: &User{ + Login: String("author"), + ID: Int64(0), + AvatarURL: String("https://avatars1.githubusercontent.com/u/0?v=4"), + GravatarID: String(""), + URL: String("https://api.github.com/users/author"), + HTMLURL: String("https://github.com/author"), + FollowersURL: String("https://api.github.com/users/author/followers"), + FollowingURL: String("https://api.github.com/users/author/following{/other_user}"), + GistsURL: String("https://api.github.com/users/author/gists{/gist_id}"), + StarredURL: String("https://api.github.com/users/author/starred{/owner}{/repo}"), + SubscriptionsURL: String("https://api.github.com/users/author/subscriptions"), + OrganizationsURL: String("https://api.github.com/users/author/orgs"), + ReposURL: String("https://api.github.com/users/author/repos"), + EventsURL: String("https://api.github.com/users/author/events{/privacy}"), + ReceivedEventsURL: String("https://api.github.com/users/author/received_events"), + Type: String("User"), + SiteAdmin: Bool(false), + }, + Body: String("test"), + BodyHTML: String("

test

"), + BodyVersion: String("version"), + CommentsCount: Int(1), + CommentsURL: String("https://api.github.com/teams/2/discussions/3/comments"), + CreatedAt: &Timestamp{time.Date(2018, time.January, 1, 0, 0, 0, 0, time.UTC)}, + LastEditedAt: nil, + HTMLURL: String("https://github.com/orgs/1/teams/2/discussions/3"), + NodeID: String("node"), + Number: Int(3), + Pinned: Bool(false), + Private: Bool(false), + TeamURL: String("https://api.github.com/teams/2"), + Title: String("test"), + UpdatedAt: &Timestamp{time.Date(2018, time.January, 1, 0, 0, 0, 0, time.UTC)}, + URL: String("https://api.github.com/teams/2/discussions/3"), + }, + } + if !reflect.DeepEqual(discussions, want) { + t.Errorf("Teams.ListDiscussions returned %+v, want %+v", discussions, want) + } +} + +func TestTeamsService_GetDiscussion(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/2/discussions/3", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + fmt.Fprint(w, `{"number":3}`) + }) + + discussion, _, err := client.Teams.GetDiscussion(context.Background(), 2, 3) + if err != nil { + t.Errorf("Teams.GetDiscussion returned error: %v", err) + } + + want := &TeamDiscussion{Number: Int(3)} + if !reflect.DeepEqual(discussion, want) { + t.Errorf("Teams.GetDiscussion returned %+v, want %+v", discussion, want) + } +} + +func TestTeamsService_CreateDiscussion(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := TeamDiscussion{Title: String("c_t"), Body: String("c_b")} + + mux.HandleFunc("/teams/2/discussions", func(w http.ResponseWriter, r *http.Request) { + v := new(TeamDiscussion) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + if !reflect.DeepEqual(v, &input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + fmt.Fprint(w, `{"number":3}`) + }) + + comment, _, err := client.Teams.CreateDiscussion(context.Background(), 2, input) + if err != nil { + t.Errorf("Teams.CreateDiscussion returned error: %v", err) + } + + want := &TeamDiscussion{Number: Int(3)} + if !reflect.DeepEqual(comment, want) { + t.Errorf("Teams.CreateDiscussion returned %+v, want %+v", comment, want) + } +} + +func TestTeamsService_EditDiscussion(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := TeamDiscussion{Title: String("e_t"), Body: String("e_b")} + + mux.HandleFunc("/teams/2/discussions/3", func(w http.ResponseWriter, r *http.Request) { + v := new(TeamDiscussion) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "PATCH") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + if !reflect.DeepEqual(v, &input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + fmt.Fprint(w, `{"number":3}`) + }) + + comment, _, err := client.Teams.EditDiscussion(context.Background(), 2, 3, input) + if err != nil { + t.Errorf("Teams.EditDiscussion returned error: %v", err) + } + + want := &TeamDiscussion{Number: Int(3)} + if !reflect.DeepEqual(comment, want) { + t.Errorf("Teams.EditDiscussion returned %+v, want %+v", comment, want) + } +} + +func TestTeamsService_DeleteDiscussion(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/2/discussions/3", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeTeamDiscussionsPreview) + }) + + _, err := client.Teams.DeleteDiscussion(context.Background(), 2, 3) + if err != nil { + t.Errorf("Teams.DeleteDiscussion returned error: %v", err) + } +} diff --git a/vendor/github.com/google/go-github/github/teams_members.go b/vendor/github.com/google/go-github/github/teams_members.go new file mode 100644 index 0000000..d5cfa0d --- /dev/null +++ b/vendor/github.com/google/go-github/github/teams_members.go @@ -0,0 +1,174 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// TeamListTeamMembersOptions specifies the optional parameters to the +// TeamsService.ListTeamMembers method. +type TeamListTeamMembersOptions struct { + // Role filters members returned by their role in the team. Possible + // values are "all", "member", "maintainer". Default is "all". + Role string `url:"role,omitempty"` + + ListOptions +} + +// ListTeamMembers lists all of the users who are members of the specified +// team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#list-team-members +func (s *TeamsService) ListTeamMembers(ctx context.Context, team int64, opt *TeamListTeamMembersOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("teams/%v/members", team) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + var members []*User + resp, err := s.client.Do(ctx, req, &members) + if err != nil { + return nil, resp, err + } + + return members, resp, nil +} + +// IsTeamMember checks if a user is a member of the specified team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#get-team-member +// +// Deprecated: This API has been marked as deprecated in the Github API docs, +// TeamsService.GetTeamMembership method should be used instead. +func (s *TeamsService) IsTeamMember(ctx context.Context, team int64, user string) (bool, *Response, error) { + u := fmt.Sprintf("teams/%v/members/%v", team, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + member, err := parseBoolResponse(err) + return member, resp, err +} + +// GetTeamMembership returns the membership status for a user in a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#get-team-membership +func (s *TeamsService) GetTeamMembership(ctx context.Context, team int64, user string) (*Membership, *Response, error) { + u := fmt.Sprintf("teams/%v/memberships/%v", team, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + t := new(Membership) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// TeamAddTeamMembershipOptions specifies the optional +// parameters to the TeamsService.AddTeamMembership method. +type TeamAddTeamMembershipOptions struct { + // Role specifies the role the user should have in the team. Possible + // values are: + // member - a normal member of the team + // maintainer - a team maintainer. Able to add/remove other team + // members, promote other team members to team + // maintainer, and edit the team’s name and description + // + // Default value is "member". + Role string `json:"role,omitempty"` +} + +// AddTeamMembership adds or invites a user to a team. +// +// In order to add a membership between a user and a team, the authenticated +// user must have 'admin' permissions to the team or be an owner of the +// organization that the team is associated with. +// +// If the user is already a part of the team's organization (meaning they're on +// at least one other team in the organization), this endpoint will add the +// user to the team. +// +// If the user is completely unaffiliated with the team's organization (meaning +// they're on none of the organization's teams), this endpoint will send an +// invitation to the user via email. This newly-created membership will be in +// the "pending" state until the user accepts the invitation, at which point +// the membership will transition to the "active" state and the user will be +// added as a member of the team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#add-or-update-team-membership +func (s *TeamsService) AddTeamMembership(ctx context.Context, team int64, user string, opt *TeamAddTeamMembershipOptions) (*Membership, *Response, error) { + u := fmt.Sprintf("teams/%v/memberships/%v", team, user) + req, err := s.client.NewRequest("PUT", u, opt) + if err != nil { + return nil, nil, err + } + + t := new(Membership) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// RemoveTeamMembership removes a user from a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#remove-team-membership +func (s *TeamsService) RemoveTeamMembership(ctx context.Context, team int64, user string) (*Response, error) { + u := fmt.Sprintf("teams/%v/memberships/%v", team, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListPendingTeamInvitations get pending invitaion list in team. +// Warning: The API may change without advance notice during the preview period. +// Preview features are not supported for production use. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#list-pending-team-invitations +func (s *TeamsService) ListPendingTeamInvitations(ctx context.Context, team int64, opt *ListOptions) ([]*Invitation, *Response, error) { + u := fmt.Sprintf("teams/%v/invitations", team) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var pendingInvitations []*Invitation + resp, err := s.client.Do(ctx, req, &pendingInvitations) + if err != nil { + return nil, resp, err + } + + return pendingInvitations, resp, nil +} diff --git a/vendor/github.com/google/go-github/github/teams_members_test.go b/vendor/github.com/google/go-github/github/teams_members_test.go new file mode 100644 index 0000000..2489da0 --- /dev/null +++ b/vendor/github.com/google/go-github/github/teams_members_test.go @@ -0,0 +1,167 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "reflect" + "testing" +) + +func TestTeamsService__ListTeamMembers(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/members", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + testFormValues(t, r, values{"role": "member", "page": "2"}) + fmt.Fprint(w, `[{"id":1}]`) + }) + + opt := &TeamListTeamMembersOptions{Role: "member", ListOptions: ListOptions{Page: 2}} + members, _, err := client.Teams.ListTeamMembers(context.Background(), 1, opt) + if err != nil { + t.Errorf("Teams.ListTeamMembers returned error: %v", err) + } + + want := []*User{{ID: Int64(1)}} + if !reflect.DeepEqual(members, want) { + t.Errorf("Teams.ListTeamMembers returned %+v, want %+v", members, want) + } +} + +func TestTeamsService__IsTeamMember_true(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/members/u", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + }) + + member, _, err := client.Teams.IsTeamMember(context.Background(), 1, "u") + if err != nil { + t.Errorf("Teams.IsTeamMember returned error: %v", err) + } + if want := true; member != want { + t.Errorf("Teams.IsTeamMember returned %+v, want %+v", member, want) + } +} + +// ensure that a 404 response is interpreted as "false" and not an error +func TestTeamsService__IsTeamMember_false(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/members/u", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + w.WriteHeader(http.StatusNotFound) + }) + + member, _, err := client.Teams.IsTeamMember(context.Background(), 1, "u") + if err != nil { + t.Errorf("Teams.IsTeamMember returned error: %+v", err) + } + if want := false; member != want { + t.Errorf("Teams.IsTeamMember returned %+v, want %+v", member, want) + } +} + +// ensure that a 400 response is interpreted as an actual error, and not simply +// as "false" like the above case of a 404 +func TestTeamsService__IsTeamMember_error(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/members/u", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + http.Error(w, "BadRequest", http.StatusBadRequest) + }) + + member, _, err := client.Teams.IsTeamMember(context.Background(), 1, "u") + if err == nil { + t.Errorf("Expected HTTP 400 response") + } + if want := false; member != want { + t.Errorf("Teams.IsTeamMember returned %+v, want %+v", member, want) + } +} + +func TestTeamsService__IsTeamMember_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, _, err := client.Teams.IsTeamMember(context.Background(), 1, "%") + testURLParseError(t, err) +} + +func TestTeamsService__GetTeamMembership(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/memberships/u", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + fmt.Fprint(w, `{"url":"u", "state":"active"}`) + }) + + membership, _, err := client.Teams.GetTeamMembership(context.Background(), 1, "u") + if err != nil { + t.Errorf("Teams.GetTeamMembership returned error: %v", err) + } + + want := &Membership{URL: String("u"), State: String("active")} + if !reflect.DeepEqual(membership, want) { + t.Errorf("Teams.GetTeamMembership returned %+v, want %+v", membership, want) + } +} + +func TestTeamsService__AddTeamMembership(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + opt := &TeamAddTeamMembershipOptions{Role: "maintainer"} + + mux.HandleFunc("/teams/1/memberships/u", func(w http.ResponseWriter, r *http.Request) { + v := new(TeamAddTeamMembershipOptions) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "PUT") + if !reflect.DeepEqual(v, opt) { + t.Errorf("Request body = %+v, want %+v", v, opt) + } + + fmt.Fprint(w, `{"url":"u", "state":"pending"}`) + }) + + membership, _, err := client.Teams.AddTeamMembership(context.Background(), 1, "u", opt) + if err != nil { + t.Errorf("Teams.AddTeamMembership returned error: %v", err) + } + + want := &Membership{URL: String("u"), State: String("pending")} + if !reflect.DeepEqual(membership, want) { + t.Errorf("Teams.AddTeamMembership returned %+v, want %+v", membership, want) + } +} + +func TestTeamsService__RemoveTeamMembership(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/memberships/u", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Teams.RemoveTeamMembership(context.Background(), 1, "u") + if err != nil { + t.Errorf("Teams.RemoveTeamMembership returned error: %v", err) + } +} diff --git a/vendor/github.com/google/go-github/github/teams_test.go b/vendor/github.com/google/go-github/github/teams_test.go new file mode 100644 index 0000000..fbd8130 --- /dev/null +++ b/vendor/github.com/google/go-github/github/teams_test.go @@ -0,0 +1,554 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "reflect" + "strings" + "testing" + "time" +) + +func TestTeamsService_ListTeams(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/orgs/o/teams", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + testFormValues(t, r, values{"page": "2"}) + fmt.Fprint(w, `[{"id":1}]`) + }) + + opt := &ListOptions{Page: 2} + teams, _, err := client.Teams.ListTeams(context.Background(), "o", opt) + if err != nil { + t.Errorf("Teams.ListTeams returned error: %v", err) + } + + want := []*Team{{ID: Int64(1)}} + if !reflect.DeepEqual(teams, want) { + t.Errorf("Teams.ListTeams returned %+v, want %+v", teams, want) + } +} + +func TestTeamsService_ListTeams_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, _, err := client.Teams.ListTeams(context.Background(), "%", nil) + testURLParseError(t, err) +} + +func TestTeamsService_GetTeam(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + fmt.Fprint(w, `{"id":1, "name":"n", "description": "d", "url":"u", "slug": "s", "permission":"p", "ldap_dn":"cn=n,ou=groups,dc=example,dc=com", "parent":null}`) + }) + + team, _, err := client.Teams.GetTeam(context.Background(), 1) + if err != nil { + t.Errorf("Teams.GetTeam returned error: %v", err) + } + + want := &Team{ID: Int64(1), Name: String("n"), Description: String("d"), URL: String("u"), Slug: String("s"), Permission: String("p"), LDAPDN: String("cn=n,ou=groups,dc=example,dc=com")} + if !reflect.DeepEqual(team, want) { + t.Errorf("Teams.GetTeam returned %+v, want %+v", team, want) + } +} + +func TestTeamsService_GetTeam_nestedTeams(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + fmt.Fprint(w, `{"id":1, "name":"n", "description": "d", "url":"u", "slug": "s", "permission":"p", + "parent": {"id":2, "name":"n", "description": "d", "parent": null}}`) + }) + + team, _, err := client.Teams.GetTeam(context.Background(), 1) + if err != nil { + t.Errorf("Teams.GetTeam returned error: %v", err) + } + + want := &Team{ID: Int64(1), Name: String("n"), Description: String("d"), URL: String("u"), Slug: String("s"), Permission: String("p"), + Parent: &Team{ID: Int64(2), Name: String("n"), Description: String("d")}, + } + if !reflect.DeepEqual(team, want) { + t.Errorf("Teams.GetTeam returned %+v, want %+v", team, want) + } +} + +func TestTeamsService_CreateTeam(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := NewTeam{Name: "n", Privacy: String("closed"), RepoNames: []string{"r"}} + + mux.HandleFunc("/orgs/o/teams", func(w http.ResponseWriter, r *http.Request) { + v := new(NewTeam) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "POST") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + if !reflect.DeepEqual(v, &input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + fmt.Fprint(w, `{"id":1}`) + }) + + team, _, err := client.Teams.CreateTeam(context.Background(), "o", input) + if err != nil { + t.Errorf("Teams.CreateTeam returned error: %v", err) + } + + want := &Team{ID: Int64(1)} + if !reflect.DeepEqual(team, want) { + t.Errorf("Teams.CreateTeam returned %+v, want %+v", team, want) + } +} + +func TestTeamsService_CreateTeam_invalidOrg(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, _, err := client.Teams.CreateTeam(context.Background(), "%", NewTeam{}) + testURLParseError(t, err) +} + +func TestTeamsService_EditTeam(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := NewTeam{Name: "n", Privacy: String("closed")} + + mux.HandleFunc("/teams/1", func(w http.ResponseWriter, r *http.Request) { + v := new(NewTeam) + json.NewDecoder(r.Body).Decode(v) + + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + testMethod(t, r, "PATCH") + if !reflect.DeepEqual(v, &input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + fmt.Fprint(w, `{"id":1}`) + }) + + team, _, err := client.Teams.EditTeam(context.Background(), 1, input) + if err != nil { + t.Errorf("Teams.EditTeam returned error: %v", err) + } + + want := &Team{ID: Int64(1)} + if !reflect.DeepEqual(team, want) { + t.Errorf("Teams.EditTeam returned %+v, want %+v", team, want) + } +} + +func TestTeamsService_DeleteTeam(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + }) + + _, err := client.Teams.DeleteTeam(context.Background(), 1) + if err != nil { + t.Errorf("Teams.DeleteTeam returned error: %v", err) + } +} + +func TestTeamsService_ListChildTeams(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/teams", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + testFormValues(t, r, values{"page": "2"}) + fmt.Fprint(w, `[{"id":2}]`) + }) + + opt := &ListOptions{Page: 2} + teams, _, err := client.Teams.ListChildTeams(context.Background(), 1, opt) + if err != nil { + t.Errorf("Teams.ListTeams returned error: %v", err) + } + + want := []*Team{{ID: Int64(2)}} + if !reflect.DeepEqual(teams, want) { + t.Errorf("Teams.ListTeams returned %+v, want %+v", teams, want) + } +} + +func TestTeamsService_ListTeamRepos(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/repos", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + wantAcceptHeaders := []string{mediaTypeTopicsPreview, mediaTypeNestedTeamsPreview} + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) + testFormValues(t, r, values{"page": "2"}) + fmt.Fprint(w, `[{"id":1}]`) + }) + + opt := &ListOptions{Page: 2} + members, _, err := client.Teams.ListTeamRepos(context.Background(), 1, opt) + if err != nil { + t.Errorf("Teams.ListTeamRepos returned error: %v", err) + } + + want := []*Repository{{ID: Int64(1)}} + if !reflect.DeepEqual(members, want) { + t.Errorf("Teams.ListTeamRepos returned %+v, want %+v", members, want) + } +} + +func TestTeamsService_IsTeamRepo_true(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + wantAcceptHeaders := []string{mediaTypeOrgPermissionRepo, mediaTypeNestedTeamsPreview} + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) + fmt.Fprint(w, `{"id":1}`) + }) + + repo, _, err := client.Teams.IsTeamRepo(context.Background(), 1, "o", "r") + if err != nil { + t.Errorf("Teams.IsTeamRepo returned error: %v", err) + } + + want := &Repository{ID: Int64(1)} + if !reflect.DeepEqual(repo, want) { + t.Errorf("Teams.IsTeamRepo returned %+v, want %+v", repo, want) + } +} + +func TestTeamsService_IsTeamRepo_false(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + w.WriteHeader(http.StatusNotFound) + }) + + repo, resp, err := client.Teams.IsTeamRepo(context.Background(), 1, "o", "r") + if err == nil { + t.Errorf("Expected HTTP 404 response") + } + if got, want := resp.Response.StatusCode, http.StatusNotFound; got != want { + t.Errorf("Teams.IsTeamRepo returned status %d, want %d", got, want) + } + if repo != nil { + t.Errorf("Teams.IsTeamRepo returned %+v, want nil", repo) + } +} + +func TestTeamsService_IsTeamRepo_error(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + http.Error(w, "BadRequest", http.StatusBadRequest) + }) + + repo, resp, err := client.Teams.IsTeamRepo(context.Background(), 1, "o", "r") + if err == nil { + t.Errorf("Expected HTTP 400 response") + } + if got, want := resp.Response.StatusCode, http.StatusBadRequest; got != want { + t.Errorf("Teams.IsTeamRepo returned status %d, want %d", got, want) + } + if repo != nil { + t.Errorf("Teams.IsTeamRepo returned %+v, want nil", repo) + } +} + +func TestTeamsService_IsTeamRepo_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, _, err := client.Teams.IsTeamRepo(context.Background(), 1, "%", "r") + testURLParseError(t, err) +} + +func TestTeamsService_AddTeamRepo(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + opt := &TeamAddTeamRepoOptions{Permission: "admin"} + + mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { + v := new(TeamAddTeamRepoOptions) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "PUT") + if !reflect.DeepEqual(v, opt) { + t.Errorf("Request body = %+v, want %+v", v, opt) + } + + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Teams.AddTeamRepo(context.Background(), 1, "o", "r", opt) + if err != nil { + t.Errorf("Teams.AddTeamRepo returned error: %v", err) + } +} + +func TestTeamsService_AddTeamRepo_noAccess(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + w.WriteHeader(http.StatusUnprocessableEntity) + }) + + _, err := client.Teams.AddTeamRepo(context.Background(), 1, "o", "r", nil) + if err == nil { + t.Errorf("Expcted error to be returned") + } +} + +func TestTeamsService_AddTeamRepo_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, err := client.Teams.AddTeamRepo(context.Background(), 1, "%", "r", nil) + testURLParseError(t, err) +} + +func TestTeamsService_RemoveTeamRepo(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/repos/o/r", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Teams.RemoveTeamRepo(context.Background(), 1, "o", "r") + if err != nil { + t.Errorf("Teams.RemoveTeamRepo returned error: %v", err) + } +} + +func TestTeamsService_RemoveTeamRepo_invalidOwner(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + + _, err := client.Teams.RemoveTeamRepo(context.Background(), 1, "%", "r") + testURLParseError(t, err) +} + +func TestTeamsService_ListUserTeams(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/user/teams", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeNestedTeamsPreview) + testFormValues(t, r, values{"page": "1"}) + fmt.Fprint(w, `[{"id":1}]`) + }) + + opt := &ListOptions{Page: 1} + teams, _, err := client.Teams.ListUserTeams(context.Background(), opt) + if err != nil { + t.Errorf("Teams.ListUserTeams returned error: %v", err) + } + + want := []*Team{{ID: Int64(1)}} + if !reflect.DeepEqual(teams, want) { + t.Errorf("Teams.ListUserTeams returned %+v, want %+v", teams, want) + } +} + +func TestTeamsService_ListPendingTeamInvitations(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/teams/1/invitations", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testFormValues(t, r, values{"page": "1"}) + fmt.Fprint(w, `[ + { + "id": 1, + "login": "monalisa", + "email": "octocat@github.com", + "role": "direct_member", + "created_at": "2017-01-21T00:00:00Z", + "inviter": { + "login": "other_user", + "id": 1, + "avatar_url": "https://github.com/images/error/other_user_happy.gif", + "gravatar_id": "", + "url": "https://api.github.com/users/other_user", + "html_url": "https://github.com/other_user", + "followers_url": "https://api.github.com/users/other_user/followers", + "following_url": "https://api.github.com/users/other_user/following/other_user", + "gists_url": "https://api.github.com/users/other_user/gists/gist_id", + "starred_url": "https://api.github.com/users/other_user/starred/owner/repo", + "subscriptions_url": "https://api.github.com/users/other_user/subscriptions", + "organizations_url": "https://api.github.com/users/other_user/orgs", + "repos_url": "https://api.github.com/users/other_user/repos", + "events_url": "https://api.github.com/users/other_user/events/privacy", + "received_events_url": "https://api.github.com/users/other_user/received_events/privacy", + "type": "User", + "site_admin": false + } + } + ]`) + }) + + opt := &ListOptions{Page: 1} + invitations, _, err := client.Teams.ListPendingTeamInvitations(context.Background(), 1, opt) + if err != nil { + t.Errorf("Teams.ListPendingTeamInvitations returned error: %v", err) + } + + createdAt := time.Date(2017, time.January, 21, 0, 0, 0, 0, time.UTC) + want := []*Invitation{ + { + ID: Int64(1), + Login: String("monalisa"), + Email: String("octocat@github.com"), + Role: String("direct_member"), + CreatedAt: &createdAt, + Inviter: &User{ + Login: String("other_user"), + ID: Int64(1), + AvatarURL: String("https://github.com/images/error/other_user_happy.gif"), + GravatarID: String(""), + URL: String("https://api.github.com/users/other_user"), + HTMLURL: String("https://github.com/other_user"), + FollowersURL: String("https://api.github.com/users/other_user/followers"), + FollowingURL: String("https://api.github.com/users/other_user/following/other_user"), + GistsURL: String("https://api.github.com/users/other_user/gists/gist_id"), + StarredURL: String("https://api.github.com/users/other_user/starred/owner/repo"), + SubscriptionsURL: String("https://api.github.com/users/other_user/subscriptions"), + OrganizationsURL: String("https://api.github.com/users/other_user/orgs"), + ReposURL: String("https://api.github.com/users/other_user/repos"), + EventsURL: String("https://api.github.com/users/other_user/events/privacy"), + ReceivedEventsURL: String("https://api.github.com/users/other_user/received_events/privacy"), + Type: String("User"), + SiteAdmin: Bool(false), + }, + }} + + if !reflect.DeepEqual(invitations, want) { + t.Errorf("Teams.ListPendingTeamInvitations returned %+v, want %+v", invitations, want) + } +} + +func TestTeamsService_ListProjects(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + wantAcceptHeaders := []string{mediaTypeNestedTeamsPreview, mediaTypeProjectsPreview} + mux.HandleFunc("/teams/1/projects", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) + fmt.Fprint(w, `[{"id":1}]`) + }) + + projects, _, err := client.Teams.ListTeamProjects(context.Background(), 1) + if err != nil { + t.Errorf("Teams.ListTeamProjects returned error: %v", err) + } + + want := []*Project{{ID: Int64(1)}} + if !reflect.DeepEqual(projects, want) { + t.Errorf("Teams.ListTeamProjects returned %+v, want %+v", projects, want) + } +} + +func TestTeamsService_ReviewProjects(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + wantAcceptHeaders := []string{mediaTypeNestedTeamsPreview, mediaTypeProjectsPreview} + mux.HandleFunc("/teams/1/projects/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) + fmt.Fprint(w, `{"id":1}`) + }) + + project, _, err := client.Teams.ReviewTeamProjects(context.Background(), 1, 1) + if err != nil { + t.Errorf("Teams.ReviewTeamProjects returned error: %v", err) + } + + want := &Project{ID: Int64(1)} + if !reflect.DeepEqual(project, want) { + t.Errorf("Teams.ReviewTeamProjects returned %+v, want %+v", project, want) + } +} + +func TestTeamsService_AddTeamProject(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + opt := &TeamProjectOptions{ + Permission: String("admin"), + } + + wantAcceptHeaders := []string{mediaTypeNestedTeamsPreview, mediaTypeProjectsPreview} + mux.HandleFunc("/teams/1/projects/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "PUT") + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) + + v := &TeamProjectOptions{} + json.NewDecoder(r.Body).Decode(v) + if !reflect.DeepEqual(v, opt) { + t.Errorf("Request body = %+v, want %+v", v, opt) + } + + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Teams.AddTeamProject(context.Background(), 1, 1, opt) + if err != nil { + t.Errorf("Teams.AddTeamProject returned error: %v", err) + } +} + +func TestTeamsService_RemoveTeamProject(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + wantAcceptHeaders := []string{mediaTypeNestedTeamsPreview, mediaTypeProjectsPreview} + mux.HandleFunc("/teams/1/projects/1", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "DELETE") + testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Teams.RemoveTeamProject(context.Background(), 1, 1) + if err != nil { + t.Errorf("Teams.RemoveTeamProject returned error: %v", err) + } +} diff --git a/vendor/github.com/google/go-github/github/timestamp.go b/vendor/github.com/google/go-github/github/timestamp.go index a1c1554..90929d5 100644 --- a/vendor/github.com/google/go-github/github/timestamp.go +++ b/vendor/github.com/google/go-github/github/timestamp.go @@ -28,9 +28,9 @@ func (t *Timestamp) UnmarshalJSON(data []byte) (err error) { str := string(data) i, err := strconv.ParseInt(str, 10, 64) if err == nil { - (*t).Time = time.Unix(i, 0) + t.Time = time.Unix(i, 0) } else { - (*t).Time, err = time.Parse(`"`+time.RFC3339+`"`, str) + t.Time, err = time.Parse(`"`+time.RFC3339+`"`, str) } return } diff --git a/vendor/github.com/google/go-github/github/timestamp_test.go b/vendor/github.com/google/go-github/github/timestamp_test.go index 997b43d..dc09eaf 100644 --- a/vendor/github.com/google/go-github/github/timestamp_test.go +++ b/vendor/github.com/google/go-github/github/timestamp_test.go @@ -13,13 +13,14 @@ import ( ) const ( - emptyTimeStr = `"0001-01-01T00:00:00Z"` - referenceTimeStr = `"2006-01-02T15:04:05Z"` - referenceUnixTimeStr = `1136214245` + emptyTimeStr = `"0001-01-01T00:00:00Z"` + referenceTimeStr = `"2006-01-02T15:04:05Z"` + referenceTimeStrFractional = `"2006-01-02T15:04:05.000Z"` // This format was returned by the Projects API before October 1, 2017. + referenceUnixTimeStr = `1136214245` ) var ( - referenceTime = time.Date(2006, 01, 02, 15, 04, 05, 0, time.UTC) + referenceTime = time.Date(2006, time.January, 02, 15, 04, 05, 0, time.UTC) unixOrigin = time.Unix(0, 0).In(time.UTC) ) @@ -57,7 +58,8 @@ func TestTimestamp_Unmarshal(t *testing.T) { equal bool }{ {"Reference", referenceTimeStr, Timestamp{referenceTime}, false, true}, - {"ReferenceUnix", `1136214245`, Timestamp{referenceTime}, false, true}, + {"ReferenceUnix", referenceUnixTimeStr, Timestamp{referenceTime}, false, true}, + {"ReferenceFractional", referenceTimeStrFractional, Timestamp{referenceTime}, false, true}, {"Empty", emptyTimeStr, Timestamp{}, false, true}, {"UnixStart", `0`, Timestamp{unixOrigin}, false, true}, {"Mismatch", referenceTimeStr, Timestamp{}, false, false}, diff --git a/vendor/github.com/google/go-github/github/users.go b/vendor/github.com/google/go-github/github/users.go index 83cfb45..87cfa7f 100644 --- a/vendor/github.com/google/go-github/github/users.go +++ b/vendor/github.com/google/go-github/github/users.go @@ -18,33 +18,35 @@ type UsersService service // User represents a GitHub user. type User struct { - Login *string `json:"login,omitempty"` - ID *int `json:"id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - GravatarID *string `json:"gravatar_id,omitempty"` - Name *string `json:"name,omitempty"` - Company *string `json:"company,omitempty"` - Blog *string `json:"blog,omitempty"` - Location *string `json:"location,omitempty"` - Email *string `json:"email,omitempty"` - Hireable *bool `json:"hireable,omitempty"` - Bio *string `json:"bio,omitempty"` - PublicRepos *int `json:"public_repos,omitempty"` - PublicGists *int `json:"public_gists,omitempty"` - Followers *int `json:"followers,omitempty"` - Following *int `json:"following,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - SuspendedAt *Timestamp `json:"suspended_at,omitempty"` - Type *string `json:"type,omitempty"` - SiteAdmin *bool `json:"site_admin,omitempty"` - TotalPrivateRepos *int `json:"total_private_repos,omitempty"` - OwnedPrivateRepos *int `json:"owned_private_repos,omitempty"` - PrivateGists *int `json:"private_gists,omitempty"` - DiskUsage *int `json:"disk_usage,omitempty"` - Collaborators *int `json:"collaborators,omitempty"` - Plan *Plan `json:"plan,omitempty"` + Login *string `json:"login,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + AvatarURL *string `json:"avatar_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + GravatarID *string `json:"gravatar_id,omitempty"` + Name *string `json:"name,omitempty"` + Company *string `json:"company,omitempty"` + Blog *string `json:"blog,omitempty"` + Location *string `json:"location,omitempty"` + Email *string `json:"email,omitempty"` + Hireable *bool `json:"hireable,omitempty"` + Bio *string `json:"bio,omitempty"` + PublicRepos *int `json:"public_repos,omitempty"` + PublicGists *int `json:"public_gists,omitempty"` + Followers *int `json:"followers,omitempty"` + Following *int `json:"following,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + SuspendedAt *Timestamp `json:"suspended_at,omitempty"` + Type *string `json:"type,omitempty"` + SiteAdmin *bool `json:"site_admin,omitempty"` + TotalPrivateRepos *int `json:"total_private_repos,omitempty"` + OwnedPrivateRepos *int `json:"owned_private_repos,omitempty"` + PrivateGists *int `json:"private_gists,omitempty"` + DiskUsage *int `json:"disk_usage,omitempty"` + Collaborators *int `json:"collaborators,omitempty"` + TwoFactorAuthentication *bool `json:"two_factor_authentication,omitempty"` + Plan *Plan `json:"plan,omitempty"` // API URLs URL *string `json:"url,omitempty"` @@ -75,6 +77,7 @@ func (u User) String() string { // user. // // GitHub API docs: https://developer.github.com/v3/users/#get-a-single-user +// and: https://developer.github.com/v3/users/#get-the-authenticated-user func (s *UsersService) Get(ctx context.Context, user string) (*User, *Response, error) { var u string if user != "" { @@ -99,7 +102,7 @@ func (s *UsersService) Get(ctx context.Context, user string) (*User, *Response, // GetByID fetches a user. // // Note: GetByID uses the undocumented GitHub API endpoint /user/:id. -func (s *UsersService) GetByID(ctx context.Context, id int) (*User, *Response, error) { +func (s *UsersService) GetByID(ctx context.Context, id int64) (*User, *Response, error) { u := fmt.Sprintf("user/%d", id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { @@ -134,12 +137,65 @@ func (s *UsersService) Edit(ctx context.Context, user *User) (*User, *Response, return uResp, resp, nil } +// HovercardOptions specifies optional parameters to the UsersService.GetHovercard +// method. +type HovercardOptions struct { + // SubjectType specifies the additional information to be received about the hovercard. + // Possible values are: organization, repository, issue, pull_request. (Required when using subject_id.) + SubjectType string `url:"subject_type"` + + // SubjectID specifies the ID for the SubjectType. (Required when using subject_type.) + SubjectID string `url:"subject_id"` +} + +// Hovercard represents hovercard information about a user. +type Hovercard struct { + Contexts []*UserContext `json:"contexts,omitempty"` +} + +// UserContext represents the contextual information about user. +type UserContext struct { + Message *string `json:"message,omitempty"` + Octicon *string `json:"octicon,omitempty"` +} + +// GetHovercard fetches contextual information about user. It requires authentication +// via Basic Auth or via OAuth with the repo scope. +// +// GitHub API docs: https://developer.github.com/v3/users/#get-contextual-information-about-a-user +func (s *UsersService) GetHovercard(ctx context.Context, user string, opt *HovercardOptions) (*Hovercard, *Response, error) { + u := fmt.Sprintf("users/%v/hovercard", user) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeHovercardPreview) + + hc := new(Hovercard) + resp, err := s.client.Do(ctx, req, hc) + if err != nil { + return nil, resp, err + } + + return hc, resp, nil +} + // UserListOptions specifies optional parameters to the UsersService.ListAll // method. type UserListOptions struct { // ID of the last user seen - Since int `url:"since,omitempty"` + Since int64 `url:"since,omitempty"` + // Note: Pagination is powered exclusively by the Since parameter, + // ListOptions.Page has no effect. + // ListOptions.PerPage controls an undocumented GitHub API parameter. ListOptions } @@ -183,9 +239,6 @@ func (s *UsersService) ListInvitations(ctx context.Context, opt *ListOptions) ([ return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeRepositoryInvitationsPreview) - invites := []*RepositoryInvitation{} resp, err := s.client.Do(ctx, req, &invites) if err != nil { @@ -199,16 +252,13 @@ func (s *UsersService) ListInvitations(ctx context.Context, opt *ListOptions) ([ // authenticated user. // // GitHub API docs: https://developer.github.com/v3/repos/invitations/#accept-a-repository-invitation -func (s *UsersService) AcceptInvitation(ctx context.Context, invitationID int) (*Response, error) { +func (s *UsersService) AcceptInvitation(ctx context.Context, invitationID int64) (*Response, error) { u := fmt.Sprintf("user/repository_invitations/%v", invitationID) req, err := s.client.NewRequest("PATCH", u, nil) if err != nil { return nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeRepositoryInvitationsPreview) - return s.client.Do(ctx, req, nil) } @@ -216,15 +266,12 @@ func (s *UsersService) AcceptInvitation(ctx context.Context, invitationID int) ( // authenticated user. // // GitHub API docs: https://developer.github.com/v3/repos/invitations/#decline-a-repository-invitation -func (s *UsersService) DeclineInvitation(ctx context.Context, invitationID int) (*Response, error) { +func (s *UsersService) DeclineInvitation(ctx context.Context, invitationID int64) (*Response, error) { u := fmt.Sprintf("user/repository_invitations/%v", invitationID) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { return nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeRepositoryInvitationsPreview) - return s.client.Do(ctx, req, nil) } diff --git a/vendor/github.com/google/go-github/github/users_administration.go b/vendor/github.com/google/go-github/github/users_administration.go index e042398..1c483a7 100644 --- a/vendor/github.com/google/go-github/github/users_administration.go +++ b/vendor/github.com/google/go-github/github/users_administration.go @@ -12,7 +12,7 @@ import ( // PromoteSiteAdmin promotes a user to a site administrator of a GitHub Enterprise instance. // -// GitHub API docs: https://developer.github.com/v3/users/administration/#promote-an-ordinary-user-to-a-site-administrator +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#promote-an-ordinary-user-to-a-site-administrator func (s *UsersService) PromoteSiteAdmin(ctx context.Context, user string) (*Response, error) { u := fmt.Sprintf("users/%v/site_admin", user) @@ -26,7 +26,7 @@ func (s *UsersService) PromoteSiteAdmin(ctx context.Context, user string) (*Resp // DemoteSiteAdmin demotes a user from site administrator of a GitHub Enterprise instance. // -// GitHub API docs: https://developer.github.com/v3/users/administration/#demote-a-site-administrator-to-an-ordinary-user +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#demote-a-site-administrator-to-an-ordinary-user func (s *UsersService) DemoteSiteAdmin(ctx context.Context, user string) (*Response, error) { u := fmt.Sprintf("users/%v/site_admin", user) @@ -38,13 +38,18 @@ func (s *UsersService) DemoteSiteAdmin(ctx context.Context, user string) (*Respo return s.client.Do(ctx, req, nil) } +// UserSuspendOptions represents the reason a user is being suspended. +type UserSuspendOptions struct { + Reason *string `json:"reason,omitempty"` +} + // Suspend a user on a GitHub Enterprise instance. // -// GitHub API docs: https://developer.github.com/v3/users/administration/#suspend-a-user -func (s *UsersService) Suspend(ctx context.Context, user string) (*Response, error) { +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#suspend-a-user +func (s *UsersService) Suspend(ctx context.Context, user string, opt *UserSuspendOptions) (*Response, error) { u := fmt.Sprintf("users/%v/suspended", user) - req, err := s.client.NewRequest("PUT", u, nil) + req, err := s.client.NewRequest("PUT", u, opt) if err != nil { return nil, err } @@ -54,7 +59,7 @@ func (s *UsersService) Suspend(ctx context.Context, user string) (*Response, err // Unsuspend a user on a GitHub Enterprise instance. // -// GitHub API docs: https://developer.github.com/v3/users/administration/#unsuspend-a-user +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#unsuspend-a-user func (s *UsersService) Unsuspend(ctx context.Context, user string) (*Response, error) { u := fmt.Sprintf("users/%v/suspended", user) diff --git a/vendor/github.com/google/go-github/github/users_administration_test.go b/vendor/github.com/google/go-github/github/users_administration_test.go index 8f02b88..c1d9f97 100644 --- a/vendor/github.com/google/go-github/github/users_administration_test.go +++ b/vendor/github.com/google/go-github/github/users_administration_test.go @@ -7,12 +7,14 @@ package github import ( "context" + "encoding/json" "net/http" + "reflect" "testing" ) func TestUsersService_PromoteSiteAdmin(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/site_admin", func(w http.ResponseWriter, r *http.Request) { @@ -27,7 +29,7 @@ func TestUsersService_PromoteSiteAdmin(t *testing.T) { } func TestUsersService_DemoteSiteAdmin(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/site_admin", func(w http.ResponseWriter, r *http.Request) { @@ -42,7 +44,7 @@ func TestUsersService_DemoteSiteAdmin(t *testing.T) { } func TestUsersService_Suspend(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/suspended", func(w http.ResponseWriter, r *http.Request) { @@ -50,14 +52,38 @@ func TestUsersService_Suspend(t *testing.T) { w.WriteHeader(http.StatusNoContent) }) - _, err := client.Users.Suspend(context.Background(), "u") + _, err := client.Users.Suspend(context.Background(), "u", nil) + if err != nil { + t.Errorf("Users.Suspend returned error: %v", err) + } +} + +func TestUsersServiceReason_Suspend(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + input := &UserSuspendOptions{Reason: String("test")} + + mux.HandleFunc("/users/u/suspended", func(w http.ResponseWriter, r *http.Request) { + v := new(UserSuspendOptions) + json.NewDecoder(r.Body).Decode(v) + + testMethod(t, r, "PUT") + if !reflect.DeepEqual(v, input) { + t.Errorf("Request body = %+v, want %+v", v, input) + } + + w.WriteHeader(http.StatusNoContent) + }) + + _, err := client.Users.Suspend(context.Background(), "u", input) if err != nil { t.Errorf("Users.Suspend returned error: %v", err) } } func TestUsersService_Unsuspend(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/suspended", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/users_blocking_test.go b/vendor/github.com/google/go-github/github/users_blocking_test.go index ea7a8ab..83e8e30 100644 --- a/vendor/github.com/google/go-github/github/users_blocking_test.go +++ b/vendor/github.com/google/go-github/github/users_blocking_test.go @@ -14,7 +14,7 @@ import ( ) func TestUsersService_ListBlockedUsers(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/blocks", func(w http.ResponseWriter, r *http.Request) { @@ -39,7 +39,7 @@ func TestUsersService_ListBlockedUsers(t *testing.T) { } func TestUsersService_IsBlocked(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/blocks/u", func(w http.ResponseWriter, r *http.Request) { @@ -58,7 +58,7 @@ func TestUsersService_IsBlocked(t *testing.T) { } func TestUsersService_BlockUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/blocks/u", func(w http.ResponseWriter, r *http.Request) { @@ -74,7 +74,7 @@ func TestUsersService_BlockUser(t *testing.T) { } func TestUsersService_UnblockUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/blocks/u", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/users_emails_test.go b/vendor/github.com/google/go-github/github/users_emails_test.go index 3f8176a..3f72b40 100644 --- a/vendor/github.com/google/go-github/github/users_emails_test.go +++ b/vendor/github.com/google/go-github/github/users_emails_test.go @@ -15,7 +15,7 @@ import ( ) func TestUsersService_ListEmails(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/emails", func(w http.ResponseWriter, r *http.Request) { @@ -41,7 +41,7 @@ func TestUsersService_ListEmails(t *testing.T) { } func TestUsersService_AddEmails(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := []string{"new@example.com"} @@ -73,7 +73,7 @@ func TestUsersService_AddEmails(t *testing.T) { } func TestUsersService_DeleteEmails(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := []string{"user@example.com"} diff --git a/vendor/github.com/google/go-github/github/users_followers_test.go b/vendor/github.com/google/go-github/github/users_followers_test.go index 688af2c..777fe80 100644 --- a/vendor/github.com/google/go-github/github/users_followers_test.go +++ b/vendor/github.com/google/go-github/github/users_followers_test.go @@ -14,7 +14,7 @@ import ( ) func TestUsersService_ListFollowers_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/followers", func(w http.ResponseWriter, r *http.Request) { @@ -29,14 +29,14 @@ func TestUsersService_ListFollowers_authenticatedUser(t *testing.T) { t.Errorf("Users.ListFollowers returned error: %v", err) } - want := []*User{{ID: Int(1)}} + want := []*User{{ID: Int64(1)}} if !reflect.DeepEqual(users, want) { t.Errorf("Users.ListFollowers returned %+v, want %+v", users, want) } } func TestUsersService_ListFollowers_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/followers", func(w http.ResponseWriter, r *http.Request) { @@ -49,19 +49,22 @@ func TestUsersService_ListFollowers_specifiedUser(t *testing.T) { t.Errorf("Users.ListFollowers returned error: %v", err) } - want := []*User{{ID: Int(1)}} + want := []*User{{ID: Int64(1)}} if !reflect.DeepEqual(users, want) { t.Errorf("Users.ListFollowers returned %+v, want %+v", users, want) } } func TestUsersService_ListFollowers_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Users.ListFollowers(context.Background(), "%", nil) testURLParseError(t, err) } func TestUsersService_ListFollowing_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/following", func(w http.ResponseWriter, r *http.Request) { @@ -76,14 +79,14 @@ func TestUsersService_ListFollowing_authenticatedUser(t *testing.T) { t.Errorf("Users.ListFollowing returned error: %v", err) } - want := []*User{{ID: Int(1)}} + want := []*User{{ID: Int64(1)}} if !reflect.DeepEqual(users, want) { t.Errorf("Users.ListFollowing returned %+v, want %+v", users, want) } } func TestUsersService_ListFollowing_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/following", func(w http.ResponseWriter, r *http.Request) { @@ -96,19 +99,22 @@ func TestUsersService_ListFollowing_specifiedUser(t *testing.T) { t.Errorf("Users.ListFollowing returned error: %v", err) } - want := []*User{{ID: Int(1)}} + want := []*User{{ID: Int64(1)}} if !reflect.DeepEqual(users, want) { t.Errorf("Users.ListFollowing returned %+v, want %+v", users, want) } } func TestUsersService_ListFollowing_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Users.ListFollowing(context.Background(), "%", nil) testURLParseError(t, err) } func TestUsersService_IsFollowing_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/following/t", func(w http.ResponseWriter, r *http.Request) { @@ -126,7 +132,7 @@ func TestUsersService_IsFollowing_authenticatedUser(t *testing.T) { } func TestUsersService_IsFollowing_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/following/t", func(w http.ResponseWriter, r *http.Request) { @@ -144,7 +150,7 @@ func TestUsersService_IsFollowing_specifiedUser(t *testing.T) { } func TestUsersService_IsFollowing_false(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/following/t", func(w http.ResponseWriter, r *http.Request) { @@ -162,7 +168,7 @@ func TestUsersService_IsFollowing_false(t *testing.T) { } func TestUsersService_IsFollowing_error(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/following/t", func(w http.ResponseWriter, r *http.Request) { @@ -180,12 +186,15 @@ func TestUsersService_IsFollowing_error(t *testing.T) { } func TestUsersService_IsFollowing_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Users.IsFollowing(context.Background(), "%", "%") testURLParseError(t, err) } func TestUsersService_Follow(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/following/u", func(w http.ResponseWriter, r *http.Request) { @@ -199,12 +208,15 @@ func TestUsersService_Follow(t *testing.T) { } func TestUsersService_Follow_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Users.Follow(context.Background(), "%") testURLParseError(t, err) } func TestUsersService_Unfollow(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/following/u", func(w http.ResponseWriter, r *http.Request) { @@ -218,6 +230,9 @@ func TestUsersService_Unfollow(t *testing.T) { } func TestUsersService_Unfollow_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, err := client.Users.Unfollow(context.Background(), "%") testURLParseError(t, err) } diff --git a/vendor/github.com/google/go-github/github/users_gpg_keys.go b/vendor/github.com/google/go-github/github/users_gpg_keys.go index 3e95fb4..07ed38d 100644 --- a/vendor/github.com/google/go-github/github/users_gpg_keys.go +++ b/vendor/github.com/google/go-github/github/users_gpg_keys.go @@ -15,8 +15,8 @@ import ( // // https://developer.github.com/changes/2016-04-04-git-signing-api-preview/ type GPGKey struct { - ID *int `json:"id,omitempty"` - PrimaryKeyID *int `json:"primary_key_id,omitempty"` + ID *int64 `json:"id,omitempty"` + PrimaryKeyID *int64 `json:"primary_key_id,omitempty"` KeyID *string `json:"key_id,omitempty"` PublicKey *string `json:"public_key,omitempty"` Emails []GPGEmail `json:"emails,omitempty"` @@ -62,9 +62,6 @@ func (s *UsersService) ListGPGKeys(ctx context.Context, user string, opt *ListOp return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGitSigningPreview) - var keys []*GPGKey resp, err := s.client.Do(ctx, req, &keys) if err != nil { @@ -78,16 +75,13 @@ func (s *UsersService) ListGPGKeys(ctx context.Context, user string, opt *ListOp // via Basic Auth or via OAuth with at least read:gpg_key scope. // // GitHub API docs: https://developer.github.com/v3/users/gpg_keys/#get-a-single-gpg-key -func (s *UsersService) GetGPGKey(ctx context.Context, id int) (*GPGKey, *Response, error) { +func (s *UsersService) GetGPGKey(ctx context.Context, id int64) (*GPGKey, *Response, error) { u := fmt.Sprintf("user/gpg_keys/%v", id) req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGitSigningPreview) - key := &GPGKey{} resp, err := s.client.Do(ctx, req, key) if err != nil { @@ -110,9 +104,6 @@ func (s *UsersService) CreateGPGKey(ctx context.Context, armoredPublicKey string return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGitSigningPreview) - key := &GPGKey{} resp, err := s.client.Do(ctx, req, key) if err != nil { @@ -126,15 +117,12 @@ func (s *UsersService) CreateGPGKey(ctx context.Context, armoredPublicKey string // via OAuth with at least admin:gpg_key scope. // // GitHub API docs: https://developer.github.com/v3/users/gpg_keys/#delete-a-gpg-key -func (s *UsersService) DeleteGPGKey(ctx context.Context, id int) (*Response, error) { +func (s *UsersService) DeleteGPGKey(ctx context.Context, id int64) (*Response, error) { u := fmt.Sprintf("user/gpg_keys/%v", id) req, err := s.client.NewRequest("DELETE", u, nil) if err != nil { return nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGitSigningPreview) - return s.client.Do(ctx, req, nil) } diff --git a/vendor/github.com/google/go-github/github/users_gpg_keys_test.go b/vendor/github.com/google/go-github/github/users_gpg_keys_test.go index f4180df..dfabab2 100644 --- a/vendor/github.com/google/go-github/github/users_gpg_keys_test.go +++ b/vendor/github.com/google/go-github/github/users_gpg_keys_test.go @@ -15,12 +15,11 @@ import ( ) func TestUsersService_ListGPGKeys_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/gpg_keys", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeGitSigningPreview) testFormValues(t, r, values{"page": "2"}) fmt.Fprint(w, `[{"id":1,"primary_key_id":2}]`) }) @@ -31,19 +30,18 @@ func TestUsersService_ListGPGKeys_authenticatedUser(t *testing.T) { t.Errorf("Users.ListGPGKeys returned error: %v", err) } - want := []*GPGKey{{ID: Int(1), PrimaryKeyID: Int(2)}} + want := []*GPGKey{{ID: Int64(1), PrimaryKeyID: Int64(2)}} if !reflect.DeepEqual(keys, want) { t.Errorf("Users.ListGPGKeys = %+v, want %+v", keys, want) } } func TestUsersService_ListGPGKeys_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/gpg_keys", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeGitSigningPreview) fmt.Fprint(w, `[{"id":1,"primary_key_id":2}]`) }) @@ -52,24 +50,26 @@ func TestUsersService_ListGPGKeys_specifiedUser(t *testing.T) { t.Errorf("Users.ListGPGKeys returned error: %v", err) } - want := []*GPGKey{{ID: Int(1), PrimaryKeyID: Int(2)}} + want := []*GPGKey{{ID: Int64(1), PrimaryKeyID: Int64(2)}} if !reflect.DeepEqual(keys, want) { t.Errorf("Users.ListGPGKeys = %+v, want %+v", keys, want) } } func TestUsersService_ListGPGKeys_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Users.ListGPGKeys(context.Background(), "%", nil) testURLParseError(t, err) } func TestUsersService_GetGPGKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/gpg_keys/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeGitSigningPreview) fmt.Fprint(w, `{"id":1}`) }) @@ -78,14 +78,14 @@ func TestUsersService_GetGPGKey(t *testing.T) { t.Errorf("Users.GetGPGKey returned error: %v", err) } - want := &GPGKey{ID: Int(1)} + want := &GPGKey{ID: Int64(1)} if !reflect.DeepEqual(key, want) { t.Errorf("Users.GetGPGKey = %+v, want %+v", key, want) } } func TestUsersService_CreateGPGKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := ` @@ -104,7 +104,6 @@ mQINBFcEd9kBEACo54TDbGhKlXKWMvJgecEUKPPcv7XdnpKdGb3LRw5MvFwT0V0f json.NewDecoder(r.Body).Decode(&gpgKey) testMethod(t, r, "POST") - testHeader(t, r, "Accept", mediaTypeGitSigningPreview) if gpgKey.ArmoredPublicKey == nil || *gpgKey.ArmoredPublicKey != input { t.Errorf("gpgKey = %+v, want %q", gpgKey, input) } @@ -117,19 +116,18 @@ mQINBFcEd9kBEACo54TDbGhKlXKWMvJgecEUKPPcv7XdnpKdGb3LRw5MvFwT0V0f t.Errorf("Users.GetGPGKey returned error: %v", err) } - want := &GPGKey{ID: Int(1)} + want := &GPGKey{ID: Int64(1)} if !reflect.DeepEqual(gpgKey, want) { t.Errorf("Users.GetGPGKey = %+v, want %+v", gpgKey, want) } } func TestUsersService_DeleteGPGKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/gpg_keys/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "DELETE") - testHeader(t, r, "Accept", mediaTypeGitSigningPreview) }) _, err := client.Users.DeleteGPGKey(context.Background(), 1) diff --git a/vendor/github.com/google/go-github/github/users_keys.go b/vendor/github.com/google/go-github/github/users_keys.go index 97ed4b8..ddc832a 100644 --- a/vendor/github.com/google/go-github/github/users_keys.go +++ b/vendor/github.com/google/go-github/github/users_keys.go @@ -12,7 +12,7 @@ import ( // Key represents a public SSH key used to authenticate a user or deploy script. type Key struct { - ID *int `json:"id,omitempty"` + ID *int64 `json:"id,omitempty"` Key *string `json:"key,omitempty"` URL *string `json:"url,omitempty"` Title *string `json:"title,omitempty"` @@ -56,7 +56,7 @@ func (s *UsersService) ListKeys(ctx context.Context, user string, opt *ListOptio // GetKey fetches a single public key. // // GitHub API docs: https://developer.github.com/v3/users/keys/#get-a-single-public-key -func (s *UsersService) GetKey(ctx context.Context, id int) (*Key, *Response, error) { +func (s *UsersService) GetKey(ctx context.Context, id int64) (*Key, *Response, error) { u := fmt.Sprintf("user/keys/%v", id) req, err := s.client.NewRequest("GET", u, nil) @@ -96,7 +96,7 @@ func (s *UsersService) CreateKey(ctx context.Context, key *Key) (*Key, *Response // DeleteKey deletes a public key. // // GitHub API docs: https://developer.github.com/v3/users/keys/#delete-a-public-key -func (s *UsersService) DeleteKey(ctx context.Context, id int) (*Response, error) { +func (s *UsersService) DeleteKey(ctx context.Context, id int64) (*Response, error) { u := fmt.Sprintf("user/keys/%v", id) req, err := s.client.NewRequest("DELETE", u, nil) diff --git a/vendor/github.com/google/go-github/github/users_keys_test.go b/vendor/github.com/google/go-github/github/users_keys_test.go index 8554c50..3115fdb 100644 --- a/vendor/github.com/google/go-github/github/users_keys_test.go +++ b/vendor/github.com/google/go-github/github/users_keys_test.go @@ -15,7 +15,7 @@ import ( ) func TestUsersService_ListKeys_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/keys", func(w http.ResponseWriter, r *http.Request) { @@ -30,14 +30,14 @@ func TestUsersService_ListKeys_authenticatedUser(t *testing.T) { t.Errorf("Users.ListKeys returned error: %v", err) } - want := []*Key{{ID: Int(1)}} + want := []*Key{{ID: Int64(1)}} if !reflect.DeepEqual(keys, want) { t.Errorf("Users.ListKeys returned %+v, want %+v", keys, want) } } func TestUsersService_ListKeys_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u/keys", func(w http.ResponseWriter, r *http.Request) { @@ -50,19 +50,22 @@ func TestUsersService_ListKeys_specifiedUser(t *testing.T) { t.Errorf("Users.ListKeys returned error: %v", err) } - want := []*Key{{ID: Int(1)}} + want := []*Key{{ID: Int64(1)}} if !reflect.DeepEqual(keys, want) { t.Errorf("Users.ListKeys returned %+v, want %+v", keys, want) } } func TestUsersService_ListKeys_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Users.ListKeys(context.Background(), "%", nil) testURLParseError(t, err) } func TestUsersService_GetKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/keys/1", func(w http.ResponseWriter, r *http.Request) { @@ -75,14 +78,14 @@ func TestUsersService_GetKey(t *testing.T) { t.Errorf("Users.GetKey returned error: %v", err) } - want := &Key{ID: Int(1)} + want := &Key{ID: Int64(1)} if !reflect.DeepEqual(key, want) { t.Errorf("Users.GetKey returned %+v, want %+v", key, want) } } func TestUsersService_CreateKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &Key{Key: String("k"), Title: String("t")} @@ -104,14 +107,14 @@ func TestUsersService_CreateKey(t *testing.T) { t.Errorf("Users.GetKey returned error: %v", err) } - want := &Key{ID: Int(1)} + want := &Key{ID: Int64(1)} if !reflect.DeepEqual(key, want) { t.Errorf("Users.GetKey returned %+v, want %+v", key, want) } } func TestUsersService_DeleteKey(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/keys/1", func(w http.ResponseWriter, r *http.Request) { diff --git a/vendor/github.com/google/go-github/github/users_test.go b/vendor/github.com/google/go-github/github/users_test.go index 34c239a..8fa95a2 100644 --- a/vendor/github.com/google/go-github/github/users_test.go +++ b/vendor/github.com/google/go-github/github/users_test.go @@ -19,7 +19,7 @@ func TestUser_marshall(t *testing.T) { u := &User{ Login: String("l"), - ID: Int(1), + ID: Int64(1), URL: String("u"), AvatarURL: String("a"), GravatarID: String("g"), @@ -57,7 +57,7 @@ func TestUser_marshall(t *testing.T) { } func TestUsersService_Get_authenticatedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user", func(w http.ResponseWriter, r *http.Request) { @@ -70,14 +70,14 @@ func TestUsersService_Get_authenticatedUser(t *testing.T) { t.Errorf("Users.Get returned error: %v", err) } - want := &User{ID: Int(1)} + want := &User{ID: Int64(1)} if !reflect.DeepEqual(user, want) { t.Errorf("Users.Get returned %+v, want %+v", user, want) } } func TestUsersService_Get_specifiedUser(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users/u", func(w http.ResponseWriter, r *http.Request) { @@ -90,19 +90,22 @@ func TestUsersService_Get_specifiedUser(t *testing.T) { t.Errorf("Users.Get returned error: %v", err) } - want := &User{ID: Int(1)} + want := &User{ID: Int64(1)} if !reflect.DeepEqual(user, want) { t.Errorf("Users.Get returned %+v, want %+v", user, want) } } func TestUsersService_Get_invalidUser(t *testing.T) { + client, _, _, teardown := setup() + defer teardown() + _, _, err := client.Users.Get(context.Background(), "%") testURLParseError(t, err) } func TestUsersService_GetByID(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/1", func(w http.ResponseWriter, r *http.Request) { @@ -115,14 +118,14 @@ func TestUsersService_GetByID(t *testing.T) { t.Fatalf("Users.GetByID returned error: %v", err) } - want := &User{ID: Int(1)} + want := &User{ID: Int64(1)} if !reflect.DeepEqual(user, want) { t.Errorf("Users.GetByID returned %+v, want %+v", user, want) } } func TestUsersService_Edit(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() input := &User{Name: String("n")} @@ -144,14 +147,37 @@ func TestUsersService_Edit(t *testing.T) { t.Errorf("Users.Edit returned error: %v", err) } - want := &User{ID: Int(1)} + want := &User{ID: Int64(1)} if !reflect.DeepEqual(user, want) { t.Errorf("Users.Edit returned %+v, want %+v", user, want) } } +func TestUsersService_GetHovercard(t *testing.T) { + client, mux, _, teardown := setup() + defer teardown() + + mux.HandleFunc("/users/u/hovercard", func(w http.ResponseWriter, r *http.Request) { + testMethod(t, r, "GET") + testHeader(t, r, "Accept", mediaTypeHovercardPreview) + testFormValues(t, r, values{"subject_type": "repository", "subject_id": "20180408"}) + fmt.Fprint(w, `{"contexts": [{"message":"Owns this repository", "octicon": "repo"}]}`) + }) + + opt := &HovercardOptions{SubjectType: "repository", SubjectID: "20180408"} + hovercard, _, err := client.Users.GetHovercard(context.Background(), "u", opt) + if err != nil { + t.Errorf("Users.GetHovercard returned error: %v", err) + } + + want := &Hovercard{Contexts: []*UserContext{{Message: String("Owns this repository"), Octicon: String("repo")}}} + if !reflect.DeepEqual(hovercard, want) { + t.Errorf("Users.GetHovercard returned %+v, want %+v", hovercard, want) + } +} + func TestUsersService_ListAll(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/users", func(w http.ResponseWriter, r *http.Request) { @@ -166,19 +192,18 @@ func TestUsersService_ListAll(t *testing.T) { t.Errorf("Users.Get returned error: %v", err) } - want := []*User{{ID: Int(2)}} + want := []*User{{ID: Int64(2)}} if !reflect.DeepEqual(users, want) { t.Errorf("Users.ListAll returned %+v, want %+v", users, want) } } func TestUsersService_ListInvitations(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/repository_invitations", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") - testHeader(t, r, "Accept", mediaTypeRepositoryInvitationsPreview) fmt.Fprintf(w, `[{"id":1}, {"id":2}]`) }) @@ -187,14 +212,14 @@ func TestUsersService_ListInvitations(t *testing.T) { t.Errorf("Users.ListInvitations returned error: %v", err) } - want := []*RepositoryInvitation{{ID: Int(1)}, {ID: Int(2)}} + want := []*RepositoryInvitation{{ID: Int64(1)}, {ID: Int64(2)}} if !reflect.DeepEqual(got, want) { t.Errorf("Users.ListInvitations = %+v, want %+v", got, want) } } func TestUsersService_ListInvitations_withOptions(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/repository_invitations", func(w http.ResponseWriter, r *http.Request) { @@ -202,7 +227,6 @@ func TestUsersService_ListInvitations_withOptions(t *testing.T) { testFormValues(t, r, values{ "page": "2", }) - testHeader(t, r, "Accept", mediaTypeRepositoryInvitationsPreview) fmt.Fprintf(w, `[{"id":1}, {"id":2}]`) }) @@ -212,12 +236,11 @@ func TestUsersService_ListInvitations_withOptions(t *testing.T) { } } func TestUsersService_AcceptInvitation(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/repository_invitations/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PATCH") - testHeader(t, r, "Accept", mediaTypeRepositoryInvitationsPreview) w.WriteHeader(http.StatusNoContent) }) @@ -227,12 +250,11 @@ func TestUsersService_AcceptInvitation(t *testing.T) { } func TestUsersService_DeclineInvitation(t *testing.T) { - setup() + client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/user/repository_invitations/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "DELETE") - testHeader(t, r, "Accept", mediaTypeRepositoryInvitationsPreview) w.WriteHeader(http.StatusNoContent) }) diff --git a/vendor/github.com/google/go-github/github/with_appengine.go b/vendor/github.com/google/go-github/github/with_appengine.go index 957c4d3..59ce26b 100644 --- a/vendor/github.com/google/go-github/github/with_appengine.go +++ b/vendor/github.com/google/go-github/github/with_appengine.go @@ -6,21 +6,15 @@ // +build appengine // This file provides glue for making github work on App Engine. -// In order to get the entire github package to compile with -// Go 1.6, you will need to rewrite all the import "context" lines. -// Fortunately, this is easy with "gofmt": -// -// gofmt -w -r '"context" -> "golang.org/x/net/context"' *.go package github import ( "context" "net/http" - - "google.golang.org/appengine" ) -func withContext(ctx context.Context, req *http.Request) (context.Context, *http.Request) { - return appengine.WithContext(ctx, req), req +func withContext(ctx context.Context, req *http.Request) *http.Request { + // No-op because App Engine adds context to a request differently. + return req } diff --git a/vendor/github.com/google/go-github/github/without_appengine.go b/vendor/github.com/google/go-github/github/without_appengine.go index b0edc04..6f8fdac 100644 --- a/vendor/github.com/google/go-github/github/without_appengine.go +++ b/vendor/github.com/google/go-github/github/without_appengine.go @@ -14,6 +14,6 @@ import ( "net/http" ) -func withContext(ctx context.Context, req *http.Request) (context.Context, *http.Request) { - return ctx, req.WithContext(ctx) +func withContext(ctx context.Context, req *http.Request) *http.Request { + return req.WithContext(ctx) } diff --git a/vendor/github.com/google/go-github/go.mod b/vendor/github.com/google/go-github/go.mod new file mode 100644 index 0000000..eb127af --- /dev/null +++ b/vendor/github.com/google/go-github/go.mod @@ -0,0 +1,12 @@ +module github.com/google/go-github/v24 + +require ( + github.com/golang/protobuf v1.2.0 // indirect + github.com/google/go-github v17.0.0+incompatible + github.com/google/go-querystring v1.0.0 + golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 + golang.org/x/net v0.0.0-20190311183353-d8887717615a // indirect + golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be + golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 // indirect + google.golang.org/appengine v1.1.0 +) diff --git a/vendor/github.com/google/go-github/go.sum b/vendor/github.com/google/go-github/go.sum new file mode 100644 index 0000000..317188b --- /dev/null +++ b/vendor/github.com/google/go-github/go.sum @@ -0,0 +1,19 @@ +github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/google/go-github v17.0.0+incompatible h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY= +github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= +github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= +github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be h1:vEDujvNQGv4jgYKudGeI/+DAX4Jffq6hpD55MmoEvKs= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 h1:bjcUS9ztw9kFmmIxJInhon/0Is3p+EHBKNgquIzo1OI= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +google.golang.org/appengine v1.1.0 h1:igQkv0AAhEIvTEpD5LIpAfav2eeVO9HBTjvKHVJPRSs= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= diff --git a/vendor/github.com/google/go-github/tests/README.md b/vendor/github.com/google/go-github/test/README.md similarity index 94% rename from vendor/github.com/google/go-github/tests/README.md rename to vendor/github.com/google/go-github/test/README.md index f21d079..455795a 100644 --- a/vendor/github.com/google/go-github/tests/README.md +++ b/vendor/github.com/google/go-github/test/README.md @@ -37,7 +37,7 @@ These tests require a GitHub user (username and password), and also that a attendant Client ID and Client Secret) be available. Then, to execute just the Authorization tests: - GITHUB_USERNAME='' GITHUB_PASSWORD='' GITHUB_CLIENT_ID='' GITHUB_CLIENT_SECRET='' go test -v -tags=integration --run=Authorizations ./integration + GITHUB_USERNAME='' GITHUB_PASSWORD='' GITHUB_CLIENT_ID='' GITHUB_CLIENT_SECRET='' go test -v -tags=integration -run=Authorizations ./integration If some or all of these environment variables are not available, certain of the Authorization integration tests will be skipped. @@ -54,7 +54,7 @@ This test sends real network traffic to the GitHub API and will exhaust the default unregistered rate limit (60 requests per hour) very quickly. Additionally, some data is only returned for authenticated API calls. Unlike the integration tests above, these tests only read data, so it's less -imperitive that these be run using a dedicated test account (though you still +imperative that these be run using a dedicated test account (though you still really should). Run the fields tool using: diff --git a/vendor/github.com/google/go-github/tests/fields/fields.go b/vendor/github.com/google/go-github/test/fields/fields.go similarity index 98% rename from vendor/github.com/google/go-github/tests/fields/fields.go rename to vendor/github.com/google/go-github/test/fields/fields.go index 7ee1f6e..5df051e 100644 --- a/vendor/github.com/google/go-github/tests/fields/fields.go +++ b/vendor/github.com/google/go-github/test/fields/fields.go @@ -25,7 +25,7 @@ import ( "reflect" "strings" - "github.com/google/go-github/github" + "github.com/google/go-github/v24/github" "golang.org/x/oauth2" ) diff --git a/vendor/github.com/google/go-github/tests/integration/activity_test.go b/vendor/github.com/google/go-github/test/integration/activity_test.go similarity index 74% rename from vendor/github.com/google/go-github/tests/integration/activity_test.go rename to vendor/github.com/google/go-github/test/integration/activity_test.go index d6b31d3..df6c34d 100644 --- a/vendor/github.com/google/go-github/tests/integration/activity_test.go +++ b/vendor/github.com/google/go-github/test/integration/activity_test.go @@ -5,9 +5,10 @@ // +build integration -package tests +package integration import ( + "context" "testing" "github.com/google/go-github/github" @@ -19,7 +20,7 @@ const ( ) func TestActivity_Starring(t *testing.T) { - stargazers, _, err := client.Activity.ListStargazers(owner, repo, nil) + stargazers, _, err := client.Activity.ListStargazers(context.Background(), owner, repo, nil) if err != nil { t.Fatalf("Activity.ListStargazers returned error: %v", err) } @@ -34,7 +35,7 @@ func TestActivity_Starring(t *testing.T) { } // first, check if already starred the target repository - star, _, err := client.Activity.IsStarred(owner, repo) + star, _, err := client.Activity.IsStarred(context.Background(), owner, repo) if err != nil { t.Fatalf("Activity.IsStarred returned error: %v", err) } @@ -43,13 +44,13 @@ func TestActivity_Starring(t *testing.T) { } // star the target repository - _, err = client.Activity.Star(owner, repo) + _, err = client.Activity.Star(context.Background(), owner, repo) if err != nil { t.Fatalf("Activity.Star returned error: %v", err) } // check again and verify starred - star, _, err = client.Activity.IsStarred(owner, repo) + star, _, err = client.Activity.IsStarred(context.Background(), owner, repo) if err != nil { t.Fatalf("Activity.IsStarred returned error: %v", err) } @@ -58,13 +59,13 @@ func TestActivity_Starring(t *testing.T) { } // unstar - _, err = client.Activity.Unstar(owner, repo) + _, err = client.Activity.Unstar(context.Background(), owner, repo) if err != nil { t.Fatalf("Activity.Unstar returned error: %v", err) } // check again and verify not watching - star, _, err = client.Activity.IsStarred(owner, repo) + star, _, err = client.Activity.IsStarred(context.Background(), owner, repo) if err != nil { t.Fatalf("Activity.IsStarred returned error: %v", err) } @@ -75,13 +76,13 @@ func TestActivity_Starring(t *testing.T) { func deleteSubscription(t *testing.T) { // delete subscription - _, err := client.Activity.DeleteRepositorySubscription(owner, repo) + _, err := client.Activity.DeleteRepositorySubscription(context.Background(), owner, repo) if err != nil { t.Fatalf("Activity.DeleteRepositorySubscription returned error: %v", err) } // check again and verify not watching - sub, _, err := client.Activity.GetRepositorySubscription(owner, repo) + sub, _, err := client.Activity.GetRepositorySubscription(context.Background(), owner, repo) if err != nil { t.Fatalf("Activity.GetRepositorySubscription returned error: %v", err) } @@ -93,13 +94,13 @@ func deleteSubscription(t *testing.T) { func createSubscription(t *testing.T) { // watch the target repository sub := &github.Subscription{Subscribed: github.Bool(true)} - _, _, err := client.Activity.SetRepositorySubscription(owner, repo, sub) + _, _, err := client.Activity.SetRepositorySubscription(context.Background(), owner, repo, sub) if err != nil { t.Fatalf("Activity.SetRepositorySubscription returned error: %v", err) } // check again and verify watching - sub, _, err = client.Activity.GetRepositorySubscription(owner, repo) + sub, _, err = client.Activity.GetRepositorySubscription(context.Background(), owner, repo) if err != nil { t.Fatalf("Activity.GetRepositorySubscription returned error: %v", err) } @@ -109,7 +110,7 @@ func createSubscription(t *testing.T) { } func TestActivity_Watching(t *testing.T) { - watchers, _, err := client.Activity.ListWatchers(owner, repo, nil) + watchers, _, err := client.Activity.ListWatchers(context.Background(), owner, repo, nil) if err != nil { t.Fatalf("Activity.ListWatchers returned error: %v", err) } @@ -124,7 +125,7 @@ func TestActivity_Watching(t *testing.T) { } // first, check if already watching the target repository - sub, _, err := client.Activity.GetRepositorySubscription(owner, repo) + sub, _, err := client.Activity.GetRepositorySubscription(context.Background(), owner, repo) if err != nil { t.Fatalf("Activity.GetRepositorySubscription returned error: %v", err) } diff --git a/vendor/github.com/google/go-github/tests/integration/authorizations_test.go b/vendor/github.com/google/go-github/test/integration/authorizations_test.go similarity index 84% rename from vendor/github.com/google/go-github/tests/integration/authorizations_test.go rename to vendor/github.com/google/go-github/test/integration/authorizations_test.go index 7e953bf..532ac76 100644 --- a/vendor/github.com/google/go-github/tests/integration/authorizations_test.go +++ b/vendor/github.com/google/go-github/test/integration/authorizations_test.go @@ -5,9 +5,10 @@ // +build integration -package tests +package integration import ( + "context" "math/rand" "os" "strconv" @@ -31,7 +32,7 @@ func TestAuthorizationsBasicOperations(t *testing.T) { client := getUserPassClient(t) - auths, resp, err := client.Authorizations.List(nil) + auths, resp, err := client.Authorizations.List(context.Background(), nil) failOnError(t, err) failIfNotStatusCode(t, resp, 200) @@ -39,7 +40,7 @@ func TestAuthorizationsBasicOperations(t *testing.T) { authReq := generatePersonalAuthTokenRequest() - createdAuth, resp, err := client.Authorizations.Create(authReq) + createdAuth, resp, err := client.Authorizations.Create(context.Background(), authReq) failOnError(t, err) failIfNotStatusCode(t, resp, 201) @@ -47,7 +48,7 @@ func TestAuthorizationsBasicOperations(t *testing.T) { t.Fatal("Returned Authorization does not match the requested Authorization.") } - auths, resp, err = client.Authorizations.List(nil) + auths, resp, err = client.Authorizations.List(context.Background(), nil) failOnError(t, err) failIfNotStatusCode(t, resp, 200) @@ -59,7 +60,7 @@ func TestAuthorizationsBasicOperations(t *testing.T) { authUpdate := new(github.AuthorizationUpdateRequest) authUpdate.Note = github.String("Updated note: " + randString()) - updatedAuth, resp, err := client.Authorizations.Edit(*createdAuth.ID, authUpdate) + updatedAuth, resp, err := client.Authorizations.Edit(context.Background(), *createdAuth.ID, authUpdate) failOnError(t, err) failIfNotStatusCode(t, resp, 200) @@ -68,7 +69,7 @@ func TestAuthorizationsBasicOperations(t *testing.T) { } // Verify that the Get operation also reflects the update - retrievedAuth, resp, err := client.Authorizations.Get(*createdAuth.ID) + retrievedAuth, resp, err := client.Authorizations.Get(context.Background(), *createdAuth.ID) failOnError(t, err) failIfNotStatusCode(t, resp, 200) @@ -77,19 +78,19 @@ func TestAuthorizationsBasicOperations(t *testing.T) { } // Now, let's delete... - resp, err = client.Authorizations.Delete(*createdAuth.ID) + resp, err = client.Authorizations.Delete(context.Background(), *createdAuth.ID) failOnError(t, err) failIfNotStatusCode(t, resp, 204) // Verify that we can no longer retrieve the auth - retrievedAuth, resp, err = client.Authorizations.Get(*createdAuth.ID) + retrievedAuth, resp, err = client.Authorizations.Get(context.Background(), *createdAuth.ID) if err == nil { t.Fatal("Should have failed due to 404") } failIfNotStatusCode(t, resp, 404) // Verify that our count reset back to the initial value - auths, resp, err = client.Authorizations.List(nil) + auths, resp, err = client.Authorizations.List(context.Background(), nil) failOnError(t, err) failIfNotStatusCode(t, resp, 200) @@ -114,7 +115,7 @@ func TestAuthorizationsAppOperations(t *testing.T) { authRequest := generateAppAuthTokenRequest(clientID, clientSecret) - createdAuth, resp, err := userAuthenticatedClient.Authorizations.GetOrCreateForApp(clientID, authRequest) + createdAuth, resp, err := userAuthenticatedClient.Authorizations.GetOrCreateForApp(context.Background(), clientID, authRequest) failOnError(t, err) failIfNotStatusCode(t, resp, 201) @@ -125,7 +126,7 @@ func TestAuthorizationsAppOperations(t *testing.T) { // Let's try the same request again, this time it should return the same // auth instead of creating a new one - secondAuth, resp, err := userAuthenticatedClient.Authorizations.GetOrCreateForApp(clientID, authRequest) + secondAuth, resp, err := userAuthenticatedClient.Authorizations.GetOrCreateForApp(context.Background(), clientID, authRequest) failOnError(t, err) failIfNotStatusCode(t, resp, 200) @@ -135,7 +136,7 @@ func TestAuthorizationsAppOperations(t *testing.T) { } // Verify the token - appAuth, resp, err := appAuthenticatedClient.Authorizations.Check(clientID, *createdAuth.Token) + appAuth, resp, err := appAuthenticatedClient.Authorizations.Check(context.Background(), clientID, *createdAuth.Token) failOnError(t, err) failIfNotStatusCode(t, resp, 200) @@ -145,19 +146,19 @@ func TestAuthorizationsAppOperations(t *testing.T) { } // Let's verify that we get a 404 for a non-existent token - _, resp, err = appAuthenticatedClient.Authorizations.Check(clientID, InvalidTokenValue) + _, resp, err = appAuthenticatedClient.Authorizations.Check(context.Background(), clientID, InvalidTokenValue) if err == nil { t.Fatal("An error should have been returned because of the invalid token.") } failIfNotStatusCode(t, resp, 404) // Let's reset the token - resetAuth, resp, err := appAuthenticatedClient.Authorizations.Reset(clientID, *createdAuth.Token) + resetAuth, resp, err := appAuthenticatedClient.Authorizations.Reset(context.Background(), clientID, *createdAuth.Token) failOnError(t, err) failIfNotStatusCode(t, resp, 200) // Let's verify that we get a 404 for a non-existent token - _, resp, err = appAuthenticatedClient.Authorizations.Reset(clientID, InvalidTokenValue) + _, resp, err = appAuthenticatedClient.Authorizations.Reset(context.Background(), clientID, InvalidTokenValue) if err == nil { t.Fatal("An error should have been returned because of the invalid token.") } @@ -174,19 +175,19 @@ func TestAuthorizationsAppOperations(t *testing.T) { } // Verify that the original token is now invalid - _, resp, err = appAuthenticatedClient.Authorizations.Check(clientID, *createdAuth.Token) + _, resp, err = appAuthenticatedClient.Authorizations.Check(context.Background(), clientID, *createdAuth.Token) if err == nil { t.Fatal("The original token should be invalid.") } failIfNotStatusCode(t, resp, 404) // Check that the reset token is valid - _, resp, err = appAuthenticatedClient.Authorizations.Check(clientID, *resetAuth.Token) + _, resp, err = appAuthenticatedClient.Authorizations.Check(context.Background(), clientID, *resetAuth.Token) failOnError(t, err) failIfNotStatusCode(t, resp, 200) // Let's revoke the token - resp, err = appAuthenticatedClient.Authorizations.Revoke(clientID, *resetAuth.Token) + resp, err = appAuthenticatedClient.Authorizations.Revoke(context.Background(), clientID, *resetAuth.Token) failOnError(t, err) failIfNotStatusCode(t, resp, 204) @@ -195,7 +196,7 @@ func TestAuthorizationsAppOperations(t *testing.T) { time.Sleep(time.Second * 2) // Now, the reset token should also be invalid - _, resp, err = appAuthenticatedClient.Authorizations.Check(clientID, *resetAuth.Token) + _, resp, err = appAuthenticatedClient.Authorizations.Check(context.Background(), clientID, *resetAuth.Token) if err == nil { t.Fatal("The reset token should be invalid.") } diff --git a/vendor/github.com/google/go-github/tests/integration/doc.go b/vendor/github.com/google/go-github/test/integration/doc.go similarity index 78% rename from vendor/github.com/google/go-github/tests/integration/doc.go rename to vendor/github.com/google/go-github/test/integration/doc.go index 9d13112..5ee470c 100644 --- a/vendor/github.com/google/go-github/tests/integration/doc.go +++ b/vendor/github.com/google/go-github/test/integration/doc.go @@ -3,9 +3,9 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package tests contains integration tests. +// Package integration contains integration tests. // // These tests call the live GitHub API, and therefore require a little more -// setup to run. See https://github.com/google/go-github/tree/master/tests#integration +// setup to run. See https://github.com/google/go-github/tree/master/test#integration // for more information. -package tests +package integration diff --git a/vendor/github.com/google/go-github/tests/integration/github_test.go b/vendor/github.com/google/go-github/test/integration/github_test.go similarity index 87% rename from vendor/github.com/google/go-github/tests/integration/github_test.go rename to vendor/github.com/google/go-github/test/integration/github_test.go index 5a63f05..489d38a 100644 --- a/vendor/github.com/google/go-github/tests/integration/github_test.go +++ b/vendor/github.com/google/go-github/test/integration/github_test.go @@ -5,7 +5,7 @@ // +build integration -package tests +package integration import ( "context" @@ -63,7 +63,7 @@ func createRandomTestRepository(owner string, autoinit bool) (*github.Repository var repoName string for { repoName = fmt.Sprintf("test-%d", rand.Int()) - _, resp, err := client.Repositories.Get(owner, repoName) + _, resp, err := client.Repositories.Get(context.Background(), owner, repoName) if err != nil { if resp.StatusCode == http.StatusNotFound { // found a non-existent repo, perfect @@ -75,7 +75,7 @@ func createRandomTestRepository(owner string, autoinit bool) (*github.Repository } // create the repository - repo, _, err := client.Repositories.Create("", &github.Repository{Name: github.String(repoName), AutoInit: github.Bool(autoinit)}) + repo, _, err := client.Repositories.Create(context.Background(), "", &github.Repository{Name: github.String(repoName), AutoInit: github.Bool(autoinit)}) if err != nil { return nil, err } diff --git a/vendor/github.com/google/go-github/tests/integration/issues_test.go b/vendor/github.com/google/go-github/test/integration/issues_test.go similarity index 68% rename from vendor/github.com/google/go-github/tests/integration/issues_test.go rename to vendor/github.com/google/go-github/test/integration/issues_test.go index 12c549d..a5e4890 100644 --- a/vendor/github.com/google/go-github/tests/integration/issues_test.go +++ b/vendor/github.com/google/go-github/test/integration/issues_test.go @@ -5,12 +5,15 @@ // +build integration -package tests +package integration -import "testing" +import ( + "context" + "testing" +) func TestIssueEvents(t *testing.T) { - events, _, err := client.Issues.ListRepositoryEvents("google", "go-github", nil) + events, _, err := client.Issues.ListRepositoryEvents(context.Background(), "google", "go-github", nil) if err != nil { t.Fatalf("Issues.ListRepositoryEvents returned error: %v", err) } @@ -19,7 +22,7 @@ func TestIssueEvents(t *testing.T) { t.Errorf("ListRepositoryEvents returned no events") } - events, _, err = client.Issues.ListIssueEvents("google", "go-github", 1, nil) + events, _, err = client.Issues.ListIssueEvents(context.Background(), "google", "go-github", 1, nil) if err != nil { t.Fatalf("Issues.ListIssueEvents returned error: %v", err) } @@ -28,7 +31,7 @@ func TestIssueEvents(t *testing.T) { t.Errorf("ListIssueEvents returned no events") } - event, _, err := client.Issues.GetEvent("google", "go-github", *events[0].ID) + event, _, err := client.Issues.GetEvent(context.Background(), "google", "go-github", *events[0].ID) if err != nil { t.Fatalf("Issues.GetEvent returned error: %v", err) } diff --git a/vendor/github.com/google/go-github/tests/integration/misc_test.go b/vendor/github.com/google/go-github/test/integration/misc_test.go similarity index 74% rename from vendor/github.com/google/go-github/tests/integration/misc_test.go rename to vendor/github.com/google/go-github/test/integration/misc_test.go index bf98df1..1595cf0 100644 --- a/vendor/github.com/google/go-github/tests/integration/misc_test.go +++ b/vendor/github.com/google/go-github/test/integration/misc_test.go @@ -5,15 +5,16 @@ // +build integration -package tests +package integration import ( + "context" "testing" "time" ) func TestEmojis(t *testing.T) { - emoji, _, err := client.ListEmojis() + emoji, _, err := client.ListEmojis(context.Background()) if err != nil { t.Fatalf("ListEmojis returned error: %v", err) } @@ -28,7 +29,7 @@ func TestEmojis(t *testing.T) { } func TestAPIMeta(t *testing.T) { - meta, _, err := client.APIMeta() + meta, _, err := client.APIMeta(context.Background()) if err != nil { t.Fatalf("APIMeta returned error: %v", err) } @@ -47,7 +48,7 @@ func TestAPIMeta(t *testing.T) { } func TestRateLimits(t *testing.T) { - limits, _, err := client.RateLimits() + limits, _, err := client.RateLimits(context.Background()) if err != nil { t.Fatalf("RateLimits returned error: %v", err) } @@ -65,3 +66,14 @@ func TestRateLimits(t *testing.T) { t.Errorf("Core.Reset is more than 1 minute in the past; that doesn't seem right.") } } + +func TestListServiceHooks(t *testing.T) { + hooks, _, err := client.ListServiceHooks(context.Background()) + if err != nil { + t.Fatalf("ListServiceHooks returned error: %v", err) + } + + if len(hooks) == 0 { + t.Fatalf("ListServiceHooks returned no hooks") + } +} diff --git a/vendor/github.com/google/go-github/tests/integration/pulls_test.go b/vendor/github.com/google/go-github/test/integration/pulls_test.go similarity index 79% rename from vendor/github.com/google/go-github/tests/integration/pulls_test.go rename to vendor/github.com/google/go-github/test/integration/pulls_test.go index fcfd8e0..4aadfbd 100644 --- a/vendor/github.com/google/go-github/tests/integration/pulls_test.go +++ b/vendor/github.com/google/go-github/test/integration/pulls_test.go @@ -5,12 +5,15 @@ // +build integration -package tests +package integration -import "testing" +import ( + "context" + "testing" +) func TestPullRequests_ListCommits(t *testing.T) { - commits, _, err := client.PullRequests.ListCommits("google", "go-github", 2, nil) + commits, _, err := client.PullRequests.ListCommits(context.Background(), "google", "go-github", 2, nil) if err != nil { t.Fatalf("PullRequests.ListCommits() returned error: %v", err) } diff --git a/vendor/github.com/google/go-github/tests/integration/repos_test.go b/vendor/github.com/google/go-github/test/integration/repos_test.go similarity index 67% rename from vendor/github.com/google/go-github/tests/integration/repos_test.go rename to vendor/github.com/google/go-github/test/integration/repos_test.go index d12802f..58aa182 100644 --- a/vendor/github.com/google/go-github/tests/integration/repos_test.go +++ b/vendor/github.com/google/go-github/test/integration/repos_test.go @@ -5,9 +5,10 @@ // +build integration -package tests +package integration import ( + "context" "net/http" "reflect" "testing" @@ -21,7 +22,7 @@ func TestRepositories_CRUD(t *testing.T) { } // get authenticated user - me, _, err := client.Users.Get("") + me, _, err := client.Users.Get(context.Background(), "") if err != nil { t.Fatalf("Users.Get('') returned error: %v", err) } @@ -34,19 +35,19 @@ func TestRepositories_CRUD(t *testing.T) { // update the repository description repo.Description = github.String("description") repo.DefaultBranch = nil // FIXME: this shouldn't be necessary - _, _, err = client.Repositories.Edit(*repo.Owner.Login, *repo.Name, repo) + _, _, err = client.Repositories.Edit(context.Background(), *repo.Owner.Login, *repo.Name, repo) if err != nil { t.Fatalf("Repositories.Edit() returned error: %v", err) } // delete the repository - _, err = client.Repositories.Delete(*repo.Owner.Login, *repo.Name) + _, err = client.Repositories.Delete(context.Background(), *repo.Owner.Login, *repo.Name) if err != nil { t.Fatalf("Repositories.Delete() returned error: %v", err) } // verify that the repository was deleted - _, resp, err := client.Repositories.Get(*repo.Owner.Login, *repo.Name) + _, resp, err := client.Repositories.Get(context.Background(), *repo.Owner.Login, *repo.Name) if err == nil { t.Fatalf("Test repository still exists after deleting it.") } @@ -57,7 +58,7 @@ func TestRepositories_CRUD(t *testing.T) { func TestRepositories_BranchesTags(t *testing.T) { // branches - branches, _, err := client.Repositories.ListBranches("git", "git", nil) + branches, _, err := client.Repositories.ListBranches(context.Background(), "git", "git", nil) if err != nil { t.Fatalf("Repositories.ListBranches() returned error: %v", err) } @@ -66,13 +67,13 @@ func TestRepositories_BranchesTags(t *testing.T) { t.Fatalf("Repositories.ListBranches('git', 'git') returned no branches") } - _, _, err = client.Repositories.GetBranch("git", "git", *branches[0].Name) + _, _, err = client.Repositories.GetBranch(context.Background(), "git", "git", *branches[0].Name) if err != nil { t.Fatalf("Repositories.GetBranch() returned error: %v", err) } // tags - tags, _, err := client.Repositories.ListTags("git", "git", nil) + tags, _, err := client.Repositories.ListTags(context.Background(), "git", "git", nil) if err != nil { t.Fatalf("Repositories.ListTags() returned error: %v", err) } @@ -82,24 +83,13 @@ func TestRepositories_BranchesTags(t *testing.T) { } } -func TestRepositories_ServiceHooks(t *testing.T) { - hooks, _, err := client.Repositories.ListServiceHooks() - if err != nil { - t.Fatalf("Repositories.ListServiceHooks() returned error: %v", err) - } - - if len(hooks) == 0 { - t.Fatalf("Repositories.ListServiceHooks() returned no hooks") - } -} - func TestRepositories_EditBranches(t *testing.T) { if !checkAuth("TestRepositories_EditBranches") { return } // get authenticated user - me, _, err := client.Users.Get("") + me, _, err := client.Users.Get(context.Background(), "") if err != nil { t.Fatalf("Users.Get('') returned error: %v", err) } @@ -109,7 +99,7 @@ func TestRepositories_EditBranches(t *testing.T) { t.Fatalf("createRandomTestRepository returned error: %v", err) } - branch, _, err := client.Repositories.GetBranch(*repo.Owner.Login, *repo.Name, "master") + branch, _, err := client.Repositories.GetBranch(context.Background(), *repo.Owner.Login, *repo.Name, "master") if err != nil { t.Fatalf("Repositories.GetBranch() returned error: %v", err) } @@ -120,32 +110,36 @@ func TestRepositories_EditBranches(t *testing.T) { protectionRequest := &github.ProtectionRequest{ RequiredStatusChecks: &github.RequiredStatusChecks{ - IncludeAdmins: true, - Strict: true, - Contexts: []string{"continuous-integration"}, + Strict: true, + Contexts: []string{"continuous-integration"}, }, - RequiredPullRequestReviews: &github.RequiredPullRequestReviews{ - IncludeAdmins: true, + RequiredPullRequestReviews: &github.PullRequestReviewsEnforcementRequest{ + DismissStaleReviews: true, }, + EnforceAdmins: true, // TODO: Only organization repositories can have users and team restrictions. // In order to be able to test these Restrictions, need to add support // for creating temporary organization repositories. Restrictions: nil, } - protection, _, err := client.Repositories.UpdateBranchProtection(*repo.Owner.Login, *repo.Name, "master", protectionRequest) + protection, _, err := client.Repositories.UpdateBranchProtection(context.Background(), *repo.Owner.Login, *repo.Name, "master", protectionRequest) if err != nil { t.Fatalf("Repositories.UpdateBranchProtection() returned error: %v", err) } want := &github.Protection{ RequiredStatusChecks: &github.RequiredStatusChecks{ - IncludeAdmins: true, - Strict: true, - Contexts: []string{"continuous-integration"}, + Strict: true, + Contexts: []string{"continuous-integration"}, + }, + RequiredPullRequestReviews: &github.PullRequestReviewsEnforcement{ + DismissStaleReviews: true, + RequiredApprovingReviewCount: 0, }, - RequiredPullRequestReviews: &github.RequiredPullRequestReviews{ - IncludeAdmins: true, + EnforceAdmins: &github.AdminEnforcement{ + URL: github.String("https://api.github.com/repos/" + *repo.Owner.Login + "/" + *repo.Name + "/branches/master/protection/enforce_admins"), + Enabled: true, }, Restrictions: nil, } @@ -153,7 +147,7 @@ func TestRepositories_EditBranches(t *testing.T) { t.Errorf("Repositories.UpdateBranchProtection() returned %+v, want %+v", protection, want) } - _, err = client.Repositories.Delete(*repo.Owner.Login, *repo.Name) + _, err = client.Repositories.Delete(context.Background(), *repo.Owner.Login, *repo.Name) if err != nil { t.Fatalf("Repositories.Delete() returned error: %v", err) } @@ -164,18 +158,18 @@ func TestRepositories_List(t *testing.T) { return } - _, _, err := client.Repositories.List("", nil) + _, _, err := client.Repositories.List(context.Background(), "", nil) if err != nil { t.Fatalf("Repositories.List('') returned error: %v", err) } - _, _, err = client.Repositories.List("google", nil) + _, _, err = client.Repositories.List(context.Background(), "google", nil) if err != nil { t.Fatalf("Repositories.List('google') returned error: %v", err) } opt := github.RepositoryListOptions{Sort: "created"} - repos, _, err := client.Repositories.List("google", &opt) + repos, _, err := client.Repositories.List(context.Background(), "google", &opt) if err != nil { t.Fatalf("Repositories.List('google') with Sort opt returned error: %v", err) } diff --git a/vendor/github.com/google/go-github/tests/integration/users_test.go b/vendor/github.com/google/go-github/test/integration/users_test.go similarity index 80% rename from vendor/github.com/google/go-github/tests/integration/users_test.go rename to vendor/github.com/google/go-github/test/integration/users_test.go index 6618a55..3c47c36 100644 --- a/vendor/github.com/google/go-github/tests/integration/users_test.go +++ b/vendor/github.com/google/go-github/test/integration/users_test.go @@ -5,9 +5,10 @@ // +build integration -package tests +package integration import ( + "context" "fmt" "math/rand" "testing" @@ -17,7 +18,7 @@ import ( func TestUsers_Get(t *testing.T) { // list all users - users, _, err := client.Users.ListAll(nil) + users, _, err := client.Users.ListAll(context.Background(), nil) if err != nil { t.Fatalf("Users.ListAll returned error: %v", err) } @@ -32,7 +33,7 @@ func TestUsers_Get(t *testing.T) { } // get individual user - u, _, err := client.Users.Get("octocat") + u, _, err := client.Users.Get(context.Background(), "octocat") if err != nil { t.Fatalf("Users.Get('octocat') returned error: %v", err) } @@ -50,7 +51,7 @@ func TestUsers_Update(t *testing.T) { return } - u, _, err := client.Users.Get("") + u, _, err := client.Users.Get(context.Background(), "") if err != nil { t.Fatalf("Users.Get('') returned error: %v", err) } @@ -69,13 +70,13 @@ func TestUsers_Update(t *testing.T) { testLoc := fmt.Sprintf("test-%d", rand.Int()) u.Location = &testLoc - _, _, err = client.Users.Edit(u) + _, _, err = client.Users.Edit(context.Background(), u) if err != nil { t.Fatalf("Users.Update returned error: %v", err) } // refetch user and check location value - u, _, err = client.Users.Get("") + u, _, err = client.Users.Get(context.Background(), "") if err != nil { t.Fatalf("Users.Get('') returned error: %v", err) } @@ -86,7 +87,7 @@ func TestUsers_Update(t *testing.T) { // set location back to the original value u.Location = &location - _, _, err = client.Users.Edit(u) + _, _, err = client.Users.Edit(context.Background(), u) if err != nil { t.Fatalf("Users.Edit returned error: %v", err) } @@ -97,7 +98,7 @@ func TestUsers_Emails(t *testing.T) { return } - emails, _, err := client.Users.ListEmails(nil) + emails, _, err := client.Users.ListEmails(context.Background(), nil) if err != nil { t.Fatalf("Users.ListEmails() returned error: %v", err) } @@ -116,13 +117,13 @@ EmailLoop: } // Add new address - _, _, err = client.Users.AddEmails([]string{email}) + _, _, err = client.Users.AddEmails(context.Background(), []string{email}) if err != nil { t.Fatalf("Users.AddEmails() returned error: %v", err) } // List emails again and verify new email is present - emails, _, err = client.Users.ListEmails(nil) + emails, _, err = client.Users.ListEmails(context.Background(), nil) if err != nil { t.Fatalf("Users.ListEmails() returned error: %v", err) } @@ -140,13 +141,13 @@ EmailLoop: } // Remove new address - _, err = client.Users.DeleteEmails([]string{email}) + _, err = client.Users.DeleteEmails(context.Background(), []string{email}) if err != nil { t.Fatalf("Users.DeleteEmails() returned error: %v", err) } // List emails again and verify new email was removed - emails, _, err = client.Users.ListEmails(nil) + emails, _, err = client.Users.ListEmails(context.Background(), nil) if err != nil { t.Fatalf("Users.ListEmails() returned error: %v", err) } @@ -159,7 +160,7 @@ EmailLoop: } func TestUsers_Keys(t *testing.T) { - keys, _, err := client.Users.ListKeys("willnorris", nil) + keys, _, err := client.Users.ListKeys(context.Background(), "willnorris", nil) if err != nil { t.Fatalf("Users.ListKeys('willnorris') returned error: %v", err) } @@ -174,7 +175,7 @@ func TestUsers_Keys(t *testing.T) { } // TODO: make this integration test work for any authenticated user. - keys, _, err = client.Users.ListKeys("", nil) + keys, _, err = client.Users.ListKeys(context.Background(), "", nil) if err != nil { t.Fatalf("Users.ListKeys('') returned error: %v", err) } @@ -188,7 +189,7 @@ func TestUsers_Keys(t *testing.T) { } // Add new key - _, _, err = client.Users.CreateKey(&github.Key{ + _, _, err = client.Users.CreateKey(context.Background(), &github.Key{ Title: github.String("go-github test key"), Key: github.String(key), }) @@ -197,12 +198,12 @@ func TestUsers_Keys(t *testing.T) { } // List keys again and verify new key is present - keys, _, err = client.Users.ListKeys("", nil) + keys, _, err = client.Users.ListKeys(context.Background(), "", nil) if err != nil { t.Fatalf("Users.ListKeys('') returned error: %v", err) } - var id int + var id int64 for _, k := range keys { if k.Key != nil && *k.Key == key { id = *k.ID @@ -215,7 +216,7 @@ func TestUsers_Keys(t *testing.T) { } // Verify that fetching individual key works - k, _, err := client.Users.GetKey(id) + k, _, err := client.Users.GetKey(context.Background(), id) if err != nil { t.Fatalf("Users.GetKey(%q) returned error: %v", id, err) } @@ -224,13 +225,13 @@ func TestUsers_Keys(t *testing.T) { } // Remove test key - _, err = client.Users.DeleteKey(id) + _, err = client.Users.DeleteKey(context.Background(), id) if err != nil { t.Fatalf("Users.DeleteKey(%d) returned error: %v", id, err) } // List keys again and verify test key was removed - keys, _, err = client.Users.ListKeys("", nil) + keys, _, err = client.Users.ListKeys(context.Background(), "", nil) if err != nil { t.Fatalf("Users.ListKeys('') returned error: %v", err) } diff --git a/vendor/github.com/google/go-querystring/.travis.yml b/vendor/github.com/google/go-querystring/.travis.yml new file mode 100644 index 0000000..881d087 --- /dev/null +++ b/vendor/github.com/google/go-querystring/.travis.yml @@ -0,0 +1,5 @@ +language: go + +go: + - 1.x + - 1.11.x diff --git a/vendor/github.com/google/go-querystring/README.md b/vendor/github.com/google/go-querystring/README.md index 03e9370..0e600be 100644 --- a/vendor/github.com/google/go-querystring/README.md +++ b/vendor/github.com/google/go-querystring/README.md @@ -1,10 +1,8 @@ # go-querystring # -go-querystring is Go library for encoding structs into URL query parameters. - +[![GoDoc](https://godoc.org/github.com/google/go-querystring/query?status.svg)](https://godoc.org/github.com/google/go-querystring/query) [![Build Status](https://travis-ci.org/google/go-querystring.svg?branch=master)](https://travis-ci.org/google/go-querystring) -**Documentation:** -**Build Status:** [![Build Status](https://drone.io/github.com/google/go-querystring/status.png)](https://drone.io/github.com/google/go-querystring/latest) +go-querystring is Go library for encoding structs into URL query parameters. ## Usage ## diff --git a/vendor/github.com/google/go-querystring/go.mod b/vendor/github.com/google/go-querystring/go.mod new file mode 100644 index 0000000..45dca2d --- /dev/null +++ b/vendor/github.com/google/go-querystring/go.mod @@ -0,0 +1 @@ +module github.com/google/go-querystring diff --git a/vendor/github.com/google/go-querystring/query/encode_test.go b/vendor/github.com/google/go-querystring/query/encode_test.go index 0f26a77..77bea5a 100644 --- a/vendor/github.com/google/go-querystring/query/encode_test.go +++ b/vendor/github.com/google/go-querystring/query/encode_test.go @@ -191,7 +191,7 @@ func TestValues_omitEmpty(t *testing.T) { v, err := Values(s) if err != nil { - t.Errorf("Values(%q) returned error: %v", s, err) + t.Errorf("Values(%v) returned error: %v", s, err) } want := url.Values{ @@ -200,7 +200,7 @@ func TestValues_omitEmpty(t *testing.T) { "E": {""}, // E is included because the pointer is not empty, even though the string being pointed to is } if !reflect.DeepEqual(want, v) { - t.Errorf("Values(%q) returned %v, want %v", s, v, want) + t.Errorf("Values(%v) returned %v, want %v", s, v, want) } } @@ -298,12 +298,12 @@ func TestValues_MarshalerWithNilPointer(t *testing.T) { }{} v, err := Values(s) if err != nil { - t.Errorf("Values(%q) returned error: %v", s, err) + t.Errorf("Values(%v) returned error: %v", s, err) } want := url.Values{} if !reflect.DeepEqual(want, v) { - t.Errorf("Values(%q) returned %v, want %v", s, v, want) + t.Errorf("Values(%v) returned %v, want %v", s, v, want) } } diff --git a/vendor/github.com/hashicorp/hcl/.travis.yml b/vendor/github.com/hashicorp/hcl/.travis.yml index 3f83d90..cb63a32 100644 --- a/vendor/github.com/hashicorp/hcl/.travis.yml +++ b/vendor/github.com/hashicorp/hcl/.travis.yml @@ -3,7 +3,8 @@ sudo: false language: go go: - - 1.8 + - 1.x + - tip branches: only: diff --git a/vendor/github.com/hashicorp/hcl/decoder.go b/vendor/github.com/hashicorp/hcl/decoder.go index 6e75ece..dc9d45a 100644 --- a/vendor/github.com/hashicorp/hcl/decoder.go +++ b/vendor/github.com/hashicorp/hcl/decoder.go @@ -117,10 +117,17 @@ func (d *decoder) decode(name string, node ast.Node, result reflect.Value) error func (d *decoder) decodeBool(name string, node ast.Node, result reflect.Value) error { switch n := node.(type) { case *ast.LiteralType: - if n.Token.Type == token.BOOL { - v, err := strconv.ParseBool(n.Token.Text) - if err != nil { - return err + switch n.Token.Type { + case token.BOOL, token.STRING, token.NUMBER: + var v bool + s := strings.ToLower(strings.Replace(n.Token.Text, "\"", "", -1)) + switch s { + case "1", "true": + v = true + case "0", "false": + v = false + default: + return fmt.Errorf("decodeBool: Unknown value for boolean: %s", n.Token.Text) } result.Set(reflect.ValueOf(v)) @@ -137,7 +144,7 @@ func (d *decoder) decodeBool(name string, node ast.Node, result reflect.Value) e func (d *decoder) decodeFloat(name string, node ast.Node, result reflect.Value) error { switch n := node.(type) { case *ast.LiteralType: - if n.Token.Type == token.FLOAT { + if n.Token.Type == token.FLOAT || n.Token.Type == token.NUMBER { v, err := strconv.ParseFloat(n.Token.Text, 64) if err != nil { return err @@ -573,7 +580,11 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) // Compile the list of all the fields that we're going to be decoding // from all the structs. - fields := make(map[*reflect.StructField]reflect.Value) + type field struct { + field reflect.StructField + val reflect.Value + } + fields := []field{} for len(structs) > 0 { structVal := structs[0] structs = structs[1:] @@ -616,7 +627,7 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) } // Normal struct field, store it away - fields[&fieldType] = structVal.Field(i) + fields = append(fields, field{fieldType, structVal.Field(i)}) } } @@ -624,26 +635,27 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) decodedFields := make([]string, 0, len(fields)) decodedFieldsVal := make([]reflect.Value, 0) unusedKeysVal := make([]reflect.Value, 0) - for fieldType, field := range fields { - if !field.IsValid() { + for _, f := range fields { + field, fieldValue := f.field, f.val + if !fieldValue.IsValid() { // This should never happen panic("field is not valid") } // If we can't set the field, then it is unexported or something, // and we just continue onwards. - if !field.CanSet() { + if !fieldValue.CanSet() { continue } - fieldName := fieldType.Name + fieldName := field.Name - tagValue := fieldType.Tag.Get(tagName) + tagValue := field.Tag.Get(tagName) tagParts := strings.SplitN(tagValue, ",", 2) if len(tagParts) >= 2 { switch tagParts[1] { case "decodedFields": - decodedFieldsVal = append(decodedFieldsVal, field) + decodedFieldsVal = append(decodedFieldsVal, fieldValue) continue case "key": if item == nil { @@ -654,10 +666,10 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) } } - field.SetString(item.Keys[0].Token.Value().(string)) + fieldValue.SetString(item.Keys[0].Token.Value().(string)) continue case "unusedKeys": - unusedKeysVal = append(unusedKeysVal, field) + unusedKeysVal = append(unusedKeysVal, fieldValue) continue } } @@ -684,7 +696,7 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) // because we actually want the value. fieldName = fmt.Sprintf("%s.%s", name, fieldName) if len(prefixMatches.Items) > 0 { - if err := d.decode(fieldName, prefixMatches, field); err != nil { + if err := d.decode(fieldName, prefixMatches, fieldValue); err != nil { return err } } @@ -694,12 +706,12 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) decodeNode = &ast.ObjectList{Items: ot.List.Items} } - if err := d.decode(fieldName, decodeNode, field); err != nil { + if err := d.decode(fieldName, decodeNode, fieldValue); err != nil { return err } } - decodedFields = append(decodedFields, fieldType.Name) + decodedFields = append(decodedFields, field.Name) } if len(decodedFieldsVal) > 0 { diff --git a/vendor/github.com/hashicorp/hcl/decoder_test.go b/vendor/github.com/hashicorp/hcl/decoder_test.go index 38363ad..3ffdcfd 100644 --- a/vendor/github.com/hashicorp/hcl/decoder_test.go +++ b/vendor/github.com/hashicorp/hcl/decoder_test.go @@ -73,6 +73,7 @@ func TestDecode_interface(t *testing.T) { false, map[string]interface{}{ "a": 1.02, + "b": 2, }, }, { @@ -793,6 +794,51 @@ func TestDecode_interfaceNonPointer(t *testing.T) { } } +func TestDecode_boolString(t *testing.T) { + var value struct { + Boolean bool + } + + err := Decode(&value, testReadFile(t, "basic_bool_string.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + if value.Boolean != true { + t.Fatalf("bad: %#v", value.Boolean) + } +} + +func TestDecode_boolInt(t *testing.T) { + var value struct { + Boolean bool + } + + err := Decode(&value, testReadFile(t, "basic_bool_int.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + if value.Boolean != true { + t.Fatalf("bad: %#v", value.Boolean) + } +} + +func TestDecode_bool(t *testing.T) { + var value struct { + Boolean bool + } + + err := Decode(&value, testReadFile(t, "basic_bool.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + if value.Boolean != true { + t.Fatalf("bad: %#v", value.Boolean) + } +} + func TestDecode_intString(t *testing.T) { var value struct { Count int @@ -811,6 +857,7 @@ func TestDecode_intString(t *testing.T) { func TestDecode_float32(t *testing.T) { var value struct { A float32 `hcl:"a"` + B float32 `hcl:"b"` } err := Decode(&value, testReadFile(t, "float.hcl")) @@ -821,11 +868,15 @@ func TestDecode_float32(t *testing.T) { if got, want := value.A, float32(1.02); got != want { t.Fatalf("wrong result %#v; want %#v", got, want) } + if got, want := value.B, float32(2); got != want { + t.Fatalf("wrong result %#v; want %#v", got, want) + } } func TestDecode_float64(t *testing.T) { var value struct { A float64 `hcl:"a"` + B float64 `hcl:"b"` } err := Decode(&value, testReadFile(t, "float.hcl")) @@ -836,6 +887,9 @@ func TestDecode_float64(t *testing.T) { if got, want := value.A, float64(1.02); got != want { t.Fatalf("wrong result %#v; want %#v", got, want) } + if got, want := value.B, float64(2); got != want { + t.Fatalf("wrong result %#v; want %#v", got, want) + } } func TestDecode_intStringAliased(t *testing.T) { diff --git a/vendor/github.com/hashicorp/hcl/go.mod b/vendor/github.com/hashicorp/hcl/go.mod new file mode 100644 index 0000000..4debbbe --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/go.mod @@ -0,0 +1,3 @@ +module github.com/hashicorp/hcl + +require github.com/davecgh/go-spew v1.1.1 diff --git a/vendor/github.com/hashicorp/hcl/go.sum b/vendor/github.com/hashicorp/hcl/go.sum new file mode 100644 index 0000000..b5e2922 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/go.sum @@ -0,0 +1,2 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/ast_test.go b/vendor/github.com/hashicorp/hcl/hcl/ast/ast_test.go index 942256c..d4364a1 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/ast/ast_test.go +++ b/vendor/github.com/hashicorp/hcl/hcl/ast/ast_test.go @@ -137,7 +137,7 @@ func TestWalkEquality(t *testing.T) { } if len(newNode.Items) != 2 { - t.Error("expected newNode length 2, got: %d", len(newNode.Items)) + t.Errorf("expected newNode length 2, got: %d", len(newNode.Items)) } expected := []string{ @@ -147,7 +147,7 @@ func TestWalkEquality(t *testing.T) { for i, item := range newNode.Items { if len(item.Keys) != 1 { - t.Error("expected keys newNode length 1, got: %d", len(item.Keys)) + t.Errorf("expected keys newNode length 1, got: %d", len(item.Keys)) } if item.Keys[0].Token.Text != expected[i] { diff --git a/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd_test.go b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd_test.go index b952d76..66bed58 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd_test.go +++ b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd_test.go @@ -46,7 +46,7 @@ func TestIsValidFile(t *testing.T) { } if res := isValidFile(file, fixtureExtensions); res != tc.Expected { - t.Errorf("want: %b, got: %b", tc.Expected, res) + t.Errorf("want: %t, got: %t", tc.Expected, res) } } } @@ -86,7 +86,7 @@ func TestRunMultiplePaths(t *testing.T) { t.Errorf("unexpected error: %s", err) } if stdout.String() != expectedOut.String() { - t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut.String(), stdout.String()) } } @@ -132,7 +132,7 @@ func TestRunSubDirectories(t *testing.T) { t.Errorf("unexpected error: %s", err) } if stdout.String() != expectedOut.String() { - t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut.String(), stdout.String()) } } @@ -161,7 +161,7 @@ func TestRunStdin(t *testing.T) { t.Errorf("unexpected error: %s", err) } if !bytes.Equal(stdout.Bytes(), expectedOut.Bytes()) { - t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut.String(), stdout.String()) } } @@ -242,7 +242,7 @@ func TestRunNoOptions(t *testing.T) { t.Errorf("unexpected error: %s", err) } if stdout.String() != expectedOut.String() { - t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut.String(), stdout.String()) } } @@ -274,7 +274,7 @@ func TestRunList(t *testing.T) { t.Errorf("unexpected error: %s", err) } if stdout.String() != expectedOut.String() { - t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut.String(), stdout.String()) } } diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go index b488180..64c83bc 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go +++ b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go @@ -197,9 +197,18 @@ func (p *Parser) objectItem() (*ast.ObjectItem, error) { keyStr = append(keyStr, k.Token.Text) } - return nil, fmt.Errorf( - "key '%s' expected start of object ('{') or assignment ('=')", - strings.Join(keyStr, " ")) + return nil, &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf( + "key '%s' expected start of object ('{') or assignment ('=')", + strings.Join(keyStr, " ")), + } + } + + // key=#comment + // val + if p.lineComment != nil { + o.LineComment, p.lineComment = p.lineComment, nil } // do a look-ahead for line comment @@ -319,7 +328,10 @@ func (p *Parser) objectType() (*ast.ObjectType, error) { // No error, scan and expect the ending to be a brace if tok := p.scan(); tok.Type != token.RBRACE { - return nil, fmt.Errorf("object expected closing RBRACE got: %s", tok.Type) + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf("object expected closing RBRACE got: %s", tok.Type), + } } o.List = l diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go b/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go index c896d58..7c038d1 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go @@ -252,6 +252,14 @@ func (p *printer) objectItem(o *ast.ObjectItem) []byte { } } + // If key and val are on different lines, treat line comments like lead comments. + if o.LineComment != nil && o.Val.Pos().Line != o.Keys[0].Pos().Line { + for _, comment := range o.LineComment.List { + buf.WriteString(comment.Text) + buf.WriteByte(newline) + } + } + for i, k := range o.Keys { buf.WriteString(k.Token.Text) buf.WriteByte(blank) @@ -265,7 +273,7 @@ func (p *printer) objectItem(o *ast.ObjectItem) []byte { buf.Write(p.output(o.Val)) - if o.Val.Pos().Line == o.Keys[0].Pos().Line && o.LineComment != nil { + if o.LineComment != nil && o.Val.Pos().Line == o.Keys[0].Pos().Line { buf.WriteByte(blank) for _, comment := range o.LineComment.List { buf.WriteString(comment.Text) @@ -509,8 +517,13 @@ func (p *printer) alignedItems(items []*ast.ObjectItem) []byte { // list returns the printable HCL form of an list type. func (p *printer) list(l *ast.ListType) []byte { + if p.isSingleLineList(l) { + return p.singleLineList(l) + } + var buf bytes.Buffer buf.WriteString("[") + buf.WriteByte(newline) var longestLine int for _, item := range l.List { @@ -523,115 +536,112 @@ func (p *printer) list(l *ast.ListType) []byte { } } - insertSpaceBeforeItem := false - lastHadLeadComment := false + haveEmptyLine := false for i, item := range l.List { - // Keep track of whether this item is a heredoc since that has - // unique behavior. - heredoc := false - if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC { - heredoc = true - } - - if item.Pos().Line != l.Lbrack.Line { - // multiline list, add newline before we add each item - buf.WriteByte(newline) - insertSpaceBeforeItem = false - - // If we have a lead comment, then we want to write that first - leadComment := false - if lit, ok := item.(*ast.LiteralType); ok && lit.LeadComment != nil { - leadComment = true - - // If this isn't the first item and the previous element - // didn't have a lead comment, then we need to add an extra - // newline to properly space things out. If it did have a - // lead comment previously then this would be done - // automatically. - if i > 0 && !lastHadLeadComment { - buf.WriteByte(newline) - } - - for _, comment := range lit.LeadComment.List { - buf.Write(p.indent([]byte(comment.Text))) - buf.WriteByte(newline) - } + // If we have a lead comment, then we want to write that first + leadComment := false + if lit, ok := item.(*ast.LiteralType); ok && lit.LeadComment != nil { + leadComment = true + + // Ensure an empty line before every element with a + // lead comment (except the first item in a list). + if !haveEmptyLine && i != 0 { + buf.WriteByte(newline) } - // also indent each line - val := p.output(item) - curLen := len(val) - buf.Write(p.indent(val)) - - // if this item is a heredoc, then we output the comma on - // the next line. This is the only case this happens. - comma := []byte{','} - if heredoc { + for _, comment := range lit.LeadComment.List { + buf.Write(p.indent([]byte(comment.Text))) buf.WriteByte(newline) - comma = p.indent(comma) } + } - buf.Write(comma) + // also indent each line + val := p.output(item) + curLen := len(val) + buf.Write(p.indent(val)) - if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil { - // if the next item doesn't have any comments, do not align - buf.WriteByte(blank) // align one space - for i := 0; i < longestLine-curLen; i++ { - buf.WriteByte(blank) - } + // if this item is a heredoc, then we output the comma on + // the next line. This is the only case this happens. + comma := []byte{','} + if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC { + buf.WriteByte(newline) + comma = p.indent(comma) + } - for _, comment := range lit.LineComment.List { - buf.WriteString(comment.Text) - } - } + buf.Write(comma) - lastItem := i == len(l.List)-1 - if lastItem { - buf.WriteByte(newline) + if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil { + // if the next item doesn't have any comments, do not align + buf.WriteByte(blank) // align one space + for i := 0; i < longestLine-curLen; i++ { + buf.WriteByte(blank) } - if leadComment && !lastItem { - buf.WriteByte(newline) + for _, comment := range lit.LineComment.List { + buf.WriteString(comment.Text) } + } - lastHadLeadComment = leadComment - } else { - if insertSpaceBeforeItem { - buf.WriteByte(blank) - insertSpaceBeforeItem = false - } + buf.WriteByte(newline) - // Output the item itself - // also indent each line - val := p.output(item) - curLen := len(val) - buf.Write(val) + // Ensure an empty line after every element with a + // lead comment (except the first item in a list). + haveEmptyLine = leadComment && i != len(l.List)-1 + if haveEmptyLine { + buf.WriteByte(newline) + } + } - // If this is a heredoc item we always have to output a newline - // so that it parses properly. - if heredoc { - buf.WriteByte(newline) - } + buf.WriteString("]") + return buf.Bytes() +} - // If this isn't the last element, write a comma. - if i != len(l.List)-1 { - buf.WriteString(",") - insertSpaceBeforeItem = true - } +// isSingleLineList returns true if: +// * they were previously formatted entirely on one line +// * they consist entirely of literals +// * there are either no heredoc strings or the list has exactly one element +// * there are no line comments +func (printer) isSingleLineList(l *ast.ListType) bool { + for _, item := range l.List { + if item.Pos().Line != l.Lbrack.Line { + return false + } - if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil { - // if the next item doesn't have any comments, do not align - buf.WriteByte(blank) // align one space - for i := 0; i < longestLine-curLen; i++ { - buf.WriteByte(blank) - } + lit, ok := item.(*ast.LiteralType) + if !ok { + return false + } - for _, comment := range lit.LineComment.List { - buf.WriteString(comment.Text) - } - } + if lit.Token.Type == token.HEREDOC && len(l.List) != 1 { + return false + } + + if lit.LineComment != nil { + return false + } + } + + return true +} + +// singleLineList prints a simple single line list. +// For a definition of "simple", see isSingleLineList above. +func (p *printer) singleLineList(l *ast.ListType) []byte { + buf := &bytes.Buffer{} + + buf.WriteString("[") + for i, item := range l.List { + if i != 0 { + buf.WriteString(", ") } + // Output the item itself + buf.Write(p.output(item)) + + // The heredoc marker needs to be at the end of line. + if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC { + buf.WriteByte(newline) + } } buf.WriteString("]") diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/printer_test.go b/vendor/github.com/hashicorp/hcl/hcl/printer/printer_test.go index 5248259..8ae747a 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/printer_test.go +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/printer_test.go @@ -147,3 +147,30 @@ func lineAt(text []byte, offs int) []byte { } return text[offs:i] } + +// TestFormatParsable ensures that the output of Format() is can be parsed again. +func TestFormatValidOutput(t *testing.T) { + cases := []string{ + "#\x00", + "#\ue123t", + "x=//\n0y=<<_\n_\n", + "y=[1,//\n]", + "Y=<<4\n4/\n\n\n/4/@=4/\n\n\n/4000000004\r\r\n00004\n", + "x=<<_\n_\r\r\n_\n", + "X=<<-\n\r\r\n", + } + + for _, c := range cases { + f, err := Format([]byte(c)) + if err != nil { + // ignore these failures, not all inputs are valid HCL. + t.Logf("Format(%q) = %v", c, err) + continue + } + + if _, err := parser.Parse(f); err != nil { + t.Errorf("Format(%q) = %q; Parse(%q) = %v", c, f, f, err) + continue + } + } +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.golden b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.golden index 9d4b072..192c26a 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.golden +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.golden @@ -34,3 +34,6 @@ variable = { foo { bar = "fatih" // line comment 2 } // line comment 3 + +// comment +multiline = "assignment" diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.input b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.input index 57c37ac..c4b29de 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.input +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.input @@ -35,3 +35,5 @@ foo { bar = "fatih" // line comment 2 } // line comment 3 +multiline = // comment +"assignment" diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_crlf.input b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_crlf.input index 5d27206..4955086 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_crlf.input +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_crlf.input @@ -35,3 +35,5 @@ foo { bar = "fatih" // line comment 2 } // line comment 3 +multiline = // comment +"assignment" diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list.golden b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list.golden index 14c37ac..6894b44 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list.golden +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list.golden @@ -2,11 +2,14 @@ foo = ["fatih", "arslan"] foo = ["bar", "qaz"] -foo = ["zeynep", +foo = [ + "zeynep", "arslan", ] -foo = ["fatih", "zeynep", +foo = [ + "fatih", + "zeynep", "arslan", ] diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list_comment.golden b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list_comment.golden index e5753c9..35a848f 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list_comment.golden +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list_comment.golden @@ -1,7 +1,13 @@ -foo = [1, # Hello +foo = [ + 1, # Hello 2, ] -foo = [1, # Hello +foo = [ + 1, # Hello 2, # World ] + +foo = [ + 1, # Hello +] diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list_comment.input b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list_comment.input index 1d636c8..c56aef2 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list_comment.input +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/list_comment.input @@ -4,3 +4,6 @@ foo = [1, # Hello foo = [1, # Hello 2, # World ] + +foo = [1, # Hello +] diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/object_with_heredoc.golden b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/object_with_heredoc.golden index 7e92243..a271d28 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/object_with_heredoc.golden +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/object_with_heredoc.golden @@ -1,6 +1,7 @@ obj { foo = [< 0 { + if ch == '\x00' { s.err("unexpected null character (0x00)") return eof } + if ch == '\uE123' { + s.err("unicode code point U+E123 reserved for internal use") + return utf8.RuneError + } + // debug // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column) return ch @@ -351,7 +352,7 @@ func (s *Scanner) scanNumber(ch rune) token.Type { return token.NUMBER } -// scanMantissa scans the mantissa begining from the rune. It returns the next +// scanMantissa scans the mantissa beginning from the rune. It returns the next // non decimal rune. It's used to determine wheter it's a fraction or exponent. func (s *Scanner) scanMantissa(ch rune) rune { scanned := false @@ -432,16 +433,16 @@ func (s *Scanner) scanHeredoc() { // Read the identifier identBytes := s.src[offs : s.srcPos.Offset-s.lastCharLen] - if len(identBytes) == 0 { + if len(identBytes) == 0 || (len(identBytes) == 1 && identBytes[0] == '-') { s.err("zero-length heredoc anchor") return } var identRegexp *regexp.Regexp if identBytes[0] == '-' { - identRegexp = regexp.MustCompile(fmt.Sprintf(`[[:space:]]*%s\z`, identBytes[1:])) + identRegexp = regexp.MustCompile(fmt.Sprintf(`^[[:space:]]*%s\r*\z`, identBytes[1:])) } else { - identRegexp = regexp.MustCompile(fmt.Sprintf(`[[:space:]]*%s\z`, identBytes)) + identRegexp = regexp.MustCompile(fmt.Sprintf(`^[[:space:]]*%s\r*\z`, identBytes)) } // Read the actual string value @@ -551,7 +552,7 @@ func (s *Scanner) scanDigits(ch rune, base, n int) rune { s.err("illegal char escape") } - if n != start { + if n != start && ch != eof { // we scanned all digits, put the last non digit char back, // only if we read anything at all s.unread() diff --git a/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner_test.go b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner_test.go index 4f2c9cb..58d68f5 100644 --- a/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner_test.go +++ b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner_test.go @@ -509,9 +509,12 @@ func TestScan_crlf(t *testing.T) { func TestError(t *testing.T) { testError(t, "\x80", "1:1", "illegal UTF-8 encoding", token.ILLEGAL) testError(t, "\xff", "1:1", "illegal UTF-8 encoding", token.ILLEGAL) + testError(t, "\uE123", "1:1", "unicode code point U+E123 reserved for internal use", token.ILLEGAL) testError(t, "ab\x80", "1:3", "illegal UTF-8 encoding", token.IDENT) testError(t, "abc\xff", "1:4", "illegal UTF-8 encoding", token.IDENT) + testError(t, "ab\x00", "1:3", "unexpected null character (0x00)", token.IDENT) + testError(t, "ab\x00\n", "1:3", "unexpected null character (0x00)", token.IDENT) testError(t, `"ab`+"\x80", "1:4", "illegal UTF-8 encoding", token.STRING) testError(t, `"abc`+"\xff", "1:5", "illegal UTF-8 encoding", token.STRING) @@ -528,6 +531,9 @@ func TestError(t *testing.T) { testError(t, `"${abc`+"\n", "2:1", "literal not terminated", token.STRING) testError(t, `/*/`, "1:4", "comment not terminated", token.COMMENT) testError(t, `/foo`, "1:1", "expected '/' for comment", token.COMMENT) + + testError(t, "<<\nfoo\n\n", "1:3", "zero-length heredoc anchor", token.HEREDOC) + testError(t, "<<-\nfoo\n\n", "1:4", "zero-length heredoc anchor", token.HEREDOC) } func testError(t *testing.T, src, pos, msg string, tok token.Type) { @@ -589,3 +595,48 @@ func countNewlines(s string) int { } return n } + +func TestScanDigitsUnread(t *testing.T) { + cases := []string{ + "M=0\"\\00", + "M=\"\\00", + "\"\\00", + "M=[\"\\00", + "U{\"\\00", + "\"\n{}#\n\"\\00", + "M=[[\"\\00", + "U{d=0\"\\U00", + "#\n\"\\x00", + "m=[[[\"\\00", + } + + for _, c := range cases { + s := New([]byte(c)) + + for { + tok := s.Scan() + if tok.Type == token.EOF { + break + } + t.Logf("s.Scan() = %s", tok) + } + } +} + +func TestScanHeredocRegexpCompile(t *testing.T) { + cases := []string{ + "0\xe1\n<<ȸ\nhello\nworld\nȸ", + } + + for _, c := range cases { + s := New([]byte(c)) + + for { + tok := s.Scan() + if tok.Type == token.EOF { + break + } + t.Logf("s.Scan() = %s", tok) + } + } +} diff --git a/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go index dd5c72b..fe3f0f0 100644 --- a/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go +++ b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go @@ -246,7 +246,7 @@ func (s *Scanner) scanNumber(ch rune) token.Type { return token.NUMBER } -// scanMantissa scans the mantissa begining from the rune. It returns the next +// scanMantissa scans the mantissa beginning from the rune. It returns the next // non decimal rune. It's used to determine wheter it's a fraction or exponent. func (s *Scanner) scanMantissa(ch rune) rune { scanned := false diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/basic_bool.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_bool.hcl new file mode 100644 index 0000000..024c06c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_bool.hcl @@ -0,0 +1 @@ +boolean = true diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/basic_bool_int.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_bool_int.hcl new file mode 100644 index 0000000..9e1b45e --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_bool_int.hcl @@ -0,0 +1 @@ +boolean = 1 diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/basic_bool_string.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_bool_string.hcl new file mode 100644 index 0000000..4b805de --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_bool_string.hcl @@ -0,0 +1 @@ +boolean = "trUe" diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/float.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/float.hcl index eed44e5..edf355e 100644 --- a/vendor/github.com/hashicorp/hcl/test-fixtures/float.hcl +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/float.hcl @@ -1 +1,2 @@ a = 1.02 +b = 2 diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/float.json b/vendor/github.com/hashicorp/hcl/test-fixtures/float.json index a9d1ab4..5808680 100644 --- a/vendor/github.com/hashicorp/hcl/test-fixtures/float.json +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/float.json @@ -1,3 +1,4 @@ { - "a": 1.02 + "a": 1.02, + "b": 2 } diff --git a/vendor/github.com/magiconair/properties/.circleci/config.yml b/vendor/github.com/magiconair/properties/.circleci/config.yml new file mode 100644 index 0000000..f036e06 --- /dev/null +++ b/vendor/github.com/magiconair/properties/.circleci/config.yml @@ -0,0 +1,11 @@ +version: 2 +jobs: + build: + docker: + - image: circleci/golang:1.11.2 + + working_directory: /go/src/github.com/magiconair/properties + steps: + - checkout + - run: go get -v -t -d ./... + - run: go test -v ./... diff --git a/vendor/github.com/magiconair/properties/.travis.yml b/vendor/github.com/magiconair/properties/.travis.yml index 60436b2..d16d2ba 100644 --- a/vendor/github.com/magiconair/properties/.travis.yml +++ b/vendor/github.com/magiconair/properties/.travis.yml @@ -5,4 +5,7 @@ go: - 1.6.x - 1.7.x - 1.8.x + - 1.9.x + - "1.10.x" + - "1.11.x" - tip diff --git a/vendor/github.com/magiconair/properties/CHANGELOG.md b/vendor/github.com/magiconair/properties/CHANGELOG.md index 4905fec..f83adc2 100644 --- a/vendor/github.com/magiconair/properties/CHANGELOG.md +++ b/vendor/github.com/magiconair/properties/CHANGELOG.md @@ -1,57 +1,92 @@ ## Changelog -### [1.7.3](https://github.com/magiconair/properties/tags/v1.7.3) - 10 Jul 2017 +### [1.8](https://github.com/magiconair/properties/tree/v1.8) - 15 May 2018 + + * [PR #26](https://github.com/magiconair/properties/pull/26): Disable expansion during loading + + This adds the option to disable property expansion during loading. + + Thanks to [@kmala](https://github.com/kmala) for the patch. + +### [1.7.6](https://github.com/magiconair/properties/tree/v1.7.6) - 14 Feb 2018 + + * [PR #29](https://github.com/magiconair/properties/pull/29): Reworked expansion logic to handle more complex cases. + + See PR for an example. + + Thanks to [@yobert](https://github.com/yobert) for the fix. + +### [1.7.5](https://github.com/magiconair/properties/tree/v1.7.5) - 13 Feb 2018 + + * [PR #28](https://github.com/magiconair/properties/pull/28): Support duplicate expansions in the same value + + Values which expand the same key multiple times (e.g. `key=${a} ${a}`) will no longer fail + with a `circular reference error`. + + Thanks to [@yobert](https://github.com/yobert) for the fix. + +### [1.7.4](https://github.com/magiconair/properties/tree/v1.7.4) - 31 Oct 2017 + + * [Issue #23](https://github.com/magiconair/properties/issues/23): Ignore blank lines with whitespaces + + * [PR #24](https://github.com/magiconair/properties/pull/24): Update keys when DisableExpansion is enabled + + Thanks to [@mgurov](https://github.com/mgurov) for the fix. + +### [1.7.3](https://github.com/magiconair/properties/tree/v1.7.3) - 10 Jul 2017 * [Issue #17](https://github.com/magiconair/properties/issues/17): Add [SetValue()](http://godoc.org/github.com/magiconair/properties#Properties.SetValue) method to set values generically * [Issue #22](https://github.com/magiconair/properties/issues/22): Add [LoadMap()](http://godoc.org/github.com/magiconair/properties#LoadMap) function to load properties from a string map -### [1.7.2](https://github.com/magiconair/properties/tags/v1.7.2) - 20 Mar 2017 +### [1.7.2](https://github.com/magiconair/properties/tree/v1.7.2) - 20 Mar 2017 * [Issue #15](https://github.com/magiconair/properties/issues/15): Drop gocheck dependency * [PR #21](https://github.com/magiconair/properties/pull/21): Add [Map()](http://godoc.org/github.com/magiconair/properties#Properties.Map) and [FilterFunc()](http://godoc.org/github.com/magiconair/properties#Properties.FilterFunc) -### [1.7.1](https://github.com/magiconair/properties/tags/v1.7.1) - 13 Jan 2017 +### [1.7.1](https://github.com/magiconair/properties/tree/v1.7.1) - 13 Jan 2017 + * [Issue #14](https://github.com/magiconair/properties/issues/14): Decouple TestLoadExpandedFile from `$USER` + * [PR #12](https://github.com/magiconair/properties/pull/12): Load from files and URLs * [PR #16](https://github.com/magiconair/properties/pull/16): Keep gofmt happy * [PR #18](https://github.com/magiconair/properties/pull/18): Fix Delete() function -### [1.7.0](https://github.com/magiconair/properties/tags/v1.7.0) - 20 Mar 2016 +### [1.7.0](https://github.com/magiconair/properties/tree/v1.7.0) - 20 Mar 2016 * [Issue #10](https://github.com/magiconair/properties/issues/10): Add [LoadURL,LoadURLs,MustLoadURL,MustLoadURLs](http://godoc.org/github.com/magiconair/properties#LoadURL) method to load properties from a URL. * [Issue #11](https://github.com/magiconair/properties/issues/11): Add [LoadString,MustLoadString](http://godoc.org/github.com/magiconair/properties#LoadString) method to load properties from an UTF8 string. * [PR #8](https://github.com/magiconair/properties/pull/8): Add [MustFlag](http://godoc.org/github.com/magiconair/properties#Properties.MustFlag) method to provide overrides via command line flags. (@pascaldekloe) -### [1.6.0](https://github.com/magiconair/properties/tags/v1.6.0) - 11 Dec 2015 +### [1.6.0](https://github.com/magiconair/properties/tree/v1.6.0) - 11 Dec 2015 * Add [Decode](http://godoc.org/github.com/magiconair/properties#Properties.Decode) method to populate struct from properties via tags. -### [1.5.6](https://github.com/magiconair/properties/tags/v1.5.6) - 18 Oct 2015 +### [1.5.6](https://github.com/magiconair/properties/tree/v1.5.6) - 18 Oct 2015 * Vendored in gopkg.in/check.v1 -### [1.5.5](https://github.com/magiconair/properties/tags/v1.5.5) - 31 Jul 2015 +### [1.5.5](https://github.com/magiconair/properties/tree/v1.5.5) - 31 Jul 2015 * [PR #6](https://github.com/magiconair/properties/pull/6): Add [Delete](http://godoc.org/github.com/magiconair/properties#Properties.Delete) method to remove keys including comments. (@gerbenjacobs) -### [1.5.4](https://github.com/magiconair/properties/tags/v1.5.4) - 23 Jun 2015 +### [1.5.4](https://github.com/magiconair/properties/tree/v1.5.4) - 23 Jun 2015 * [Issue #5](https://github.com/magiconair/properties/issues/5): Allow disabling of property expansion [DisableExpansion](http://godoc.org/github.com/magiconair/properties#Properties.DisableExpansion). When property expansion is disabled Properties become a simple key/value store and don't check for circular references. -### [1.5.3](https://github.com/magiconair/properties/tags/v1.5.3) - 02 Jun 2015 +### [1.5.3](https://github.com/magiconair/properties/tree/v1.5.3) - 02 Jun 2015 * [Issue #4](https://github.com/magiconair/properties/issues/4): Maintain key order in [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) and [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp) -### [1.5.2](https://github.com/magiconair/properties/tags/v1.5.2) - 10 Apr 2015 +### [1.5.2](https://github.com/magiconair/properties/tree/v1.5.2) - 10 Apr 2015 * [Issue #3](https://github.com/magiconair/properties/issues/3): Don't print comments in [WriteComment()](http://godoc.org/github.com/magiconair/properties#Properties.WriteComment) if they are all empty * Add clickable links to README -### [1.5.1](https://github.com/magiconair/properties/tags/v1.5.1) - 08 Dec 2014 +### [1.5.1](https://github.com/magiconair/properties/tree/v1.5.1) - 08 Dec 2014 * Added [GetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.GetParsedDuration) and [MustGetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.MustGetParsedDuration) for values specified compatible with [time.ParseDuration()](http://golang.org/pkg/time/#ParseDuration). -### [1.5.0](https://github.com/magiconair/properties/tags/v1.5.0) - 18 Nov 2014 +### [1.5.0](https://github.com/magiconair/properties/tree/v1.5.0) - 18 Nov 2014 * Added support for single and multi-line comments (reading, writing and updating) * The order of keys is now preserved @@ -59,31 +94,31 @@ * Added a [MustSet()](http://godoc.org/github.com/magiconair/properties#Properties.MustSet) method * Migrated test library from launchpad.net/gocheck to [gopkg.in/check.v1](http://gopkg.in/check.v1) -### [1.4.2](https://github.com/magiconair/properties/tags/v1.4.2) - 15 Nov 2014 +### [1.4.2](https://github.com/magiconair/properties/tree/v1.4.2) - 15 Nov 2014 * [Issue #2](https://github.com/magiconair/properties/issues/2): Fixed goroutine leak in parser which created two lexers but cleaned up only one -### [1.4.1](https://github.com/magiconair/properties/tags/v1.4.1) - 13 Nov 2014 +### [1.4.1](https://github.com/magiconair/properties/tree/v1.4.1) - 13 Nov 2014 * [Issue #1](https://github.com/magiconair/properties/issues/1): Fixed bug in Keys() method which returned an empty string -### [1.4.0](https://github.com/magiconair/properties/tags/v1.4.0) - 23 Sep 2014 +### [1.4.0](https://github.com/magiconair/properties/tree/v1.4.0) - 23 Sep 2014 * Added [Keys()](http://godoc.org/github.com/magiconair/properties#Properties.Keys) to get the keys * Added [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp) and [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) to get a subset of the properties -### [1.3.0](https://github.com/magiconair/properties/tags/v1.3.0) - 18 Mar 2014 +### [1.3.0](https://github.com/magiconair/properties/tree/v1.3.0) - 18 Mar 2014 * Added support for time.Duration -* Made MustXXX() failure beha[ior configurable (log.Fatal, panic](https://github.com/magiconair/properties/tags/vior configurable (log.Fatal, panic) - custom) +* Made MustXXX() failure beha[ior configurable (log.Fatal, panic](https://github.com/magiconair/properties/tree/vior configurable (log.Fatal, panic) - custom) * Changed default of MustXXX() failure from panic to log.Fatal -### [1.2.0](https://github.com/magiconair/properties/tags/v1.2.0) - 05 Mar 2014 +### [1.2.0](https://github.com/magiconair/properties/tree/v1.2.0) - 05 Mar 2014 * Added MustGet... functions * Added support for int and uint with range checks on 32 bit platforms -### [1.1.0](https://github.com/magiconair/properties/tags/v1.1.0) - 20 Jan 2014 +### [1.1.0](https://github.com/magiconair/properties/tree/v1.1.0) - 20 Jan 2014 * Renamed from goproperties to properties * Added support for expansion of environment vars in @@ -91,6 +126,6 @@ * Fixed bug where value expressions were not at the start of the string -### [1.0.0](https://github.com/magiconair/properties/tags/v1.0.0) - 7 Jan 2014 +### [1.0.0](https://github.com/magiconair/properties/tree/v1.0.0) - 7 Jan 2014 * Initial release diff --git a/vendor/github.com/magiconair/properties/LICENSE b/vendor/github.com/magiconair/properties/LICENSE index 7eab43b..b387087 100644 --- a/vendor/github.com/magiconair/properties/LICENSE +++ b/vendor/github.com/magiconair/properties/LICENSE @@ -1,6 +1,6 @@ goproperties - properties file decoder for Go -Copyright (c) 2013-2014 - Frank Schroeder +Copyright (c) 2013-2018 - Frank Schroeder All rights reserved. diff --git a/vendor/github.com/magiconair/properties/README.md b/vendor/github.com/magiconair/properties/README.md index eb3b8c4..42ed5c3 100644 --- a/vendor/github.com/magiconair/properties/README.md +++ b/vendor/github.com/magiconair/properties/README.md @@ -1,7 +1,12 @@ -Overview [![Build Status](https://travis-ci.org/magiconair/properties.svg?branch=master)](https://travis-ci.org/magiconair/properties) -======== +[![](https://img.shields.io/github/tag/magiconair/properties.svg?style=flat-square&label=release)](https://github.com/magiconair/properties/releases) +[![Travis CI Status](https://img.shields.io/travis/magiconair/properties.svg?branch=master&style=flat-square&label=travis)](https://travis-ci.org/magiconair/properties) +[![CircleCI Status](https://img.shields.io/circleci/project/github/magiconair/properties.svg?label=circle+ci&style=flat-square)](https://circleci.com/gh/magiconair/properties) +[![License](https://img.shields.io/badge/License-BSD%202--Clause-orange.svg?style=flat-square)](https://raw.githubusercontent.com/magiconair/properties/master/LICENSE) +[![GoDoc](http://img.shields.io/badge/godoc-reference-5272B4.svg?style=flat-square)](http://godoc.org/github.com/magiconair/properties) -#### Current version: 1.7.3 +# Overview + +#### Please run `git pull --tags` to update the tags. See [below](#updated-git-tags) why. properties is a Go library for reading and writing properties files. @@ -25,10 +30,9 @@ changed from `panic` to `log.Fatal` but this is configurable and custom error handling functions can be provided. See the package documentation for details. -Read the full documentation on [GoDoc](https://godoc.org/github.com/magiconair/properties) [![GoDoc](https://godoc.org/github.com/magiconair/properties?status.png)](https://godoc.org/github.com/magiconair/properties) +Read the full documentation on [![GoDoc](http://img.shields.io/badge/godoc-reference-5272B4.svg?style=flat-square)](http://godoc.org/github.com/magiconair/properties) -Getting Started ---------------- +## Getting Started ```go import ( @@ -83,18 +87,43 @@ func main() { ``` -Installation and Upgrade ------------------------- +## Installation and Upgrade ``` $ go get -u github.com/magiconair/properties ``` -License -------- +## License 2 clause BSD license. See [LICENSE](https://github.com/magiconair/properties/blob/master/LICENSE) file for details. -ToDo ----- +## ToDo + * Dump contents with passwords and secrets obscured + +## Updated Git tags + +#### 13 Feb 2018 + +I realized that all of the git tags I had pushed before v1.7.5 were lightweight tags +and I've only recently learned that this doesn't play well with `git describe` 😞 + +I have replaced all lightweight tags with signed tags using this script which should +retain the commit date, name and email address. Please run `git pull --tags` to update them. + +Worst case you have to reclone the repo. + +```shell +#!/bin/bash +tag=$1 +echo "Updating $tag" +date=$(git show ${tag}^0 --format=%aD | head -1) +email=$(git show ${tag}^0 --format=%aE | head -1) +name=$(git show ${tag}^0 --format=%aN | head -1) +GIT_COMMITTER_DATE="$date" GIT_COMMITTER_NAME="$name" GIT_COMMITTER_EMAIL="$email" git tag -s -f ${tag} ${tag}^0 -m ${tag} +``` + +I apologize for the inconvenience. + +Frank + diff --git a/vendor/github.com/magiconair/properties/assert/assert.go b/vendor/github.com/magiconair/properties/assert/assert.go index cb1097b..d0f2704 100644 --- a/vendor/github.com/magiconair/properties/assert/assert.go +++ b/vendor/github.com/magiconair/properties/assert/assert.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/vendor/github.com/magiconair/properties/assert/assert_test.go b/vendor/github.com/magiconair/properties/assert/assert_test.go index dcef73d..ddb6bf0 100644 --- a/vendor/github.com/magiconair/properties/assert/assert_test.go +++ b/vendor/github.com/magiconair/properties/assert/assert_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/vendor/github.com/magiconair/properties/decode.go b/vendor/github.com/magiconair/properties/decode.go index 0a961bb..3ebf804 100644 --- a/vendor/github.com/magiconair/properties/decode.go +++ b/vendor/github.com/magiconair/properties/decode.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/vendor/github.com/magiconair/properties/decode_test.go b/vendor/github.com/magiconair/properties/decode_test.go index c829314..9431b83 100644 --- a/vendor/github.com/magiconair/properties/decode_test.go +++ b/vendor/github.com/magiconair/properties/decode_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/vendor/github.com/magiconair/properties/doc.go b/vendor/github.com/magiconair/properties/doc.go index 36c8368..f8822da 100644 --- a/vendor/github.com/magiconair/properties/doc.go +++ b/vendor/github.com/magiconair/properties/doc.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -73,7 +73,7 @@ // # refers to the users' home dir // home = ${HOME} // -// # local key takes precendence over env var: u = foo +// # local key takes precedence over env var: u = foo // USER = foo // u = ${USER} // @@ -102,7 +102,7 @@ // v = p.GetString("key", "def") // v = p.GetDuration("key", 999) // -// As an alterantive properties may be applied with the standard +// As an alternative properties may be applied with the standard // library's flag implementation at any time. // // # Standard configuration diff --git a/vendor/github.com/magiconair/properties/example_test.go b/vendor/github.com/magiconair/properties/example_test.go index 6f21dfb..4d0bbea 100644 --- a/vendor/github.com/magiconair/properties/example_test.go +++ b/vendor/github.com/magiconair/properties/example_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/vendor/github.com/magiconair/properties/go.mod b/vendor/github.com/magiconair/properties/go.mod new file mode 100644 index 0000000..02a6f86 --- /dev/null +++ b/vendor/github.com/magiconair/properties/go.mod @@ -0,0 +1 @@ +module github.com/magiconair/properties diff --git a/vendor/github.com/magiconair/properties/integrate.go b/vendor/github.com/magiconair/properties/integrate.go index 0d775e0..74d38dc 100644 --- a/vendor/github.com/magiconair/properties/integrate.go +++ b/vendor/github.com/magiconair/properties/integrate.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/vendor/github.com/magiconair/properties/integrate_test.go b/vendor/github.com/magiconair/properties/integrate_test.go index cbee181..5ed74e2 100644 --- a/vendor/github.com/magiconair/properties/integrate_test.go +++ b/vendor/github.com/magiconair/properties/integrate_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -52,7 +52,7 @@ func TestFlagOverride(t *testing.T) { t.Errorf("Got customized b=%d, want %d", *gotB, want) } if want := 4; *gotC != want { - t.Errorf("Got overriden c=%d, want %d", *gotC, want) + t.Errorf("Got overridden c=%d, want %d", *gotC, want) } } diff --git a/vendor/github.com/magiconair/properties/lex.go b/vendor/github.com/magiconair/properties/lex.go index a3cba03..367166d 100644 --- a/vendor/github.com/magiconair/properties/lex.go +++ b/vendor/github.com/magiconair/properties/lex.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // @@ -196,9 +196,8 @@ func lexBeforeKey(l *lexer) stateFn { return lexComment case isWhitespace(r): - l.acceptRun(whitespace) l.ignore() - return lexKey + return lexBeforeKey default: l.backup() diff --git a/vendor/github.com/magiconair/properties/load.go b/vendor/github.com/magiconair/properties/load.go index 278cc2e..c8e1b58 100644 --- a/vendor/github.com/magiconair/properties/load.go +++ b/vendor/github.com/magiconair/properties/load.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -16,21 +16,157 @@ import ( type Encoding uint const ( + // utf8Default is a private placeholder for the zero value of Encoding to + // ensure that it has the correct meaning. UTF8 is the default encoding but + // was assigned a non-zero value which cannot be changed without breaking + // existing code. Clients should continue to use the public constants. + utf8Default Encoding = iota + // UTF8 interprets the input data as UTF-8. - UTF8 Encoding = 1 << iota + UTF8 // ISO_8859_1 interprets the input data as ISO-8859-1. ISO_8859_1 ) +type Loader struct { + // Encoding determines how the data from files and byte buffers + // is interpreted. For URLs the Content-Type header is used + // to determine the encoding of the data. + Encoding Encoding + + // DisableExpansion configures the property expansion of the + // returned property object. When set to true, the property values + // will not be expanded and the Property object will not be checked + // for invalid expansion expressions. + DisableExpansion bool + + // IgnoreMissing configures whether missing files or URLs which return + // 404 are reported as errors. When set to true, missing files and 404 + // status codes are not reported as errors. + IgnoreMissing bool +} + +// Load reads a buffer into a Properties struct. +func (l *Loader) LoadBytes(buf []byte) (*Properties, error) { + return l.loadBytes(buf, l.Encoding) +} + +// LoadAll reads the content of multiple URLs or files in the given order into +// a Properties struct. If IgnoreMissing is true then a 404 status code or +// missing file will not be reported as error. Encoding sets the encoding for +// files. For the URLs see LoadURL for the Content-Type header and the +// encoding. +func (l *Loader) LoadAll(names []string) (*Properties, error) { + all := NewProperties() + for _, name := range names { + n, err := expandName(name) + if err != nil { + return nil, err + } + + var p *Properties + switch { + case strings.HasPrefix(n, "http://"): + p, err = l.LoadURL(n) + case strings.HasPrefix(n, "https://"): + p, err = l.LoadURL(n) + default: + p, err = l.LoadFile(n) + } + if err != nil { + return nil, err + } + all.Merge(p) + } + + all.DisableExpansion = l.DisableExpansion + if all.DisableExpansion { + return all, nil + } + return all, all.check() +} + +// LoadFile reads a file into a Properties struct. +// If IgnoreMissing is true then a missing file will not be +// reported as error. +func (l *Loader) LoadFile(filename string) (*Properties, error) { + data, err := ioutil.ReadFile(filename) + if err != nil { + if l.IgnoreMissing && os.IsNotExist(err) { + LogPrintf("properties: %s not found. skipping", filename) + return NewProperties(), nil + } + return nil, err + } + return l.loadBytes(data, l.Encoding) +} + +// LoadURL reads the content of the URL into a Properties struct. +// +// The encoding is determined via the Content-Type header which +// should be set to 'text/plain'. If the 'charset' parameter is +// missing, 'iso-8859-1' or 'latin1' the encoding is set to +// ISO-8859-1. If the 'charset' parameter is set to 'utf-8' the +// encoding is set to UTF-8. A missing content type header is +// interpreted as 'text/plain; charset=utf-8'. +func (l *Loader) LoadURL(url string) (*Properties, error) { + resp, err := http.Get(url) + if err != nil { + return nil, fmt.Errorf("properties: error fetching %q. %s", url, err) + } + + if resp.StatusCode == 404 && l.IgnoreMissing { + LogPrintf("properties: %s returned %d. skipping", url, resp.StatusCode) + return NewProperties(), nil + } + + if resp.StatusCode != 200 { + return nil, fmt.Errorf("properties: %s returned %d", url, resp.StatusCode) + } + + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("properties: %s error reading response. %s", url, err) + } + defer resp.Body.Close() + + ct := resp.Header.Get("Content-Type") + var enc Encoding + switch strings.ToLower(ct) { + case "text/plain", "text/plain; charset=iso-8859-1", "text/plain; charset=latin1": + enc = ISO_8859_1 + case "", "text/plain; charset=utf-8": + enc = UTF8 + default: + return nil, fmt.Errorf("properties: invalid content type %s", ct) + } + + return l.loadBytes(body, enc) +} + +func (l *Loader) loadBytes(buf []byte, enc Encoding) (*Properties, error) { + p, err := parse(convert(buf, enc)) + if err != nil { + return nil, err + } + p.DisableExpansion = l.DisableExpansion + if p.DisableExpansion { + return p, nil + } + return p, p.check() +} + // Load reads a buffer into a Properties struct. func Load(buf []byte, enc Encoding) (*Properties, error) { - return loadBuf(buf, enc) + l := &Loader{Encoding: enc} + return l.LoadBytes(buf) } // LoadString reads an UTF8 string into a properties struct. func LoadString(s string) (*Properties, error) { - return loadBuf([]byte(s), UTF8) + l := &Loader{Encoding: UTF8} + return l.LoadBytes([]byte(s)) } // LoadMap creates a new Properties struct from a string map. @@ -44,34 +180,32 @@ func LoadMap(m map[string]string) *Properties { // LoadFile reads a file into a Properties struct. func LoadFile(filename string, enc Encoding) (*Properties, error) { - return loadAll([]string{filename}, enc, false) + l := &Loader{Encoding: enc} + return l.LoadAll([]string{filename}) } // LoadFiles reads multiple files in the given order into // a Properties struct. If 'ignoreMissing' is true then // non-existent files will not be reported as error. func LoadFiles(filenames []string, enc Encoding, ignoreMissing bool) (*Properties, error) { - return loadAll(filenames, enc, ignoreMissing) + l := &Loader{Encoding: enc, IgnoreMissing: ignoreMissing} + return l.LoadAll(filenames) } // LoadURL reads the content of the URL into a Properties struct. -// -// The encoding is determined via the Content-Type header which -// should be set to 'text/plain'. If the 'charset' parameter is -// missing, 'iso-8859-1' or 'latin1' the encoding is set to -// ISO-8859-1. If the 'charset' parameter is set to 'utf-8' the -// encoding is set to UTF-8. A missing content type header is -// interpreted as 'text/plain; charset=utf-8'. +// See Loader#LoadURL for details. func LoadURL(url string) (*Properties, error) { - return loadAll([]string{url}, UTF8, false) + l := &Loader{Encoding: UTF8} + return l.LoadAll([]string{url}) } // LoadURLs reads the content of multiple URLs in the given order into a -// Properties struct. If 'ignoreMissing' is true then a 404 status code will -// not be reported as error. See LoadURL for the Content-Type header +// Properties struct. If IgnoreMissing is true then a 404 status code will +// not be reported as error. See Loader#LoadURL for the Content-Type header // and the encoding. func LoadURLs(urls []string, ignoreMissing bool) (*Properties, error) { - return loadAll(urls, UTF8, ignoreMissing) + l := &Loader{Encoding: UTF8, IgnoreMissing: ignoreMissing} + return l.LoadAll(urls) } // LoadAll reads the content of multiple URLs or files in the given order into a @@ -79,7 +213,8 @@ func LoadURLs(urls []string, ignoreMissing bool) (*Properties, error) { // not be reported as error. Encoding sets the encoding for files. For the URLs please see // LoadURL for the Content-Type header and the encoding. func LoadAll(names []string, enc Encoding, ignoreMissing bool) (*Properties, error) { - return loadAll(names, enc, ignoreMissing) + l := &Loader{Encoding: enc, IgnoreMissing: ignoreMissing} + return l.LoadAll(names) } // MustLoadString reads an UTF8 string into a Properties struct and @@ -122,90 +257,6 @@ func MustLoadAll(names []string, enc Encoding, ignoreMissing bool) *Properties { return must(LoadAll(names, enc, ignoreMissing)) } -func loadBuf(buf []byte, enc Encoding) (*Properties, error) { - p, err := parse(convert(buf, enc)) - if err != nil { - return nil, err - } - return p, p.check() -} - -func loadAll(names []string, enc Encoding, ignoreMissing bool) (*Properties, error) { - result := NewProperties() - for _, name := range names { - n, err := expandName(name) - if err != nil { - return nil, err - } - var p *Properties - if strings.HasPrefix(n, "http://") || strings.HasPrefix(n, "https://") { - p, err = loadURL(n, ignoreMissing) - } else { - p, err = loadFile(n, enc, ignoreMissing) - } - if err != nil { - return nil, err - } - result.Merge(p) - - } - return result, result.check() -} - -func loadFile(filename string, enc Encoding, ignoreMissing bool) (*Properties, error) { - data, err := ioutil.ReadFile(filename) - if err != nil { - if ignoreMissing && os.IsNotExist(err) { - LogPrintf("properties: %s not found. skipping", filename) - return NewProperties(), nil - } - return nil, err - } - p, err := parse(convert(data, enc)) - if err != nil { - return nil, err - } - return p, nil -} - -func loadURL(url string, ignoreMissing bool) (*Properties, error) { - resp, err := http.Get(url) - if err != nil { - return nil, fmt.Errorf("properties: error fetching %q. %s", url, err) - } - if resp.StatusCode == 404 && ignoreMissing { - LogPrintf("properties: %s returned %d. skipping", url, resp.StatusCode) - return NewProperties(), nil - } - if resp.StatusCode != 200 { - return nil, fmt.Errorf("properties: %s returned %d", url, resp.StatusCode) - } - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("properties: %s error reading response. %s", url, err) - } - if err = resp.Body.Close(); err != nil { - return nil, fmt.Errorf("properties: %s error reading response. %s", url, err) - } - - ct := resp.Header.Get("Content-Type") - var enc Encoding - switch strings.ToLower(ct) { - case "text/plain", "text/plain; charset=iso-8859-1", "text/plain; charset=latin1": - enc = ISO_8859_1 - case "", "text/plain; charset=utf-8": - enc = UTF8 - default: - return nil, fmt.Errorf("properties: invalid content type %s", ct) - } - - p, err := parse(convert(body, enc)) - if err != nil { - return nil, err - } - return p, nil -} - func must(p *Properties, err error) *Properties { if err != nil { ErrorHandler(err) @@ -218,7 +269,7 @@ func must(p *Properties, err error) *Properties { // with an empty string. Malformed expressions like "${ENV_VAR" will // be reported as error. func expandName(name string) (string, error) { - return expand(name, make(map[string]bool), "${", "}", make(map[string]string)) + return expand(name, []string{}, "${", "}", make(map[string]string)) } // Interprets a byte buffer either as an ISO-8859-1 or UTF-8 encoded string. @@ -226,7 +277,7 @@ func expandName(name string) (string, error) { // first 256 unicode code points cover ISO-8859-1. func convert(buf []byte, enc Encoding) string { switch enc { - case UTF8: + case utf8Default, UTF8: return string(buf) case ISO_8859_1: runes := make([]rune, len(buf)) diff --git a/vendor/github.com/magiconair/properties/load_test.go b/vendor/github.com/magiconair/properties/load_test.go index d8770c8..db28b99 100644 --- a/vendor/github.com/magiconair/properties/load_test.go +++ b/vendor/github.com/magiconair/properties/load_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -16,6 +16,18 @@ import ( "github.com/magiconair/properties/assert" ) +func TestEncoding(t *testing.T) { + if got, want := utf8Default, Encoding(0); got != want { + t.Fatalf("got encoding %d want %d", got, want) + } + if got, want := UTF8, Encoding(1); got != want { + t.Fatalf("got encoding %d want %d", got, want) + } + if got, want := ISO_8859_1, Encoding(2); got != want { + t.Fatalf("got encoding %d want %d", got, want) + } +} + func TestLoadFailsWithNotExistingFile(t *testing.T) { _, err := LoadFile("doesnotexist.properties", ISO_8859_1) assert.Equal(t, err != nil, true, "") diff --git a/vendor/github.com/magiconair/properties/parser.go b/vendor/github.com/magiconair/properties/parser.go index 90f555c..cdc4a80 100644 --- a/vendor/github.com/magiconair/properties/parser.go +++ b/vendor/github.com/magiconair/properties/parser.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/vendor/github.com/magiconair/properties/properties.go b/vendor/github.com/magiconair/properties/properties.go index 4f3d5a4..cb3d1a3 100644 --- a/vendor/github.com/magiconair/properties/properties.go +++ b/vendor/github.com/magiconair/properties/properties.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -19,6 +19,8 @@ import ( "unicode/utf8" ) +const maxExpansionDepth = 64 + // ErrorHandlerFunc defines the type of function which handles failures // of the MustXXX() functions. An error handler function must exit // the application after handling the error. @@ -81,6 +83,17 @@ func NewProperties() *Properties { } } +// Load reads a buffer into the given Properties struct. +func (p *Properties) Load(buf []byte, enc Encoding) error { + l := &Loader{Encoding: enc, DisableExpansion: p.DisableExpansion} + newProperties, err := l.LoadBytes(buf) + if err != nil { + return err + } + p.Merge(newProperties) + return nil +} + // Get returns the expanded value for the given key if exists. // Otherwise, ok is false. func (p *Properties) Get(key string) (value string, ok bool) { @@ -92,7 +105,7 @@ func (p *Properties) Get(key string) (value string, ok bool) { return "", false } - expanded, err := p.expand(v) + expanded, err := p.expand(key, v) // we guarantee that the expanded value is free of // circular references and malformed expressions @@ -511,6 +524,9 @@ func (p *Properties) Set(key, value string) (prev string, ok bool, err error) { if p.DisableExpansion { prev, ok = p.Get(key) p.m[key] = value + if !ok { + p.k = append(p.k, key) + } return prev, ok, nil } @@ -522,7 +538,7 @@ func (p *Properties) Set(key, value string) (prev string, ok bool, err error) { p.m[key] = value // now check for a circular reference - _, err = p.expand(value) + _, err = p.expand(key, value) if err != nil { // revert to the previous state @@ -693,56 +709,65 @@ outer: // check expands all values and returns an error if a circular reference or // a malformed expression was found. func (p *Properties) check() error { - for _, value := range p.m { - if _, err := p.expand(value); err != nil { + for key, value := range p.m { + if _, err := p.expand(key, value); err != nil { return err } } return nil } -func (p *Properties) expand(input string) (string, error) { +func (p *Properties) expand(key, input string) (string, error) { // no pre/postfix -> nothing to expand if p.Prefix == "" && p.Postfix == "" { return input, nil } - return expand(input, make(map[string]bool), p.Prefix, p.Postfix, p.m) + return expand(input, []string{key}, p.Prefix, p.Postfix, p.m) } // expand recursively expands expressions of '(prefix)key(postfix)' to their corresponding values. // The function keeps track of the keys that were already expanded and stops if it // detects a circular reference or a malformed expression of the form '(prefix)key'. -func expand(s string, keys map[string]bool, prefix, postfix string, values map[string]string) (string, error) { - start := strings.Index(s, prefix) - if start == -1 { - return s, nil +func expand(s string, keys []string, prefix, postfix string, values map[string]string) (string, error) { + if len(keys) > maxExpansionDepth { + return "", fmt.Errorf("expansion too deep") } - keyStart := start + len(prefix) - keyLen := strings.Index(s[keyStart:], postfix) - if keyLen == -1 { - return "", fmt.Errorf("malformed expression") - } - - end := keyStart + keyLen + len(postfix) - 1 - key := s[keyStart : keyStart+keyLen] + for { + start := strings.Index(s, prefix) + if start == -1 { + return s, nil + } - // fmt.Printf("s:%q pp:%q start:%d end:%d keyStart:%d keyLen:%d key:%q\n", s, prefix + "..." + postfix, start, end, keyStart, keyLen, key) + keyStart := start + len(prefix) + keyLen := strings.Index(s[keyStart:], postfix) + if keyLen == -1 { + return "", fmt.Errorf("malformed expression") + } - if _, ok := keys[key]; ok { - return "", fmt.Errorf("circular reference") - } + end := keyStart + keyLen + len(postfix) - 1 + key := s[keyStart : keyStart+keyLen] - val, ok := values[key] - if !ok { - val = os.Getenv(key) - } + // fmt.Printf("s:%q pp:%q start:%d end:%d keyStart:%d keyLen:%d key:%q\n", s, prefix + "..." + postfix, start, end, keyStart, keyLen, key) - // remember that we've seen the key - keys[key] = true + for _, k := range keys { + if key == k { + return "", fmt.Errorf("circular reference") + } + } - return expand(s[:start]+val+s[end+1:], keys, prefix, postfix, values) + val, ok := values[key] + if !ok { + val = os.Getenv(key) + } + new_val, err := expand(val, append(keys, key), prefix, postfix, values) + if err != nil { + return "", err + } + s = s[:start] + new_val + s[end+1:] + } + return s, nil } // encode encodes a UTF-8 string to ISO-8859-1 and escapes some characters. diff --git a/vendor/github.com/magiconair/properties/properties_test.go b/vendor/github.com/magiconair/properties/properties_test.go index 0eac1f4..5db64c3 100644 --- a/vendor/github.com/magiconair/properties/properties_test.go +++ b/vendor/github.com/magiconair/properties/properties_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -44,6 +44,8 @@ var complexTests = [][]string{ {"\nkey=value\n", "key", "value"}, {"\rkey=value\r", "key", "value"}, {"\r\nkey=value\r\n", "key", "value"}, + {"\nkey=value\n \nkey2=value2", "key", "value", "key2", "value2"}, + {"\nkey=value\n\t\nkey2=value2", "key", "value", "key2", "value2"}, // escaped chars in key {"k\\ ey = value", "k ey", "value"}, @@ -88,6 +90,10 @@ var complexTests = [][]string{ {"key=value\nkey2=${key}bb", "key", "value", "key2", "valuebb"}, {"key=value\nkey2=aa${key}bb", "key", "value", "key2", "aavaluebb"}, {"key=value\nkey2=${key}\nkey3=${key2}", "key", "value", "key2", "value", "key3", "value"}, + {"key=value\nkey2=${key}${key}", "key", "value", "key2", "valuevalue"}, + {"key=value\nkey2=${key}${key}${key}${key}", "key", "value", "key2", "valuevaluevaluevalue"}, + {"key=value\nkey2=${key}${key3}\nkey3=${key}", "key", "value", "key2", "valuevalue", "key3", "value"}, + {"key=value\nkey2=${key3}${key}${key4}\nkey3=${key}\nkey4=${key}", "key", "value", "key2", "valuevaluevalue", "key3", "value", "key4", "value"}, {"key=${USER}", "key", os.Getenv("USER")}, {"key=${USER}\nUSER=value", "key", "value", "USER", "value"}, } @@ -444,6 +450,30 @@ func TestErrors(t *testing.T) { } } +func TestVeryDeep(t *testing.T) { + input := "key0=value\n" + prefix := "${" + postfix := "}" + i := 0 + for i = 0; i < maxExpansionDepth-1; i++ { + input += fmt.Sprintf("key%d=%skey%d%s\n", i+1, prefix, i, postfix) + } + + p, err := Load([]byte(input), ISO_8859_1) + assert.Equal(t, err, nil) + p.Prefix = prefix + p.Postfix = postfix + + assert.Equal(t, p.MustGet(fmt.Sprintf("key%d", i)), "value") + + // Nudge input over the edge + input += fmt.Sprintf("key%d=%skey%d%s\n", i+1, prefix, i, postfix) + + _, err = Load([]byte(input), ISO_8859_1) + assert.Equal(t, err != nil, true, "want error") + assert.Equal(t, strings.Contains(err.Error(), "expansion too deep"), true) +} + func TestDisableExpansion(t *testing.T) { input := "key=value\nkey2=${key}" p := mustParse(t, input) @@ -458,6 +488,19 @@ func TestDisableExpansion(t *testing.T) { assert.Equal(t, p.MustGet("keyB"), "${keyA}") } +func TestDisableExpansionStillUpdatesKeys(t *testing.T) { + p := NewProperties() + p.MustSet("p1", "a") + assert.Equal(t, p.Keys(), []string{"p1"}) + assert.Equal(t, p.String(), "p1 = a\n") + + p.DisableExpansion = true + p.MustSet("p2", "b") + + assert.Equal(t, p.Keys(), []string{"p1", "p2"}) + assert.Equal(t, p.String(), "p1 = a\np2 = b\n") +} + func TestMustGet(t *testing.T) { input := "key = value\nkey2 = ghi" p := mustParse(t, input) @@ -860,6 +903,14 @@ func TestFilterFunc(t *testing.T) { assert.Equal(t, pp.Map(), m) } +func TestLoad(t *testing.T) { + x := "key=${value}\nvalue=${key}" + p := NewProperties() + p.DisableExpansion = true + err := p.Load([]byte(x), UTF8) + assert.Equal(t, err, nil) +} + // ---------------------------------------------------------------------------- // tests all combinations of delimiters, leading and/or trailing whitespace and newlines. diff --git a/vendor/github.com/magiconair/properties/rangecheck.go b/vendor/github.com/magiconair/properties/rangecheck.go index 2e907d5..b013a2e 100644 --- a/vendor/github.com/magiconair/properties/rangecheck.go +++ b/vendor/github.com/magiconair/properties/rangecheck.go @@ -1,4 +1,4 @@ -// Copyright 2017 Frank Schroeder. All rights reserved. +// Copyright 2018 Frank Schroeder. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/vendor/github.com/mitchellh/mapstructure/.travis.yml b/vendor/github.com/mitchellh/mapstructure/.travis.yml index 5c14c13..1689c7d 100644 --- a/vendor/github.com/mitchellh/mapstructure/.travis.yml +++ b/vendor/github.com/mitchellh/mapstructure/.travis.yml @@ -1,7 +1,8 @@ -language: go +language: go + +go: + - "1.11.x" + - tip -go: - - 1.8.1 - script: - - go test + - go test diff --git a/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md new file mode 100644 index 0000000..3b3cb72 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md @@ -0,0 +1,21 @@ +## 1.1.2 + +* Fix error when decode hook decodes interface implementation into interface + type. [GH-140] + +## 1.1.1 + +* Fix panic that can happen in `decodePtr` + +## 1.1.0 + +* Added `StringToIPHookFunc` to convert `string` to `net.IP` and `net.IPNet` [GH-133] +* Support struct to struct decoding [GH-137] +* If source map value is nil, then destination map value is nil (instead of empty) +* If source slice value is nil, then destination slice value is nil (instead of empty) +* If source pointer is nil, then destination pointer is set to nil (instead of + allocated zero value of type) + +## 1.0.0 + +* Initial tagged stable release. diff --git a/vendor/github.com/mitchellh/mapstructure/README.md b/vendor/github.com/mitchellh/mapstructure/README.md index 659d688..0018dc7 100644 --- a/vendor/github.com/mitchellh/mapstructure/README.md +++ b/vendor/github.com/mitchellh/mapstructure/README.md @@ -1,4 +1,4 @@ -# mapstructure +# mapstructure [![Godoc](https://godoc.org/github.com/mitchellh/mapstructure?status.svg)](https://godoc.org/github.com/mitchellh/mapstructure) mapstructure is a Go library for decoding generic map values to structures and vice versa, while providing helpful error handling. diff --git a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go index afcfd5e..1f0abc6 100644 --- a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go +++ b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go @@ -2,6 +2,8 @@ package mapstructure import ( "errors" + "fmt" + "net" "reflect" "strconv" "strings" @@ -115,6 +117,69 @@ func StringToTimeDurationHookFunc() DecodeHookFunc { } } +// StringToIPHookFunc returns a DecodeHookFunc that converts +// strings to net.IP +func StringToIPHookFunc() DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(net.IP{}) { + return data, nil + } + + // Convert it by parsing + ip := net.ParseIP(data.(string)) + if ip == nil { + return net.IP{}, fmt.Errorf("failed parsing ip %v", data) + } + + return ip, nil + } +} + +// StringToIPNetHookFunc returns a DecodeHookFunc that converts +// strings to net.IPNet +func StringToIPNetHookFunc() DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(net.IPNet{}) { + return data, nil + } + + // Convert it by parsing + _, net, err := net.ParseCIDR(data.(string)) + return net, err + } +} + +// StringToTimeHookFunc returns a DecodeHookFunc that converts +// strings to time.Time. +func StringToTimeHookFunc(layout string) DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(time.Time{}) { + return data, nil + } + + // Convert it by parsing + return time.Parse(layout, data.(string)) + } +} + // WeaklyTypedHook is a DecodeHookFunc which adds support for weak typing to // the decoder. // diff --git a/vendor/github.com/mitchellh/mapstructure/decode_hooks_test.go b/vendor/github.com/mitchellh/mapstructure/decode_hooks_test.go index 53289af..028afbd 100644 --- a/vendor/github.com/mitchellh/mapstructure/decode_hooks_test.go +++ b/vendor/github.com/mitchellh/mapstructure/decode_hooks_test.go @@ -2,6 +2,7 @@ package mapstructure import ( "errors" + "net" "reflect" "testing" "time" @@ -153,6 +154,99 @@ func TestStringToTimeDurationHookFunc(t *testing.T) { } } +func TestStringToTimeHookFunc(t *testing.T) { + strType := reflect.TypeOf("") + timeType := reflect.TypeOf(time.Time{}) + cases := []struct { + f, t reflect.Type + layout string + data interface{} + result interface{} + err bool + }{ + {strType, timeType, time.RFC3339, "2006-01-02T15:04:05Z", + time.Date(2006, 1, 2, 15, 4, 5, 0, time.UTC), false}, + {strType, timeType, time.RFC3339, "5", time.Time{}, true}, + {strType, strType, time.RFC3339, "5", "5", false}, + } + + for i, tc := range cases { + f := StringToTimeHookFunc(tc.layout) + actual, err := DecodeHookExec(f, tc.f, tc.t, tc.data) + if tc.err != (err != nil) { + t.Fatalf("case %d: expected err %#v", i, tc.err) + } + if !reflect.DeepEqual(actual, tc.result) { + t.Fatalf( + "case %d: expected %#v, got %#v", + i, tc.result, actual) + } + } +} + +func TestStringToIPHookFunc(t *testing.T) { + strType := reflect.TypeOf("") + ipType := reflect.TypeOf(net.IP{}) + cases := []struct { + f, t reflect.Type + data interface{} + result interface{} + err bool + }{ + {strType, ipType, "1.2.3.4", + net.IPv4(0x01, 0x02, 0x03, 0x04), false}, + {strType, ipType, "5", net.IP{}, true}, + {strType, strType, "5", "5", false}, + } + + for i, tc := range cases { + f := StringToIPHookFunc() + actual, err := DecodeHookExec(f, tc.f, tc.t, tc.data) + if tc.err != (err != nil) { + t.Fatalf("case %d: expected err %#v", i, tc.err) + } + if !reflect.DeepEqual(actual, tc.result) { + t.Fatalf( + "case %d: expected %#v, got %#v", + i, tc.result, actual) + } + } +} + +func TestStringToIPNetHookFunc(t *testing.T) { + strType := reflect.TypeOf("") + ipNetType := reflect.TypeOf(net.IPNet{}) + var nilNet *net.IPNet = nil + + cases := []struct { + f, t reflect.Type + data interface{} + result interface{} + err bool + }{ + {strType, ipNetType, "1.2.3.4/24", + &net.IPNet{ + IP: net.IP{0x01, 0x02, 0x03, 0x00}, + Mask: net.IPv4Mask(0xff, 0xff, 0xff, 0x00), + }, false}, + {strType, ipNetType, "5", nilNet, true}, + {strType, strType, "5", "5", false}, + } + + for i, tc := range cases { + f := StringToIPNetHookFunc() + actual, err := DecodeHookExec(f, tc.f, tc.t, tc.data) + if tc.err != (err != nil) { + t.Fatalf("case %d: expected err %#v", i, tc.err) + } + if !reflect.DeepEqual(actual, tc.result) { + t.Fatalf( + "case %d: expected %#v, got %#v", + i, tc.result, actual) + } + } +} + func TestWeaklyTypedHook(t *testing.T) { var f DecodeHookFunc = WeaklyTypedHook diff --git a/vendor/github.com/mitchellh/mapstructure/go.mod b/vendor/github.com/mitchellh/mapstructure/go.mod new file mode 100644 index 0000000..d2a7125 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/go.mod @@ -0,0 +1 @@ +module github.com/mitchellh/mapstructure diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure.go b/vendor/github.com/mitchellh/mapstructure/mapstructure.go index 6ec5c33..256ee63 100644 --- a/vendor/github.com/mitchellh/mapstructure/mapstructure.go +++ b/vendor/github.com/mitchellh/mapstructure/mapstructure.go @@ -114,12 +114,12 @@ type Metadata struct { Unused []string } -// Decode takes a map and uses reflection to convert it into the -// given Go native structure. val must be a pointer to a struct. -func Decode(m interface{}, rawVal interface{}) error { +// Decode takes an input structure and uses reflection to translate it to +// the output structure. output must be a pointer to a map or struct. +func Decode(input interface{}, output interface{}) error { config := &DecoderConfig{ Metadata: nil, - Result: rawVal, + Result: output, } decoder, err := NewDecoder(config) @@ -127,7 +127,7 @@ func Decode(m interface{}, rawVal interface{}) error { return err } - return decoder.Decode(m) + return decoder.Decode(input) } // WeakDecode is the same as Decode but is shorthand to enable @@ -147,6 +147,40 @@ func WeakDecode(input, output interface{}) error { return decoder.Decode(input) } +// DecodeMetadata is the same as Decode, but is shorthand to +// enable metadata collection. See DecoderConfig for more info. +func DecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { + config := &DecoderConfig{ + Metadata: metadata, + Result: output, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + +// WeakDecodeMetadata is the same as Decode, but is shorthand to +// enable both WeaklyTypedInput and metadata collection. See +// DecoderConfig for more info. +func WeakDecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { + config := &DecoderConfig{ + Metadata: metadata, + Result: output, + WeaklyTypedInput: true, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + // NewDecoder returns a new decoder for the given configuration. Once // a decoder has been returned, the same configuration must not be used // again. @@ -184,68 +218,91 @@ func NewDecoder(config *DecoderConfig) (*Decoder, error) { // Decode decodes the given raw interface to the target pointer specified // by the configuration. -func (d *Decoder) Decode(raw interface{}) error { - return d.decode("", raw, reflect.ValueOf(d.config.Result).Elem()) +func (d *Decoder) Decode(input interface{}) error { + return d.decode("", input, reflect.ValueOf(d.config.Result).Elem()) } // Decodes an unknown data type into a specific reflection value. -func (d *Decoder) decode(name string, data interface{}, val reflect.Value) error { - if data == nil { - // If the data is nil, then we don't set anything. +func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error { + var inputVal reflect.Value + if input != nil { + inputVal = reflect.ValueOf(input) + + // We need to check here if input is a typed nil. Typed nils won't + // match the "input == nil" below so we check that here. + if inputVal.Kind() == reflect.Ptr && inputVal.IsNil() { + input = nil + } + } + + if input == nil { + // If the data is nil, then we don't set anything, unless ZeroFields is set + // to true. + if d.config.ZeroFields { + outVal.Set(reflect.Zero(outVal.Type())) + + if d.config.Metadata != nil && name != "" { + d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + } + } return nil } - dataVal := reflect.ValueOf(data) - if !dataVal.IsValid() { - // If the data value is invalid, then we just set the value + if !inputVal.IsValid() { + // If the input value is invalid, then we just set the value // to be the zero value. - val.Set(reflect.Zero(val.Type())) + outVal.Set(reflect.Zero(outVal.Type())) + if d.config.Metadata != nil && name != "" { + d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + } return nil } if d.config.DecodeHook != nil { - // We have a DecodeHook, so let's pre-process the data. + // We have a DecodeHook, so let's pre-process the input. var err error - data, err = DecodeHookExec( + input, err = DecodeHookExec( d.config.DecodeHook, - dataVal.Type(), val.Type(), data) + inputVal.Type(), outVal.Type(), input) if err != nil { return fmt.Errorf("error decoding '%s': %s", name, err) } } var err error - dataKind := getKind(val) - switch dataKind { + outputKind := getKind(outVal) + switch outputKind { case reflect.Bool: - err = d.decodeBool(name, data, val) + err = d.decodeBool(name, input, outVal) case reflect.Interface: - err = d.decodeBasic(name, data, val) + err = d.decodeBasic(name, input, outVal) case reflect.String: - err = d.decodeString(name, data, val) + err = d.decodeString(name, input, outVal) case reflect.Int: - err = d.decodeInt(name, data, val) + err = d.decodeInt(name, input, outVal) case reflect.Uint: - err = d.decodeUint(name, data, val) + err = d.decodeUint(name, input, outVal) case reflect.Float32: - err = d.decodeFloat(name, data, val) + err = d.decodeFloat(name, input, outVal) case reflect.Struct: - err = d.decodeStruct(name, data, val) + err = d.decodeStruct(name, input, outVal) case reflect.Map: - err = d.decodeMap(name, data, val) + err = d.decodeMap(name, input, outVal) case reflect.Ptr: - err = d.decodePtr(name, data, val) + err = d.decodePtr(name, input, outVal) case reflect.Slice: - err = d.decodeSlice(name, data, val) + err = d.decodeSlice(name, input, outVal) + case reflect.Array: + err = d.decodeArray(name, input, outVal) case reflect.Func: - err = d.decodeFunc(name, data, val) + err = d.decodeFunc(name, input, outVal) default: // If we reached this point then we weren't able to decode it - return fmt.Errorf("%s: unsupported type: %s", name, dataKind) + return fmt.Errorf("%s: unsupported type: %s", name, outputKind) } // If we reached here, then we successfully decoded SOMETHING, so - // mark the key as used if we're tracking metadata. + // mark the key as used if we're tracking metainput. if d.config.Metadata != nil && name != "" { d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) } @@ -256,7 +313,19 @@ func (d *Decoder) decode(name string, data interface{}, val reflect.Value) error // This decodes a basic type (bool, int, string, etc.) and sets the // value to "data" of that type. func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error { + if val.IsValid() && val.Elem().IsValid() { + return d.decode(name, data, val.Elem()) + } + dataVal := reflect.ValueOf(data) + + // If the input data is a pointer, and the assigned type is the dereference + // of that exact pointer, then indirect it so that we can assign it. + // Example: *string to string + if dataVal.Kind() == reflect.Ptr && dataVal.Type().Elem() == val.Type() { + dataVal = reflect.Indirect(dataVal) + } + if !dataVal.IsValid() { dataVal = reflect.Zero(val.Type()) } @@ -273,7 +342,7 @@ func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) } func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) + dataVal := reflect.Indirect(reflect.ValueOf(data)) dataKind := getKind(dataVal) converted := true @@ -292,12 +361,22 @@ func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) val.SetString(strconv.FormatUint(dataVal.Uint(), 10)) case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64)) - case dataKind == reflect.Slice && d.config.WeaklyTypedInput: + case dataKind == reflect.Slice && d.config.WeaklyTypedInput, + dataKind == reflect.Array && d.config.WeaklyTypedInput: dataType := dataVal.Type() elemKind := dataType.Elem().Kind() - switch { - case elemKind == reflect.Uint8: - val.SetString(string(dataVal.Interface().([]uint8))) + switch elemKind { + case reflect.Uint8: + var uints []uint8 + if dataKind == reflect.Array { + uints = make([]uint8, dataVal.Len(), dataVal.Len()) + for i := range uints { + uints[i] = dataVal.Index(i).Interface().(uint8) + } + } else { + uints = dataVal.Interface().([]uint8) + } + val.SetString(string(uints)) default: converted = false } @@ -315,7 +394,7 @@ func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) } func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) + dataVal := reflect.Indirect(reflect.ValueOf(data)) dataKind := getKind(dataVal) dataType := dataVal.Type() @@ -357,7 +436,7 @@ func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) er } func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) + dataVal := reflect.Indirect(reflect.ValueOf(data)) dataKind := getKind(dataVal) switch { @@ -400,7 +479,7 @@ func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) e } func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) + dataVal := reflect.Indirect(reflect.ValueOf(data)) dataKind := getKind(dataVal) switch { @@ -431,7 +510,7 @@ func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) e } func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.ValueOf(data) + dataVal := reflect.Indirect(reflect.ValueOf(data)) dataKind := getKind(dataVal) dataType := dataVal.Type() @@ -487,38 +566,68 @@ func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) er valMap = reflect.MakeMap(mapType) } - // Check input type + // Check input type and based on the input type jump to the proper func dataVal := reflect.Indirect(reflect.ValueOf(data)) - if dataVal.Kind() != reflect.Map { - // In weak mode, we accept a slice of maps as an input... - if d.config.WeaklyTypedInput { - switch dataVal.Kind() { - case reflect.Array, reflect.Slice: - // Special case for BC reasons (covered by tests) - if dataVal.Len() == 0 { - val.Set(valMap) - return nil - } + switch dataVal.Kind() { + case reflect.Map: + return d.decodeMapFromMap(name, dataVal, val, valMap) - for i := 0; i < dataVal.Len(); i++ { - err := d.decode( - fmt.Sprintf("%s[%d]", name, i), - dataVal.Index(i).Interface(), val) - if err != nil { - return err - } - } + case reflect.Struct: + return d.decodeMapFromStruct(name, dataVal, val, valMap) - return nil - } + case reflect.Array, reflect.Slice: + if d.config.WeaklyTypedInput { + return d.decodeMapFromSlice(name, dataVal, val, valMap) } + fallthrough + + default: return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) } +} + +func (d *Decoder) decodeMapFromSlice(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { + // Special case for BC reasons (covered by tests) + if dataVal.Len() == 0 { + val.Set(valMap) + return nil + } + + for i := 0; i < dataVal.Len(); i++ { + err := d.decode( + fmt.Sprintf("%s[%d]", name, i), + dataVal.Index(i).Interface(), val) + if err != nil { + return err + } + } + + return nil +} + +func (d *Decoder) decodeMapFromMap(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { + valType := val.Type() + valKeyType := valType.Key() + valElemType := valType.Elem() // Accumulate errors errors := make([]string, 0) + // If the input data is empty, then we just match what the input data is. + if dataVal.Len() == 0 { + if dataVal.IsNil() { + if !val.IsNil() { + val.Set(dataVal) + } + } else { + // Set to empty allocated value + val.Set(valMap) + } + + return nil + } + for _, k := range dataVal.MapKeys() { fieldName := fmt.Sprintf("%s[%s]", name, k) @@ -551,22 +660,128 @@ func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) er return nil } +func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { + typ := dataVal.Type() + for i := 0; i < typ.NumField(); i++ { + // Get the StructField first since this is a cheap operation. If the + // field is unexported, then ignore it. + f := typ.Field(i) + if f.PkgPath != "" { + continue + } + + // Next get the actual value of this field and verify it is assignable + // to the map value. + v := dataVal.Field(i) + if !v.Type().AssignableTo(valMap.Type().Elem()) { + return fmt.Errorf("cannot assign type '%s' to map value field of type '%s'", v.Type(), valMap.Type().Elem()) + } + + tagValue := f.Tag.Get(d.config.TagName) + tagParts := strings.Split(tagValue, ",") + + // Determine the name of the key in the map + keyName := f.Name + if tagParts[0] != "" { + if tagParts[0] == "-" { + continue + } + keyName = tagParts[0] + } + + // If "squash" is specified in the tag, we squash the field down. + squash := false + for _, tag := range tagParts[1:] { + if tag == "squash" { + squash = true + break + } + } + if squash && v.Kind() != reflect.Struct { + return fmt.Errorf("cannot squash non-struct type '%s'", v.Type()) + } + + switch v.Kind() { + // this is an embedded struct, so handle it differently + case reflect.Struct: + x := reflect.New(v.Type()) + x.Elem().Set(v) + + vType := valMap.Type() + vKeyType := vType.Key() + vElemType := vType.Elem() + mType := reflect.MapOf(vKeyType, vElemType) + vMap := reflect.MakeMap(mType) + + err := d.decode(keyName, x.Interface(), vMap) + if err != nil { + return err + } + + if squash { + for _, k := range vMap.MapKeys() { + valMap.SetMapIndex(k, vMap.MapIndex(k)) + } + } else { + valMap.SetMapIndex(reflect.ValueOf(keyName), vMap) + } + + default: + valMap.SetMapIndex(reflect.ValueOf(keyName), v) + } + } + + if val.CanAddr() { + val.Set(valMap) + } + + return nil +} + func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) error { + // If the input data is nil, then we want to just set the output + // pointer to be nil as well. + isNil := data == nil + if !isNil { + switch v := reflect.Indirect(reflect.ValueOf(data)); v.Kind() { + case reflect.Chan, + reflect.Func, + reflect.Interface, + reflect.Map, + reflect.Ptr, + reflect.Slice: + isNil = v.IsNil() + } + } + if isNil { + if !val.IsNil() && val.CanSet() { + nilValue := reflect.New(val.Type()).Elem() + val.Set(nilValue) + } + + return nil + } + // Create an element of the concrete (non pointer) type and decode // into that. Then set the value of the pointer to this type. valType := val.Type() valElemType := valType.Elem() + if val.CanSet() { + realVal := val + if realVal.IsNil() || d.config.ZeroFields { + realVal = reflect.New(valElemType) + } - realVal := val - if realVal.IsNil() || d.config.ZeroFields { - realVal = reflect.New(valElemType) - } + if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil { + return err + } - if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil { - return err + val.Set(realVal) + } else { + if err := d.decode(name, data, reflect.Indirect(val)); err != nil { + return err + } } - - val.Set(realVal) return nil } @@ -592,22 +807,101 @@ func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) valSlice := val if valSlice.IsNil() || d.config.ZeroFields { + if d.config.WeaklyTypedInput { + switch { + // Slice and array we use the normal logic + case dataValKind == reflect.Slice, dataValKind == reflect.Array: + break + + // Empty maps turn into empty slices + case dataValKind == reflect.Map: + if dataVal.Len() == 0 { + val.Set(reflect.MakeSlice(sliceType, 0, 0)) + return nil + } + // Create slice of maps of other sizes + return d.decodeSlice(name, []interface{}{data}, val) + + case dataValKind == reflect.String && valElemType.Kind() == reflect.Uint8: + return d.decodeSlice(name, []byte(dataVal.String()), val) + + // All other types we try to convert to the slice type + // and "lift" it into it. i.e. a string becomes a string slice. + default: + // Just re-try this function with data as a slice. + return d.decodeSlice(name, []interface{}{data}, val) + } + } + + // Check input type + if dataValKind != reflect.Array && dataValKind != reflect.Slice { + return fmt.Errorf( + "'%s': source data must be an array or slice, got %s", name, dataValKind) + + } + + // If the input value is empty, then don't allocate since non-nil != nil + if dataVal.Len() == 0 { + return nil + } + + // Make a new slice to hold our result, same size as the original data. + valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len()) + } + + // Accumulate any errors + errors := make([]string, 0) + + for i := 0; i < dataVal.Len(); i++ { + currentData := dataVal.Index(i).Interface() + for valSlice.Len() <= i { + valSlice = reflect.Append(valSlice, reflect.Zero(valElemType)) + } + currentField := valSlice.Index(i) + + fieldName := fmt.Sprintf("%s[%d]", name, i) + if err := d.decode(fieldName, currentData, currentField); err != nil { + errors = appendErrors(errors, err) + } + } + + // Finally, set the value to the slice we built up + val.Set(valSlice) + + // If there were errors, we return those + if len(errors) > 0 { + return &Error{errors} + } + + return nil +} + +func (d *Decoder) decodeArray(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataValKind := dataVal.Kind() + valType := val.Type() + valElemType := valType.Elem() + arrayType := reflect.ArrayOf(valType.Len(), valElemType) + + valArray := val + + if valArray.Interface() == reflect.Zero(valArray.Type()).Interface() || d.config.ZeroFields { // Check input type if dataValKind != reflect.Array && dataValKind != reflect.Slice { if d.config.WeaklyTypedInput { switch { - // Empty maps turn into empty slices + // Empty maps turn into empty arrays case dataValKind == reflect.Map: if dataVal.Len() == 0 { - val.Set(reflect.MakeSlice(sliceType, 0, 0)) + val.Set(reflect.Zero(arrayType)) return nil } - // All other types we try to convert to the slice type - // and "lift" it into it. i.e. a string becomes a string slice. + // All other types we try to convert to the array type + // and "lift" it into it. i.e. a string becomes a string array. default: // Just re-try this function with data as a slice. - return d.decodeSlice(name, []interface{}{data}, val) + return d.decodeArray(name, []interface{}{data}, val) } } @@ -615,9 +909,14 @@ func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) "'%s': source data must be an array or slice, got %s", name, dataValKind) } + if dataVal.Len() > arrayType.Len() { + return fmt.Errorf( + "'%s': expected source data to have length less or equal to %d, got %d", name, arrayType.Len(), dataVal.Len()) - // Make a new slice to hold our result, same size as the original data. - valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len()) + } + + // Make a new array to hold our result, same size as the original data. + valArray = reflect.New(arrayType).Elem() } // Accumulate any errors @@ -625,10 +924,7 @@ func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) for i := 0; i < dataVal.Len(); i++ { currentData := dataVal.Index(i).Interface() - for valSlice.Len() <= i { - valSlice = reflect.Append(valSlice, reflect.Zero(valElemType)) - } - currentField := valSlice.Index(i) + currentField := valArray.Index(i) fieldName := fmt.Sprintf("%s[%d]", name, i) if err := d.decode(fieldName, currentData, currentField); err != nil { @@ -636,8 +932,8 @@ func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) } } - // Finally, set the value to the slice we built up - val.Set(valSlice) + // Finally, set the value to the array we built up + val.Set(valArray) // If there were errors, we return those if len(errors) > 0 { @@ -658,10 +954,29 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) } dataValKind := dataVal.Kind() - if dataValKind != reflect.Map { - return fmt.Errorf("'%s' expected a map, got '%s'", name, dataValKind) + switch dataValKind { + case reflect.Map: + return d.decodeStructFromMap(name, dataVal, val) + + case reflect.Struct: + // Not the most efficient way to do this but we can optimize later if + // we want to. To convert from struct to struct we go to map first + // as an intermediary. + m := make(map[string]interface{}) + mval := reflect.Indirect(reflect.ValueOf(&m)) + if err := d.decodeMapFromStruct(name, dataVal, mval, mval); err != nil { + return err + } + + result := d.decodeStructFromMap(name, mval, val) + return result + + default: + return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) } +} +func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) error { dataValType := dataVal.Type() if kind := dataValType.Key().Kind(); kind != reflect.String && kind != reflect.Interface { return fmt.Errorf( @@ -686,7 +1001,11 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) // Compile the list of all the fields that we're going to be decoding // from all the structs. - fields := make(map[*reflect.StructField]reflect.Value) + type field struct { + field reflect.StructField + val reflect.Value + } + fields := []field{} for len(structs) > 0 { structVal := structs[0] structs = structs[1:] @@ -712,20 +1031,22 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) errors = appendErrors(errors, fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldKind)) } else { - structs = append(structs, val.FieldByName(fieldType.Name)) + structs = append(structs, structVal.FieldByName(fieldType.Name)) } continue } // Normal struct field, store it away - fields[&fieldType] = structVal.Field(i) + fields = append(fields, field{fieldType, structVal.Field(i)}) } } - for fieldType, field := range fields { - fieldName := fieldType.Name + // for fieldType, field := range fields { + for _, f := range fields { + field, fieldValue := f.field, f.val + fieldName := field.Name - tagValue := fieldType.Tag.Get(d.config.TagName) + tagValue := field.Tag.Get(d.config.TagName) tagValue = strings.SplitN(tagValue, ",", 2)[0] if tagValue != "" { fieldName = tagValue @@ -760,14 +1081,14 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) // Delete the key we're using from the unused map so we stop tracking delete(dataValKeysUnused, rawMapKey.Interface()) - if !field.IsValid() { + if !fieldValue.IsValid() { // This should never happen panic("field is not valid") } // If we can't set the field, then it is unexported or something, // and we just continue onwards. - if !field.CanSet() { + if !fieldValue.CanSet() { continue } @@ -777,7 +1098,7 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) fieldName = fmt.Sprintf("%s.%s", name, fieldName) } - if err := d.decode(fieldName, rawMapVal.Interface(), field); err != nil { + if err := d.decode(fieldName, rawMapVal.Interface(), fieldValue); err != nil { errors = appendErrors(errors, err) } } diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure_bugs_test.go b/vendor/github.com/mitchellh/mapstructure/mapstructure_bugs_test.go index 08e4956..ba6590a 100644 --- a/vendor/github.com/mitchellh/mapstructure/mapstructure_bugs_test.go +++ b/vendor/github.com/mitchellh/mapstructure/mapstructure_bugs_test.go @@ -1,48 +1,142 @@ package mapstructure -import "testing" +import ( + "reflect" + "testing" +) -// GH-1 +// GH-1, GH-10, GH-96 func TestDecode_NilValue(t *testing.T) { - input := map[string]interface{}{ - "vfoo": nil, - "vother": nil, - } + t.Parallel() - var result Map - err := Decode(input, &result) - if err != nil { - t.Fatalf("should not error: %s", err) + tests := []struct { + name string + in interface{} + target interface{} + out interface{} + metaKeys []string + metaUnused []string + }{ + { + "all nil", + &map[string]interface{}{ + "vfoo": nil, + "vother": nil, + }, + &Map{Vfoo: "foo", Vother: map[string]string{"foo": "bar"}}, + &Map{Vfoo: "", Vother: nil}, + []string{"Vfoo", "Vother"}, + []string{}, + }, + { + "partial nil", + &map[string]interface{}{ + "vfoo": "baz", + "vother": nil, + }, + &Map{Vfoo: "foo", Vother: map[string]string{"foo": "bar"}}, + &Map{Vfoo: "baz", Vother: nil}, + []string{"Vfoo", "Vother"}, + []string{}, + }, + { + "partial decode", + &map[string]interface{}{ + "vother": nil, + }, + &Map{Vfoo: "foo", Vother: map[string]string{"foo": "bar"}}, + &Map{Vfoo: "foo", Vother: nil}, + []string{"Vother"}, + []string{}, + }, + { + "unused values", + &map[string]interface{}{ + "vbar": "bar", + "vfoo": nil, + "vother": nil, + }, + &Map{Vfoo: "foo", Vother: map[string]string{"foo": "bar"}}, + &Map{Vfoo: "", Vother: nil}, + []string{"Vfoo", "Vother"}, + []string{"vbar"}, + }, + { + "map interface all nil", + &map[interface{}]interface{}{ + "vfoo": nil, + "vother": nil, + }, + &Map{Vfoo: "foo", Vother: map[string]string{"foo": "bar"}}, + &Map{Vfoo: "", Vother: nil}, + []string{"Vfoo", "Vother"}, + []string{}, + }, + { + "map interface partial nil", + &map[interface{}]interface{}{ + "vfoo": "baz", + "vother": nil, + }, + &Map{Vfoo: "foo", Vother: map[string]string{"foo": "bar"}}, + &Map{Vfoo: "baz", Vother: nil}, + []string{"Vfoo", "Vother"}, + []string{}, + }, + { + "map interface partial decode", + &map[interface{}]interface{}{ + "vother": nil, + }, + &Map{Vfoo: "foo", Vother: map[string]string{"foo": "bar"}}, + &Map{Vfoo: "foo", Vother: nil}, + []string{"Vother"}, + []string{}, + }, + { + "map interface unused values", + &map[interface{}]interface{}{ + "vbar": "bar", + "vfoo": nil, + "vother": nil, + }, + &Map{Vfoo: "foo", Vother: map[string]string{"foo": "bar"}}, + &Map{Vfoo: "", Vother: nil}, + []string{"Vfoo", "Vother"}, + []string{"vbar"}, + }, } - if result.Vfoo != "" { - t.Fatalf("value should be default: %s", result.Vfoo) - } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + config := &DecoderConfig{ + Metadata: new(Metadata), + Result: tc.target, + ZeroFields: true, + } - if result.Vother != nil { - t.Fatalf("Vother should be nil: %s", result.Vother) - } -} + decoder, err := NewDecoder(config) + if err != nil { + t.Fatalf("should not error: %s", err) + } -// GH-10 -func TestDecode_mapInterfaceInterface(t *testing.T) { - input := map[interface{}]interface{}{ - "vfoo": nil, - "vother": nil, - } + err = decoder.Decode(tc.in) + if err != nil { + t.Fatalf("should not error: %s", err) + } - var result Map - err := Decode(input, &result) - if err != nil { - t.Fatalf("should not error: %s", err) - } + if !reflect.DeepEqual(tc.out, tc.target) { + t.Fatalf("%q: TestDecode_NilValue() expected: %#v, got: %#v", tc.name, tc.out, tc.target) + } - if result.Vfoo != "" { - t.Fatalf("value should be default: %s", result.Vfoo) - } + if !reflect.DeepEqual(tc.metaKeys, config.Metadata.Keys) { + t.Fatalf("%q: Metadata.Keys mismatch expected: %#v, got: %#v", tc.name, tc.metaKeys, config.Metadata.Keys) + } - if result.Vother != nil { - t.Fatalf("Vother should be nil: %s", result.Vother) + if !reflect.DeepEqual(tc.metaUnused, config.Metadata.Unused) { + t.Fatalf("%q: Metadata.Unused mismatch expected: %#v, got: %#v", tc.name, tc.metaUnused, config.Metadata.Unused) + } + }) } } @@ -258,3 +352,109 @@ func TestDecodeSliceToEmptySliceWOZeroing(t *testing.T) { } } } + +// #70 +func TestNextSquashMapstructure(t *testing.T) { + data := &struct { + Level1 struct { + Level2 struct { + Foo string + } `mapstructure:",squash"` + } `mapstructure:",squash"` + }{} + err := Decode(map[interface{}]interface{}{"foo": "baz"}, &data) + if err != nil { + t.Fatalf("should not error: %s", err) + } + if data.Level1.Level2.Foo != "baz" { + t.Fatal("value should be baz") + } +} + +type ImplementsInterfacePointerReceiver struct { + Name string +} + +func (i *ImplementsInterfacePointerReceiver) DoStuff() {} + +type ImplementsInterfaceValueReceiver string + +func (i ImplementsInterfaceValueReceiver) DoStuff() {} + +// GH-140 Type error when using DecodeHook to decode into interface +func TestDecode_DecodeHookInterface(t *testing.T) { + t.Parallel() + + type Interface interface { + DoStuff() + } + type DecodeIntoInterface struct { + Test Interface + } + + testData := map[string]string{"test": "test"} + + stringToPointerInterfaceDecodeHook := func(from, to reflect.Type, data interface{}) (interface{}, error) { + if from.Kind() != reflect.String { + return data, nil + } + + if to != reflect.TypeOf((*Interface)(nil)).Elem() { + return data, nil + } + // Ensure interface is satisfied + var impl Interface = &ImplementsInterfacePointerReceiver{data.(string)} + return impl, nil + } + + stringToValueInterfaceDecodeHook := func(from, to reflect.Type, data interface{}) (interface{}, error) { + if from.Kind() != reflect.String { + return data, nil + } + + if to != reflect.TypeOf((*Interface)(nil)).Elem() { + return data, nil + } + // Ensure interface is satisfied + var impl Interface = ImplementsInterfaceValueReceiver(data.(string)) + return impl, nil + } + + { + decodeInto := new(DecodeIntoInterface) + + decoder, _ := NewDecoder(&DecoderConfig{ + DecodeHook: stringToPointerInterfaceDecodeHook, + Result: decodeInto, + }) + + err := decoder.Decode(testData) + if err != nil { + t.Fatalf("Decode returned error: %s", err) + } + + expected := &ImplementsInterfacePointerReceiver{"test"} + if !reflect.DeepEqual(decodeInto.Test, expected) { + t.Fatalf("expected: %#v (%T), got: %#v (%T)", decodeInto.Test, decodeInto.Test, expected, expected) + } + } + + { + decodeInto := new(DecodeIntoInterface) + + decoder, _ := NewDecoder(&DecoderConfig{ + DecodeHook: stringToValueInterfaceDecodeHook, + Result: decodeInto, + }) + + err := decoder.Decode(testData) + if err != nil { + t.Fatalf("Decode returned error: %s", err) + } + + expected := ImplementsInterfaceValueReceiver("test") + if !reflect.DeepEqual(decodeInto.Test, expected) { + t.Fatalf("expected: %#v (%T), got: %#v (%T)", decodeInto.Test, decodeInto.Test, expected, expected) + } + } +} diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure_test.go b/vendor/github.com/mitchellh/mapstructure/mapstructure_test.go index 547af73..27ac10e 100644 --- a/vendor/github.com/mitchellh/mapstructure/mapstructure_test.go +++ b/vendor/github.com/mitchellh/mapstructure/mapstructure_test.go @@ -23,6 +23,20 @@ type Basic struct { VjsonNumber json.Number } +type BasicPointer struct { + Vstring *string + Vint *int + Vuint *uint + Vbool *bool + Vfloat *float64 + Vextra *string + vsilent *bool + Vdata *interface{} + VjsonInt *int + VjsonFloat *float64 + VjsonNumber *json.Number +} + type BasicSquash struct { Test Basic `mapstructure:",squash"` } @@ -49,6 +63,13 @@ type EmbeddedSlice struct { Vunique string } +type ArrayAlias [2]string + +type EmbeddedArray struct { + ArrayAlias `mapstructure:"array_alias"` + Vunique string +} + type SquashOnNonStructType struct { InvalidSquashType int `mapstructure:",squash"` } @@ -76,15 +97,37 @@ type NilInterface struct { W io.Writer } +type NilPointer struct { + Value *string +} + type Slice struct { Vfoo string Vbar []string } +type SliceOfAlias struct { + Vfoo string + Vbar SliceAlias +} + type SliceOfStruct struct { Value []Basic } +type SlicePointer struct { + Vbar *[]string +} + +type Array struct { + Vfoo string + Vbar [2]string +} + +type ArrayOfStruct struct { + Value [2]Basic +} + type Func struct { Foo func() string } @@ -112,14 +155,20 @@ type TypeConversionResult struct { FloatToBool bool FloatToString string SliceUint8ToString string + StringToSliceUint8 []byte + ArrayUint8ToString string StringToInt int StringToUint uint StringToBool bool StringToFloat float32 StringToStrSlice []string StringToIntSlice []int + StringToStrArray [1]string + StringToIntArray [1]int SliceToMap map[string]interface{} MapToSlice []interface{} + ArrayToMap map[string]interface{} + MapToArray [1]interface{} } func TestBasicTypes(t *testing.T) { @@ -227,6 +276,32 @@ func TestBasic_Merge(t *testing.T) { } } +// Test for issue #46. +func TestBasic_Struct(t *testing.T) { + t.Parallel() + + input := map[string]interface{}{ + "vdata": map[string]interface{}{ + "vstring": "foo", + }, + } + + var result, inner Basic + result.Vdata = &inner + err := Decode(input, &result) + if err != nil { + t.Fatalf("got an err: %s", err) + } + expected := Basic{ + Vdata: &Basic{ + Vstring: "foo", + }, + } + if !reflect.DeepEqual(result, expected) { + t.Fatalf("bad: %#v", result) + } +} + func TestDecode_BasicSquash(t *testing.T) { t.Parallel() @@ -245,6 +320,36 @@ func TestDecode_BasicSquash(t *testing.T) { } } +func TestDecodeFrom_BasicSquash(t *testing.T) { + t.Parallel() + + var v interface{} + var ok bool + + input := BasicSquash{ + Test: Basic{ + Vstring: "foo", + }, + } + + var result map[string]interface{} + err := Decode(input, &result) + if err != nil { + t.Fatalf("got an err: %s", err.Error()) + } + + if _, ok = result["Test"]; ok { + t.Error("test should not be present in map") + } + + v, ok = result["Vstring"] + if !ok { + t.Error("vstring should be present in map") + } else if !reflect.DeepEqual(v, "foo") { + t.Errorf("vstring value should be 'foo': %#v", v) + } +} + func TestDecode_Embedded(t *testing.T) { t.Parallel() @@ -322,6 +427,29 @@ func TestDecode_EmbeddedSlice(t *testing.T) { } } +func TestDecode_EmbeddedArray(t *testing.T) { + t.Parallel() + + input := map[string]interface{}{ + "array_alias": [2]string{"foo", "bar"}, + "vunique": "bar", + } + + var result EmbeddedArray + err := Decode(input, &result) + if err != nil { + t.Fatalf("got an err: %s", err.Error()) + } + + if !reflect.DeepEqual(result.ArrayAlias, ArrayAlias([2]string{"foo", "bar"})) { + t.Errorf("array value: %#v", result.ArrayAlias) + } + + if result.Vunique != "bar" { + t.Errorf("vunique value should be 'bar': %#v", result.Vunique) + } +} + func TestDecode_EmbeddedSquash(t *testing.T) { t.Parallel() @@ -345,6 +473,44 @@ func TestDecode_EmbeddedSquash(t *testing.T) { } } +func TestDecodeFrom_EmbeddedSquash(t *testing.T) { + t.Parallel() + + var v interface{} + var ok bool + + input := EmbeddedSquash{ + Basic: Basic{ + Vstring: "foo", + }, + Vunique: "bar", + } + + var result map[string]interface{} + err := Decode(input, &result) + if err != nil { + t.Fatalf("got an err: %s", err.Error()) + } + + if _, ok = result["Basic"]; ok { + t.Error("basic should not be present in map") + } + + v, ok = result["Vstring"] + if !ok { + t.Error("vstring should be present in map") + } else if !reflect.DeepEqual(v, "foo") { + t.Errorf("vstring value should be 'foo': %#v", v) + } + + v, ok = result["Vunique"] + if !ok { + t.Error("vunique should be present in map") + } else if !reflect.DeepEqual(v, "bar") { + t.Errorf("vunique value should be 'bar': %#v", v) + } +} + func TestDecode_SquashOnNonStructType(t *testing.T) { t.Parallel() @@ -488,6 +654,43 @@ func TestDecode_NilInterfaceHook(t *testing.T) { } } +func TestDecode_NilPointerHook(t *testing.T) { + t.Parallel() + + input := map[string]interface{}{ + "value": "", + } + + decodeHook := func(f, t reflect.Type, v interface{}) (interface{}, error) { + if typed, ok := v.(string); ok { + if typed == "" { + return nil, nil + } + } + return v, nil + } + + var result NilPointer + config := &DecoderConfig{ + DecodeHook: decodeHook, + Result: &result, + } + + decoder, err := NewDecoder(config) + if err != nil { + t.Fatalf("err: %s", err) + } + + err = decoder.Decode(input) + if err != nil { + t.Fatalf("got an err: %s", err) + } + + if result.Value != nil { + t.Errorf("W should be nil: %#v", result.Value) + } +} + func TestDecode_FuncHook(t *testing.T) { t.Parallel() @@ -582,14 +785,20 @@ func TestDecode_TypeConversion(t *testing.T) { "FloatToBool": 42.42, "FloatToString": 42.42, "SliceUint8ToString": []uint8("foo"), + "StringToSliceUint8": "foo", + "ArrayUint8ToString": [3]uint8{'f', 'o', 'o'}, "StringToInt": "42", "StringToUint": "42", "StringToBool": "1", "StringToFloat": "42.42", "StringToStrSlice": "A", "StringToIntSlice": "42", + "StringToStrArray": "A", + "StringToIntArray": "42", "SliceToMap": []interface{}{}, "MapToSlice": map[string]interface{}{}, + "ArrayToMap": []interface{}{}, + "MapToArray": map[string]interface{}{}, } expectedResultStrict := TypeConversionResult{ @@ -622,14 +831,20 @@ func TestDecode_TypeConversion(t *testing.T) { FloatToBool: true, FloatToString: "42.42", SliceUint8ToString: "foo", + StringToSliceUint8: []byte("foo"), + ArrayUint8ToString: "foo", StringToInt: 42, StringToUint: 42, StringToBool: true, StringToFloat: 42.42, StringToStrSlice: []string{"A"}, StringToIntSlice: []int{42}, + StringToStrArray: [1]string{"A"}, + StringToIntArray: [1]int{42}, SliceToMap: map[string]interface{}{}, MapToSlice: []interface{}{}, + ArrayToMap: map[string]interface{}{}, + MapToArray: [1]interface{}{}, } // Test strict type conversion @@ -872,6 +1087,56 @@ func TestNestedTypePointer(t *testing.T) { } } +// Test for issue #46. +func TestNestedTypeInterface(t *testing.T) { + t.Parallel() + + input := map[string]interface{}{ + "vfoo": "foo", + "vbar": &map[string]interface{}{ + "vstring": "foo", + "vint": 42, + "vbool": true, + + "vdata": map[string]interface{}{ + "vstring": "bar", + }, + }, + } + + var result NestedPointer + result.Vbar = new(Basic) + result.Vbar.Vdata = new(Basic) + err := Decode(input, &result) + if err != nil { + t.Fatalf("got an err: %s", err.Error()) + } + + if result.Vfoo != "foo" { + t.Errorf("vfoo value should be 'foo': %#v", result.Vfoo) + } + + if result.Vbar.Vstring != "foo" { + t.Errorf("vstring value should be 'foo': %#v", result.Vbar.Vstring) + } + + if result.Vbar.Vint != 42 { + t.Errorf("vint value should be 42: %#v", result.Vbar.Vint) + } + + if result.Vbar.Vbool != true { + t.Errorf("vbool value should be true: %#v", result.Vbar.Vbool) + } + + if result.Vbar.Vextra != "" { + t.Errorf("vextra value should be empty: %#v", result.Vbar.Vextra) + } + + if result.Vbar.Vdata.(*Basic).Vstring != "bar" { + t.Errorf("vstring value should be 'bar': %#v", result.Vbar.Vdata.(*Basic).Vstring) + } +} + func TestSlice(t *testing.T) { t.Parallel() @@ -938,6 +1203,37 @@ func TestSliceOfStruct(t *testing.T) { } } +func TestSliceCornerCases(t *testing.T) { + t.Parallel() + + // Input with a map with zero values + input := map[string]interface{}{} + var resultWeak []Basic + + err := WeakDecode(input, &resultWeak) + if err != nil { + t.Fatalf("got unexpected error: %s", err) + } + + if len(resultWeak) != 0 { + t.Errorf("length should be 0") + } + // Input with more values + input = map[string]interface{}{ + "Vstring": "foo", + } + + resultWeak = nil + err = WeakDecode(input, &resultWeak) + if err != nil { + t.Fatalf("got unexpected error: %s", err) + } + + if resultWeak[0].Vstring != "foo" { + t.Errorf("value does not match") + } +} + func TestSliceToMap(t *testing.T) { t.Parallel() @@ -965,6 +1261,391 @@ func TestSliceToMap(t *testing.T) { } } +func TestArray(t *testing.T) { + t.Parallel() + + inputStringArray := map[string]interface{}{ + "vfoo": "foo", + "vbar": [2]string{"foo", "bar"}, + } + + inputStringArrayPointer := map[string]interface{}{ + "vfoo": "foo", + "vbar": &[2]string{"foo", "bar"}, + } + + outputStringArray := &Array{ + "foo", + [2]string{"foo", "bar"}, + } + + testArrayInput(t, inputStringArray, outputStringArray) + testArrayInput(t, inputStringArrayPointer, outputStringArray) +} + +func TestInvalidArray(t *testing.T) { + t.Parallel() + + input := map[string]interface{}{ + "vfoo": "foo", + "vbar": 42, + } + + result := Array{} + err := Decode(input, &result) + if err == nil { + t.Errorf("expected failure") + } +} + +func TestArrayOfStruct(t *testing.T) { + t.Parallel() + + input := map[string]interface{}{ + "value": []map[string]interface{}{ + {"vstring": "one"}, + {"vstring": "two"}, + }, + } + + var result ArrayOfStruct + err := Decode(input, &result) + if err != nil { + t.Fatalf("got unexpected error: %s", err) + } + + if len(result.Value) != 2 { + t.Fatalf("expected two values, got %d", len(result.Value)) + } + + if result.Value[0].Vstring != "one" { + t.Errorf("first value should be 'one', got: %s", result.Value[0].Vstring) + } + + if result.Value[1].Vstring != "two" { + t.Errorf("second value should be 'two', got: %s", result.Value[1].Vstring) + } +} + +func TestArrayToMap(t *testing.T) { + t.Parallel() + + input := []map[string]interface{}{ + { + "foo": "bar", + }, + { + "bar": "baz", + }, + } + + var result map[string]interface{} + err := WeakDecode(input, &result) + if err != nil { + t.Fatalf("got an error: %s", err) + } + + expected := map[string]interface{}{ + "foo": "bar", + "bar": "baz", + } + if !reflect.DeepEqual(result, expected) { + t.Errorf("bad: %#v", result) + } +} + +func TestDecodeTable(t *testing.T) { + t.Parallel() + + // We need to make new types so that we don't get the short-circuit + // copy functionality. We want to test the deep copying functionality. + type BasicCopy Basic + type NestedPointerCopy NestedPointer + type MapCopy Map + + tests := []struct { + name string + in interface{} + target interface{} + out interface{} + wantErr bool + }{ + { + "basic struct input", + &Basic{ + Vstring: "vstring", + Vint: 2, + Vuint: 3, + Vbool: true, + Vfloat: 4.56, + Vextra: "vextra", + vsilent: true, + Vdata: []byte("data"), + }, + &map[string]interface{}{}, + &map[string]interface{}{ + "Vstring": "vstring", + "Vint": 2, + "Vuint": uint(3), + "Vbool": true, + "Vfloat": 4.56, + "Vextra": "vextra", + "Vdata": []byte("data"), + "VjsonInt": 0, + "VjsonFloat": 0.0, + "VjsonNumber": json.Number(""), + }, + false, + }, + { + "embedded struct input", + &Embedded{ + Vunique: "vunique", + Basic: Basic{ + Vstring: "vstring", + Vint: 2, + Vuint: 3, + Vbool: true, + Vfloat: 4.56, + Vextra: "vextra", + vsilent: true, + Vdata: []byte("data"), + }, + }, + &map[string]interface{}{}, + &map[string]interface{}{ + "Vunique": "vunique", + "Basic": map[string]interface{}{ + "Vstring": "vstring", + "Vint": 2, + "Vuint": uint(3), + "Vbool": true, + "Vfloat": 4.56, + "Vextra": "vextra", + "Vdata": []byte("data"), + "VjsonInt": 0, + "VjsonFloat": 0.0, + "VjsonNumber": json.Number(""), + }, + }, + false, + }, + { + "struct => struct", + &Basic{ + Vstring: "vstring", + Vint: 2, + Vuint: 3, + Vbool: true, + Vfloat: 4.56, + Vextra: "vextra", + Vdata: []byte("data"), + vsilent: true, + }, + &BasicCopy{}, + &BasicCopy{ + Vstring: "vstring", + Vint: 2, + Vuint: 3, + Vbool: true, + Vfloat: 4.56, + Vextra: "vextra", + Vdata: []byte("data"), + }, + false, + }, + { + "struct => struct with pointers", + &NestedPointer{ + Vfoo: "hello", + Vbar: nil, + }, + &NestedPointerCopy{}, + &NestedPointerCopy{ + Vfoo: "hello", + }, + false, + }, + { + "basic pointer to non-pointer", + &BasicPointer{ + Vstring: stringPtr("vstring"), + Vint: intPtr(2), + Vuint: uintPtr(3), + Vbool: boolPtr(true), + Vfloat: floatPtr(4.56), + Vdata: interfacePtr([]byte("data")), + }, + &Basic{}, + &Basic{ + Vstring: "vstring", + Vint: 2, + Vuint: 3, + Vbool: true, + Vfloat: 4.56, + Vdata: []byte("data"), + }, + false, + }, + { + "slice non-pointer to pointer", + &Slice{}, + &SlicePointer{}, + &SlicePointer{}, + false, + }, + { + "slice non-pointer to pointer, zero field", + &Slice{}, + &SlicePointer{ + Vbar: &[]string{"yo"}, + }, + &SlicePointer{}, + false, + }, + { + "slice to slice alias", + &Slice{}, + &SliceOfAlias{}, + &SliceOfAlias{}, + false, + }, + { + "nil map to map", + &Map{}, + &MapCopy{}, + &MapCopy{}, + false, + }, + { + "nil map to non-empty map", + &Map{}, + &MapCopy{Vother: map[string]string{"foo": "bar"}}, + &MapCopy{}, + false, + }, + + { + "slice input - should error", + []string{"foo", "bar"}, + &map[string]interface{}{}, + &map[string]interface{}{}, + true, + }, + { + "struct with slice property", + &Slice{ + Vfoo: "vfoo", + Vbar: []string{"foo", "bar"}, + }, + &map[string]interface{}{}, + &map[string]interface{}{ + "Vfoo": "vfoo", + "Vbar": []string{"foo", "bar"}, + }, + false, + }, + { + "struct with slice of struct property", + &SliceOfStruct{ + Value: []Basic{ + Basic{ + Vstring: "vstring", + Vint: 2, + Vuint: 3, + Vbool: true, + Vfloat: 4.56, + Vextra: "vextra", + vsilent: true, + Vdata: []byte("data"), + }, + }, + }, + &map[string]interface{}{}, + &map[string]interface{}{ + "Value": []Basic{ + Basic{ + Vstring: "vstring", + Vint: 2, + Vuint: 3, + Vbool: true, + Vfloat: 4.56, + Vextra: "vextra", + vsilent: true, + Vdata: []byte("data"), + }, + }, + }, + false, + }, + { + "struct with map property", + &Map{ + Vfoo: "vfoo", + Vother: map[string]string{"vother": "vother"}, + }, + &map[string]interface{}{}, + &map[string]interface{}{ + "Vfoo": "vfoo", + "Vother": map[string]string{ + "vother": "vother", + }}, + false, + }, + { + "tagged struct", + &Tagged{ + Extra: "extra", + Value: "value", + }, + &map[string]string{}, + &map[string]string{ + "bar": "extra", + "foo": "value", + }, + false, + }, + { + "omit tag struct", + &struct { + Value string `mapstructure:"value"` + Omit string `mapstructure:"-"` + }{ + Value: "value", + Omit: "omit", + }, + &map[string]string{}, + &map[string]string{ + "value": "value", + }, + false, + }, + { + "decode to wrong map type", + &struct { + Value string + }{ + Value: "string", + }, + &map[string]int{}, + &map[string]int{}, + true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := Decode(tt.in, tt.target); (err != nil) != tt.wantErr { + t.Fatalf("%q: TestMapOutputForStructuredInputs() unexpected error: %s", tt.name, err) + } + + if !reflect.DeepEqual(tt.out, tt.target) { + t.Fatalf("%q: TestMapOutputForStructuredInputs() expected: %#v, got: %#v", tt.name, tt.out, tt.target) + } + }) + } +} + func TestInvalidType(t *testing.T) { t.Parallel() @@ -1024,6 +1705,39 @@ func TestInvalidType(t *testing.T) { } } +func TestDecodeMetadata(t *testing.T) { + t.Parallel() + + input := map[string]interface{}{ + "vfoo": "foo", + "vbar": map[string]interface{}{ + "vstring": "foo", + "Vuint": 42, + "foo": "bar", + }, + "bar": "nil", + } + + var md Metadata + var result Nested + + err := DecodeMetadata(input, &result, &md) + if err != nil { + t.Fatalf("err: %s", err.Error()) + } + + expectedKeys := []string{"Vbar", "Vbar.Vstring", "Vbar.Vuint", "Vfoo"} + sort.Strings(md.Keys) + if !reflect.DeepEqual(md.Keys, expectedKeys) { + t.Fatalf("bad keys: %#v", md.Keys) + } + + expectedUnused := []string{"Vbar.foo", "bar"} + if !reflect.DeepEqual(md.Unused, expectedUnused) { + t.Fatalf("bad unused: %#v", md.Unused) + } +} + func TestMetadata(t *testing.T) { t.Parallel() @@ -1164,6 +1878,43 @@ func TestWeakDecode(t *testing.T) { } } +func TestWeakDecodeMetadata(t *testing.T) { + t.Parallel() + + input := map[string]interface{}{ + "foo": "4", + "bar": "value", + "unused": "value", + } + + var md Metadata + var result struct { + Foo int + Bar string + } + + if err := WeakDecodeMetadata(input, &result, &md); err != nil { + t.Fatalf("err: %s", err) + } + if result.Foo != 4 { + t.Fatalf("bad: %#v", result) + } + if result.Bar != "value" { + t.Fatalf("bad: %#v", result) + } + + expectedKeys := []string{"Bar", "Foo"} + sort.Strings(md.Keys) + if !reflect.DeepEqual(md.Keys, expectedKeys) { + t.Fatalf("bad keys: %#v", md.Keys) + } + + expectedUnused := []string{"unused"} + if !reflect.DeepEqual(md.Unused, expectedUnused) { + t.Fatalf("bad unused: %#v", md.Unused) + } +} + func testSliceInput(t *testing.T, input map[string]interface{}, expected *Slice) { var result Slice err := Decode(input, &result) @@ -1191,3 +1942,38 @@ func testSliceInput(t *testing.T, input map[string]interface{}, expected *Slice) } } } + +func testArrayInput(t *testing.T, input map[string]interface{}, expected *Array) { + var result Array + err := Decode(input, &result) + if err != nil { + t.Fatalf("got error: %s", err) + } + + if result.Vfoo != expected.Vfoo { + t.Errorf("Vfoo expected '%s', got '%s'", expected.Vfoo, result.Vfoo) + } + + if result.Vbar == [2]string{} { + t.Fatalf("Vbar a slice, got '%#v'", result.Vbar) + } + + if len(result.Vbar) != len(expected.Vbar) { + t.Errorf("Vbar length should be %d, got %d", len(expected.Vbar), len(result.Vbar)) + } + + for i, v := range result.Vbar { + if v != expected.Vbar[i] { + t.Errorf( + "Vbar[%d] should be '%#v', got '%#v'", + i, expected.Vbar[i], v) + } + } +} + +func stringPtr(v string) *string { return &v } +func intPtr(v int) *int { return &v } +func uintPtr(v uint) *uint { return &v } +func boolPtr(v bool) *bool { return &v } +func floatPtr(v float64) *float64 { return &v } +func interfacePtr(v interface{}) *interface{} { return &v } diff --git a/vendor/github.com/pelletier/go-toml/.circleci/config.yml b/vendor/github.com/pelletier/go-toml/.circleci/config.yml new file mode 100644 index 0000000..a0ef2d7 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/.circleci/config.yml @@ -0,0 +1,140 @@ +version: 2.1 + +executors: + golang: + parameters: + version: + type: string + docker: + - image: circleci/golang:<< parameters.version >> + +commands: + get_deps: + description: "Get go dependencies" + steps: + - run: go get github.com/jstemmer/go-junit-report + + run_test: + description: "Run unit tests for a go module" + parameters: + test_name: + type: string + module: + type: string + coverage: + default: false + type: boolean + allow_fail: + type: boolean + default: false + steps: + - run: + name: "Run tests for <>" + command: | + TEST_DIR="/tmp/test-results/<>" + mkdir -p ${TEST_DIR} + trap "go-junit-report ${TEST_DIR}/go-test-report.xml" EXIT + go test <> -race -v \ + <<# parameters.coverage >>-coverprofile=/tmp/workspace/coverage.txt -covermode=atomic<> \ + | tee /tmp/test-results/go-test.out <<# parameters.allow_fail >>|| true<> + +jobs: + go: + parameters: + version: + type: string + allow_fail: + type: boolean + default: false + executor: + name: golang + version: "<>" + working_directory: /go/src/github.com/pelletier/go-toml + environment: + GO111MODULE: "on" + steps: + - checkout + - run: mkdir -p /tmp/workspace + - run: go fmt ./... <<# parameters.allow_fail >>|| true<> + - get_deps + - run_test: + test_name: "go-toml" + module: "github.com/pelletier/go-toml" + coverage: true + allow_fail: <> + - run_test: + test_name: "tomljson" + module: "github.com/pelletier/go-toml/cmd/tomljson" + allow_fail: <> + - run_test: + test_name: "tomll" + module: "github.com/pelletier/go-toml/cmd/tomll" + allow_fail: <> + - run_test: + test_name: "query" + module: "github.com/pelletier/go-toml/query" + allow_fail: <> + - store_test_results: + path: /tmp/test-results + codecov: + docker: + - image: "circleci/golang:1.12" + steps: + - attach_workspace: + at: /tmp/workspace + - run: + name: "upload to codecov" + working_directory: /tmp/workspace + command: | + curl https://codecov.io/bash > codecov.sh + bash codecov.sh -v + +workflows: + version: 2.1 + build: + jobs: + - go: + name: "go1_11" + version: "1.11" + - go: + name: "go1_12" + version: "1.12" + post-steps: + - run: go tool cover -html=/tmp/workspace/coverage.txt -o coverage.html + - store_artifacts: + path: /tmp/workspace/coverage.txt + - store_artifacts: + path: coverage.html + - persist_to_workspace: + root: /tmp/workspace + paths: + - coverage.txt + - go: + name: "gotip" + version: "1.12" # use as base + allow_fail: true + pre-steps: + - restore_cache: + keys: + - go-tip-source + - run: + name: "Compile go tip" + command: | + if [ ! -d "/tmp/go" ]; then + git clone https://go.googlesource.com/go /tmp/go + fi + cd /tmp/go + git checkout master + git pull + cd src + ./make.bash + echo 'export PATH="/tmp/go/bin:$PATH"' >> $BASH_ENV + - run: go version + - save_cache: + key: go-tip-source + paths: + - "/tmp/go" + - codecov: + requires: + - go1_11 + - go1_12 diff --git a/vendor/github.com/pelletier/go-toml/.github/ISSUE_TEMPLATE/bug_report.md b/vendor/github.com/pelletier/go-toml/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..815b604 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,22 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior. Including TOML files. + +**Expected behavior** +A clear and concise description of what you expected to happen, if other than "should work". + +**Versions** + - go-toml: version (git sha) + - go: version + - operating system: e.g. macOS, Windows, Linux + +**Additional context** +Add any other context about the problem here that you think may help to diagnose. diff --git a/vendor/github.com/pelletier/go-toml/.github/ISSUE_TEMPLATE/feature_request.md b/vendor/github.com/pelletier/go-toml/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..066b2d9 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,17 @@ +--- +name: Feature request +about: Suggest an idea for this project + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/vendor/github.com/pelletier/go-toml/.gitignore b/vendor/github.com/pelletier/go-toml/.gitignore index f1b6190..99e38bb 100644 --- a/vendor/github.com/pelletier/go-toml/.gitignore +++ b/vendor/github.com/pelletier/go-toml/.gitignore @@ -1 +1,2 @@ test_program/test_program_bin +fuzz/ diff --git a/vendor/github.com/pelletier/go-toml/.travis.yml b/vendor/github.com/pelletier/go-toml/.travis.yml index 1f8b41f..abb03e9 100644 --- a/vendor/github.com/pelletier/go-toml/.travis.yml +++ b/vendor/github.com/pelletier/go-toml/.travis.yml @@ -1,23 +1,22 @@ sudo: false language: go go: - - 1.6.4 - - 1.7.6 - - 1.8.3 + - 1.11.x + - 1.12.x - tip matrix: allow_failures: - go: tip fast_finish: true +env: + - GO111MODULE=on script: - if [ -n "$(go fmt ./...)" ]; then exit 1; fi - - ./test.sh + - go test github.com/pelletier/go-toml -race -coverprofile=coverage.txt -covermode=atomic + - go test github.com/pelletier/go-toml/cmd/tomljson + - go test github.com/pelletier/go-toml/cmd/tomll + - go test github.com/pelletier/go-toml/query - ./benchmark.sh $TRAVIS_BRANCH https://github.com/$TRAVIS_REPO_SLUG.git -before_install: - - go get github.com/axw/gocov/gocov - - go get github.com/mattn/goveralls - - if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi -branches: - only: [master] + after_success: - - $HOME/gopath/bin/goveralls -service=travis-ci -coverprofile=coverage.out -repotoken $COVERALLS_TOKEN + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/pelletier/go-toml/CONTRIBUTING.md b/vendor/github.com/pelletier/go-toml/CONTRIBUTING.md new file mode 100644 index 0000000..405c911 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/CONTRIBUTING.md @@ -0,0 +1,132 @@ +## Contributing + +Thank you for your interest in go-toml! We appreciate you considering +contributing to go-toml! + +The main goal is the project is to provide an easy-to-use TOML +implementation for Go that gets the job done and gets out of your way – +dealing with TOML is probably not the central piece of your project. + +As the single maintainer of go-toml, time is scarce. All help, big or +small, is more than welcomed! + +### Ask questions + +Any question you may have, somebody else might have it too. Always feel +free to ask them on the [issues tracker][issues-tracker]. We will try to +answer them as clearly and quickly as possible, time permitting. + +Asking questions also helps us identify areas where the documentation needs +improvement, or new features that weren't envisioned before. Sometimes, a +seemingly innocent question leads to the fix of a bug. Don't hesitate and +ask away! + +### Improve the documentation + +The best way to share your knowledge and experience with go-toml is to +improve the documentation. Fix a typo, clarify an interface, add an +example, anything goes! + +The documentation is present in the [README][readme] and thorough the +source code. On release, it gets updated on [GoDoc][godoc]. To make a +change to the documentation, create a pull request with your proposed +changes. For simple changes like that, the easiest way to go is probably +the "Fork this project and edit the file" button on Github, displayed at +the top right of the file. Unless it's a trivial change (for example a +typo), provide a little bit of context in your pull request description or +commit message. + +### Report a bug + +Found a bug! Sorry to hear that :(. Help us and other track them down and +fix by reporting it. [File a new bug report][bug-report] on the [issues +tracker][issues-tracker]. The template should provide enough guidance on +what to include. When in doubt: add more details! By reducing ambiguity and +providing more information, it decreases back and forth and saves everyone +time. + +### Code changes + +Want to contribute a patch? Very happy to hear that! + +First, some high-level rules: + +* A short proposal with some POC code is better than a lengthy piece of + text with no code. Code speaks louder than words. +* No backward-incompatible patch will be accepted unless discussed. + Sometimes it's hard, and Go's lack of versioning by default does not + help, but we try not to break people's programs unless we absolutely have + to. +* If you are writing a new feature or extending an existing one, make sure + to write some documentation. +* Bug fixes need to be accompanied with regression tests. +* New code needs to be tested. +* Your commit messages need to explain why the change is needed, even if + already included in the PR description. + +It does sound like a lot, but those best practices are here to save time +overall and continuously improve the quality of the project, which is +something everyone benefits from. + +#### Get started + +The fairly standard code contribution process looks like that: + +1. [Fork the project][fork]. +2. Make your changes, commit on any branch you like. +3. [Open up a pull request][pull-request] +4. Review, potential ask for changes. +5. Merge. You're in! + +Feel free to ask for help! You can create draft pull requests to gather +some early feedback! + +#### Run the tests + +You can run tests for go-toml using Go's test tool: `go test ./...`. +When creating a pull requests, all tests will be ran on Linux on a few Go +versions (Travis CI), and on Windows using the latest Go version +(AppVeyor). + +#### Style + +Try to look around and follow the same format and structure as the rest of +the code. We enforce using `go fmt` on the whole code base. + +--- + +### Maintainers-only + +#### Merge pull request + +Checklist: + +* Passing CI. +* Does not introduce backward-incompatible changes (unless discussed). +* Has relevant doc changes. +* Has relevant unit tests. + +1. Merge using "squash and merge". +2. Make sure to edit the commit message to keep all the useful information + nice and clean. +3. Make sure the commit title is clear and contains the PR number (#123). + +#### New release + +1. Go to [releases][releases]. Click on "X commits to master since this + release". +2. Make note of all the changes. Look for backward incompatible changes, + new features, and bug fixes. +3. Pick the new version using the above and semver. +4. Create a [new release][new-release]. +5. Follow the same format as [1.1.0][release-110]. + +[issues-tracker]: https://github.com/pelletier/go-toml/issues +[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md +[godoc]: https://godoc.org/github.com/pelletier/go-toml +[readme]: ./README.md +[fork]: https://help.github.com/articles/fork-a-repo +[pull-request]: https://help.github.com/en/articles/creating-a-pull-request +[releases]: https://github.com/pelletier/go-toml/releases +[new-release]: https://github.com/pelletier/go-toml/releases/new +[release-110]: https://github.com/pelletier/go-toml/releases/tag/v1.1.0 diff --git a/vendor/github.com/pelletier/go-toml/PULL_REQUEST_TEMPLATE.md b/vendor/github.com/pelletier/go-toml/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..041cdc4 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,5 @@ +**Issue:** add link to pelletier/go-toml issue here + +Explanation of what this pull request does. + +More detailed description of the decisions being made and the reasons why (if the patch is non-trivial). diff --git a/vendor/github.com/pelletier/go-toml/README.md b/vendor/github.com/pelletier/go-toml/README.md index 22da41a..031a27a 100644 --- a/vendor/github.com/pelletier/go-toml/README.md +++ b/vendor/github.com/pelletier/go-toml/README.md @@ -8,8 +8,10 @@ This library supports TOML version [![GoDoc](https://godoc.org/github.com/pelletier/go-toml?status.svg)](http://godoc.org/github.com/pelletier/go-toml) [![license](https://img.shields.io/github/license/pelletier/go-toml.svg)](https://github.com/pelletier/go-toml/blob/master/LICENSE) [![Build Status](https://travis-ci.org/pelletier/go-toml.svg?branch=master)](https://travis-ci.org/pelletier/go-toml) -[![Coverage Status](https://coveralls.io/repos/github/pelletier/go-toml/badge.svg?branch=master)](https://coveralls.io/github/pelletier/go-toml?branch=master) +[![Windows Build status](https://ci.appveyor.com/api/projects/status/4aepwwjori266hkt/branch/master?svg=true)](https://ci.appveyor.com/project/pelletier/go-toml/branch/master) +[![codecov](https://codecov.io/gh/pelletier/go-toml/branch/master/graph/badge.svg)](https://codecov.io/gh/pelletier/go-toml) [![Go Report Card](https://goreportcard.com/badge/github.com/pelletier/go-toml)](https://goreportcard.com/report/github.com/pelletier/go-toml) +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fpelletier%2Fgo-toml.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fpelletier%2Fgo-toml?ref=badge_shield) ## Features @@ -33,7 +35,7 @@ import "github.com/pelletier/go-toml" Read a TOML document: ```go -config, _ := toml.LoadString(` +config, _ := toml.Load(` [postgres] user = "pelletier" password = "mypassword"`) @@ -42,7 +44,7 @@ user := config.Get("postgres.user").(string) // or using an intermediate object postgresConfig := config.Get("postgres").(*toml.Tree) -password = postgresConfig.Get("password").(string) +password := postgresConfig.Get("password").(string) ``` Or use Unmarshal: @@ -57,12 +59,12 @@ type Config struct { } doc := []byte(` -[postgres] -user = "pelletier" -password = "mypassword"`) +[Postgres] +User = "pelletier" +Password = "mypassword"`) config := Config{} -Unmarshal(doc, &config) +toml.Unmarshal(doc, &config) fmt.Println("user=", config.Postgres.User) ``` @@ -70,7 +72,8 @@ Or use a query: ```go // use a query to gather elements without walking the tree -results, _ := config.Query("$..[user,password]") +q, _ := query.Compile("$..[user,password]") +results := q.Execute(config) for ii, item := range results.Values() { fmt.Println("Query result %d: %v", ii, item) } @@ -106,12 +109,19 @@ much appreciated! ### Run tests -You have to make sure two kind of tests run: +`go test ./...` + +### Fuzzing + +The script `./fuzz.sh` is available to +run [go-fuzz](https://github.com/dvyukov/go-fuzz) on go-toml. -1. The Go unit tests -2. The TOML examples base +## Versioning -You can run both of them using `./test.sh`. +Go-toml follows [Semantic Versioning](http://semver.org/). The supported version +of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of +this document. The last two major versions of Go are supported +(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)). ## License diff --git a/vendor/github.com/pelletier/go-toml/appveyor.yml b/vendor/github.com/pelletier/go-toml/appveyor.yml new file mode 100644 index 0000000..40e8a41 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/appveyor.yml @@ -0,0 +1,34 @@ +version: "{build}" + +# Source Config +clone_folder: c:\gopath\src\github.com\pelletier\go-toml + +# Build host +environment: + GOPATH: c:\gopath + DEPTESTBYPASS501: 1 + GOVERSION: 1.12 + GO111MODULE: on + +init: + - git config --global core.autocrlf input + +# Build +install: + # Install the specific Go version. + - rmdir c:\go /s /q + - appveyor DownloadFile https://storage.googleapis.com/golang/go%GOVERSION%.windows-amd64.msi + - msiexec /i go%GOVERSION%.windows-amd64.msi /q + - choco install bzr + - set Path=c:\go\bin;c:\gopath\bin;C:\Program Files (x86)\Bazaar\;C:\Program Files\Mercurial\%Path% + - go version + - go env + +build: false +deploy: false + +test_script: + - go test github.com/pelletier/go-toml + - go test github.com/pelletier/go-toml/cmd/tomljson + - go test github.com/pelletier/go-toml/cmd/tomll + - go test github.com/pelletier/go-toml/query diff --git a/vendor/github.com/pelletier/go-toml/cmd/test_program.go b/vendor/github.com/pelletier/go-toml/cmd/test_program.go deleted file mode 100644 index 73077f6..0000000 --- a/vendor/github.com/pelletier/go-toml/cmd/test_program.go +++ /dev/null @@ -1,91 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "log" - "os" - "time" - - "github.com/pelletier/go-toml" -) - -func main() { - bytes, err := ioutil.ReadAll(os.Stdin) - if err != nil { - log.Fatalf("Error during TOML read: %s", err) - os.Exit(2) - } - tree, err := toml.Load(string(bytes)) - if err != nil { - log.Fatalf("Error during TOML load: %s", err) - os.Exit(1) - } - - typedTree := translate(*tree) - - if err := json.NewEncoder(os.Stdout).Encode(typedTree); err != nil { - log.Fatalf("Error encoding JSON: %s", err) - os.Exit(3) - } - - os.Exit(0) -} - -func translate(tomlData interface{}) interface{} { - switch orig := tomlData.(type) { - case map[string]interface{}: - typed := make(map[string]interface{}, len(orig)) - for k, v := range orig { - typed[k] = translate(v) - } - return typed - case *toml.Tree: - return translate(*orig) - case toml.Tree: - keys := orig.Keys() - typed := make(map[string]interface{}, len(keys)) - for _, k := range keys { - typed[k] = translate(orig.GetPath([]string{k})) - } - return typed - case []*toml.Tree: - typed := make([]map[string]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v).(map[string]interface{}) - } - return typed - case []map[string]interface{}: - typed := make([]map[string]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v).(map[string]interface{}) - } - return typed - case []interface{}: - typed := make([]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v) - } - return tag("array", typed) - case time.Time: - return tag("datetime", orig.Format("2006-01-02T15:04:05Z")) - case bool: - return tag("bool", fmt.Sprintf("%v", orig)) - case int64: - return tag("integer", fmt.Sprintf("%d", orig)) - case float64: - return tag("float", fmt.Sprintf("%v", orig)) - case string: - return tag("string", orig) - } - - panic(fmt.Sprintf("Unknown type: %T", tomlData)) -} - -func tag(typeName string, data interface{}) map[string]interface{} { - return map[string]interface{}{ - "type": typeName, - "value": data, - } -} diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go index b2d6fc6..322315b 100644 --- a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go +++ b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go @@ -17,13 +17,12 @@ import ( func main() { flag.Usage = func() { - fmt.Fprintln(os.Stderr, `tomljson can be used in two ways: -Writing to STDIN and reading from STDOUT: - cat file.toml | tomljson > file.json - -Reading from a file name: - tomljson file.toml -`) + fmt.Fprintln(os.Stderr, "tomljson can be used in two ways:") + fmt.Fprintln(os.Stderr, "Writing to STDIN and reading from STDOUT:") + fmt.Fprintln(os.Stderr, " cat file.toml | tomljson > file.json") + fmt.Fprintln(os.Stderr, "") + fmt.Fprintln(os.Stderr, "Reading from a file name:") + fmt.Fprintln(os.Stderr, " tomljson file.toml") } flag.Parse() os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr)) diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go index 0b4bdbb..d515ee0 100644 --- a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go +++ b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go @@ -4,6 +4,7 @@ import ( "bytes" "io/ioutil" "os" + "runtime" "strings" "testing" ) @@ -76,7 +77,14 @@ func TestProcessMainReadFromFile(t *testing.T) { } func TestProcessMainReadFromMissingFile(t *testing.T) { - expectedError := `open /this/file/does/not/exist: no such file or directory + var expectedError string + if runtime.GOOS == "windows" { + expectedError = `open /this/file/does/not/exist: The system cannot find the path specified. ` + } else { + expectedError = `open /this/file/does/not/exist: no such file or directory +` + } + expectProcessMainResults(t, ``, []string{"/this/file/does/not/exist"}, -1, ``, expectedError) } diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go index 36c7e37..93ab0c9 100644 --- a/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go +++ b/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go @@ -17,15 +17,14 @@ import ( func main() { flag.Usage = func() { - fmt.Fprintln(os.Stderr, `tomll can be used in two ways: -Writing to STDIN and reading from STDOUT: - cat file.toml | tomll > file.toml - -Reading and updating a list of files: - tomll a.toml b.toml c.toml - -When given a list of files, tomll will modify all files in place without asking. -`) + fmt.Fprintln(os.Stderr, "tomll can be used in two ways:") + fmt.Fprintln(os.Stderr, "Writing to STDIN and reading from STDOUT:") + fmt.Fprintln(os.Stderr, " cat file.toml | tomll > file.toml") + fmt.Fprintln(os.Stderr, "") + fmt.Fprintln(os.Stderr, "Reading and updating a list of files:") + fmt.Fprintln(os.Stderr, " tomll a.toml b.toml c.toml") + fmt.Fprintln(os.Stderr, "") + fmt.Fprintln(os.Stderr, "When given a list of files, tomll will modify all files in place without asking.") } flag.Parse() // read from stdin and print to stdout diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomltestgen/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomltestgen/main.go new file mode 100644 index 0000000..cd9bdd7 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/cmd/tomltestgen/main.go @@ -0,0 +1,219 @@ +// Tomltestgen is a program that retrieves a given version of +// https://github.com/BurntSushi/toml-test and generates go code for go-toml's unit tests +// based on the test files. +// +// Usage: go run github.com/pelletier/go-toml/cmd/tomltestgen > toml_testgen_test.go +package main + +import ( + "archive/zip" + "bytes" + "flag" + "fmt" + "go/format" + "io" + "io/ioutil" + "log" + "net/http" + "os" + "regexp" + "strconv" + "strings" + "text/template" + "time" +) + +type invalid struct { + Name string + Input string +} + +type valid struct { + Name string + Input string + JsonRef string +} + +type testsCollection struct { + Ref string + Timestamp string + Invalid []invalid + Valid []valid + Count int +} + +const srcTemplate = "// Generated by tomltestgen for toml-test ref {{.Ref}} on {{.Timestamp}}\n" + + "package toml\n" + + " import (\n" + + " \"testing\"\n" + + ")\n" + + + "{{range .Invalid}}\n" + + "func TestInvalid{{.Name}}(t *testing.T) {\n" + + " input := {{.Input|gostr}}\n" + + " testgenInvalid(t, input)\n" + + "}\n" + + "{{end}}\n" + + "\n" + + "{{range .Valid}}\n" + + "func TestValid{{.Name}}(t *testing.T) {\n" + + " input := {{.Input|gostr}}\n" + + " jsonRef := {{.JsonRef|gostr}}\n" + + " testgenValid(t, input, jsonRef)\n" + + "}\n" + + "{{end}}\n" + +func downloadTmpFile(url string) string { + log.Println("starting to download file from", url) + resp, err := http.Get(url) + if err != nil { + panic(err) + } + defer resp.Body.Close() + + tmpfile, err := ioutil.TempFile("", "toml-test-*.zip") + if err != nil { + panic(err) + } + defer tmpfile.Close() + + copiedLen, err := io.Copy(tmpfile, resp.Body) + if err != nil { + panic(err) + } + if resp.ContentLength > 0 && copiedLen != resp.ContentLength { + panic(fmt.Errorf("copied %d bytes, request body had %d", copiedLen, resp.ContentLength)) + } + return tmpfile.Name() +} + +func kebabToCamel(kebab string) string { + camel := "" + nextUpper := true + for _, c := range kebab { + if nextUpper { + camel += strings.ToUpper(string(c)) + nextUpper = false + } else if c == '-' { + nextUpper = true + } else { + camel += string(c) + } + } + return camel +} + +func readFileFromZip(f *zip.File) string { + reader, err := f.Open() + if err != nil { + panic(err) + } + defer reader.Close() + bytes, err := ioutil.ReadAll(reader) + if err != nil { + panic(err) + } + return string(bytes) +} + +func templateGoStr(input string) string { + if len(input) > 0 && input[len(input)-1] == '\n' { + input = input[0 : len(input)-1] + } + if strings.Contains(input, "`") { + lines := strings.Split(input, "\n") + for idx, line := range lines { + lines[idx] = strconv.Quote(line + "\n") + } + return strings.Join(lines, " + \n") + } + return "`" + input + "`" +} + +var ( + ref = flag.String("r", "master", "git reference") +) + +func usage() { + _, _ = fmt.Fprintf(os.Stderr, "usage: tomltestgen [flags]\n") + flag.PrintDefaults() +} + +func main() { + flag.Usage = usage + flag.Parse() + + url := "https://codeload.github.com/BurntSushi/toml-test/zip/" + *ref + resultFile := downloadTmpFile(url) + defer os.Remove(resultFile) + log.Println("file written to", resultFile) + + zipReader, err := zip.OpenReader(resultFile) + if err != nil { + panic(err) + } + defer zipReader.Close() + + collection := testsCollection{ + Ref: *ref, + Timestamp: time.Now().Format(time.RFC3339), + } + + zipFilesMap := map[string]*zip.File{} + + for _, f := range zipReader.File { + zipFilesMap[f.Name] = f + } + + testFileRegexp := regexp.MustCompile(`([^/]+/tests/(valid|invalid)/(.+))\.(toml)`) + for _, f := range zipReader.File { + groups := testFileRegexp.FindStringSubmatch(f.Name) + if len(groups) > 0 { + name := kebabToCamel(groups[3]) + testType := groups[2] + + log.Printf("> [%s] %s\n", testType, name) + + tomlContent := readFileFromZip(f) + + switch testType { + case "invalid": + collection.Invalid = append(collection.Invalid, invalid{ + Name: name, + Input: tomlContent, + }) + collection.Count++ + case "valid": + baseFilePath := groups[1] + jsonFilePath := baseFilePath + ".json" + jsonContent := readFileFromZip(zipFilesMap[jsonFilePath]) + + collection.Valid = append(collection.Valid, valid{ + Name: name, + Input: tomlContent, + JsonRef: jsonContent, + }) + collection.Count++ + default: + panic(fmt.Sprintf("unknown test type: %s", testType)) + } + } + } + + log.Printf("Collected %d tests from toml-test\n", collection.Count) + + funcMap := template.FuncMap{ + "gostr": templateGoStr, + } + t := template.Must(template.New("src").Funcs(funcMap).Parse(srcTemplate)) + buf := new(bytes.Buffer) + err = t.Execute(buf, collection) + if err != nil { + panic(err) + } + outputBytes, err := format.Source(buf.Bytes()) + if err != nil { + panic(err) + } + fmt.Println(string(outputBytes)) +} diff --git a/vendor/github.com/pelletier/go-toml/doc.go b/vendor/github.com/pelletier/go-toml/doc.go index 3c89619..d5fd98c 100644 --- a/vendor/github.com/pelletier/go-toml/doc.go +++ b/vendor/github.com/pelletier/go-toml/doc.go @@ -17,7 +17,7 @@ // JSONPath-like queries // // The package github.com/pelletier/go-toml/query implements a system -// similar to JSONPath to quickly retrive elements of a TOML document using a +// similar to JSONPath to quickly retrieve elements of a TOML document using a // single expression. See the package documentation for more information. // package toml diff --git a/vendor/github.com/pelletier/go-toml/doc_test.go b/vendor/github.com/pelletier/go-toml/doc_test.go index 9dd7738..3b8171b 100644 --- a/vendor/github.com/pelletier/go-toml/doc_test.go +++ b/vendor/github.com/pelletier/go-toml/doc_test.go @@ -1,13 +1,16 @@ // code examples for godoc -package toml +package toml_test import ( "fmt" + "log" + + toml "github.com/pelletier/go-toml" ) func Example_tree() { - config, err := LoadFile("config.toml") + config, err := toml.LoadFile("config.toml") if err != nil { fmt.Println("Error ", err.Error()) @@ -17,7 +20,7 @@ func Example_tree() { password := config.Get("postgres.password").(string) // or using an intermediate object - configTree := config.Get("postgres").(*Tree) + configTree := config.Get("postgres").(*toml.Tree) user = configTree.Get("user").(string) password = configTree.Get("password").(string) fmt.Println("User is", user, " and password is", password) @@ -48,6 +51,55 @@ func Example_unmarshal() { `) person := Person{} - Unmarshal(document, &person) + toml.Unmarshal(document, &person) fmt.Println(person.Name, "is", person.Age, "and works at", person.Employer.Name) + // Output: + // John is 30 and works at Company Inc. +} + +func ExampleMarshal() { + type Postgres struct { + User string `toml:"user"` + Password string `toml:"password"` + Database string `toml:"db" commented:"true" comment:"not used anymore"` + } + type Config struct { + Postgres Postgres `toml:"postgres" comment:"Postgres configuration"` + } + + config := Config{Postgres{User: "pelletier", Password: "mypassword", Database: "old_database"}} + b, err := toml.Marshal(config) + if err != nil { + log.Fatal(err) + } + fmt.Println(string(b)) + // Output: + // # Postgres configuration + // [postgres] + // + // # not used anymore + // # db = "old_database" + // password = "mypassword" + // user = "pelletier" +} + +func ExampleUnmarshal() { + type Postgres struct { + User string + Password string + } + type Config struct { + Postgres Postgres + } + + doc := []byte(` + [postgres] + user = "pelletier" + password = "mypassword"`) + + config := Config{} + toml.Unmarshal(doc, &config) + fmt.Println("user=", config.Postgres.User) + // Output: + // user= pelletier } diff --git a/vendor/github.com/pelletier/go-toml/fuzz.go b/vendor/github.com/pelletier/go-toml/fuzz.go new file mode 100644 index 0000000..14570c8 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/fuzz.go @@ -0,0 +1,31 @@ +// +build gofuzz + +package toml + +func Fuzz(data []byte) int { + tree, err := LoadBytes(data) + if err != nil { + if tree != nil { + panic("tree must be nil if there is an error") + } + return 0 + } + + str, err := tree.ToTomlString() + if err != nil { + if str != "" { + panic(`str must be "" if there is an error`) + } + panic(err) + } + + tree, err = Load(str) + if err != nil { + if tree != nil { + panic("tree must be nil if there is an error") + } + return 0 + } + + return 1 +} diff --git a/vendor/github.com/pelletier/go-toml/fuzz.sh b/vendor/github.com/pelletier/go-toml/fuzz.sh new file mode 100755 index 0000000..3204b4c --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/fuzz.sh @@ -0,0 +1,15 @@ +#! /bin/sh +set -eu + +go get github.com/dvyukov/go-fuzz/go-fuzz +go get github.com/dvyukov/go-fuzz/go-fuzz-build + +if [ ! -e toml-fuzz.zip ]; then + go-fuzz-build github.com/pelletier/go-toml +fi + +rm -fr fuzz +mkdir -p fuzz/corpus +cp *.toml fuzz/corpus + +go-fuzz -bin=toml-fuzz.zip -workdir=fuzz diff --git a/vendor/github.com/pelletier/go-toml/go.mod b/vendor/github.com/pelletier/go-toml/go.mod new file mode 100644 index 0000000..f4690e1 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/go.mod @@ -0,0 +1,9 @@ +module github.com/pelletier/go-toml + +go 1.12 + +require ( + github.com/BurntSushi/toml v0.3.1 + github.com/davecgh/go-spew v1.1.1 + gopkg.in/yaml.v2 v2.2.2 +) diff --git a/vendor/github.com/pelletier/go-toml/go.sum b/vendor/github.com/pelletier/go-toml/go.sum new file mode 100644 index 0000000..8d91a47 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/go.sum @@ -0,0 +1,7 @@ +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/pelletier/go-toml/keysparsing.go b/vendor/github.com/pelletier/go-toml/keysparsing.go index d62ca5f..e923bc4 100644 --- a/vendor/github.com/pelletier/go-toml/keysparsing.go +++ b/vendor/github.com/pelletier/go-toml/keysparsing.go @@ -3,88 +3,107 @@ package toml import ( - "bytes" "errors" "fmt" "unicode" ) +// Convert the bare key group string to an array. +// The input supports double quotation and single quotation, +// but escape sequences are not supported. Lexers must unescape them beforehand. func parseKey(key string) ([]string, error) { - groups := []string{} - var buffer bytes.Buffer - inQuotes := false - wasInQuotes := false - escapeNext := false - ignoreSpace := true - expectDot := false + runes := []rune(key) + var groups []string - for _, char := range key { - if ignoreSpace { - if char == ' ' { - continue - } - ignoreSpace = false + if len(key) == 0 { + return nil, errors.New("empty key") + } + + idx := 0 + for idx < len(runes) { + for ; idx < len(runes) && isSpace(runes[idx]); idx++ { + // skip leading whitespace } - if escapeNext { - buffer.WriteRune(char) - escapeNext = false - continue + if idx >= len(runes) { + break } - switch char { - case '\\': - escapeNext = true - continue - case '"': - if inQuotes { - groups = append(groups, buffer.String()) - buffer.Reset() - wasInQuotes = true - } - inQuotes = !inQuotes - expectDot = false - case '.': - if inQuotes { - buffer.WriteRune(char) - } else { - if !wasInQuotes { - if buffer.Len() == 0 { - return nil, errors.New("empty table key") + r := runes[idx] + if isValidBareChar(r) { + // parse bare key + startIdx := idx + endIdx := -1 + idx++ + for idx < len(runes) { + r = runes[idx] + if isValidBareChar(r) { + idx++ + } else if r == '.' { + endIdx = idx + break + } else if isSpace(r) { + endIdx = idx + for ; idx < len(runes) && isSpace(runes[idx]); idx++ { + // skip trailing whitespace + } + if idx < len(runes) && runes[idx] != '.' { + return nil, fmt.Errorf("invalid key character after whitespace: %c", runes[idx]) } - groups = append(groups, buffer.String()) - buffer.Reset() + break + } else { + return nil, fmt.Errorf("invalid bare key character: %c", r) + } + } + if endIdx == -1 { + endIdx = idx + } + groups = append(groups, string(runes[startIdx:endIdx])) + } else if r == '\'' { + // parse single quoted key + idx++ + startIdx := idx + for { + if idx >= len(runes) { + return nil, fmt.Errorf("unclosed single-quoted key") + } + r = runes[idx] + if r == '\'' { + groups = append(groups, string(runes[startIdx:idx])) + idx++ + break } - ignoreSpace = true - expectDot = false - wasInQuotes = false + idx++ } - case ' ': - if inQuotes { - buffer.WriteRune(char) - } else { - expectDot = true + } else if r == '"' { + // parse double quoted key + idx++ + startIdx := idx + for { + if idx >= len(runes) { + return nil, fmt.Errorf("unclosed double-quoted key") + } + r = runes[idx] + if r == '"' { + groups = append(groups, string(runes[startIdx:idx])) + idx++ + break + } + idx++ } - default: - if !inQuotes && !isValidBareChar(char) { - return nil, fmt.Errorf("invalid bare character: %c", char) + } else if r == '.' { + idx++ + if idx >= len(runes) { + return nil, fmt.Errorf("unexpected end of key") } - if !inQuotes && expectDot { - return nil, errors.New("what?") + r = runes[idx] + if !isValidBareChar(r) && r != '\'' && r != '"' && r != ' ' { + return nil, fmt.Errorf("expecting key part after dot") } - buffer.WriteRune(char) - expectDot = false + } else { + return nil, fmt.Errorf("invalid key character: %c", r) } } - if inQuotes { - return nil, errors.New("mismatched quotes") - } - if escapeNext { - return nil, errors.New("unfinished escape sequence") - } - if buffer.Len() > 0 { - groups = append(groups, buffer.String()) - } if len(groups) == 0 { - return nil, errors.New("empty key") + return nil, fmt.Errorf("empty key") } return groups, nil } diff --git a/vendor/github.com/pelletier/go-toml/keysparsing_test.go b/vendor/github.com/pelletier/go-toml/keysparsing_test.go index 1a9eccc..5a7530b 100644 --- a/vendor/github.com/pelletier/go-toml/keysparsing_test.go +++ b/vendor/github.com/pelletier/go-toml/keysparsing_test.go @@ -22,7 +22,10 @@ func testResult(t *testing.T, key string, expected []string) { } func testError(t *testing.T, key string, expectedError string) { - _, err := parseKey(key) + res, err := parseKey(key) + if err == nil { + t.Fatalf("Expected error, but succesfully parsed key %s", res) + } if fmt.Sprintf("%s", err) != expectedError { t.Fatalf("Expected error \"%s\", but got \"%s\".", expectedError, err) } @@ -41,16 +44,36 @@ func TestDottedKeyBasic(t *testing.T) { } func TestBaseKeyPound(t *testing.T) { - testError(t, "hello#world", "invalid bare character: #") + testError(t, "hello#world", "invalid bare key character: #") +} + +func TestUnclosedSingleQuotedKey(t *testing.T) { + testError(t, "'", "unclosed single-quoted key") +} + +func TestUnclosedDoubleQuotedKey(t *testing.T) { + testError(t, "\"", "unclosed double-quoted key") +} + +func TestInvalidStartKeyCharacter(t *testing.T) { + testError(t, "/", "invalid key character: /") +} + +func TestInvalidSpaceInKey(t *testing.T) { + testError(t, "invalid key", "invalid key character after whitespace: k") } func TestQuotedKeys(t *testing.T) { testResult(t, `hello."foo".bar`, []string{"hello", "foo", "bar"}) testResult(t, `"hello!"`, []string{"hello!"}) + testResult(t, `foo."ba.r".baz`, []string{"foo", "ba.r", "baz"}) + + // escape sequences must not be converted + testResult(t, `"hello\tworld"`, []string{`hello\tworld`}) } func TestEmptyKey(t *testing.T) { - testError(t, "", "empty key") - testError(t, " ", "empty key") + testError(t, ``, "empty key") + testError(t, ` `, "empty key") testResult(t, `""`, []string{""}) } diff --git a/vendor/github.com/pelletier/go-toml/lexer.go b/vendor/github.com/pelletier/go-toml/lexer.go index 1b6647d..6254d39 100644 --- a/vendor/github.com/pelletier/go-toml/lexer.go +++ b/vendor/github.com/pelletier/go-toml/lexer.go @@ -204,6 +204,14 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn { return l.lexFalse } + if l.follow("inf") { + return l.lexInf + } + + if l.follow("nan") { + return l.lexNan + } + if isSpace(next) { l.skip() continue @@ -265,6 +273,18 @@ func (l *tomlLexer) lexFalse() tomlLexStateFn { return l.lexRvalue } +func (l *tomlLexer) lexInf() tomlLexStateFn { + l.fastForward(3) + l.emit(tokenInf) + return l.lexRvalue +} + +func (l *tomlLexer) lexNan() tomlLexStateFn { + l.fastForward(3) + l.emit(tokenNan) + return l.lexRvalue +} + func (l *tomlLexer) lexEqual() tomlLexStateFn { l.next() l.emit(tokenEqual) @@ -277,6 +297,8 @@ func (l *tomlLexer) lexComma() tomlLexStateFn { return l.lexRvalue } +// Parse the key and emits its value without escape sequences. +// bare keys, basic string keys and literal string keys are supported. func (l *tomlLexer) lexKey() tomlLexStateFn { growingString := "" @@ -287,13 +309,24 @@ func (l *tomlLexer) lexKey() tomlLexStateFn { if err != nil { return l.errorf(err.Error()) } - growingString += `"` + str + `"` + growingString += "\"" + str + "\"" + l.next() + continue + } else if r == '\'' { + l.next() + str, err := l.lexLiteralStringAsString(`'`, false) + if err != nil { + return l.errorf(err.Error()) + } + growingString += "'" + str + "'" l.next() continue } else if r == '\n' { return l.errorf("keys cannot contain new lines") } else if isSpace(r) { break + } else if r == '.' { + // skip } else if !isValidBareChar(r) { return l.errorf("keys cannot contain %c character", r) } @@ -527,6 +560,7 @@ func (l *tomlLexer) lexTableKey() tomlLexStateFn { return l.lexInsideTableKey } +// Parse the key till "]]", but only bare keys are supported func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn { for r := l.peek(); r != eof; r = l.peek() { switch r { @@ -550,6 +584,7 @@ func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn { return l.errorf("unclosed table array key") } +// Parse the key till "]" but only bare keys are supported func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn { for r := l.peek(); r != eof; r = l.peek() { switch r { @@ -575,11 +610,77 @@ func (l *tomlLexer) lexRightBracket() tomlLexStateFn { return l.lexRvalue } +type validRuneFn func(r rune) bool + +func isValidHexRune(r rune) bool { + return r >= 'a' && r <= 'f' || + r >= 'A' && r <= 'F' || + r >= '0' && r <= '9' || + r == '_' +} + +func isValidOctalRune(r rune) bool { + return r >= '0' && r <= '7' || r == '_' +} + +func isValidBinaryRune(r rune) bool { + return r == '0' || r == '1' || r == '_' +} + func (l *tomlLexer) lexNumber() tomlLexStateFn { r := l.peek() + + if r == '0' { + follow := l.peekString(2) + if len(follow) == 2 { + var isValidRune validRuneFn + switch follow[1] { + case 'x': + isValidRune = isValidHexRune + case 'o': + isValidRune = isValidOctalRune + case 'b': + isValidRune = isValidBinaryRune + default: + if follow[1] >= 'a' && follow[1] <= 'z' || follow[1] >= 'A' && follow[1] <= 'Z' { + return l.errorf("unknown number base: %s. possible options are x (hex) o (octal) b (binary)", string(follow[1])) + } + } + + if isValidRune != nil { + l.next() + l.next() + digitSeen := false + for { + next := l.peek() + if !isValidRune(next) { + break + } + digitSeen = true + l.next() + } + + if !digitSeen { + return l.errorf("number needs at least one digit") + } + + l.emit(tokenInteger) + + return l.lexRvalue + } + } + } + if r == '+' || r == '-' { l.next() + if l.follow("inf") { + return l.lexInf + } + if l.follow("nan") { + return l.lexNan + } } + pointSeen := false expSeen := false digitSeen := false diff --git a/vendor/github.com/pelletier/go-toml/marshal.go b/vendor/github.com/pelletier/go-toml/marshal.go index 1a3176f..f17d049 100644 --- a/vendor/github.com/pelletier/go-toml/marshal.go +++ b/vendor/github.com/pelletier/go-toml/marshal.go @@ -4,15 +4,54 @@ import ( "bytes" "errors" "fmt" + "io" "reflect" + "strconv" "strings" "time" ) +const ( + tagFieldName = "toml" + tagFieldComment = "comment" + tagCommented = "commented" + tagMultiline = "multiline" + tagDefault = "default" +) + type tomlOpts struct { - name string - include bool - omitempty bool + name string + comment string + commented bool + multiline bool + include bool + omitempty bool + defaultValue string +} + +type encOpts struct { + quoteMapKeys bool + arraysOneElementPerLine bool +} + +var encOptsDefaults = encOpts{ + quoteMapKeys: false, +} + +type annotation struct { + tag string + comment string + commented string + multiline string + defaultValue string +} + +var annotationDefault = annotation{ + tag: tagFieldName, + comment: tagFieldComment, + commented: tagCommented, + multiline: tagMultiline, + defaultValue: tagDefault, } var timeType = reflect.TypeOf(time.Time{}) @@ -94,8 +133,15 @@ encoder, except that there is no concept of a Marshaler interface or MarshalTOML function for sub-structs, and currently only definite types can be marshaled (i.e. no `interface{}`). +The following struct annotations are supported: + + toml:"Field" Overrides the field's name to output. + omitempty When set, empty values and groups are not emitted. + comment:"comment" Emits a # comment on the same line. This supports new lines. + commented:"true" Emits the value as commented. + Note that pointers are automatically assigned the "omitempty" option, as TOML -explicity does not handle null values (saying instead the label should be +explicitly does not handle null values (saying instead the label should be dropped). Tree structural types and corresponding marshal types: @@ -115,59 +161,169 @@ Tree primitive types and corresponding marshal types: time.Time time.Time{}, pointers to same */ func Marshal(v interface{}) ([]byte, error) { + return NewEncoder(nil).marshal(v) +} + +// Encoder writes TOML values to an output stream. +type Encoder struct { + w io.Writer + encOpts + annotation +} + +// NewEncoder returns a new encoder that writes to w. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: w, + encOpts: encOptsDefaults, + annotation: annotationDefault, + } +} + +// Encode writes the TOML encoding of v to the stream. +// +// See the documentation for Marshal for details. +func (e *Encoder) Encode(v interface{}) error { + b, err := e.marshal(v) + if err != nil { + return err + } + if _, err := e.w.Write(b); err != nil { + return err + } + return nil +} + +// QuoteMapKeys sets up the encoder to encode +// maps with string type keys with quoted TOML keys. +// +// This relieves the character limitations on map keys. +func (e *Encoder) QuoteMapKeys(v bool) *Encoder { + e.quoteMapKeys = v + return e +} + +// ArraysWithOneElementPerLine sets up the encoder to encode arrays +// with more than one element on multiple lines instead of one. +// +// For example: +// +// A = [1,2,3] +// +// Becomes +// +// A = [ +// 1, +// 2, +// 3, +// ] +func (e *Encoder) ArraysWithOneElementPerLine(v bool) *Encoder { + e.arraysOneElementPerLine = v + return e +} + +// SetTagName allows changing default tag "toml" +func (e *Encoder) SetTagName(v string) *Encoder { + e.tag = v + return e +} + +// SetTagComment allows changing default tag "comment" +func (e *Encoder) SetTagComment(v string) *Encoder { + e.comment = v + return e +} + +// SetTagCommented allows changing default tag "commented" +func (e *Encoder) SetTagCommented(v string) *Encoder { + e.commented = v + return e +} + +// SetTagMultiline allows changing default tag "multiline" +func (e *Encoder) SetTagMultiline(v string) *Encoder { + e.multiline = v + return e +} + +func (e *Encoder) marshal(v interface{}) ([]byte, error) { mtype := reflect.TypeOf(v) - if mtype.Kind() != reflect.Struct { - return []byte{}, errors.New("Only a struct can be marshaled to TOML") + + switch mtype.Kind() { + case reflect.Struct, reflect.Map: + case reflect.Ptr: + if mtype.Elem().Kind() != reflect.Struct { + return []byte{}, errors.New("Only pointer to struct can be marshaled to TOML") + } + default: + return []byte{}, errors.New("Only a struct or map can be marshaled to TOML") } + sval := reflect.ValueOf(v) if isCustomMarshaler(mtype) { return callCustomMarshaler(sval) } - t, err := valueToTree(mtype, sval) + t, err := e.valueToTree(mtype, sval) if err != nil { return []byte{}, err } - s, err := t.ToTomlString() - return []byte(s), err + + var buf bytes.Buffer + _, err = t.writeTo(&buf, "", "", 0, e.arraysOneElementPerLine) + + return buf.Bytes(), err } // Convert given marshal struct or map value to toml tree -func valueToTree(mtype reflect.Type, mval reflect.Value) (*Tree, error) { +func (e *Encoder) valueToTree(mtype reflect.Type, mval reflect.Value) (*Tree, error) { if mtype.Kind() == reflect.Ptr { - return valueToTree(mtype.Elem(), mval.Elem()) + return e.valueToTree(mtype.Elem(), mval.Elem()) } tval := newTree() switch mtype.Kind() { case reflect.Struct: for i := 0; i < mtype.NumField(); i++ { mtypef, mvalf := mtype.Field(i), mval.Field(i) - opts := tomlOptions(mtypef) + opts := tomlOptions(mtypef, e.annotation) if opts.include && (!opts.omitempty || !isZero(mvalf)) { - val, err := valueToToml(mtypef.Type, mvalf) + val, err := e.valueToToml(mtypef.Type, mvalf) if err != nil { return nil, err } - tval.Set(opts.name, val) + + tval.SetWithOptions(opts.name, SetOptions{ + Comment: opts.comment, + Commented: opts.commented, + Multiline: opts.multiline, + }, val) } } case reflect.Map: for _, key := range mval.MapKeys() { mvalf := mval.MapIndex(key) - val, err := valueToToml(mtype.Elem(), mvalf) + val, err := e.valueToToml(mtype.Elem(), mvalf) if err != nil { return nil, err } - tval.Set(key.String(), val) + if e.quoteMapKeys { + keyStr, err := tomlValueStringRepresentation(key.String(), "", e.arraysOneElementPerLine) + if err != nil { + return nil, err + } + tval.SetPath([]string{keyStr}, val) + } else { + tval.Set(key.String(), val) + } } } return tval, nil } // Convert given marshal slice to slice of Toml trees -func valueToTreeSlice(mtype reflect.Type, mval reflect.Value) ([]*Tree, error) { +func (e *Encoder) valueToTreeSlice(mtype reflect.Type, mval reflect.Value) ([]*Tree, error) { tval := make([]*Tree, mval.Len(), mval.Len()) for i := 0; i < mval.Len(); i++ { - val, err := valueToTree(mtype.Elem(), mval.Index(i)) + val, err := e.valueToTree(mtype.Elem(), mval.Index(i)) if err != nil { return nil, err } @@ -177,10 +333,10 @@ func valueToTreeSlice(mtype reflect.Type, mval reflect.Value) ([]*Tree, error) { } // Convert given marshal slice to slice of toml values -func valueToOtherSlice(mtype reflect.Type, mval reflect.Value) (interface{}, error) { +func (e *Encoder) valueToOtherSlice(mtype reflect.Type, mval reflect.Value) (interface{}, error) { tval := make([]interface{}, mval.Len(), mval.Len()) for i := 0; i < mval.Len(); i++ { - val, err := valueToToml(mtype.Elem(), mval.Index(i)) + val, err := e.valueToToml(mtype.Elem(), mval.Index(i)) if err != nil { return nil, err } @@ -190,24 +346,27 @@ func valueToOtherSlice(mtype reflect.Type, mval reflect.Value) (interface{}, err } // Convert given marshal value to toml value -func valueToToml(mtype reflect.Type, mval reflect.Value) (interface{}, error) { +func (e *Encoder) valueToToml(mtype reflect.Type, mval reflect.Value) (interface{}, error) { if mtype.Kind() == reflect.Ptr { - return valueToToml(mtype.Elem(), mval.Elem()) + return e.valueToToml(mtype.Elem(), mval.Elem()) } switch { case isCustomMarshaler(mtype): return callCustomMarshaler(mval) case isTree(mtype): - return valueToTree(mtype, mval) + return e.valueToTree(mtype, mval) case isTreeSlice(mtype): - return valueToTreeSlice(mtype, mval) + return e.valueToTreeSlice(mtype, mval) case isOtherSlice(mtype): - return valueToOtherSlice(mtype, mval) + return e.valueToOtherSlice(mtype, mval) default: switch mtype.Kind() { case reflect.Bool: return mval.Bool(), nil case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + if mtype.Kind() == reflect.Int64 && mtype == reflect.TypeOf(time.Duration(1)) { + return fmt.Sprint(mval), nil + } return mval.Int(), nil case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: return mval.Uint(), nil @@ -227,17 +386,16 @@ func valueToToml(mtype reflect.Type, mval reflect.Value) (interface{}, error) { // Neither Unmarshaler interfaces nor UnmarshalTOML functions are supported for // sub-structs, and only definite types can be unmarshaled. func (t *Tree) Unmarshal(v interface{}) error { - mtype := reflect.TypeOf(v) - if mtype.Kind() != reflect.Ptr || mtype.Elem().Kind() != reflect.Struct { - return errors.New("Only a pointer to struct can be unmarshaled from TOML") - } + d := Decoder{tval: t, tagName: tagFieldName} + return d.unmarshal(v) +} - sval, err := valueFromTree(mtype.Elem(), t) - if err != nil { - return err - } - reflect.ValueOf(v).Elem().Set(sval) - return nil +// Marshal returns the TOML encoding of Tree. +// See Marshal() documentation for types mapping table. +func (t *Tree) Marshal() ([]byte, error) { + var buf bytes.Buffer + err := NewEncoder(&buf).Encode(t) + return buf.Bytes(), err } // Unmarshal parses the TOML-encoded data and stores the result in the value @@ -246,6 +404,18 @@ func (t *Tree) Unmarshal(v interface{}) error { // sub-structs, and currently only definite types can be unmarshaled to (i.e. no // `interface{}`). // +// The following struct annotations are supported: +// +// toml:"Field" Overrides the field's name to map to. +// default:"foo" Provides a default value. +// +// For default values, only fields of the following types are supported: +// * string +// * bool +// * int +// * int64 +// * float64 +// // See Marshal() documentation for types mapping table. func Unmarshal(data []byte, v interface{}) error { t, err := LoadReader(bytes.NewReader(data)) @@ -255,10 +425,60 @@ func Unmarshal(data []byte, v interface{}) error { return t.Unmarshal(v) } +// Decoder reads and decodes TOML values from an input stream. +type Decoder struct { + r io.Reader + tval *Tree + encOpts + tagName string +} + +// NewDecoder returns a new decoder that reads from r. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{ + r: r, + encOpts: encOptsDefaults, + tagName: tagFieldName, + } +} + +// Decode reads a TOML-encoded value from it's input +// and unmarshals it in the value pointed at by v. +// +// See the documentation for Marshal for details. +func (d *Decoder) Decode(v interface{}) error { + var err error + d.tval, err = LoadReader(d.r) + if err != nil { + return err + } + return d.unmarshal(v) +} + +// SetTagName allows changing default tag "toml" +func (d *Decoder) SetTagName(v string) *Decoder { + d.tagName = v + return d +} + +func (d *Decoder) unmarshal(v interface{}) error { + mtype := reflect.TypeOf(v) + if mtype.Kind() != reflect.Ptr || mtype.Elem().Kind() != reflect.Struct { + return errors.New("Only a pointer to struct can be unmarshaled from TOML") + } + + sval, err := d.valueFromTree(mtype.Elem(), d.tval) + if err != nil { + return err + } + reflect.ValueOf(v).Elem().Set(sval) + return nil +} + // Convert toml tree to marshal struct or map, using marshal type -func valueFromTree(mtype reflect.Type, tval *Tree) (reflect.Value, error) { +func (d *Decoder) valueFromTree(mtype reflect.Type, tval *Tree) (reflect.Value, error) { if mtype.Kind() == reflect.Ptr { - return unwrapPointer(mtype, tval) + return d.unwrapPointer(mtype, tval) } var mval reflect.Value switch mtype.Kind() { @@ -266,30 +486,73 @@ func valueFromTree(mtype reflect.Type, tval *Tree) (reflect.Value, error) { mval = reflect.New(mtype).Elem() for i := 0; i < mtype.NumField(); i++ { mtypef := mtype.Field(i) - opts := tomlOptions(mtypef) + an := annotation{tag: d.tagName} + opts := tomlOptions(mtypef, an) if opts.include { baseKey := opts.name - keysToTry := []string{baseKey, strings.ToLower(baseKey), strings.ToTitle(baseKey)} + keysToTry := []string{ + baseKey, + strings.ToLower(baseKey), + strings.ToTitle(baseKey), + strings.ToLower(string(baseKey[0])) + baseKey[1:], + } + + found := false for _, key := range keysToTry { exists := tval.Has(key) if !exists { continue } val := tval.Get(key) - mvalf, err := valueFromToml(mtypef.Type, val) + mvalf, err := d.valueFromToml(mtypef.Type, val) if err != nil { return mval, formatError(err, tval.GetPosition(key)) } mval.Field(i).Set(mvalf) + found = true break } + + if !found && opts.defaultValue != "" { + mvalf := mval.Field(i) + var val interface{} = nil + var err error = nil + switch mvalf.Kind() { + case reflect.Bool: + val, err = strconv.ParseBool(opts.defaultValue) + if err != nil { + return mval.Field(i), err + } + case reflect.Int: + val, err = strconv.Atoi(opts.defaultValue) + if err != nil { + return mval.Field(i), err + } + case reflect.String: + val = opts.defaultValue + case reflect.Int64: + val, err = strconv.ParseInt(opts.defaultValue, 10, 64) + if err != nil { + return mval.Field(i), err + } + case reflect.Float64: + val, err = strconv.ParseFloat(opts.defaultValue, 64) + if err != nil { + return mval.Field(i), err + } + default: + return mval.Field(i), fmt.Errorf("unsuported field type for default option") + } + mval.Field(i).Set(reflect.ValueOf(val)) + } } } case reflect.Map: mval = reflect.MakeMap(mtype) for _, key := range tval.Keys() { - val := tval.Get(key) - mvalf, err := valueFromToml(mtype.Elem(), val) + // TODO: path splits key + val := tval.GetPath([]string{key}) + mvalf, err := d.valueFromToml(mtype.Elem(), val) if err != nil { return mval, formatError(err, tval.GetPosition(key)) } @@ -300,10 +563,10 @@ func valueFromTree(mtype reflect.Type, tval *Tree) (reflect.Value, error) { } // Convert toml value to marshal struct/map slice, using marshal type -func valueFromTreeSlice(mtype reflect.Type, tval []*Tree) (reflect.Value, error) { +func (d *Decoder) valueFromTreeSlice(mtype reflect.Type, tval []*Tree) (reflect.Value, error) { mval := reflect.MakeSlice(mtype, len(tval), len(tval)) for i := 0; i < len(tval); i++ { - val, err := valueFromTree(mtype.Elem(), tval[i]) + val, err := d.valueFromTree(mtype.Elem(), tval[i]) if err != nil { return mval, err } @@ -313,10 +576,10 @@ func valueFromTreeSlice(mtype reflect.Type, tval []*Tree) (reflect.Value, error) } // Convert toml value to marshal primitive slice, using marshal type -func valueFromOtherSlice(mtype reflect.Type, tval []interface{}) (reflect.Value, error) { +func (d *Decoder) valueFromOtherSlice(mtype reflect.Type, tval []interface{}) (reflect.Value, error) { mval := reflect.MakeSlice(mtype, len(tval), len(tval)) for i := 0; i < len(tval); i++ { - val, err := valueFromToml(mtype.Elem(), tval[i]) + val, err := d.valueFromToml(mtype.Elem(), tval[i]) if err != nil { return mval, err } @@ -326,117 +589,94 @@ func valueFromOtherSlice(mtype reflect.Type, tval []interface{}) (reflect.Value, } // Convert toml value to marshal value, using marshal type -func valueFromToml(mtype reflect.Type, tval interface{}) (reflect.Value, error) { +func (d *Decoder) valueFromToml(mtype reflect.Type, tval interface{}) (reflect.Value, error) { if mtype.Kind() == reflect.Ptr { - return unwrapPointer(mtype, tval) + return d.unwrapPointer(mtype, tval) } - switch { - case isTree(mtype): - return valueFromTree(mtype, tval.(*Tree)) - case isTreeSlice(mtype): - return valueFromTreeSlice(mtype, tval.([]*Tree)) - case isOtherSlice(mtype): - return valueFromOtherSlice(mtype, tval.([]interface{})) + + switch t := tval.(type) { + case *Tree: + if isTree(mtype) { + return d.valueFromTree(mtype, t) + } + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a tree", tval, tval) + case []*Tree: + if isTreeSlice(mtype) { + return d.valueFromTreeSlice(mtype, t) + } + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to trees", tval, tval) + case []interface{}: + if isOtherSlice(mtype) { + return d.valueFromOtherSlice(mtype, t) + } + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a slice", tval, tval) default: switch mtype.Kind() { - case reflect.Bool: - val, ok := tval.(bool) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to bool", tval, tval) - } - return reflect.ValueOf(val), nil - case reflect.Int: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval) - } - return reflect.ValueOf(int(val)), nil - case reflect.Int8: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval) - } - return reflect.ValueOf(int8(val)), nil - case reflect.Int16: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval) - } - return reflect.ValueOf(int16(val)), nil - case reflect.Int32: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval) + case reflect.Bool, reflect.Struct: + val := reflect.ValueOf(tval) + // if this passes for when mtype is reflect.Struct, tval is a time.Time + if !val.Type().ConvertibleTo(mtype) { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) } - return reflect.ValueOf(int32(val)), nil - case reflect.Int64: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to int", tval, tval) - } - return reflect.ValueOf(val), nil - case reflect.Uint: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval) + + return val.Convert(mtype), nil + case reflect.String: + val := reflect.ValueOf(tval) + // stupidly, int64 is convertible to string. So special case this. + if !val.Type().ConvertibleTo(mtype) || val.Kind() == reflect.Int64 { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) } - return reflect.ValueOf(uint(val)), nil - case reflect.Uint8: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval) + + return val.Convert(mtype), nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + val := reflect.ValueOf(tval) + if mtype.Kind() == reflect.Int64 && mtype == reflect.TypeOf(time.Duration(1)) && val.Kind() == reflect.String { + d, err := time.ParseDuration(val.String()) + if err != nil { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v. %s", tval, tval, mtype.String(), err) + } + return reflect.ValueOf(d), nil } - return reflect.ValueOf(uint8(val)), nil - case reflect.Uint16: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval) + if !val.Type().ConvertibleTo(mtype) { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) } - return reflect.ValueOf(uint16(val)), nil - case reflect.Uint32: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval) + if reflect.Indirect(reflect.New(mtype)).OverflowInt(val.Convert(mtype).Int()) { + return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String()) } - return reflect.ValueOf(uint32(val)), nil - case reflect.Uint64: - val, ok := tval.(int64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to uint", tval, tval) + + return val.Convert(mtype), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + val := reflect.ValueOf(tval) + if !val.Type().ConvertibleTo(mtype) { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) } - return reflect.ValueOf(uint64(val)), nil - case reflect.Float32: - val, ok := tval.(float64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to float", tval, tval) + + if val.Convert(reflect.TypeOf(int(1))).Int() < 0 { + return reflect.ValueOf(nil), fmt.Errorf("%v(%T) is negative so does not fit in %v", tval, tval, mtype.String()) } - return reflect.ValueOf(float32(val)), nil - case reflect.Float64: - val, ok := tval.(float64) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to float", tval, tval) + if reflect.Indirect(reflect.New(mtype)).OverflowUint(uint64(val.Convert(mtype).Uint())) { + return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String()) } - return reflect.ValueOf(val), nil - case reflect.String: - val, ok := tval.(string) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to string", tval, tval) + + return val.Convert(mtype), nil + case reflect.Float32, reflect.Float64: + val := reflect.ValueOf(tval) + if !val.Type().ConvertibleTo(mtype) { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) } - return reflect.ValueOf(val), nil - case reflect.Struct: - val, ok := tval.(time.Time) - if !ok { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to time", tval, tval) + if reflect.Indirect(reflect.New(mtype)).OverflowFloat(val.Convert(mtype).Float()) { + return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String()) } - return reflect.ValueOf(val), nil + + return val.Convert(mtype), nil default: - return reflect.ValueOf(nil), fmt.Errorf("Unmarshal can't handle %v(%v)", mtype, mtype.Kind()) + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v(%v)", tval, tval, mtype, mtype.Kind()) } } } -func unwrapPointer(mtype reflect.Type, tval interface{}) (reflect.Value, error) { - val, err := valueFromToml(mtype.Elem(), tval) +func (d *Decoder) unwrapPointer(mtype reflect.Type, tval interface{}) (reflect.Value, error) { + val, err := d.valueFromToml(mtype.Elem(), tval) if err != nil { return reflect.ValueOf(nil), err } @@ -445,10 +685,25 @@ func unwrapPointer(mtype reflect.Type, tval interface{}) (reflect.Value, error) return mval, nil } -func tomlOptions(vf reflect.StructField) tomlOpts { - tag := vf.Tag.Get("toml") +func tomlOptions(vf reflect.StructField, an annotation) tomlOpts { + tag := vf.Tag.Get(an.tag) parse := strings.Split(tag, ",") - result := tomlOpts{vf.Name, true, false} + var comment string + if c := vf.Tag.Get(an.comment); c != "" { + comment = c + } + commented, _ := strconv.ParseBool(vf.Tag.Get(an.commented)) + multiline, _ := strconv.ParseBool(vf.Tag.Get(an.multiline)) + defaultValue := vf.Tag.Get(tagDefault) + result := tomlOpts{ + name: vf.Name, + comment: comment, + commented: commented, + multiline: multiline, + include: true, + omitempty: false, + defaultValue: defaultValue, + } if parse[0] != "" { if parse[0] == "-" && len(parse) == 1 { result.include = false diff --git a/vendor/github.com/pelletier/go-toml/marshal_test.go b/vendor/github.com/pelletier/go-toml/marshal_test.go index a3fa128..9e5357d 100644 --- a/vendor/github.com/pelletier/go-toml/marshal_test.go +++ b/vendor/github.com/pelletier/go-toml/marshal_test.go @@ -6,6 +6,7 @@ import ( "fmt" "io/ioutil" "reflect" + "strings" "testing" "time" ) @@ -52,6 +53,17 @@ func TestBasicMarshal(t *testing.T) { } } +func TestBasicMarshalWithPointer(t *testing.T) { + result, err := Marshal(&basicTestData) + if err != nil { + t.Fatal(err) + } + expected := basicTestToml + if !bytes.Equal(result, expected) { + t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) + } +} + func TestBasicUnmarshal(t *testing.T) { result := basicMarshalTestStruct{} err := Unmarshal(basicTestToml, &result) @@ -145,8 +157,8 @@ var docData = testDoc{ Second: &subdoc, }, SubDocList: []testSubDoc{ - testSubDoc{"List.First", 0}, - testSubDoc{"List.Second", 0}, + {"List.First", 0}, + {"List.Second", 0}, }, SubDocPtrs: []*testSubDoc{&subdoc}, } @@ -162,6 +174,17 @@ func TestDocMarshal(t *testing.T) { } } +func TestDocMarshalPointer(t *testing.T) { + result, err := Marshal(&docData) + if err != nil { + t.Fatal(err) + } + expected, _ := ioutil.ReadFile("marshal_test.toml") + if !bytes.Equal(result, expected) { + t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) + } +} + func TestDocUnmarshal(t *testing.T) { result := testDoc{} tomlData, _ := ioutil.ReadFile("marshal_test.toml") @@ -177,25 +200,6 @@ func TestDocUnmarshal(t *testing.T) { } } -func ExampleUnmarshal() { - type Postgres struct { - User string - Password string - } - type Config struct { - Postgres Postgres - } - - doc := []byte(` - [postgres] - user = "pelletier" - password = "mypassword"`) - - config := Config{} - Unmarshal(doc, &config) - fmt.Println("user=", config.Postgres.User) -} - func TestDocPartialUnmarshal(t *testing.T) { result := testDocSubs{} @@ -527,6 +531,14 @@ func TestPointerUnmarshal(t *testing.T) { } } +func TestUnmarshalTypeMismatch(t *testing.T) { + result := pointerMarshalTestStruct{} + err := Unmarshal([]byte("List = 123"), &result) + if !strings.HasPrefix(err.Error(), "(1, 1): Can't convert 123(int64) to []string(slice)") { + t.Errorf("Type mismatch must be reported: got %v", err.Error()) + } +} + type nestedMarshalTestStruct struct { String [][]string //Struct [][]basicMarshalTestSubStruct @@ -540,7 +552,7 @@ var strPtr = []*string{&str1, &str2} var strPtr2 = []*[]*string{&strPtr} var nestedTestData = nestedMarshalTestStruct{ - String: [][]string{[]string{"Five", "Six"}, []string{"One", "Two"}}, + String: [][]string{{"Five", "Six"}, {"One", "Two"}}, StringPtr: &strPtr2, } @@ -617,3 +629,637 @@ func TestNestedCustomMarshaler(t *testing.T) { t.Errorf("Bad nested custom marshaler: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) } } + +var commentTestToml = []byte(` +# it's a comment on type +[postgres] + # isCommented = "dvalue" + noComment = "cvalue" + + # A comment on AttrB with a + # break line + password = "bvalue" + + # A comment on AttrA + user = "avalue" + + [[postgres.My]] + + # a comment on my on typeC + My = "Foo" + + [[postgres.My]] + + # a comment on my on typeC + My = "Baar" +`) + +func TestMarshalComment(t *testing.T) { + type TypeC struct { + My string `comment:"a comment on my on typeC"` + } + type TypeB struct { + AttrA string `toml:"user" comment:"A comment on AttrA"` + AttrB string `toml:"password" comment:"A comment on AttrB with a\n break line"` + AttrC string `toml:"noComment"` + AttrD string `toml:"isCommented" commented:"true"` + My []TypeC + } + type TypeA struct { + TypeB TypeB `toml:"postgres" comment:"it's a comment on type"` + } + + ta := []TypeC{{My: "Foo"}, {My: "Baar"}} + config := TypeA{TypeB{AttrA: "avalue", AttrB: "bvalue", AttrC: "cvalue", AttrD: "dvalue", My: ta}} + result, err := Marshal(config) + if err != nil { + t.Fatal(err) + } + expected := commentTestToml + if !bytes.Equal(result, expected) { + t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) + } +} + +type mapsTestStruct struct { + Simple map[string]string + Paths map[string]string + Other map[string]float64 + X struct { + Y struct { + Z map[string]bool + } + } +} + +var mapsTestData = mapsTestStruct{ + Simple: map[string]string{ + "one plus one": "two", + "next": "three", + }, + Paths: map[string]string{ + "/this/is/a/path": "/this/is/also/a/path", + "/heloo.txt": "/tmp/lololo.txt", + }, + Other: map[string]float64{ + "testing": 3.9999, + }, + X: struct{ Y struct{ Z map[string]bool } }{ + Y: struct{ Z map[string]bool }{ + Z: map[string]bool{ + "is.Nested": true, + }, + }, + }, +} +var mapsTestToml = []byte(` +[Other] + "testing" = 3.9999 + +[Paths] + "/heloo.txt" = "/tmp/lololo.txt" + "/this/is/a/path" = "/this/is/also/a/path" + +[Simple] + "next" = "three" + "one plus one" = "two" + +[X] + + [X.Y] + + [X.Y.Z] + "is.Nested" = true +`) + +func TestEncodeQuotedMapKeys(t *testing.T) { + var buf bytes.Buffer + if err := NewEncoder(&buf).QuoteMapKeys(true).Encode(mapsTestData); err != nil { + t.Fatal(err) + } + result := buf.Bytes() + expected := mapsTestToml + if !bytes.Equal(result, expected) { + t.Errorf("Bad maps marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) + } +} + +func TestDecodeQuotedMapKeys(t *testing.T) { + result := mapsTestStruct{} + err := NewDecoder(bytes.NewBuffer(mapsTestToml)).Decode(&result) + expected := mapsTestData + if err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(result, expected) { + t.Errorf("Bad maps unmarshal: expected %v, got %v", expected, result) + } +} + +type structArrayNoTag struct { + A struct { + B []int64 + C []int64 + } +} + +func TestMarshalArray(t *testing.T) { + expected := []byte(` +[A] + B = [1,2,3] + C = [1] +`) + + m := structArrayNoTag{ + A: struct { + B []int64 + C []int64 + }{ + B: []int64{1, 2, 3}, + C: []int64{1}, + }, + } + + b, err := Marshal(m) + + if err != nil { + t.Fatal(err) + } + + if !bytes.Equal(b, expected) { + t.Errorf("Bad arrays marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, b) + } +} + +func TestMarshalArrayOnePerLine(t *testing.T) { + expected := []byte(` +[A] + B = [ + 1, + 2, + 3, + ] + C = [1] +`) + + m := structArrayNoTag{ + A: struct { + B []int64 + C []int64 + }{ + B: []int64{1, 2, 3}, + C: []int64{1}, + }, + } + + var buf bytes.Buffer + encoder := NewEncoder(&buf).ArraysWithOneElementPerLine(true) + err := encoder.Encode(m) + + if err != nil { + t.Fatal(err) + } + + b := buf.Bytes() + + if !bytes.Equal(b, expected) { + t.Errorf("Bad arrays marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, b) + } +} + +var customTagTestToml = []byte(` +[postgres] + password = "bvalue" + user = "avalue" + + [[postgres.My]] + My = "Foo" + + [[postgres.My]] + My = "Baar" +`) + +func TestMarshalCustomTag(t *testing.T) { + type TypeC struct { + My string + } + type TypeB struct { + AttrA string `file:"user"` + AttrB string `file:"password"` + My []TypeC + } + type TypeA struct { + TypeB TypeB `file:"postgres"` + } + + ta := []TypeC{{My: "Foo"}, {My: "Baar"}} + config := TypeA{TypeB{AttrA: "avalue", AttrB: "bvalue", My: ta}} + var buf bytes.Buffer + err := NewEncoder(&buf).SetTagName("file").Encode(config) + if err != nil { + t.Fatal(err) + } + expected := customTagTestToml + result := buf.Bytes() + if !bytes.Equal(result, expected) { + t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) + } +} + +var customCommentTagTestToml = []byte(` +# db connection +[postgres] + + # db pass + password = "bvalue" + + # db user + user = "avalue" +`) + +func TestMarshalCustomComment(t *testing.T) { + type TypeB struct { + AttrA string `toml:"user" descr:"db user"` + AttrB string `toml:"password" descr:"db pass"` + } + type TypeA struct { + TypeB TypeB `toml:"postgres" descr:"db connection"` + } + + config := TypeA{TypeB{AttrA: "avalue", AttrB: "bvalue"}} + var buf bytes.Buffer + err := NewEncoder(&buf).SetTagComment("descr").Encode(config) + if err != nil { + t.Fatal(err) + } + expected := customCommentTagTestToml + result := buf.Bytes() + if !bytes.Equal(result, expected) { + t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) + } +} + +var customCommentedTagTestToml = []byte(` +[postgres] + # password = "bvalue" + # user = "avalue" +`) + +func TestMarshalCustomCommented(t *testing.T) { + type TypeB struct { + AttrA string `toml:"user" disable:"true"` + AttrB string `toml:"password" disable:"true"` + } + type TypeA struct { + TypeB TypeB `toml:"postgres"` + } + + config := TypeA{TypeB{AttrA: "avalue", AttrB: "bvalue"}} + var buf bytes.Buffer + err := NewEncoder(&buf).SetTagCommented("disable").Encode(config) + if err != nil { + t.Fatal(err) + } + expected := customCommentedTagTestToml + result := buf.Bytes() + if !bytes.Equal(result, expected) { + t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) + } +} + +var customMultilineTagTestToml = []byte(`int_slice = [ + 1, + 2, + 3, +] +`) + +func TestMarshalCustomMultiline(t *testing.T) { + type TypeA struct { + AttrA []int `toml:"int_slice" mltln:"true"` + } + + config := TypeA{AttrA: []int{1, 2, 3}} + var buf bytes.Buffer + err := NewEncoder(&buf).ArraysWithOneElementPerLine(true).SetTagMultiline("mltln").Encode(config) + if err != nil { + t.Fatal(err) + } + expected := customMultilineTagTestToml + result := buf.Bytes() + if !bytes.Equal(result, expected) { + t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) + } +} + +var testDocBasicToml = []byte(` +[document] + bool_val = true + date_val = 1979-05-27T07:32:00Z + float_val = 123.4 + int_val = 5000 + string_val = "Bite me" + uint_val = 5001 +`) + +type testDocCustomTag struct { + Doc testDocBasicsCustomTag `file:"document"` +} +type testDocBasicsCustomTag struct { + Bool bool `file:"bool_val"` + Date time.Time `file:"date_val"` + Float float32 `file:"float_val"` + Int int `file:"int_val"` + Uint uint `file:"uint_val"` + String *string `file:"string_val"` + unexported int `file:"shouldntBeHere"` +} + +var testDocCustomTagData = testDocCustomTag{ + Doc: testDocBasicsCustomTag{ + Bool: true, + Date: time.Date(1979, 5, 27, 7, 32, 0, 0, time.UTC), + Float: 123.4, + Int: 5000, + Uint: 5001, + String: &biteMe, + unexported: 0, + }, +} + +func TestUnmarshalCustomTag(t *testing.T) { + buf := bytes.NewBuffer(testDocBasicToml) + + result := testDocCustomTag{} + err := NewDecoder(buf).SetTagName("file").Decode(&result) + if err != nil { + t.Fatal(err) + } + expected := testDocCustomTagData + if !reflect.DeepEqual(result, expected) { + resStr, _ := json.MarshalIndent(result, "", " ") + expStr, _ := json.MarshalIndent(expected, "", " ") + t.Errorf("Bad unmarshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expStr, resStr) + + } +} + +func TestUnmarshalMap(t *testing.T) { + m := make(map[string]int) + m["a"] = 1 + + err := Unmarshal(basicTestToml, m) + if err.Error() != "Only a pointer to struct can be unmarshaled from TOML" { + t.Fail() + } +} + +func TestMarshalSlice(t *testing.T) { + m := make([]int, 1) + m[0] = 1 + + var buf bytes.Buffer + err := NewEncoder(&buf).Encode(&m) + if err == nil { + t.Error("expected error, got nil") + return + } + if err.Error() != "Only pointer to struct can be marshaled to TOML" { + t.Fail() + } +} + +func TestMarshalSlicePointer(t *testing.T) { + m := make([]int, 1) + m[0] = 1 + + var buf bytes.Buffer + err := NewEncoder(&buf).Encode(m) + if err == nil { + t.Error("expected error, got nil") + return + } + if err.Error() != "Only a struct or map can be marshaled to TOML" { + t.Fail() + } +} + +type testDuration struct { + Nanosec time.Duration `toml:"nanosec"` + Microsec1 time.Duration `toml:"microsec1"` + Microsec2 *time.Duration `toml:"microsec2"` + Millisec time.Duration `toml:"millisec"` + Sec time.Duration `toml:"sec"` + Min time.Duration `toml:"min"` + Hour time.Duration `toml:"hour"` + Mixed time.Duration `toml:"mixed"` + AString string `toml:"a_string"` +} + +var testDurationToml = []byte(` +nanosec = "1ns" +microsec1 = "1us" +microsec2 = "1µs" +millisec = "1ms" +sec = "1s" +min = "1m" +hour = "1h" +mixed = "1h1m1s1ms1µs1ns" +a_string = "15s" +`) + +func TestUnmarshalDuration(t *testing.T) { + buf := bytes.NewBuffer(testDurationToml) + + result := testDuration{} + err := NewDecoder(buf).Decode(&result) + if err != nil { + t.Fatal(err) + } + ms := time.Duration(1) * time.Microsecond + expected := testDuration{ + Nanosec: 1, + Microsec1: time.Microsecond, + Microsec2: &ms, + Millisec: time.Millisecond, + Sec: time.Second, + Min: time.Minute, + Hour: time.Hour, + Mixed: time.Hour + + time.Minute + + time.Second + + time.Millisecond + + time.Microsecond + + time.Nanosecond, + AString: "15s", + } + if !reflect.DeepEqual(result, expected) { + resStr, _ := json.MarshalIndent(result, "", " ") + expStr, _ := json.MarshalIndent(expected, "", " ") + t.Errorf("Bad unmarshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expStr, resStr) + + } +} + +var testDurationToml2 = []byte(`a_string = "15s" +hour = "1h0m0s" +microsec1 = "1µs" +microsec2 = "1µs" +millisec = "1ms" +min = "1m0s" +mixed = "1h1m1.001001001s" +nanosec = "1ns" +sec = "1s" +`) + +func TestMarshalDuration(t *testing.T) { + ms := time.Duration(1) * time.Microsecond + data := testDuration{ + Nanosec: 1, + Microsec1: time.Microsecond, + Microsec2: &ms, + Millisec: time.Millisecond, + Sec: time.Second, + Min: time.Minute, + Hour: time.Hour, + Mixed: time.Hour + + time.Minute + + time.Second + + time.Millisecond + + time.Microsecond + + time.Nanosecond, + AString: "15s", + } + + var buf bytes.Buffer + err := NewEncoder(&buf).Encode(data) + if err != nil { + t.Fatal(err) + } + expected := testDurationToml2 + result := buf.Bytes() + if !bytes.Equal(result, expected) { + t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) + } +} + +type testBadDuration struct { + Val time.Duration `toml:"val"` +} + +var testBadDurationToml = []byte(`val = "1z"`) + +func TestUnmarshalBadDuration(t *testing.T) { + buf := bytes.NewBuffer(testBadDurationToml) + + result := testBadDuration{} + err := NewDecoder(buf).Decode(&result) + if err == nil { + t.Fatal() + } + if err.Error() != "(1, 1): Can't convert 1z(string) to time.Duration. time: unknown unit z in duration 1z" { + t.Fatalf("unexpected error: %s", err) + } +} + +var testCamelCaseKeyToml = []byte(`fooBar = 10`) + +func TestUnmarshalCamelCaseKey(t *testing.T) { + var x struct { + FooBar int + B int + } + + if err := Unmarshal(testCamelCaseKeyToml, &x); err != nil { + t.Fatal(err) + } + + if x.FooBar != 10 { + t.Fatal("Did not set camelCase'd key") + } +} + +func TestUnmarshalDefault(t *testing.T) { + var doc struct { + StringField string `default:"a"` + BoolField bool `default:"true"` + IntField int `default:"1"` + Int64Field int64 `default:"2"` + Float64Field float64 `default:"3.1"` + } + + err := Unmarshal([]byte(``), &doc) + if err != nil { + t.Fatal(err) + } + if doc.BoolField != true { + t.Errorf("BoolField should be true, not %t", doc.BoolField) + } + if doc.StringField != "a" { + t.Errorf("StringField should be \"a\", not %s", doc.StringField) + } + if doc.IntField != 1 { + t.Errorf("IntField should be 1, not %d", doc.IntField) + } + if doc.Int64Field != 2 { + t.Errorf("Int64Field should be 2, not %d", doc.Int64Field) + } + if doc.Float64Field != 3.1 { + t.Errorf("Float64Field should be 3.1, not %f", doc.Float64Field) + } +} + +func TestUnmarshalDefaultFailureBool(t *testing.T) { + var doc struct { + Field bool `default:"blah"` + } + + err := Unmarshal([]byte(``), &doc) + if err == nil { + t.Fatal("should error") + } +} + +func TestUnmarshalDefaultFailureInt(t *testing.T) { + var doc struct { + Field int `default:"blah"` + } + + err := Unmarshal([]byte(``), &doc) + if err == nil { + t.Fatal("should error") + } +} + +func TestUnmarshalDefaultFailureInt64(t *testing.T) { + var doc struct { + Field int64 `default:"blah"` + } + + err := Unmarshal([]byte(``), &doc) + if err == nil { + t.Fatal("should error") + } +} + +func TestUnmarshalDefaultFailureFloat64(t *testing.T) { + var doc struct { + Field float64 `default:"blah"` + } + + err := Unmarshal([]byte(``), &doc) + if err == nil { + t.Fatal("should error") + } +} + +func TestUnmarshalDefaultFailureUnsupported(t *testing.T) { + var doc struct { + Field struct{} `default:"blah"` + } + + err := Unmarshal([]byte(``), &doc) + if err == nil { + t.Fatal("should error") + } +} diff --git a/vendor/github.com/pelletier/go-toml/parser.go b/vendor/github.com/pelletier/go-toml/parser.go index 8ee49cb..a7498e4 100644 --- a/vendor/github.com/pelletier/go-toml/parser.go +++ b/vendor/github.com/pelletier/go-toml/parser.go @@ -5,6 +5,7 @@ package toml import ( "errors" "fmt" + "math" "reflect" "regexp" "strconv" @@ -76,8 +77,10 @@ func (p *tomlParser) parseStart() tomlParserStateFn { return p.parseAssign case tokenEOF: return nil + case tokenError: + p.raiseError(tok, "parsing error: %s", tok.String()) default: - p.raiseError(tok, "unexpected token") + p.raiseError(tok, "unexpected token %s", tok.typ) } return nil } @@ -164,6 +167,11 @@ func (p *tomlParser) parseAssign() tomlParserStateFn { key := p.getToken() p.assume(tokenEqual) + parsedKey, err := parseKey(key.val) + if err != nil { + p.raiseError(key, "invalid key: %s", err.Error()) + } + value := p.parseRvalue() var tableKey []string if len(p.currentTable) > 0 { @@ -172,6 +180,9 @@ func (p *tomlParser) parseAssign() tomlParserStateFn { tableKey = []string{} } + prefixKey := parsedKey[0 : len(parsedKey)-1] + tableKey = append(tableKey, prefixKey...) + // find the table to assign, looking out for arrays of tables var targetNode *Tree switch node := p.tree.GetPath(tableKey).(type) { @@ -179,20 +190,19 @@ func (p *tomlParser) parseAssign() tomlParserStateFn { targetNode = node[len(node)-1] case *Tree: targetNode = node + case nil: + // create intermediate + if err := p.tree.createSubTree(tableKey, key.Position); err != nil { + p.raiseError(key, "could not create intermediate group: %s", err) + } + targetNode = p.tree.GetPath(tableKey).(*Tree) default: p.raiseError(key, "Unknown table type for path: %s", strings.Join(tableKey, ".")) } // assign value to the found table - keyVals, err := parseKey(key.val) - if err != nil { - p.raiseError(key, "%s", err) - } - if len(keyVals) != 1 { - p.raiseError(key, "Invalid key") - } - keyVal := keyVals[0] + keyVal := parsedKey[len(parsedKey)-1] localKey := []string{keyVal} finalKey := append(tableKey, keyVal) if targetNode.GetPath(localKey) != nil { @@ -205,20 +215,32 @@ func (p *tomlParser) parseAssign() tomlParserStateFn { case *Tree, []*Tree: toInsert = value default: - toInsert = &tomlValue{value, key.Position} + toInsert = &tomlValue{value: value, position: key.Position} } targetNode.values[keyVal] = toInsert return p.parseStart } var numberUnderscoreInvalidRegexp *regexp.Regexp +var hexNumberUnderscoreInvalidRegexp *regexp.Regexp -func cleanupNumberToken(value string) (string, error) { +func numberContainsInvalidUnderscore(value string) error { if numberUnderscoreInvalidRegexp.MatchString(value) { - return "", errors.New("invalid use of _ in number") + return errors.New("invalid use of _ in number") } + return nil +} + +func hexNumberContainsInvalidUnderscore(value string) error { + if hexNumberUnderscoreInvalidRegexp.MatchString(value) { + return errors.New("invalid use of _ in hex number") + } + return nil +} + +func cleanupNumberToken(value string) string { cleanedVal := strings.Replace(value, "_", "", -1) - return cleanedVal, nil + return cleanedVal } func (p *tomlParser) parseRvalue() interface{} { @@ -234,21 +256,57 @@ func (p *tomlParser) parseRvalue() interface{} { return true case tokenFalse: return false + case tokenInf: + if tok.val[0] == '-' { + return math.Inf(-1) + } + return math.Inf(1) + case tokenNan: + return math.NaN() case tokenInteger: - cleanedVal, err := cleanupNumberToken(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) + cleanedVal := cleanupNumberToken(tok.val) + var err error + var val int64 + if len(cleanedVal) >= 3 && cleanedVal[0] == '0' { + switch cleanedVal[1] { + case 'x': + err = hexNumberContainsInvalidUnderscore(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + val, err = strconv.ParseInt(cleanedVal[2:], 16, 64) + case 'o': + err = numberContainsInvalidUnderscore(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + val, err = strconv.ParseInt(cleanedVal[2:], 8, 64) + case 'b': + err = numberContainsInvalidUnderscore(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + val, err = strconv.ParseInt(cleanedVal[2:], 2, 64) + default: + panic("invalid base") // the lexer should catch this first + } + } else { + err = numberContainsInvalidUnderscore(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + val, err = strconv.ParseInt(cleanedVal, 10, 64) } - val, err := strconv.ParseInt(cleanedVal, 10, 64) if err != nil { p.raiseError(tok, "%s", err) } return val case tokenFloat: - cleanedVal, err := cleanupNumberToken(tok.val) + err := numberContainsInvalidUnderscore(tok.val) if err != nil { p.raiseError(tok, "%s", err) } + cleanedVal := cleanupNumberToken(tok.val) val, err := strconv.ParseFloat(cleanedVal, 64) if err != nil { p.raiseError(tok, "%s", err) @@ -292,7 +350,7 @@ Loop: case tokenRightCurlyBrace: p.getToken() break Loop - case tokenKey: + case tokenKey, tokenInteger, tokenString: if !tokenIsComma(previous) && previous != nil { p.raiseError(follow, "comma expected between fields in inline table") } @@ -309,7 +367,7 @@ Loop: } p.getToken() default: - p.raiseError(follow, "unexpected token type in inline table: %s", follow.typ.String()) + p.raiseError(follow, "unexpected token type in inline table: %s", follow.String()) } previous = follow } @@ -379,5 +437,6 @@ func parseToml(flow []token) *Tree { } func init() { - numberUnderscoreInvalidRegexp = regexp.MustCompile(`([^\d]_|_[^\d]|_$|^_)`) + numberUnderscoreInvalidRegexp = regexp.MustCompile(`([^\d]_|_[^\d])|_$|^_`) + hexNumberUnderscoreInvalidRegexp = regexp.MustCompile(`(^0x_)|([^\da-f]_|_[^\da-f])|_$|^_`) } diff --git a/vendor/github.com/pelletier/go-toml/parser_test.go b/vendor/github.com/pelletier/go-toml/parser_test.go index 508cb65..7834575 100644 --- a/vendor/github.com/pelletier/go-toml/parser_test.go +++ b/vendor/github.com/pelletier/go-toml/parser_test.go @@ -2,6 +2,7 @@ package toml import ( "fmt" + "math" "reflect" "testing" "time" @@ -72,6 +73,17 @@ func TestNumberInKey(t *testing.T) { }) } +func TestIncorrectKeyExtraSquareBracket(t *testing.T) { + _, err := Load(`[a]b] +zyx = 42`) + if err == nil { + t.Error("Error should have been returned.") + } + if err.Error() != "(1, 4): parsing error: keys cannot contain ] character" { + t.Error("Bad error message:", err.Error()) + } +} + func TestSimpleNumbers(t *testing.T) { tree, err := Load("a = +42\nb = -21\nc = +4.2\nd = -2.1") assertTree(t, tree, err, map[string]interface{}{ @@ -82,6 +94,78 @@ func TestSimpleNumbers(t *testing.T) { }) } +func TestSpecialFloats(t *testing.T) { + tree, err := Load(` +normalinf = inf +plusinf = +inf +minusinf = -inf +normalnan = nan +plusnan = +nan +minusnan = -nan +`) + assertTree(t, tree, err, map[string]interface{}{ + "normalinf": math.Inf(1), + "plusinf": math.Inf(1), + "minusinf": math.Inf(-1), + "normalnan": math.NaN(), + "plusnan": math.NaN(), + "minusnan": math.NaN(), + }) +} + +func TestHexIntegers(t *testing.T) { + tree, err := Load(`a = 0xDEADBEEF`) + assertTree(t, tree, err, map[string]interface{}{"a": int64(3735928559)}) + + tree, err = Load(`a = 0xdeadbeef`) + assertTree(t, tree, err, map[string]interface{}{"a": int64(3735928559)}) + + tree, err = Load(`a = 0xdead_beef`) + assertTree(t, tree, err, map[string]interface{}{"a": int64(3735928559)}) + + _, err = Load(`a = 0x_1`) + if err.Error() != "(1, 5): invalid use of _ in hex number" { + t.Error("Bad error message:", err.Error()) + } +} + +func TestOctIntegers(t *testing.T) { + tree, err := Load(`a = 0o01234567`) + assertTree(t, tree, err, map[string]interface{}{"a": int64(342391)}) + + tree, err = Load(`a = 0o755`) + assertTree(t, tree, err, map[string]interface{}{"a": int64(493)}) + + _, err = Load(`a = 0o_1`) + if err.Error() != "(1, 5): invalid use of _ in number" { + t.Error("Bad error message:", err.Error()) + } +} + +func TestBinIntegers(t *testing.T) { + tree, err := Load(`a = 0b11010110`) + assertTree(t, tree, err, map[string]interface{}{"a": int64(214)}) + + _, err = Load(`a = 0b_1`) + if err.Error() != "(1, 5): invalid use of _ in number" { + t.Error("Bad error message:", err.Error()) + } +} + +func TestBadIntegerBase(t *testing.T) { + _, err := Load(`a = 0k1`) + if err.Error() != "(1, 5): unknown number base: k. possible options are x (hex) o (octal) b (binary)" { + t.Error("Error should have been returned.") + } +} + +func TestIntegerNoDigit(t *testing.T) { + _, err := Load(`a = 0b`) + if err.Error() != "(1, 5): number needs at least one digit" { + t.Error("Bad error message:", err.Error()) + } +} + func TestNumbersWithUnderscores(t *testing.T) { tree, err := Load("a = 1_000") assertTree(t, tree, err, map[string]interface{}{ @@ -155,6 +239,36 @@ func TestSpaceKey(t *testing.T) { }) } +func TestDoubleQuotedKey(t *testing.T) { + tree, err := Load(` + "key" = "a" + "\t" = "b" + "\U0001F914" = "c" + "\u2764" = "d" + `) + assertTree(t, tree, err, map[string]interface{}{ + "key": "a", + "\t": "b", + "\U0001F914": "c", + "\u2764": "d", + }) +} + +func TestSingleQuotedKey(t *testing.T) { + tree, err := Load(` + 'key' = "a" + '\t' = "b" + '\U0001F914' = "c" + '\u2764' = "d" + `) + assertTree(t, tree, err, map[string]interface{}{ + `key`: "a", + `\t`: "b", + `\U0001F914`: "c", + `\u2764`: "d", + }) +} + func TestStringEscapables(t *testing.T) { tree, err := Load("a = \"a \\n b\"") assertTree(t, tree, err, map[string]interface{}{ @@ -467,7 +581,7 @@ func TestDuplicateKeys(t *testing.T) { func TestEmptyIntermediateTable(t *testing.T) { _, err := Load("[foo..bar]") - if err.Error() != "(1, 2): invalid table array key: empty table key" { + if err.Error() != "(1, 2): invalid table array key: expecting key part after dot" { t.Error("Bad error message:", err.Error()) } } @@ -642,7 +756,7 @@ func TestTomlValueStringRepresentation(t *testing.T) { {int64(12345), "12345"}, {uint64(50), "50"}, {float64(123.45), "123.45"}, - {bool(true), "true"}, + {true, "true"}, {"hello world", "\"hello world\""}, {"\b\t\n\f\r\"\\", "\"\\b\\t\\n\\f\\r\\\"\\\\\""}, {"\x05", "\"\\u0005\""}, @@ -652,7 +766,7 @@ func TestTomlValueStringRepresentation(t *testing.T) { "[\"gamma\",\"delta\"]"}, {nil, ""}, } { - result, err := tomlValueStringRepresentation(item.Value) + result, err := tomlValueStringRepresentation(item.Value, "", false) if err != nil { t.Errorf("Test %d - unexpected error: %s", idx, err) } @@ -783,3 +897,43 @@ func TestInvalidFloatParsing(t *testing.T) { t.Error("Bad error message:", err.Error()) } } + +func TestMapKeyIsNum(t *testing.T) { + _, err := Load("table={2018=1,2019=2}") + if err != nil { + t.Error("should be passed") + } + _, err = Load(`table={"2018"=1,"2019"=2}`) + if err != nil { + t.Error("should be passed") + } +} + +func TestDottedKeys(t *testing.T) { + tree, err := Load(` +name = "Orange" +physical.color = "orange" +physical.shape = "round" +site."google.com" = true`) + + assertTree(t, tree, err, map[string]interface{}{ + "name": "Orange", + "physical": map[string]interface{}{ + "color": "orange", + "shape": "round", + }, + "site": map[string]interface{}{ + "google.com": true, + }, + }) +} + +func TestInvalidDottedKeyEmptyGroup(t *testing.T) { + _, err := Load(`a..b = true`) + if err == nil { + t.Fatal("should return an error") + } + if err.Error() != "(1, 1): invalid key: expecting key part after dot" { + t.Fatalf("invalid error message: %s", err) + } +} diff --git a/vendor/github.com/pelletier/go-toml/query/doc.go b/vendor/github.com/pelletier/go-toml/query/doc.go index f999fc9..ed63c11 100644 --- a/vendor/github.com/pelletier/go-toml/query/doc.go +++ b/vendor/github.com/pelletier/go-toml/query/doc.go @@ -139,7 +139,7 @@ // Compiled Queries // // Queries may be executed directly on a Tree object, or compiled ahead -// of time and executed discretely. The former is more convienent, but has the +// of time and executed discretely. The former is more convenient, but has the // penalty of having to recompile the query expression each time. // // // basic query diff --git a/vendor/github.com/pelletier/go-toml/query/parser.go b/vendor/github.com/pelletier/go-toml/query/parser.go index e4f91b9..5f69b70 100644 --- a/vendor/github.com/pelletier/go-toml/query/parser.go +++ b/vendor/github.com/pelletier/go-toml/query/parser.go @@ -253,7 +253,7 @@ func (p *queryParser) parseFilterExpr() queryParserStateFn { } tok = p.getToken() if tok.typ != tokenKey && tok.typ != tokenString { - return p.parseError(tok, "expected key or string for filter funciton name") + return p.parseError(tok, "expected key or string for filter function name") } name := tok.val tok = p.getToken() diff --git a/vendor/github.com/pelletier/go-toml/query/parser_test.go b/vendor/github.com/pelletier/go-toml/query/parser_test.go index 473896a..312f51a 100644 --- a/vendor/github.com/pelletier/go-toml/query/parser_test.go +++ b/vendor/github.com/pelletier/go-toml/query/parser_test.go @@ -2,12 +2,13 @@ package query import ( "fmt" - "github.com/pelletier/go-toml" "io/ioutil" "sort" "strings" "testing" "time" + + "github.com/pelletier/go-toml" ) type queryTestNode struct { @@ -406,8 +407,7 @@ func TestQueryFilterFn(t *testing.T) { assertQueryPositions(t, string(buff), "$..[?(float)]", - []interface{}{ - // no float values in document + []interface{}{ // no float values in document }) tv, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z") diff --git a/vendor/github.com/pelletier/go-toml/test.sh b/vendor/github.com/pelletier/go-toml/test.sh deleted file mode 100755 index 91a8896..0000000 --- a/vendor/github.com/pelletier/go-toml/test.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/bash -# fail out of the script if anything here fails -set -e - -# set the path to the present working directory -export GOPATH=`pwd` - -function git_clone() { - path=$1 - branch=$2 - version=$3 - if [ ! -d "src/$path" ]; then - mkdir -p src/$path - git clone https://$path.git src/$path - fi - pushd src/$path - git checkout "$branch" - git reset --hard "$version" - popd -} - -# Remove potential previous runs -rm -rf src test_program_bin toml-test - -# Run go vet -go vet ./... - -go get github.com/pelletier/go-buffruneio -go get github.com/davecgh/go-spew/spew -go get gopkg.in/yaml.v2 -go get github.com/BurntSushi/toml - -# get code for BurntSushi TOML validation -# pinning all to 'HEAD' for version 0.3.x work (TODO: pin to commit hash when tests stabilize) -git_clone github.com/BurntSushi/toml master HEAD -git_clone github.com/BurntSushi/toml-test master HEAD #was: 0.2.0 HEAD - -# build the BurntSushi test application -go build -o toml-test github.com/BurntSushi/toml-test - -# vendorize the current lib for testing -# NOTE: this basically mocks an install without having to go back out to github for code -mkdir -p src/github.com/pelletier/go-toml/cmd -mkdir -p src/github.com/pelletier/go-toml/query -cp *.go *.toml src/github.com/pelletier/go-toml -cp -R cmd/* src/github.com/pelletier/go-toml/cmd -cp -R query/* src/github.com/pelletier/go-toml/query -go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go - -# Run basic unit tests -go test github.com/pelletier/go-toml -covermode=count -coverprofile=coverage.out -go test github.com/pelletier/go-toml/cmd/tomljson -go test github.com/pelletier/go-toml/query - -# run the entire BurntSushi test suite -if [[ $# -eq 0 ]] ; then - echo "Running all BurntSushi tests" - ./toml-test ./test_program_bin | tee test_out -else - # run a specific test - test=$1 - test_path='src/github.com/BurntSushi/toml-test/tests' - valid_test="$test_path/valid/$test" - invalid_test="$test_path/invalid/$test" - - if [ -e "$valid_test.toml" ]; then - echo "Valid Test TOML for $test:" - echo "====" - cat "$valid_test.toml" - - echo "Valid Test JSON for $test:" - echo "====" - cat "$valid_test.json" - - echo "Go-TOML Output for $test:" - echo "====" - cat "$valid_test.toml" | ./test_program_bin - fi - - if [ -e "$invalid_test.toml" ]; then - echo "Invalid Test TOML for $test:" - echo "====" - cat "$invalid_test.toml" - - echo "Go-TOML Output for $test:" - echo "====" - echo "go-toml Output:" - cat "$invalid_test.toml" | ./test_program_bin - fi -fi diff --git a/vendor/github.com/pelletier/go-toml/token.go b/vendor/github.com/pelletier/go-toml/token.go index 5581fe0..1a90813 100644 --- a/vendor/github.com/pelletier/go-toml/token.go +++ b/vendor/github.com/pelletier/go-toml/token.go @@ -23,6 +23,8 @@ const ( tokenTrue tokenFalse tokenFloat + tokenInf + tokenNan tokenEqual tokenLeftBracket tokenRightBracket @@ -55,6 +57,8 @@ var tokenTypeNames = []string{ "True", "False", "Float", + "Inf", + "NaN", "=", "[", "]", diff --git a/vendor/github.com/pelletier/go-toml/toml.go b/vendor/github.com/pelletier/go-toml/toml.go index 64f19ed..41ad9b4 100644 --- a/vendor/github.com/pelletier/go-toml/toml.go +++ b/vendor/github.com/pelletier/go-toml/toml.go @@ -11,14 +11,19 @@ import ( ) type tomlValue struct { - value interface{} // string, int64, uint64, float64, bool, time.Time, [] of any of this list - position Position + value interface{} // string, int64, uint64, float64, bool, time.Time, [] of any of this list + comment string + commented bool + multiline bool + position Position } // Tree is the result of the parsing of a TOML file. type Tree struct { - values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree - position Position + values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree + comment string + commented bool + position Position } func newTree() *Tree { @@ -67,18 +72,15 @@ func (t *Tree) Keys() []string { } // Get the value at key in the Tree. -// Key is a dot-separated path (e.g. a.b.c). +// Key is a dot-separated path (e.g. a.b.c) without single/double quoted strings. +// If you need to retrieve non-bare keys, use GetPath. // Returns nil if the path does not exist in the tree. // If keys is of length zero, the current tree is returned. func (t *Tree) Get(key string) interface{} { if key == "" { return t } - comps, err := parseKey(key) - if err != nil { - return nil - } - return t.GetPath(comps) + return t.GetPath(strings.Split(key, ".")) } // GetPath returns the element in the tree indicated by 'keys'. @@ -174,17 +176,84 @@ func (t *Tree) GetDefault(key string, def interface{}) interface{} { return val } +// SetOptions arguments are supplied to the SetWithOptions and SetPathWithOptions functions to modify marshalling behaviour. +// The default values within the struct are valid default options. +type SetOptions struct { + Comment string + Commented bool + Multiline bool +} + +// SetWithOptions is the same as Set, but allows you to provide formatting +// instructions to the key, that will be used by Marshal(). +func (t *Tree) SetWithOptions(key string, opts SetOptions, value interface{}) { + t.SetPathWithOptions(strings.Split(key, "."), opts, value) +} + +// SetPathWithOptions is the same as SetPath, but allows you to provide +// formatting instructions to the key, that will be reused by Marshal(). +func (t *Tree) SetPathWithOptions(keys []string, opts SetOptions, value interface{}) { + subtree := t + for _, intermediateKey := range keys[:len(keys)-1] { + nextTree, exists := subtree.values[intermediateKey] + if !exists { + nextTree = newTree() + subtree.values[intermediateKey] = nextTree // add new element here + } + switch node := nextTree.(type) { + case *Tree: + subtree = node + case []*Tree: + // go to most recent element + if len(node) == 0 { + // create element if it does not exist + subtree.values[intermediateKey] = append(node, newTree()) + } + subtree = node[len(node)-1] + } + } + + var toInsert interface{} + + switch v := value.(type) { + case *Tree: + v.comment = opts.Comment + toInsert = value + case []*Tree: + toInsert = value + case *tomlValue: + v.comment = opts.Comment + toInsert = v + default: + toInsert = &tomlValue{value: value, comment: opts.Comment, commented: opts.Commented, multiline: opts.Multiline} + } + + subtree.values[keys[len(keys)-1]] = toInsert +} + // Set an element in the tree. // Key is a dot-separated path (e.g. a.b.c). // Creates all necessary intermediate trees, if needed. func (t *Tree) Set(key string, value interface{}) { - t.SetPath(strings.Split(key, "."), value) + t.SetWithComment(key, "", false, value) +} + +// SetWithComment is the same as Set, but allows you to provide comment +// information to the key, that will be reused by Marshal(). +func (t *Tree) SetWithComment(key string, comment string, commented bool, value interface{}) { + t.SetPathWithComment(strings.Split(key, "."), comment, commented, value) } // SetPath sets an element in the tree. // Keys is an array of path elements (e.g. {"a","b","c"}). // Creates all necessary intermediate trees, if needed. func (t *Tree) SetPath(keys []string, value interface{}) { + t.SetPathWithComment(keys, "", false, value) +} + +// SetPathWithComment is the same as SetPath, but allows you to provide comment +// information to the key, that will be reused by Marshal(). +func (t *Tree) SetPathWithComment(keys []string, comment string, commented bool, value interface{}) { subtree := t for _, intermediateKey := range keys[:len(keys)-1] { nextTree, exists := subtree.values[intermediateKey] @@ -207,20 +276,50 @@ func (t *Tree) SetPath(keys []string, value interface{}) { var toInsert interface{} - switch value.(type) { + switch v := value.(type) { case *Tree: + v.comment = comment toInsert = value case []*Tree: toInsert = value case *tomlValue: - toInsert = value + v.comment = comment + toInsert = v default: - toInsert = &tomlValue{value: value} + toInsert = &tomlValue{value: value, comment: comment, commented: commented} } subtree.values[keys[len(keys)-1]] = toInsert } +// Delete removes a key from the tree. +// Key is a dot-separated path (e.g. a.b.c). +func (t *Tree) Delete(key string) error { + keys, err := parseKey(key) + if err != nil { + return err + } + return t.DeletePath(keys) +} + +// Delete removes a key from the tree. +// Keys is an array of path elements (e.g. {"a","b","c"}). +func (t *Tree) DeletePath(keys []string) error { + keyLen := len(keys) + if keyLen == 1 { + delete(t.values, keys[0]) + return nil + } + tree := t.GetPath(keys[:keyLen-1]) + item := keys[keyLen-1] + switch node := tree.(type) { + case *Tree: + delete(node.values, item) + return nil + } + return errors.New("no such key to delete") +} + // createSubTree takes a tree and a key and create the necessary intermediate // subtrees to create a subtree at that point. In-place. // @@ -262,10 +361,39 @@ func LoadBytes(b []byte) (tree *Tree, err error) { err = errors.New(r.(string)) } }() + + if len(b) >= 4 && (hasUTF32BigEndianBOM4(b) || hasUTF32LittleEndianBOM4(b)) { + b = b[4:] + } else if len(b) >= 3 && hasUTF8BOM3(b) { + b = b[3:] + } else if len(b) >= 2 && (hasUTF16BigEndianBOM2(b) || hasUTF16LittleEndianBOM2(b)) { + b = b[2:] + } + tree = parseToml(lexToml(b)) return } +func hasUTF16BigEndianBOM2(b []byte) bool { + return b[0] == 0xFE && b[1] == 0xFF +} + +func hasUTF16LittleEndianBOM2(b []byte) bool { + return b[0] == 0xFF && b[1] == 0xFE +} + +func hasUTF8BOM3(b []byte) bool { + return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF +} + +func hasUTF32BigEndianBOM4(b []byte) bool { + return b[0] == 0x00 && b[1] == 0x00 && b[2] == 0xFE && b[3] == 0xFF +} + +func hasUTF32LittleEndianBOM4(b []byte) bool { + return b[0] == 0xFF && b[1] == 0xFE && b[2] == 0x00 && b[3] == 0x00 +} + // LoadReader creates a Tree from any io.Reader. func LoadReader(reader io.Reader) (tree *Tree, err error) { inputBytes, err := ioutil.ReadAll(reader) diff --git a/vendor/github.com/pelletier/go-toml/toml_test.go b/vendor/github.com/pelletier/go-toml/toml_test.go index ab9c242..81c3954 100644 --- a/vendor/github.com/pelletier/go-toml/toml_test.go +++ b/vendor/github.com/pelletier/go-toml/toml_test.go @@ -69,6 +69,60 @@ func TestTomlHasPath(t *testing.T) { } } +func TestTomlDelete(t *testing.T) { + tree, _ := Load(` + key = "value" + `) + err := tree.Delete("key") + if err != nil { + t.Errorf("Delete - unexpected error while deleting key: %s", err.Error()) + } + + if tree.Get("key") != nil { + t.Errorf("Delete should have removed key but did not.") + } + +} + +func TestTomlDeleteUnparsableKey(t *testing.T) { + tree, _ := Load(` + key = "value" + `) + err := tree.Delete(".") + if err == nil { + t.Errorf("Delete should error") + } +} + +func TestTomlDeleteNestedKey(t *testing.T) { + tree, _ := Load(` + [foo] + [foo.bar] + key = "value" + `) + err := tree.Delete("foo.bar.key") + if err != nil { + t.Errorf("Error while deleting nested key: %s", err.Error()) + } + + if tree.Get("key") != nil { + t.Errorf("Delete should have removed nested key but did not.") + } + +} + +func TestTomlDeleteNonexistentNestedKey(t *testing.T) { + tree, _ := Load(` + [foo] + [foo.bar] + key = "value" + `) + err := tree.Delete("foo.not.there.key") + if err == nil { + t.Errorf("Delete should have thrown an error trying to delete key in nonexistent tree") + } +} + func TestTomlGetPath(t *testing.T) { node := newTree() //TODO: set other node data @@ -104,3 +158,23 @@ func TestTomlFromMap(t *testing.T) { t.Fatal("hello should be 42, not", tree.Get("hello")) } } + +func TestLoadBytesBOM(t *testing.T) { + payloads := [][]byte{ + []byte("\xFE\xFFhello=1"), + []byte("\xFF\xFEhello=1"), + []byte("\xEF\xBB\xBFhello=1"), + []byte("\x00\x00\xFE\xFFhello=1"), + []byte("\xFF\xFE\x00\x00hello=1"), + } + for _, data := range payloads { + tree, err := LoadBytes(data) + if err != nil { + t.Fatal("unexpected error:", err, "for:", data) + } + v := tree.Get("hello") + if v != int64(1) { + t.Fatal("hello should be 1, not", v) + } + } +} diff --git a/vendor/github.com/pelletier/go-toml/toml_testgen_support_test.go b/vendor/github.com/pelletier/go-toml/toml_testgen_support_test.go new file mode 100644 index 0000000..eef9b9f --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/toml_testgen_support_test.go @@ -0,0 +1,119 @@ +// This is a support file for toml_testgen_test.go +package toml + +import ( + "bytes" + "encoding/json" + "fmt" + "reflect" + "testing" + "time" + + "github.com/davecgh/go-spew/spew" +) + +func testgenInvalid(t *testing.T, input string) { + t.Logf("Input TOML:\n%s", input) + tree, err := Load(input) + if err != nil { + return + } + + typedTree := testgenTranslate(*tree) + + buf := new(bytes.Buffer) + if err := json.NewEncoder(buf).Encode(typedTree); err != nil { + return + } + + t.Fatalf("test did not fail. resulting tree:\n%s", buf.String()) +} + +func testgenValid(t *testing.T, input string, jsonRef string) { + t.Logf("Input TOML:\n%s", input) + tree, err := Load(input) + if err != nil { + t.Fatalf("failed parsing toml: %s", err) + } + + typedTree := testgenTranslate(*tree) + + buf := new(bytes.Buffer) + if err := json.NewEncoder(buf).Encode(typedTree); err != nil { + t.Fatalf("failed translating to JSON: %s", err) + } + + var jsonTest interface{} + if err := json.NewDecoder(buf).Decode(&jsonTest); err != nil { + t.Logf("translated JSON:\n%s", buf.String()) + t.Fatalf("failed decoding translated JSON: %s", err) + } + + var jsonExpected interface{} + if err := json.NewDecoder(bytes.NewBufferString(jsonRef)).Decode(&jsonExpected); err != nil { + t.Logf("reference JSON:\n%s", jsonRef) + t.Fatalf("failed decoding reference JSON: %s", err) + } + + if !reflect.DeepEqual(jsonExpected, jsonTest) { + t.Logf("Diff:\n%s", spew.Sdump(jsonExpected, jsonTest)) + t.Fatal("parsed TOML tree is different than expected structure") + } +} + +func testgenTranslate(tomlData interface{}) interface{} { + switch orig := tomlData.(type) { + case map[string]interface{}: + typed := make(map[string]interface{}, len(orig)) + for k, v := range orig { + typed[k] = testgenTranslate(v) + } + return typed + case *Tree: + return testgenTranslate(*orig) + case Tree: + keys := orig.Keys() + typed := make(map[string]interface{}, len(keys)) + for _, k := range keys { + typed[k] = testgenTranslate(orig.GetPath([]string{k})) + } + return typed + case []*Tree: + typed := make([]map[string]interface{}, len(orig)) + for i, v := range orig { + typed[i] = testgenTranslate(v).(map[string]interface{}) + } + return typed + case []map[string]interface{}: + typed := make([]map[string]interface{}, len(orig)) + for i, v := range orig { + typed[i] = testgenTranslate(v).(map[string]interface{}) + } + return typed + case []interface{}: + typed := make([]interface{}, len(orig)) + for i, v := range orig { + typed[i] = testgenTranslate(v) + } + return testgenTag("array", typed) + case time.Time: + return testgenTag("datetime", orig.Format("2006-01-02T15:04:05Z")) + case bool: + return testgenTag("bool", fmt.Sprintf("%v", orig)) + case int64: + return testgenTag("integer", fmt.Sprintf("%d", orig)) + case float64: + return testgenTag("float", fmt.Sprintf("%v", orig)) + case string: + return testgenTag("string", orig) + } + + panic(fmt.Sprintf("Unknown type: %T", tomlData)) +} + +func testgenTag(typeName string, data interface{}) map[string]interface{} { + return map[string]interface{}{ + "type": typeName, + "value": data, + } +} diff --git a/vendor/github.com/pelletier/go-toml/toml_testgen_test.go b/vendor/github.com/pelletier/go-toml/toml_testgen_test.go new file mode 100644 index 0000000..688ae51 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/toml_testgen_test.go @@ -0,0 +1,943 @@ +// Generated by tomltestgen for toml-test ref 39e37e6 on 2019-03-19T23:58:45-07:00 +package toml + +import ( + "testing" +) + +func TestInvalidArrayMixedTypesArraysAndInts(t *testing.T) { + input := `arrays-and-ints = [1, ["Arrays are not integers."]]` + testgenInvalid(t, input) +} + +func TestInvalidArrayMixedTypesIntsAndFloats(t *testing.T) { + input := `ints-and-floats = [1, 1.1]` + testgenInvalid(t, input) +} + +func TestInvalidArrayMixedTypesStringsAndInts(t *testing.T) { + input := `strings-and-ints = ["hi", 42]` + testgenInvalid(t, input) +} + +func TestInvalidDatetimeMalformedNoLeads(t *testing.T) { + input := `no-leads = 1987-7-05T17:45:00Z` + testgenInvalid(t, input) +} + +func TestInvalidDatetimeMalformedNoSecs(t *testing.T) { + input := `no-secs = 1987-07-05T17:45Z` + testgenInvalid(t, input) +} + +func TestInvalidDatetimeMalformedNoT(t *testing.T) { + input := `no-t = 1987-07-0517:45:00Z` + testgenInvalid(t, input) +} + +func TestInvalidDatetimeMalformedWithMilli(t *testing.T) { + input := `with-milli = 1987-07-5T17:45:00.12Z` + testgenInvalid(t, input) +} + +func TestInvalidDuplicateKeyTable(t *testing.T) { + input := `[fruit] +type = "apple" + +[fruit.type] +apple = "yes"` + testgenInvalid(t, input) +} + +func TestInvalidDuplicateKeys(t *testing.T) { + input := `dupe = false +dupe = true` + testgenInvalid(t, input) +} + +func TestInvalidDuplicateTables(t *testing.T) { + input := `[a] +[a]` + testgenInvalid(t, input) +} + +func TestInvalidEmptyImplicitTable(t *testing.T) { + input := `[naughty..naughty]` + testgenInvalid(t, input) +} + +func TestInvalidEmptyTable(t *testing.T) { + input := `[]` + testgenInvalid(t, input) +} + +func TestInvalidFloatNoLeadingZero(t *testing.T) { + input := `answer = .12345 +neganswer = -.12345` + testgenInvalid(t, input) +} + +func TestInvalidFloatNoTrailingDigits(t *testing.T) { + input := `answer = 1. +neganswer = -1.` + testgenInvalid(t, input) +} + +func TestInvalidKeyEmpty(t *testing.T) { + input := ` = 1` + testgenInvalid(t, input) +} + +func TestInvalidKeyHash(t *testing.T) { + input := `a# = 1` + testgenInvalid(t, input) +} + +func TestInvalidKeyNewline(t *testing.T) { + input := `a += 1` + testgenInvalid(t, input) +} + +func TestInvalidKeyOpenBracket(t *testing.T) { + input := `[abc = 1` + testgenInvalid(t, input) +} + +func TestInvalidKeySingleOpenBracket(t *testing.T) { + input := `[` + testgenInvalid(t, input) +} + +func TestInvalidKeySpace(t *testing.T) { + input := `a b = 1` + testgenInvalid(t, input) +} + +func TestInvalidKeyStartBracket(t *testing.T) { + input := `[a] +[xyz = 5 +[b]` + testgenInvalid(t, input) +} + +func TestInvalidKeyTwoEquals(t *testing.T) { + input := `key= = 1` + testgenInvalid(t, input) +} + +func TestInvalidStringBadByteEscape(t *testing.T) { + input := `naughty = "\xAg"` + testgenInvalid(t, input) +} + +func TestInvalidStringBadEscape(t *testing.T) { + input := `invalid-escape = "This string has a bad \a escape character."` + testgenInvalid(t, input) +} + +func TestInvalidStringByteEscapes(t *testing.T) { + input := `answer = "\x33"` + testgenInvalid(t, input) +} + +func TestInvalidStringNoClose(t *testing.T) { + input := `no-ending-quote = "One time, at band camp` + testgenInvalid(t, input) +} + +func TestInvalidTableArrayImplicit(t *testing.T) { + input := "# This test is a bit tricky. It should fail because the first use of\n" + + "# `[[albums.songs]]` without first declaring `albums` implies that `albums`\n" + + "# must be a table. The alternative would be quite weird. Namely, it wouldn't\n" + + "# comply with the TOML spec: \"Each double-bracketed sub-table will belong to \n" + + "# the most *recently* defined table element *above* it.\"\n" + + "#\n" + + "# This is in contrast to the *valid* test, table-array-implicit where\n" + + "# `[[albums.songs]]` works by itself, so long as `[[albums]]` isn't declared\n" + + "# later. (Although, `[albums]` could be.)\n" + + "[[albums.songs]]\n" + + "name = \"Glory Days\"\n" + + "\n" + + "[[albums]]\n" + + "name = \"Born in the USA\"\n" + testgenInvalid(t, input) +} + +func TestInvalidTableArrayMalformedBracket(t *testing.T) { + input := `[[albums] +name = "Born to Run"` + testgenInvalid(t, input) +} + +func TestInvalidTableArrayMalformedEmpty(t *testing.T) { + input := `[[]] +name = "Born to Run"` + testgenInvalid(t, input) +} + +func TestInvalidTableEmpty(t *testing.T) { + input := `[]` + testgenInvalid(t, input) +} + +func TestInvalidTableNestedBracketsClose(t *testing.T) { + input := `[a]b] +zyx = 42` + testgenInvalid(t, input) +} + +func TestInvalidTableNestedBracketsOpen(t *testing.T) { + input := `[a[b] +zyx = 42` + testgenInvalid(t, input) +} + +func TestInvalidTableWhitespace(t *testing.T) { + input := `[invalid key]` + testgenInvalid(t, input) +} + +func TestInvalidTableWithPound(t *testing.T) { + input := `[key#group] +answer = 42` + testgenInvalid(t, input) +} + +func TestInvalidTextAfterArrayEntries(t *testing.T) { + input := `array = [ + "Is there life after an array separator?", No + "Entry" +]` + testgenInvalid(t, input) +} + +func TestInvalidTextAfterInteger(t *testing.T) { + input := `answer = 42 the ultimate answer?` + testgenInvalid(t, input) +} + +func TestInvalidTextAfterString(t *testing.T) { + input := `string = "Is there life after strings?" No.` + testgenInvalid(t, input) +} + +func TestInvalidTextAfterTable(t *testing.T) { + input := `[error] this shouldn't be here` + testgenInvalid(t, input) +} + +func TestInvalidTextBeforeArraySeparator(t *testing.T) { + input := `array = [ + "Is there life before an array separator?" No, + "Entry" +]` + testgenInvalid(t, input) +} + +func TestInvalidTextInArray(t *testing.T) { + input := `array = [ + "Entry 1", + I don't belong, + "Entry 2", +]` + testgenInvalid(t, input) +} + +func TestValidArrayEmpty(t *testing.T) { + input := `thevoid = [[[[[]]]]]` + jsonRef := `{ + "thevoid": { "type": "array", "value": [ + {"type": "array", "value": [ + {"type": "array", "value": [ + {"type": "array", "value": [ + {"type": "array", "value": []} + ]} + ]} + ]} + ]} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidArrayNospaces(t *testing.T) { + input := `ints = [1,2,3]` + jsonRef := `{ + "ints": { + "type": "array", + "value": [ + {"type": "integer", "value": "1"}, + {"type": "integer", "value": "2"}, + {"type": "integer", "value": "3"} + ] + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidArraysHetergeneous(t *testing.T) { + input := `mixed = [[1, 2], ["a", "b"], [1.1, 2.1]]` + jsonRef := `{ + "mixed": { + "type": "array", + "value": [ + {"type": "array", "value": [ + {"type": "integer", "value": "1"}, + {"type": "integer", "value": "2"} + ]}, + {"type": "array", "value": [ + {"type": "string", "value": "a"}, + {"type": "string", "value": "b"} + ]}, + {"type": "array", "value": [ + {"type": "float", "value": "1.1"}, + {"type": "float", "value": "2.1"} + ]} + ] + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidArraysNested(t *testing.T) { + input := `nest = [["a"], ["b"]]` + jsonRef := `{ + "nest": { + "type": "array", + "value": [ + {"type": "array", "value": [ + {"type": "string", "value": "a"} + ]}, + {"type": "array", "value": [ + {"type": "string", "value": "b"} + ]} + ] + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidArrays(t *testing.T) { + input := `ints = [1, 2, 3] +floats = [1.1, 2.1, 3.1] +strings = ["a", "b", "c"] +dates = [ + 1987-07-05T17:45:00Z, + 1979-05-27T07:32:00Z, + 2006-06-01T11:00:00Z, +]` + jsonRef := `{ + "ints": { + "type": "array", + "value": [ + {"type": "integer", "value": "1"}, + {"type": "integer", "value": "2"}, + {"type": "integer", "value": "3"} + ] + }, + "floats": { + "type": "array", + "value": [ + {"type": "float", "value": "1.1"}, + {"type": "float", "value": "2.1"}, + {"type": "float", "value": "3.1"} + ] + }, + "strings": { + "type": "array", + "value": [ + {"type": "string", "value": "a"}, + {"type": "string", "value": "b"}, + {"type": "string", "value": "c"} + ] + }, + "dates": { + "type": "array", + "value": [ + {"type": "datetime", "value": "1987-07-05T17:45:00Z"}, + {"type": "datetime", "value": "1979-05-27T07:32:00Z"}, + {"type": "datetime", "value": "2006-06-01T11:00:00Z"} + ] + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidBool(t *testing.T) { + input := `t = true +f = false` + jsonRef := `{ + "f": {"type": "bool", "value": "false"}, + "t": {"type": "bool", "value": "true"} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidCommentsEverywhere(t *testing.T) { + input := `# Top comment. + # Top comment. +# Top comment. + +# [no-extraneous-groups-please] + +[group] # Comment +answer = 42 # Comment +# no-extraneous-keys-please = 999 +# Inbetween comment. +more = [ # Comment + # What about multiple # comments? + # Can you handle it? + # + # Evil. +# Evil. + 42, 42, # Comments within arrays are fun. + # What about multiple # comments? + # Can you handle it? + # + # Evil. +# Evil. +# ] Did I fool you? +] # Hopefully not.` + jsonRef := `{ + "group": { + "answer": {"type": "integer", "value": "42"}, + "more": { + "type": "array", + "value": [ + {"type": "integer", "value": "42"}, + {"type": "integer", "value": "42"} + ] + } + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidDatetime(t *testing.T) { + input := `bestdayever = 1987-07-05T17:45:00Z` + jsonRef := `{ + "bestdayever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidEmpty(t *testing.T) { + input := `` + jsonRef := `{}` + testgenValid(t, input, jsonRef) +} + +func TestValidExample(t *testing.T) { + input := `best-day-ever = 1987-07-05T17:45:00Z + +[numtheory] +boring = false +perfection = [6, 28, 496]` + jsonRef := `{ + "best-day-ever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"}, + "numtheory": { + "boring": {"type": "bool", "value": "false"}, + "perfection": { + "type": "array", + "value": [ + {"type": "integer", "value": "6"}, + {"type": "integer", "value": "28"}, + {"type": "integer", "value": "496"} + ] + } + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidFloat(t *testing.T) { + input := `pi = 3.14 +negpi = -3.14` + jsonRef := `{ + "pi": {"type": "float", "value": "3.14"}, + "negpi": {"type": "float", "value": "-3.14"} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidImplicitAndExplicitAfter(t *testing.T) { + input := `[a.b.c] +answer = 42 + +[a] +better = 43` + jsonRef := `{ + "a": { + "better": {"type": "integer", "value": "43"}, + "b": { + "c": { + "answer": {"type": "integer", "value": "42"} + } + } + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidImplicitAndExplicitBefore(t *testing.T) { + input := `[a] +better = 43 + +[a.b.c] +answer = 42` + jsonRef := `{ + "a": { + "better": {"type": "integer", "value": "43"}, + "b": { + "c": { + "answer": {"type": "integer", "value": "42"} + } + } + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidImplicitGroups(t *testing.T) { + input := `[a.b.c] +answer = 42` + jsonRef := `{ + "a": { + "b": { + "c": { + "answer": {"type": "integer", "value": "42"} + } + } + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidInteger(t *testing.T) { + input := `answer = 42 +neganswer = -42` + jsonRef := `{ + "answer": {"type": "integer", "value": "42"}, + "neganswer": {"type": "integer", "value": "-42"} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidKeyEqualsNospace(t *testing.T) { + input := `answer=42` + jsonRef := `{ + "answer": {"type": "integer", "value": "42"} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidKeySpace(t *testing.T) { + input := `"a b" = 1` + jsonRef := `{ + "a b": {"type": "integer", "value": "1"} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidKeySpecialChars(t *testing.T) { + input := "\"~!@$^&*()_+-`1234567890[]|/?><.,;:'\" = 1\n" + jsonRef := "{\n" + + " \"~!@$^&*()_+-`1234567890[]|/?><.,;:'\": {\n" + + " \"type\": \"integer\", \"value\": \"1\"\n" + + " }\n" + + "}\n" + testgenValid(t, input, jsonRef) +} + +func TestValidLongFloat(t *testing.T) { + input := `longpi = 3.141592653589793 +neglongpi = -3.141592653589793` + jsonRef := `{ + "longpi": {"type": "float", "value": "3.141592653589793"}, + "neglongpi": {"type": "float", "value": "-3.141592653589793"} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidLongInteger(t *testing.T) { + input := `answer = 9223372036854775807 +neganswer = -9223372036854775808` + jsonRef := `{ + "answer": {"type": "integer", "value": "9223372036854775807"}, + "neganswer": {"type": "integer", "value": "-9223372036854775808"} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidMultilineString(t *testing.T) { + input := `multiline_empty_one = """""" +multiline_empty_two = """ +""" +multiline_empty_three = """\ + """ +multiline_empty_four = """\ + \ + \ + """ + +equivalent_one = "The quick brown fox jumps over the lazy dog." +equivalent_two = """ +The quick brown \ + + + fox jumps over \ + the lazy dog.""" + +equivalent_three = """\ + The quick brown \ + fox jumps over \ + the lazy dog.\ + """` + jsonRef := `{ + "multiline_empty_one": { + "type": "string", + "value": "" + }, + "multiline_empty_two": { + "type": "string", + "value": "" + }, + "multiline_empty_three": { + "type": "string", + "value": "" + }, + "multiline_empty_four": { + "type": "string", + "value": "" + }, + "equivalent_one": { + "type": "string", + "value": "The quick brown fox jumps over the lazy dog." + }, + "equivalent_two": { + "type": "string", + "value": "The quick brown fox jumps over the lazy dog." + }, + "equivalent_three": { + "type": "string", + "value": "The quick brown fox jumps over the lazy dog." + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidRawMultilineString(t *testing.T) { + input := `oneline = '''This string has a ' quote character.''' +firstnl = ''' +This string has a ' quote character.''' +multiline = ''' +This string +has ' a quote character +and more than +one newline +in it.'''` + jsonRef := `{ + "oneline": { + "type": "string", + "value": "This string has a ' quote character." + }, + "firstnl": { + "type": "string", + "value": "This string has a ' quote character." + }, + "multiline": { + "type": "string", + "value": "This string\nhas ' a quote character\nand more than\none newline\nin it." + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidRawString(t *testing.T) { + input := `backspace = 'This string has a \b backspace character.' +tab = 'This string has a \t tab character.' +newline = 'This string has a \n new line character.' +formfeed = 'This string has a \f form feed character.' +carriage = 'This string has a \r carriage return character.' +slash = 'This string has a \/ slash character.' +backslash = 'This string has a \\ backslash character.'` + jsonRef := `{ + "backspace": { + "type": "string", + "value": "This string has a \\b backspace character." + }, + "tab": { + "type": "string", + "value": "This string has a \\t tab character." + }, + "newline": { + "type": "string", + "value": "This string has a \\n new line character." + }, + "formfeed": { + "type": "string", + "value": "This string has a \\f form feed character." + }, + "carriage": { + "type": "string", + "value": "This string has a \\r carriage return character." + }, + "slash": { + "type": "string", + "value": "This string has a \\/ slash character." + }, + "backslash": { + "type": "string", + "value": "This string has a \\\\ backslash character." + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidStringEmpty(t *testing.T) { + input := `answer = ""` + jsonRef := `{ + "answer": { + "type": "string", + "value": "" + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidStringEscapes(t *testing.T) { + input := `backspace = "This string has a \b backspace character." +tab = "This string has a \t tab character." +newline = "This string has a \n new line character." +formfeed = "This string has a \f form feed character." +carriage = "This string has a \r carriage return character." +quote = "This string has a \" quote character." +backslash = "This string has a \\ backslash character." +notunicode1 = "This string does not have a unicode \\u escape." +notunicode2 = "This string does not have a unicode \u005Cu escape." +notunicode3 = "This string does not have a unicode \\u0075 escape." +notunicode4 = "This string does not have a unicode \\\u0075 escape."` + jsonRef := `{ + "backspace": { + "type": "string", + "value": "This string has a \u0008 backspace character." + }, + "tab": { + "type": "string", + "value": "This string has a \u0009 tab character." + }, + "newline": { + "type": "string", + "value": "This string has a \u000A new line character." + }, + "formfeed": { + "type": "string", + "value": "This string has a \u000C form feed character." + }, + "carriage": { + "type": "string", + "value": "This string has a \u000D carriage return character." + }, + "quote": { + "type": "string", + "value": "This string has a \u0022 quote character." + }, + "backslash": { + "type": "string", + "value": "This string has a \u005C backslash character." + }, + "notunicode1": { + "type": "string", + "value": "This string does not have a unicode \\u escape." + }, + "notunicode2": { + "type": "string", + "value": "This string does not have a unicode \u005Cu escape." + }, + "notunicode3": { + "type": "string", + "value": "This string does not have a unicode \\u0075 escape." + }, + "notunicode4": { + "type": "string", + "value": "This string does not have a unicode \\\u0075 escape." + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidStringSimple(t *testing.T) { + input := `answer = "You are not drinking enough whisky."` + jsonRef := `{ + "answer": { + "type": "string", + "value": "You are not drinking enough whisky." + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidStringWithPound(t *testing.T) { + input := `pound = "We see no # comments here." +poundcomment = "But there are # some comments here." # Did I # mess you up?` + jsonRef := `{ + "pound": {"type": "string", "value": "We see no # comments here."}, + "poundcomment": { + "type": "string", + "value": "But there are # some comments here." + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidTableArrayImplicit(t *testing.T) { + input := `[[albums.songs]] +name = "Glory Days"` + jsonRef := `{ + "albums": { + "songs": [ + {"name": {"type": "string", "value": "Glory Days"}} + ] + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidTableArrayMany(t *testing.T) { + input := `[[people]] +first_name = "Bruce" +last_name = "Springsteen" + +[[people]] +first_name = "Eric" +last_name = "Clapton" + +[[people]] +first_name = "Bob" +last_name = "Seger"` + jsonRef := `{ + "people": [ + { + "first_name": {"type": "string", "value": "Bruce"}, + "last_name": {"type": "string", "value": "Springsteen"} + }, + { + "first_name": {"type": "string", "value": "Eric"}, + "last_name": {"type": "string", "value": "Clapton"} + }, + { + "first_name": {"type": "string", "value": "Bob"}, + "last_name": {"type": "string", "value": "Seger"} + } + ] +}` + testgenValid(t, input, jsonRef) +} + +func TestValidTableArrayNest(t *testing.T) { + input := `[[albums]] +name = "Born to Run" + + [[albums.songs]] + name = "Jungleland" + + [[albums.songs]] + name = "Meeting Across the River" + +[[albums]] +name = "Born in the USA" + + [[albums.songs]] + name = "Glory Days" + + [[albums.songs]] + name = "Dancing in the Dark"` + jsonRef := `{ + "albums": [ + { + "name": {"type": "string", "value": "Born to Run"}, + "songs": [ + {"name": {"type": "string", "value": "Jungleland"}}, + {"name": {"type": "string", "value": "Meeting Across the River"}} + ] + }, + { + "name": {"type": "string", "value": "Born in the USA"}, + "songs": [ + {"name": {"type": "string", "value": "Glory Days"}}, + {"name": {"type": "string", "value": "Dancing in the Dark"}} + ] + } + ] +}` + testgenValid(t, input, jsonRef) +} + +func TestValidTableArrayOne(t *testing.T) { + input := `[[people]] +first_name = "Bruce" +last_name = "Springsteen"` + jsonRef := `{ + "people": [ + { + "first_name": {"type": "string", "value": "Bruce"}, + "last_name": {"type": "string", "value": "Springsteen"} + } + ] +}` + testgenValid(t, input, jsonRef) +} + +func TestValidTableEmpty(t *testing.T) { + input := `[a]` + jsonRef := `{ + "a": {} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidTableSubEmpty(t *testing.T) { + input := `[a] +[a.b]` + jsonRef := `{ + "a": { "b": {} } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidTableWhitespace(t *testing.T) { + input := `["valid key"]` + jsonRef := `{ + "valid key": {} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidTableWithPound(t *testing.T) { + input := `["key#group"] +answer = 42` + jsonRef := `{ + "key#group": { + "answer": {"type": "integer", "value": "42"} + } +}` + testgenValid(t, input, jsonRef) +} + +func TestValidUnicodeEscape(t *testing.T) { + input := `answer4 = "\u03B4" +answer8 = "\U000003B4"` + jsonRef := `{ + "answer4": {"type": "string", "value": "\u03B4"}, + "answer8": {"type": "string", "value": "\u03B4"} +}` + testgenValid(t, input, jsonRef) +} + +func TestValidUnicodeLiteral(t *testing.T) { + input := `answer = "δ"` + jsonRef := `{ + "answer": {"type": "string", "value": "δ"} +}` + testgenValid(t, input, jsonRef) +} diff --git a/vendor/github.com/pelletier/go-toml/tomltree_create.go b/vendor/github.com/pelletier/go-toml/tomltree_create.go index 19d1c0d..79610e9 100644 --- a/vendor/github.com/pelletier/go-toml/tomltree_create.go +++ b/vendor/github.com/pelletier/go-toml/tomltree_create.go @@ -104,7 +104,7 @@ func sliceToTree(object interface{}) (interface{}, error) { } arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue)) } - return &tomlValue{arrayValue.Interface(), Position{}}, nil + return &tomlValue{value: arrayValue.Interface(), position: Position{}}, nil } func toTree(object interface{}) (interface{}, error) { @@ -127,7 +127,7 @@ func toTree(object interface{}) (interface{}, error) { } values[key.String()] = newValue } - return &Tree{values, Position{}}, nil + return &Tree{values: values, position: Position{}}, nil } if value.Kind() == reflect.Array || value.Kind() == reflect.Slice { @@ -138,5 +138,5 @@ func toTree(object interface{}) (interface{}, error) { if err != nil { return nil, err } - return &tomlValue{simpleValue, Position{}}, nil + return &tomlValue{value: simpleValue, position: Position{}}, nil } diff --git a/vendor/github.com/pelletier/go-toml/tomltree_create_test.go b/vendor/github.com/pelletier/go-toml/tomltree_create_test.go index 1ca108a..3465a10 100644 --- a/vendor/github.com/pelletier/go-toml/tomltree_create_test.go +++ b/vendor/github.com/pelletier/go-toml/tomltree_create_test.go @@ -60,7 +60,7 @@ func TestTreeCreateToTree(t *testing.T) { }, "array": []string{"a", "b", "c"}, "array_uint": []uint{uint(1), uint(2)}, - "array_table": []map[string]interface{}{map[string]interface{}{"sub_map": 52}}, + "array_table": []map[string]interface{}{{"sub_map": 52}}, "array_times": []time.Time{time.Now(), time.Now()}, "map_times": map[string]time.Time{"now": time.Now()}, "custom_string_map_key": map[customString]interface{}{customString("custom"): "custom"}, @@ -97,7 +97,7 @@ func TestTreeCreateToTreeInvalidArrayMemberType(t *testing.T) { } func TestTreeCreateToTreeInvalidTableGroupType(t *testing.T) { - _, err := TreeFromMap(map[string]interface{}{"foo": []map[string]interface{}{map[string]interface{}{"hello": t}}}) + _, err := TreeFromMap(map[string]interface{}{"foo": []map[string]interface{}{{"hello": t}}}) expected := "cannot convert type *testing.T to Tree" if err.Error() != expected { t.Fatalf("expected error %s, got %s", expected, err.Error()) diff --git a/vendor/github.com/pelletier/go-toml/tomltree_write.go b/vendor/github.com/pelletier/go-toml/tomltree_write.go index ca763ed..e4049e2 100644 --- a/vendor/github.com/pelletier/go-toml/tomltree_write.go +++ b/vendor/github.com/pelletier/go-toml/tomltree_write.go @@ -12,7 +12,41 @@ import ( "time" ) -// encodes a string to a TOML-compliant string value +// Encodes a string to a TOML-compliant multi-line string value +// This function is a clone of the existing encodeTomlString function, except that whitespace characters +// are preserved. Quotation marks and backslashes are also not escaped. +func encodeMultilineTomlString(value string) string { + var b bytes.Buffer + + for _, rr := range value { + switch rr { + case '\b': + b.WriteString(`\b`) + case '\t': + b.WriteString("\t") + case '\n': + b.WriteString("\n") + case '\f': + b.WriteString(`\f`) + case '\r': + b.WriteString("\r") + case '"': + b.WriteString(`"`) + case '\\': + b.WriteString(`\`) + default: + intRr := uint16(rr) + if intRr < 0x001F { + b.WriteString(fmt.Sprintf("\\u%0.4X", intRr)) + } else { + b.WriteRune(rr) + } + } + } + return b.String() +} + +// Encodes a string to a TOML-compliant string value func encodeTomlString(value string) string { var b bytes.Buffer @@ -44,7 +78,16 @@ func encodeTomlString(value string) string { return b.String() } -func tomlValueStringRepresentation(v interface{}) (string, error) { +func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElementPerLine bool) (string, error) { + // this interface check is added to dereference the change made in the writeTo function. + // That change was made to allow this function to see formatting options. + tv, ok := v.(*tomlValue) + if ok { + v = tv.value + } else { + tv = &tomlValue{} + } + switch value := v.(type) { case uint64: return strconv.FormatUint(value, 10), nil @@ -54,14 +97,17 @@ func tomlValueStringRepresentation(v interface{}) (string, error) { // Ensure a round float does contain a decimal point. Otherwise feeding // the output back to the parser would convert to an integer. if math.Trunc(value) == value { - return strconv.FormatFloat(value, 'f', 1, 32), nil + return strings.ToLower(strconv.FormatFloat(value, 'f', 1, 32)), nil } - return strconv.FormatFloat(value, 'f', -1, 32), nil + return strings.ToLower(strconv.FormatFloat(value, 'f', -1, 32)), nil case string: + if tv.multiline { + return "\"\"\"\n" + encodeMultilineTomlString(value) + "\"\"\"", nil + } return "\"" + encodeTomlString(value) + "\"", nil case []byte: b, _ := v.([]byte) - return tomlValueStringRepresentation(string(b)) + return tomlValueStringRepresentation(string(b), indent, arraysOneElementPerLine) case bool: if value { return "true", nil @@ -76,21 +122,38 @@ func tomlValueStringRepresentation(v interface{}) (string, error) { rv := reflect.ValueOf(v) if rv.Kind() == reflect.Slice { - values := []string{} + var values []string for i := 0; i < rv.Len(); i++ { item := rv.Index(i).Interface() - itemRepr, err := tomlValueStringRepresentation(item) + itemRepr, err := tomlValueStringRepresentation(item, indent, arraysOneElementPerLine) if err != nil { return "", err } values = append(values, itemRepr) } + if arraysOneElementPerLine && len(values) > 1 { + stringBuffer := bytes.Buffer{} + valueIndent := indent + ` ` // TODO: move that to a shared encoder state + + stringBuffer.WriteString("[\n") + + for _, value := range values { + stringBuffer.WriteString(valueIndent) + stringBuffer.WriteString(value) + stringBuffer.WriteString(`,`) + stringBuffer.WriteString("\n") + } + + stringBuffer.WriteString(indent + "]") + + return stringBuffer.String(), nil + } return "[" + strings.Join(values, ",") + "]", nil } return "", fmt.Errorf("unsupported value type %T: %v", v, v) } -func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (int64, error) { +func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) { simpleValuesKeys := make([]string, 0) complexValuesKeys := make([]string, 0) @@ -113,12 +176,29 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) ( return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k]) } - repr, err := tomlValueStringRepresentation(v.value) + repr, err := tomlValueStringRepresentation(v, indent, arraysOneElementPerLine) if err != nil { return bytesCount, err } - writtenBytesCount, err := writeStrings(w, indent, k, " = ", repr, "\n") + if v.comment != "" { + comment := strings.Replace(v.comment, "\n", "\n"+indent+"#", -1) + start := "# " + if strings.HasPrefix(comment, "#") { + start = "" + } + writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment, "\n") + bytesCount += int64(writtenBytesCountComment) + if errc != nil { + return bytesCount, errc + } + } + + var commented string + if v.commented { + commented = "# " + } + writtenBytesCount, err := writeStrings(w, indent, commented, k, " = ", repr, "\n") bytesCount += int64(writtenBytesCount) if err != nil { return bytesCount, err @@ -132,28 +212,48 @@ func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) ( if keyspace != "" { combinedKey = keyspace + "." + combinedKey } + var commented string + if t.commented { + commented = "# " + } switch node := v.(type) { // node has to be of those two types given how keys are sorted above case *Tree: - writtenBytesCount, err := writeStrings(w, "\n", indent, "[", combinedKey, "]\n") + tv, ok := t.values[k].(*Tree) + if !ok { + return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k]) + } + if tv.comment != "" { + comment := strings.Replace(tv.comment, "\n", "\n"+indent+"#", -1) + start := "# " + if strings.HasPrefix(comment, "#") { + start = "" + } + writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment) + bytesCount += int64(writtenBytesCountComment) + if errc != nil { + return bytesCount, errc + } + } + writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n") bytesCount += int64(writtenBytesCount) if err != nil { return bytesCount, err } - bytesCount, err = node.writeTo(w, indent+" ", combinedKey, bytesCount) + bytesCount, err = node.writeTo(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine) if err != nil { return bytesCount, err } case []*Tree: for _, subTree := range node { - writtenBytesCount, err := writeStrings(w, "\n", indent, "[[", combinedKey, "]]\n") + writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n") bytesCount += int64(writtenBytesCount) if err != nil { return bytesCount, err } - bytesCount, err = subTree.writeTo(w, indent+" ", combinedKey, bytesCount) + bytesCount, err = subTree.writeTo(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine) if err != nil { return bytesCount, err } @@ -179,7 +279,7 @@ func writeStrings(w io.Writer, s ...string) (int, error) { // WriteTo encode the Tree as Toml and writes it to the writer w. // Returns the number of bytes written in case of success, or an error if anything happened. func (t *Tree) WriteTo(w io.Writer) (int64, error) { - return t.writeTo(w, "", "", 0) + return t.writeTo(w, "", "", 0, false) } // ToTomlString generates a human-readable representation of the current tree. diff --git a/vendor/github.com/pelletier/go-toml/tomltree_write_test.go b/vendor/github.com/pelletier/go-toml/tomltree_write_test.go index c2a1ce3..206203b 100644 --- a/vendor/github.com/pelletier/go-toml/tomltree_write_test.go +++ b/vendor/github.com/pelletier/go-toml/tomltree_write_test.go @@ -30,7 +30,7 @@ func (f *failingWriter) Write(p []byte) (n int, err error) { f.buffer.Write(p[:toWrite]) f.written = f.failAt - return toWrite, fmt.Errorf("failingWriter failed after writting %d bytes", f.written) + return toWrite, fmt.Errorf("failingWriter failed after writing %d bytes", f.written) } func assertErrorString(t *testing.T, expected string, err error) { @@ -161,13 +161,13 @@ func TestTreeWriteToInvalidTreeSimpleValue(t *testing.T) { } func TestTreeWriteToInvalidTreeTomlValue(t *testing.T) { - tree := Tree{values: map[string]interface{}{"foo": &tomlValue{int8(1), Position{}}}} + tree := Tree{values: map[string]interface{}{"foo": &tomlValue{value: int8(1), comment: "", position: Position{}}}} _, err := tree.ToTomlString() assertErrorString(t, "unsupported value type int8: 1", err) } func TestTreeWriteToInvalidTreeTomlValueArray(t *testing.T) { - tree := Tree{values: map[string]interface{}{"foo": &tomlValue{[]interface{}{int8(1)}, Position{}}}} + tree := Tree{values: map[string]interface{}{"foo": &tomlValue{value: int8(1), comment: "", position: Position{}}}} _, err := tree.ToTomlString() assertErrorString(t, "unsupported value type int8: 1", err) } @@ -176,7 +176,7 @@ func TestTreeWriteToFailingWriterInSimpleValue(t *testing.T) { toml, _ := Load(`a = 2`) writer := failingWriter{failAt: 0, written: 0} _, err := toml.WriteTo(&writer) - assertErrorString(t, "failingWriter failed after writting 0 bytes", err) + assertErrorString(t, "failingWriter failed after writing 0 bytes", err) } func TestTreeWriteToFailingWriterInTable(t *testing.T) { @@ -185,11 +185,11 @@ func TestTreeWriteToFailingWriterInTable(t *testing.T) { a = 2`) writer := failingWriter{failAt: 2, written: 0} _, err := toml.WriteTo(&writer) - assertErrorString(t, "failingWriter failed after writting 2 bytes", err) + assertErrorString(t, "failingWriter failed after writing 2 bytes", err) writer = failingWriter{failAt: 13, written: 0} _, err = toml.WriteTo(&writer) - assertErrorString(t, "failingWriter failed after writting 13 bytes", err) + assertErrorString(t, "failingWriter failed after writing 13 bytes", err) } func TestTreeWriteToFailingWriterInArray(t *testing.T) { @@ -198,11 +198,11 @@ func TestTreeWriteToFailingWriterInArray(t *testing.T) { a = 2`) writer := failingWriter{failAt: 2, written: 0} _, err := toml.WriteTo(&writer) - assertErrorString(t, "failingWriter failed after writting 2 bytes", err) + assertErrorString(t, "failingWriter failed after writing 2 bytes", err) writer = failingWriter{failAt: 15, written: 0} _, err = toml.WriteTo(&writer) - assertErrorString(t, "failingWriter failed after writting 15 bytes", err) + assertErrorString(t, "failingWriter failed after writing 15 bytes", err) } func TestTreeWriteToMapExampleFile(t *testing.T) { @@ -309,6 +309,24 @@ func TestTreeWriteToFloat(t *testing.T) { } } +func TestTreeWriteToSpecialFloat(t *testing.T) { + expected := `a = +inf +b = -inf +c = nan` + + tree, err := Load(expected) + if err != nil { + t.Fatal(err) + } + str, err := tree.ToTomlString() + if err != nil { + t.Fatal(err) + } + if strings.TrimSpace(str) != strings.TrimSpace(expected) { + t.Fatalf("Expected:\n%s\nGot:\n%s", expected, str) + } +} + func BenchmarkTreeToTomlString(b *testing.B) { toml, err := Load(sampleHard) if err != nil { diff --git a/vendor/github.com/pkg/errors/.gitignore b/vendor/github.com/pkg/errors/.gitignore new file mode 100644 index 0000000..daf913b --- /dev/null +++ b/vendor/github.com/pkg/errors/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/pkg/errors/.travis.yml b/vendor/github.com/pkg/errors/.travis.yml new file mode 100644 index 0000000..d2dfad4 --- /dev/null +++ b/vendor/github.com/pkg/errors/.travis.yml @@ -0,0 +1,10 @@ +language: go +go_import_path: github.com/pkg/errors +go: + - 1.10.x + - 1.11.x + - 1.12.x + - tip + +script: + - make check diff --git a/vendor/github.com/pkg/errors/LICENSE b/vendor/github.com/pkg/errors/LICENSE new file mode 100644 index 0000000..835ba3e --- /dev/null +++ b/vendor/github.com/pkg/errors/LICENSE @@ -0,0 +1,23 @@ +Copyright (c) 2015, Dave Cheney +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/pkg/errors/Makefile b/vendor/github.com/pkg/errors/Makefile new file mode 100644 index 0000000..ce9d7cd --- /dev/null +++ b/vendor/github.com/pkg/errors/Makefile @@ -0,0 +1,44 @@ +PKGS := github.com/pkg/errors +SRCDIRS := $(shell go list -f '{{.Dir}}' $(PKGS)) +GO := go + +check: test vet gofmt misspell unconvert staticcheck ineffassign unparam + +test: + $(GO) test $(PKGS) + +vet: | test + $(GO) vet $(PKGS) + +staticcheck: + $(GO) get honnef.co/go/tools/cmd/staticcheck + staticcheck -checks all $(PKGS) + +misspell: + $(GO) get github.com/client9/misspell/cmd/misspell + misspell \ + -locale GB \ + -error \ + *.md *.go + +unconvert: + $(GO) get github.com/mdempsky/unconvert + unconvert -v $(PKGS) + +ineffassign: + $(GO) get github.com/gordonklaus/ineffassign + find $(SRCDIRS) -name '*.go' | xargs ineffassign + +pedantic: check errcheck + +unparam: + $(GO) get mvdan.cc/unparam + unparam ./... + +errcheck: + $(GO) get github.com/kisielk/errcheck + errcheck $(PKGS) + +gofmt: + @echo Checking code is gofmted + @test -z "$(shell gofmt -s -l -d -e $(SRCDIRS) | tee /dev/stderr)" diff --git a/vendor/github.com/pkg/errors/README.md b/vendor/github.com/pkg/errors/README.md new file mode 100644 index 0000000..cf771e7 --- /dev/null +++ b/vendor/github.com/pkg/errors/README.md @@ -0,0 +1,59 @@ +# errors [![Travis-CI](https://travis-ci.org/pkg/errors.svg)](https://travis-ci.org/pkg/errors) [![AppVeyor](https://ci.appveyor.com/api/projects/status/b98mptawhudj53ep/branch/master?svg=true)](https://ci.appveyor.com/project/davecheney/errors/branch/master) [![GoDoc](https://godoc.org/github.com/pkg/errors?status.svg)](http://godoc.org/github.com/pkg/errors) [![Report card](https://goreportcard.com/badge/github.com/pkg/errors)](https://goreportcard.com/report/github.com/pkg/errors) [![Sourcegraph](https://sourcegraph.com/github.com/pkg/errors/-/badge.svg)](https://sourcegraph.com/github.com/pkg/errors?badge) + +Package errors provides simple error handling primitives. + +`go get github.com/pkg/errors` + +The traditional error handling idiom in Go is roughly akin to +```go +if err != nil { + return err +} +``` +which applied recursively up the call stack results in error reports without context or debugging information. The errors package allows programmers to add context to the failure path in their code in a way that does not destroy the original value of the error. + +## Adding context to an error + +The errors.Wrap function returns a new error that adds context to the original error. For example +```go +_, err := ioutil.ReadAll(r) +if err != nil { + return errors.Wrap(err, "read failed") +} +``` +## Retrieving the cause of an error + +Using `errors.Wrap` constructs a stack of errors, adding context to the preceding error. Depending on the nature of the error it may be necessary to reverse the operation of errors.Wrap to retrieve the original error for inspection. Any error value which implements this interface can be inspected by `errors.Cause`. +```go +type causer interface { + Cause() error +} +``` +`errors.Cause` will recursively retrieve the topmost error which does not implement `causer`, which is assumed to be the original cause. For example: +```go +switch err := errors.Cause(err).(type) { +case *MyError: + // handle specifically +default: + // unknown error +} +``` + +[Read the package documentation for more information](https://godoc.org/github.com/pkg/errors). + +## Roadmap + +With the upcoming [Go2 error proposals](https://go.googlesource.com/proposal/+/master/design/go2draft.md) this package is moving into maintenance mode. The roadmap for a 1.0 release is as follows: + +- 0.9. Remove pre Go 1.9 support, address outstanding pull requests (if possible) +- 1.0. Final release. + +## Contributing + +Because of the Go2 errors changes, this package is not accepting proposals for new functionality. With that said, we welcome pull requests, bug fixes and issue reports. + +Before sending a PR, please discuss your change by raising an issue. + +## License + +BSD-2-Clause diff --git a/vendor/github.com/pkg/errors/appveyor.yml b/vendor/github.com/pkg/errors/appveyor.yml new file mode 100644 index 0000000..a932ead --- /dev/null +++ b/vendor/github.com/pkg/errors/appveyor.yml @@ -0,0 +1,32 @@ +version: build-{build}.{branch} + +clone_folder: C:\gopath\src\github.com\pkg\errors +shallow_clone: true # for startup speed + +environment: + GOPATH: C:\gopath + +platform: + - x64 + +# http://www.appveyor.com/docs/installed-software +install: + # some helpful output for debugging builds + - go version + - go env + # pre-installed MinGW at C:\MinGW is 32bit only + # but MSYS2 at C:\msys64 has mingw64 + - set PATH=C:\msys64\mingw64\bin;%PATH% + - gcc --version + - g++ --version + +build_script: + - go install -v ./... + +test_script: + - set PATH=C:\gopath\bin;%PATH% + - go test -v ./... + +#artifacts: +# - path: '%GOPATH%\bin\*.exe' +deploy: off diff --git a/vendor/github.com/pkg/errors/bench_test.go b/vendor/github.com/pkg/errors/bench_test.go new file mode 100644 index 0000000..c906870 --- /dev/null +++ b/vendor/github.com/pkg/errors/bench_test.go @@ -0,0 +1,110 @@ +// +build go1.7 + +package errors + +import ( + "fmt" + "testing" + + stderrors "errors" +) + +func noErrors(at, depth int) error { + if at >= depth { + return stderrors.New("no error") + } + return noErrors(at+1, depth) +} + +func yesErrors(at, depth int) error { + if at >= depth { + return New("ye error") + } + return yesErrors(at+1, depth) +} + +// GlobalE is an exported global to store the result of benchmark results, +// preventing the compiler from optimising the benchmark functions away. +var GlobalE interface{} + +func BenchmarkErrors(b *testing.B) { + type run struct { + stack int + std bool + } + runs := []run{ + {10, false}, + {10, true}, + {100, false}, + {100, true}, + {1000, false}, + {1000, true}, + } + for _, r := range runs { + part := "pkg/errors" + if r.std { + part = "errors" + } + name := fmt.Sprintf("%s-stack-%d", part, r.stack) + b.Run(name, func(b *testing.B) { + var err error + f := yesErrors + if r.std { + f = noErrors + } + b.ReportAllocs() + for i := 0; i < b.N; i++ { + err = f(0, r.stack) + } + b.StopTimer() + GlobalE = err + }) + } +} + +func BenchmarkStackFormatting(b *testing.B) { + type run struct { + stack int + format string + } + runs := []run{ + {10, "%s"}, + {10, "%v"}, + {10, "%+v"}, + {30, "%s"}, + {30, "%v"}, + {30, "%+v"}, + {60, "%s"}, + {60, "%v"}, + {60, "%+v"}, + } + + var stackStr string + for _, r := range runs { + name := fmt.Sprintf("%s-stack-%d", r.format, r.stack) + b.Run(name, func(b *testing.B) { + err := yesErrors(0, r.stack) + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + stackStr = fmt.Sprintf(r.format, err) + } + b.StopTimer() + }) + } + + for _, r := range runs { + name := fmt.Sprintf("%s-stacktrace-%d", r.format, r.stack) + b.Run(name, func(b *testing.B) { + err := yesErrors(0, r.stack) + st := err.(*fundamental).stack.StackTrace() + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + stackStr = fmt.Sprintf(r.format, st) + } + b.StopTimer() + }) + } + GlobalE = stackStr +} diff --git a/vendor/github.com/pkg/errors/errors.go b/vendor/github.com/pkg/errors/errors.go new file mode 100644 index 0000000..8617bee --- /dev/null +++ b/vendor/github.com/pkg/errors/errors.go @@ -0,0 +1,282 @@ +// Package errors provides simple error handling primitives. +// +// The traditional error handling idiom in Go is roughly akin to +// +// if err != nil { +// return err +// } +// +// which when applied recursively up the call stack results in error reports +// without context or debugging information. The errors package allows +// programmers to add context to the failure path in their code in a way +// that does not destroy the original value of the error. +// +// Adding context to an error +// +// The errors.Wrap function returns a new error that adds context to the +// original error by recording a stack trace at the point Wrap is called, +// together with the supplied message. For example +// +// _, err := ioutil.ReadAll(r) +// if err != nil { +// return errors.Wrap(err, "read failed") +// } +// +// If additional control is required, the errors.WithStack and +// errors.WithMessage functions destructure errors.Wrap into its component +// operations: annotating an error with a stack trace and with a message, +// respectively. +// +// Retrieving the cause of an error +// +// Using errors.Wrap constructs a stack of errors, adding context to the +// preceding error. Depending on the nature of the error it may be necessary +// to reverse the operation of errors.Wrap to retrieve the original error +// for inspection. Any error value which implements this interface +// +// type causer interface { +// Cause() error +// } +// +// can be inspected by errors.Cause. errors.Cause will recursively retrieve +// the topmost error that does not implement causer, which is assumed to be +// the original cause. For example: +// +// switch err := errors.Cause(err).(type) { +// case *MyError: +// // handle specifically +// default: +// // unknown error +// } +// +// Although the causer interface is not exported by this package, it is +// considered a part of its stable public interface. +// +// Formatted printing of errors +// +// All error values returned from this package implement fmt.Formatter and can +// be formatted by the fmt package. The following verbs are supported: +// +// %s print the error. If the error has a Cause it will be +// printed recursively. +// %v see %s +// %+v extended format. Each Frame of the error's StackTrace will +// be printed in detail. +// +// Retrieving the stack trace of an error or wrapper +// +// New, Errorf, Wrap, and Wrapf record a stack trace at the point they are +// invoked. This information can be retrieved with the following interface: +// +// type stackTracer interface { +// StackTrace() errors.StackTrace +// } +// +// The returned errors.StackTrace type is defined as +// +// type StackTrace []Frame +// +// The Frame type represents a call site in the stack trace. Frame supports +// the fmt.Formatter interface that can be used for printing information about +// the stack trace of this error. For example: +// +// if err, ok := err.(stackTracer); ok { +// for _, f := range err.StackTrace() { +// fmt.Printf("%+s:%d\n", f, f) +// } +// } +// +// Although the stackTracer interface is not exported by this package, it is +// considered a part of its stable public interface. +// +// See the documentation for Frame.Format for more details. +package errors + +import ( + "fmt" + "io" +) + +// New returns an error with the supplied message. +// New also records the stack trace at the point it was called. +func New(message string) error { + return &fundamental{ + msg: message, + stack: callers(), + } +} + +// Errorf formats according to a format specifier and returns the string +// as a value that satisfies error. +// Errorf also records the stack trace at the point it was called. +func Errorf(format string, args ...interface{}) error { + return &fundamental{ + msg: fmt.Sprintf(format, args...), + stack: callers(), + } +} + +// fundamental is an error that has a message and a stack, but no caller. +type fundamental struct { + msg string + *stack +} + +func (f *fundamental) Error() string { return f.msg } + +func (f *fundamental) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + if s.Flag('+') { + io.WriteString(s, f.msg) + f.stack.Format(s, verb) + return + } + fallthrough + case 's': + io.WriteString(s, f.msg) + case 'q': + fmt.Fprintf(s, "%q", f.msg) + } +} + +// WithStack annotates err with a stack trace at the point WithStack was called. +// If err is nil, WithStack returns nil. +func WithStack(err error) error { + if err == nil { + return nil + } + return &withStack{ + err, + callers(), + } +} + +type withStack struct { + error + *stack +} + +func (w *withStack) Cause() error { return w.error } + +func (w *withStack) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + if s.Flag('+') { + fmt.Fprintf(s, "%+v", w.Cause()) + w.stack.Format(s, verb) + return + } + fallthrough + case 's': + io.WriteString(s, w.Error()) + case 'q': + fmt.Fprintf(s, "%q", w.Error()) + } +} + +// Wrap returns an error annotating err with a stack trace +// at the point Wrap is called, and the supplied message. +// If err is nil, Wrap returns nil. +func Wrap(err error, message string) error { + if err == nil { + return nil + } + err = &withMessage{ + cause: err, + msg: message, + } + return &withStack{ + err, + callers(), + } +} + +// Wrapf returns an error annotating err with a stack trace +// at the point Wrapf is called, and the format specifier. +// If err is nil, Wrapf returns nil. +func Wrapf(err error, format string, args ...interface{}) error { + if err == nil { + return nil + } + err = &withMessage{ + cause: err, + msg: fmt.Sprintf(format, args...), + } + return &withStack{ + err, + callers(), + } +} + +// WithMessage annotates err with a new message. +// If err is nil, WithMessage returns nil. +func WithMessage(err error, message string) error { + if err == nil { + return nil + } + return &withMessage{ + cause: err, + msg: message, + } +} + +// WithMessagef annotates err with the format specifier. +// If err is nil, WithMessagef returns nil. +func WithMessagef(err error, format string, args ...interface{}) error { + if err == nil { + return nil + } + return &withMessage{ + cause: err, + msg: fmt.Sprintf(format, args...), + } +} + +type withMessage struct { + cause error + msg string +} + +func (w *withMessage) Error() string { return w.msg + ": " + w.cause.Error() } +func (w *withMessage) Cause() error { return w.cause } + +func (w *withMessage) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + if s.Flag('+') { + fmt.Fprintf(s, "%+v\n", w.Cause()) + io.WriteString(s, w.msg) + return + } + fallthrough + case 's', 'q': + io.WriteString(s, w.Error()) + } +} + +// Cause returns the underlying cause of the error, if possible. +// An error value has a cause if it implements the following +// interface: +// +// type causer interface { +// Cause() error +// } +// +// If the error does not implement Cause, the original error will +// be returned. If the error is nil, nil will be returned without further +// investigation. +func Cause(err error) error { + type causer interface { + Cause() error + } + + for err != nil { + cause, ok := err.(causer) + if !ok { + break + } + err = cause.Cause() + } + return err +} diff --git a/vendor/github.com/pkg/errors/errors_test.go b/vendor/github.com/pkg/errors/errors_test.go new file mode 100644 index 0000000..2089b2f --- /dev/null +++ b/vendor/github.com/pkg/errors/errors_test.go @@ -0,0 +1,251 @@ +package errors + +import ( + "errors" + "fmt" + "io" + "reflect" + "testing" +) + +func TestNew(t *testing.T) { + tests := []struct { + err string + want error + }{ + {"", fmt.Errorf("")}, + {"foo", fmt.Errorf("foo")}, + {"foo", New("foo")}, + {"string with format specifiers: %v", errors.New("string with format specifiers: %v")}, + } + + for _, tt := range tests { + got := New(tt.err) + if got.Error() != tt.want.Error() { + t.Errorf("New.Error(): got: %q, want %q", got, tt.want) + } + } +} + +func TestWrapNil(t *testing.T) { + got := Wrap(nil, "no error") + if got != nil { + t.Errorf("Wrap(nil, \"no error\"): got %#v, expected nil", got) + } +} + +func TestWrap(t *testing.T) { + tests := []struct { + err error + message string + want string + }{ + {io.EOF, "read error", "read error: EOF"}, + {Wrap(io.EOF, "read error"), "client error", "client error: read error: EOF"}, + } + + for _, tt := range tests { + got := Wrap(tt.err, tt.message).Error() + if got != tt.want { + t.Errorf("Wrap(%v, %q): got: %v, want %v", tt.err, tt.message, got, tt.want) + } + } +} + +type nilError struct{} + +func (nilError) Error() string { return "nil error" } + +func TestCause(t *testing.T) { + x := New("error") + tests := []struct { + err error + want error + }{{ + // nil error is nil + err: nil, + want: nil, + }, { + // explicit nil error is nil + err: (error)(nil), + want: nil, + }, { + // typed nil is nil + err: (*nilError)(nil), + want: (*nilError)(nil), + }, { + // uncaused error is unaffected + err: io.EOF, + want: io.EOF, + }, { + // caused error returns cause + err: Wrap(io.EOF, "ignored"), + want: io.EOF, + }, { + err: x, // return from errors.New + want: x, + }, { + WithMessage(nil, "whoops"), + nil, + }, { + WithMessage(io.EOF, "whoops"), + io.EOF, + }, { + WithStack(nil), + nil, + }, { + WithStack(io.EOF), + io.EOF, + }} + + for i, tt := range tests { + got := Cause(tt.err) + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("test %d: got %#v, want %#v", i+1, got, tt.want) + } + } +} + +func TestWrapfNil(t *testing.T) { + got := Wrapf(nil, "no error") + if got != nil { + t.Errorf("Wrapf(nil, \"no error\"): got %#v, expected nil", got) + } +} + +func TestWrapf(t *testing.T) { + tests := []struct { + err error + message string + want string + }{ + {io.EOF, "read error", "read error: EOF"}, + {Wrapf(io.EOF, "read error without format specifiers"), "client error", "client error: read error without format specifiers: EOF"}, + {Wrapf(io.EOF, "read error with %d format specifier", 1), "client error", "client error: read error with 1 format specifier: EOF"}, + } + + for _, tt := range tests { + got := Wrapf(tt.err, tt.message).Error() + if got != tt.want { + t.Errorf("Wrapf(%v, %q): got: %v, want %v", tt.err, tt.message, got, tt.want) + } + } +} + +func TestErrorf(t *testing.T) { + tests := []struct { + err error + want string + }{ + {Errorf("read error without format specifiers"), "read error without format specifiers"}, + {Errorf("read error with %d format specifier", 1), "read error with 1 format specifier"}, + } + + for _, tt := range tests { + got := tt.err.Error() + if got != tt.want { + t.Errorf("Errorf(%v): got: %q, want %q", tt.err, got, tt.want) + } + } +} + +func TestWithStackNil(t *testing.T) { + got := WithStack(nil) + if got != nil { + t.Errorf("WithStack(nil): got %#v, expected nil", got) + } +} + +func TestWithStack(t *testing.T) { + tests := []struct { + err error + want string + }{ + {io.EOF, "EOF"}, + {WithStack(io.EOF), "EOF"}, + } + + for _, tt := range tests { + got := WithStack(tt.err).Error() + if got != tt.want { + t.Errorf("WithStack(%v): got: %v, want %v", tt.err, got, tt.want) + } + } +} + +func TestWithMessageNil(t *testing.T) { + got := WithMessage(nil, "no error") + if got != nil { + t.Errorf("WithMessage(nil, \"no error\"): got %#v, expected nil", got) + } +} + +func TestWithMessage(t *testing.T) { + tests := []struct { + err error + message string + want string + }{ + {io.EOF, "read error", "read error: EOF"}, + {WithMessage(io.EOF, "read error"), "client error", "client error: read error: EOF"}, + } + + for _, tt := range tests { + got := WithMessage(tt.err, tt.message).Error() + if got != tt.want { + t.Errorf("WithMessage(%v, %q): got: %q, want %q", tt.err, tt.message, got, tt.want) + } + } +} + +func TestWithMessagefNil(t *testing.T) { + got := WithMessagef(nil, "no error") + if got != nil { + t.Errorf("WithMessage(nil, \"no error\"): got %#v, expected nil", got) + } +} + +func TestWithMessagef(t *testing.T) { + tests := []struct { + err error + message string + want string + }{ + {io.EOF, "read error", "read error: EOF"}, + {WithMessagef(io.EOF, "read error without format specifier"), "client error", "client error: read error without format specifier: EOF"}, + {WithMessagef(io.EOF, "read error with %d format specifier", 1), "client error", "client error: read error with 1 format specifier: EOF"}, + } + + for _, tt := range tests { + got := WithMessagef(tt.err, tt.message).Error() + if got != tt.want { + t.Errorf("WithMessage(%v, %q): got: %q, want %q", tt.err, tt.message, got, tt.want) + } + } +} + +// errors.New, etc values are not expected to be compared by value +// but the change in errors#27 made them incomparable. Assert that +// various kinds of errors have a functional equality operator, even +// if the result of that equality is always false. +func TestErrorEquality(t *testing.T) { + vals := []error{ + nil, + io.EOF, + errors.New("EOF"), + New("EOF"), + Errorf("EOF"), + Wrap(io.EOF, "EOF"), + Wrapf(io.EOF, "EOF%d", 2), + WithMessage(nil, "whoops"), + WithMessage(io.EOF, "whoops"), + WithStack(io.EOF), + WithStack(nil), + } + + for i := range vals { + for j := range vals { + _ = vals[i] == vals[j] // mustn't panic + } + } +} diff --git a/vendor/github.com/pkg/errors/example_test.go b/vendor/github.com/pkg/errors/example_test.go new file mode 100644 index 0000000..c1fc13e --- /dev/null +++ b/vendor/github.com/pkg/errors/example_test.go @@ -0,0 +1,205 @@ +package errors_test + +import ( + "fmt" + + "github.com/pkg/errors" +) + +func ExampleNew() { + err := errors.New("whoops") + fmt.Println(err) + + // Output: whoops +} + +func ExampleNew_printf() { + err := errors.New("whoops") + fmt.Printf("%+v", err) + + // Example output: + // whoops + // github.com/pkg/errors_test.ExampleNew_printf + // /home/dfc/src/github.com/pkg/errors/example_test.go:17 + // testing.runExample + // /home/dfc/go/src/testing/example.go:114 + // testing.RunExamples + // /home/dfc/go/src/testing/example.go:38 + // testing.(*M).Run + // /home/dfc/go/src/testing/testing.go:744 + // main.main + // /github.com/pkg/errors/_test/_testmain.go:106 + // runtime.main + // /home/dfc/go/src/runtime/proc.go:183 + // runtime.goexit + // /home/dfc/go/src/runtime/asm_amd64.s:2059 +} + +func ExampleWithMessage() { + cause := errors.New("whoops") + err := errors.WithMessage(cause, "oh noes") + fmt.Println(err) + + // Output: oh noes: whoops +} + +func ExampleWithStack() { + cause := errors.New("whoops") + err := errors.WithStack(cause) + fmt.Println(err) + + // Output: whoops +} + +func ExampleWithStack_printf() { + cause := errors.New("whoops") + err := errors.WithStack(cause) + fmt.Printf("%+v", err) + + // Example Output: + // whoops + // github.com/pkg/errors_test.ExampleWithStack_printf + // /home/fabstu/go/src/github.com/pkg/errors/example_test.go:55 + // testing.runExample + // /usr/lib/go/src/testing/example.go:114 + // testing.RunExamples + // /usr/lib/go/src/testing/example.go:38 + // testing.(*M).Run + // /usr/lib/go/src/testing/testing.go:744 + // main.main + // github.com/pkg/errors/_test/_testmain.go:106 + // runtime.main + // /usr/lib/go/src/runtime/proc.go:183 + // runtime.goexit + // /usr/lib/go/src/runtime/asm_amd64.s:2086 + // github.com/pkg/errors_test.ExampleWithStack_printf + // /home/fabstu/go/src/github.com/pkg/errors/example_test.go:56 + // testing.runExample + // /usr/lib/go/src/testing/example.go:114 + // testing.RunExamples + // /usr/lib/go/src/testing/example.go:38 + // testing.(*M).Run + // /usr/lib/go/src/testing/testing.go:744 + // main.main + // github.com/pkg/errors/_test/_testmain.go:106 + // runtime.main + // /usr/lib/go/src/runtime/proc.go:183 + // runtime.goexit + // /usr/lib/go/src/runtime/asm_amd64.s:2086 +} + +func ExampleWrap() { + cause := errors.New("whoops") + err := errors.Wrap(cause, "oh noes") + fmt.Println(err) + + // Output: oh noes: whoops +} + +func fn() error { + e1 := errors.New("error") + e2 := errors.Wrap(e1, "inner") + e3 := errors.Wrap(e2, "middle") + return errors.Wrap(e3, "outer") +} + +func ExampleCause() { + err := fn() + fmt.Println(err) + fmt.Println(errors.Cause(err)) + + // Output: outer: middle: inner: error + // error +} + +func ExampleWrap_extended() { + err := fn() + fmt.Printf("%+v\n", err) + + // Example output: + // error + // github.com/pkg/errors_test.fn + // /home/dfc/src/github.com/pkg/errors/example_test.go:47 + // github.com/pkg/errors_test.ExampleCause_printf + // /home/dfc/src/github.com/pkg/errors/example_test.go:63 + // testing.runExample + // /home/dfc/go/src/testing/example.go:114 + // testing.RunExamples + // /home/dfc/go/src/testing/example.go:38 + // testing.(*M).Run + // /home/dfc/go/src/testing/testing.go:744 + // main.main + // /github.com/pkg/errors/_test/_testmain.go:104 + // runtime.main + // /home/dfc/go/src/runtime/proc.go:183 + // runtime.goexit + // /home/dfc/go/src/runtime/asm_amd64.s:2059 + // github.com/pkg/errors_test.fn + // /home/dfc/src/github.com/pkg/errors/example_test.go:48: inner + // github.com/pkg/errors_test.fn + // /home/dfc/src/github.com/pkg/errors/example_test.go:49: middle + // github.com/pkg/errors_test.fn + // /home/dfc/src/github.com/pkg/errors/example_test.go:50: outer +} + +func ExampleWrapf() { + cause := errors.New("whoops") + err := errors.Wrapf(cause, "oh noes #%d", 2) + fmt.Println(err) + + // Output: oh noes #2: whoops +} + +func ExampleErrorf_extended() { + err := errors.Errorf("whoops: %s", "foo") + fmt.Printf("%+v", err) + + // Example output: + // whoops: foo + // github.com/pkg/errors_test.ExampleErrorf + // /home/dfc/src/github.com/pkg/errors/example_test.go:101 + // testing.runExample + // /home/dfc/go/src/testing/example.go:114 + // testing.RunExamples + // /home/dfc/go/src/testing/example.go:38 + // testing.(*M).Run + // /home/dfc/go/src/testing/testing.go:744 + // main.main + // /github.com/pkg/errors/_test/_testmain.go:102 + // runtime.main + // /home/dfc/go/src/runtime/proc.go:183 + // runtime.goexit + // /home/dfc/go/src/runtime/asm_amd64.s:2059 +} + +func Example_stackTrace() { + type stackTracer interface { + StackTrace() errors.StackTrace + } + + err, ok := errors.Cause(fn()).(stackTracer) + if !ok { + panic("oops, err does not implement stackTracer") + } + + st := err.StackTrace() + fmt.Printf("%+v", st[0:2]) // top two frames + + // Example output: + // github.com/pkg/errors_test.fn + // /home/dfc/src/github.com/pkg/errors/example_test.go:47 + // github.com/pkg/errors_test.Example_stackTrace + // /home/dfc/src/github.com/pkg/errors/example_test.go:127 +} + +func ExampleCause_printf() { + err := errors.Wrap(func() error { + return func() error { + return errors.Errorf("hello %s", fmt.Sprintf("world")) + }() + }(), "failed") + + fmt.Printf("%v", err) + + // Output: failed: hello world +} diff --git a/vendor/github.com/pkg/errors/format_test.go b/vendor/github.com/pkg/errors/format_test.go new file mode 100644 index 0000000..cb1df82 --- /dev/null +++ b/vendor/github.com/pkg/errors/format_test.go @@ -0,0 +1,560 @@ +package errors + +import ( + "errors" + "fmt" + "io" + "regexp" + "strings" + "testing" +) + +func TestFormatNew(t *testing.T) { + tests := []struct { + error + format string + want string + }{{ + New("error"), + "%s", + "error", + }, { + New("error"), + "%v", + "error", + }, { + New("error"), + "%+v", + "error\n" + + "github.com/pkg/errors.TestFormatNew\n" + + "\t.+/github.com/pkg/errors/format_test.go:26", + }, { + New("error"), + "%q", + `"error"`, + }} + + for i, tt := range tests { + testFormatRegexp(t, i, tt.error, tt.format, tt.want) + } +} + +func TestFormatErrorf(t *testing.T) { + tests := []struct { + error + format string + want string + }{{ + Errorf("%s", "error"), + "%s", + "error", + }, { + Errorf("%s", "error"), + "%v", + "error", + }, { + Errorf("%s", "error"), + "%+v", + "error\n" + + "github.com/pkg/errors.TestFormatErrorf\n" + + "\t.+/github.com/pkg/errors/format_test.go:56", + }} + + for i, tt := range tests { + testFormatRegexp(t, i, tt.error, tt.format, tt.want) + } +} + +func TestFormatWrap(t *testing.T) { + tests := []struct { + error + format string + want string + }{{ + Wrap(New("error"), "error2"), + "%s", + "error2: error", + }, { + Wrap(New("error"), "error2"), + "%v", + "error2: error", + }, { + Wrap(New("error"), "error2"), + "%+v", + "error\n" + + "github.com/pkg/errors.TestFormatWrap\n" + + "\t.+/github.com/pkg/errors/format_test.go:82", + }, { + Wrap(io.EOF, "error"), + "%s", + "error: EOF", + }, { + Wrap(io.EOF, "error"), + "%v", + "error: EOF", + }, { + Wrap(io.EOF, "error"), + "%+v", + "EOF\n" + + "error\n" + + "github.com/pkg/errors.TestFormatWrap\n" + + "\t.+/github.com/pkg/errors/format_test.go:96", + }, { + Wrap(Wrap(io.EOF, "error1"), "error2"), + "%+v", + "EOF\n" + + "error1\n" + + "github.com/pkg/errors.TestFormatWrap\n" + + "\t.+/github.com/pkg/errors/format_test.go:103\n", + }, { + Wrap(New("error with space"), "context"), + "%q", + `"context: error with space"`, + }} + + for i, tt := range tests { + testFormatRegexp(t, i, tt.error, tt.format, tt.want) + } +} + +func TestFormatWrapf(t *testing.T) { + tests := []struct { + error + format string + want string + }{{ + Wrapf(io.EOF, "error%d", 2), + "%s", + "error2: EOF", + }, { + Wrapf(io.EOF, "error%d", 2), + "%v", + "error2: EOF", + }, { + Wrapf(io.EOF, "error%d", 2), + "%+v", + "EOF\n" + + "error2\n" + + "github.com/pkg/errors.TestFormatWrapf\n" + + "\t.+/github.com/pkg/errors/format_test.go:134", + }, { + Wrapf(New("error"), "error%d", 2), + "%s", + "error2: error", + }, { + Wrapf(New("error"), "error%d", 2), + "%v", + "error2: error", + }, { + Wrapf(New("error"), "error%d", 2), + "%+v", + "error\n" + + "github.com/pkg/errors.TestFormatWrapf\n" + + "\t.+/github.com/pkg/errors/format_test.go:149", + }} + + for i, tt := range tests { + testFormatRegexp(t, i, tt.error, tt.format, tt.want) + } +} + +func TestFormatWithStack(t *testing.T) { + tests := []struct { + error + format string + want []string + }{{ + WithStack(io.EOF), + "%s", + []string{"EOF"}, + }, { + WithStack(io.EOF), + "%v", + []string{"EOF"}, + }, { + WithStack(io.EOF), + "%+v", + []string{"EOF", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:175"}, + }, { + WithStack(New("error")), + "%s", + []string{"error"}, + }, { + WithStack(New("error")), + "%v", + []string{"error"}, + }, { + WithStack(New("error")), + "%+v", + []string{"error", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:189", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:189"}, + }, { + WithStack(WithStack(io.EOF)), + "%+v", + []string{"EOF", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:197", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:197"}, + }, { + WithStack(WithStack(Wrapf(io.EOF, "message"))), + "%+v", + []string{"EOF", + "message", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:205", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:205", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:205"}, + }, { + WithStack(Errorf("error%d", 1)), + "%+v", + []string{"error1", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:216", + "github.com/pkg/errors.TestFormatWithStack\n" + + "\t.+/github.com/pkg/errors/format_test.go:216"}, + }} + + for i, tt := range tests { + testFormatCompleteCompare(t, i, tt.error, tt.format, tt.want, true) + } +} + +func TestFormatWithMessage(t *testing.T) { + tests := []struct { + error + format string + want []string + }{{ + WithMessage(New("error"), "error2"), + "%s", + []string{"error2: error"}, + }, { + WithMessage(New("error"), "error2"), + "%v", + []string{"error2: error"}, + }, { + WithMessage(New("error"), "error2"), + "%+v", + []string{ + "error", + "github.com/pkg/errors.TestFormatWithMessage\n" + + "\t.+/github.com/pkg/errors/format_test.go:244", + "error2"}, + }, { + WithMessage(io.EOF, "addition1"), + "%s", + []string{"addition1: EOF"}, + }, { + WithMessage(io.EOF, "addition1"), + "%v", + []string{"addition1: EOF"}, + }, { + WithMessage(io.EOF, "addition1"), + "%+v", + []string{"EOF", "addition1"}, + }, { + WithMessage(WithMessage(io.EOF, "addition1"), "addition2"), + "%v", + []string{"addition2: addition1: EOF"}, + }, { + WithMessage(WithMessage(io.EOF, "addition1"), "addition2"), + "%+v", + []string{"EOF", "addition1", "addition2"}, + }, { + Wrap(WithMessage(io.EOF, "error1"), "error2"), + "%+v", + []string{"EOF", "error1", "error2", + "github.com/pkg/errors.TestFormatWithMessage\n" + + "\t.+/github.com/pkg/errors/format_test.go:272"}, + }, { + WithMessage(Errorf("error%d", 1), "error2"), + "%+v", + []string{"error1", + "github.com/pkg/errors.TestFormatWithMessage\n" + + "\t.+/github.com/pkg/errors/format_test.go:278", + "error2"}, + }, { + WithMessage(WithStack(io.EOF), "error"), + "%+v", + []string{ + "EOF", + "github.com/pkg/errors.TestFormatWithMessage\n" + + "\t.+/github.com/pkg/errors/format_test.go:285", + "error"}, + }, { + WithMessage(Wrap(WithStack(io.EOF), "inside-error"), "outside-error"), + "%+v", + []string{ + "EOF", + "github.com/pkg/errors.TestFormatWithMessage\n" + + "\t.+/github.com/pkg/errors/format_test.go:293", + "inside-error", + "github.com/pkg/errors.TestFormatWithMessage\n" + + "\t.+/github.com/pkg/errors/format_test.go:293", + "outside-error"}, + }} + + for i, tt := range tests { + testFormatCompleteCompare(t, i, tt.error, tt.format, tt.want, true) + } +} + +func TestFormatGeneric(t *testing.T) { + starts := []struct { + err error + want []string + }{ + {New("new-error"), []string{ + "new-error", + "github.com/pkg/errors.TestFormatGeneric\n" + + "\t.+/github.com/pkg/errors/format_test.go:315"}, + }, {Errorf("errorf-error"), []string{ + "errorf-error", + "github.com/pkg/errors.TestFormatGeneric\n" + + "\t.+/github.com/pkg/errors/format_test.go:319"}, + }, {errors.New("errors-new-error"), []string{ + "errors-new-error"}, + }, + } + + wrappers := []wrapper{ + { + func(err error) error { return WithMessage(err, "with-message") }, + []string{"with-message"}, + }, { + func(err error) error { return WithStack(err) }, + []string{ + "github.com/pkg/errors.(func·002|TestFormatGeneric.func2)\n\t" + + ".+/github.com/pkg/errors/format_test.go:333", + }, + }, { + func(err error) error { return Wrap(err, "wrap-error") }, + []string{ + "wrap-error", + "github.com/pkg/errors.(func·003|TestFormatGeneric.func3)\n\t" + + ".+/github.com/pkg/errors/format_test.go:339", + }, + }, { + func(err error) error { return Wrapf(err, "wrapf-error%d", 1) }, + []string{ + "wrapf-error1", + "github.com/pkg/errors.(func·004|TestFormatGeneric.func4)\n\t" + + ".+/github.com/pkg/errors/format_test.go:346", + }, + }, + } + + for s := range starts { + err := starts[s].err + want := starts[s].want + testFormatCompleteCompare(t, s, err, "%+v", want, false) + testGenericRecursive(t, err, want, wrappers, 3) + } +} + +func wrappedNew(message string) error { // This function will be mid-stack inlined in go 1.12+ + return New(message) +} + +func TestFormatWrappedNew(t *testing.T) { + tests := []struct { + error + format string + want string + }{{ + wrappedNew("error"), + "%+v", + "error\n" + + "github.com/pkg/errors.wrappedNew\n" + + "\t.+/github.com/pkg/errors/format_test.go:364\n" + + "github.com/pkg/errors.TestFormatWrappedNew\n" + + "\t.+/github.com/pkg/errors/format_test.go:373", + }} + + for i, tt := range tests { + testFormatRegexp(t, i, tt.error, tt.format, tt.want) + } +} + +func testFormatRegexp(t *testing.T, n int, arg interface{}, format, want string) { + t.Helper() + got := fmt.Sprintf(format, arg) + gotLines := strings.SplitN(got, "\n", -1) + wantLines := strings.SplitN(want, "\n", -1) + + if len(wantLines) > len(gotLines) { + t.Errorf("test %d: wantLines(%d) > gotLines(%d):\n got: %q\nwant: %q", n+1, len(wantLines), len(gotLines), got, want) + return + } + + for i, w := range wantLines { + match, err := regexp.MatchString(w, gotLines[i]) + if err != nil { + t.Fatal(err) + } + if !match { + t.Errorf("test %d: line %d: fmt.Sprintf(%q, err):\n got: %q\nwant: %q", n+1, i+1, format, got, want) + } + } +} + +var stackLineR = regexp.MustCompile(`\.`) + +// parseBlocks parses input into a slice, where: +// - incase entry contains a newline, its a stacktrace +// - incase entry contains no newline, its a solo line. +// +// Detecting stack boundaries only works incase the WithStack-calls are +// to be found on the same line, thats why it is optionally here. +// +// Example use: +// +// for _, e := range blocks { +// if strings.ContainsAny(e, "\n") { +// // Match as stack +// } else { +// // Match as line +// } +// } +// +func parseBlocks(input string, detectStackboundaries bool) ([]string, error) { + var blocks []string + + stack := "" + wasStack := false + lines := map[string]bool{} // already found lines + + for _, l := range strings.Split(input, "\n") { + isStackLine := stackLineR.MatchString(l) + + switch { + case !isStackLine && wasStack: + blocks = append(blocks, stack, l) + stack = "" + lines = map[string]bool{} + case isStackLine: + if wasStack { + // Detecting two stacks after another, possible cause lines match in + // our tests due to WithStack(WithStack(io.EOF)) on same line. + if detectStackboundaries { + if lines[l] { + if len(stack) == 0 { + return nil, errors.New("len of block must not be zero here") + } + + blocks = append(blocks, stack) + stack = l + lines = map[string]bool{l: true} + continue + } + } + + stack = stack + "\n" + l + } else { + stack = l + } + lines[l] = true + case !isStackLine && !wasStack: + blocks = append(blocks, l) + default: + return nil, errors.New("must not happen") + } + + wasStack = isStackLine + } + + // Use up stack + if stack != "" { + blocks = append(blocks, stack) + } + return blocks, nil +} + +func testFormatCompleteCompare(t *testing.T, n int, arg interface{}, format string, want []string, detectStackBoundaries bool) { + gotStr := fmt.Sprintf(format, arg) + + got, err := parseBlocks(gotStr, detectStackBoundaries) + if err != nil { + t.Fatal(err) + } + + if len(got) != len(want) { + t.Fatalf("test %d: fmt.Sprintf(%s, err) -> wrong number of blocks: got(%d) want(%d)\n got: %s\nwant: %s\ngotStr: %q", + n+1, format, len(got), len(want), prettyBlocks(got), prettyBlocks(want), gotStr) + } + + for i := range got { + if strings.ContainsAny(want[i], "\n") { + // Match as stack + match, err := regexp.MatchString(want[i], got[i]) + if err != nil { + t.Fatal(err) + } + if !match { + t.Fatalf("test %d: block %d: fmt.Sprintf(%q, err):\ngot:\n%q\nwant:\n%q\nall-got:\n%s\nall-want:\n%s\n", + n+1, i+1, format, got[i], want[i], prettyBlocks(got), prettyBlocks(want)) + } + } else { + // Match as message + if got[i] != want[i] { + t.Fatalf("test %d: fmt.Sprintf(%s, err) at block %d got != want:\n got: %q\nwant: %q", n+1, format, i+1, got[i], want[i]) + } + } + } +} + +type wrapper struct { + wrap func(err error) error + want []string +} + +func prettyBlocks(blocks []string) string { + var out []string + + for _, b := range blocks { + out = append(out, fmt.Sprintf("%v", b)) + } + + return " " + strings.Join(out, "\n ") +} + +func testGenericRecursive(t *testing.T, beforeErr error, beforeWant []string, list []wrapper, maxDepth int) { + if len(beforeWant) == 0 { + panic("beforeWant must not be empty") + } + for _, w := range list { + if len(w.want) == 0 { + panic("want must not be empty") + } + + err := w.wrap(beforeErr) + + // Copy required cause append(beforeWant, ..) modified beforeWant subtly. + beforeCopy := make([]string, len(beforeWant)) + copy(beforeCopy, beforeWant) + + beforeWant := beforeCopy + last := len(beforeWant) - 1 + var want []string + + // Merge two stacks behind each other. + if strings.ContainsAny(beforeWant[last], "\n") && strings.ContainsAny(w.want[0], "\n") { + want = append(beforeWant[:last], append([]string{beforeWant[last] + "((?s).*)" + w.want[0]}, w.want[1:]...)...) + } else { + want = append(beforeWant, w.want...) + } + + testFormatCompleteCompare(t, maxDepth, err, "%+v", want, false) + if maxDepth > 0 { + testGenericRecursive(t, err, want, list, maxDepth-1) + } + } +} diff --git a/vendor/github.com/pkg/errors/json_test.go b/vendor/github.com/pkg/errors/json_test.go new file mode 100644 index 0000000..ad1adec --- /dev/null +++ b/vendor/github.com/pkg/errors/json_test.go @@ -0,0 +1,51 @@ +package errors + +import ( + "encoding/json" + "regexp" + "testing" +) + +func TestFrameMarshalText(t *testing.T) { + var tests = []struct { + Frame + want string + }{{ + initpc, + `^github.com/pkg/errors\.init(\.ializers)? .+/github\.com/pkg/errors/stack_test.go:\d+$`, + }, { + 0, + `^unknown$`, + }} + for i, tt := range tests { + got, err := tt.Frame.MarshalText() + if err != nil { + t.Fatal(err) + } + if !regexp.MustCompile(tt.want).Match(got) { + t.Errorf("test %d: MarshalJSON:\n got %q\n want %q", i+1, string(got), tt.want) + } + } +} + +func TestFrameMarshalJSON(t *testing.T) { + var tests = []struct { + Frame + want string + }{{ + initpc, + `^"github\.com/pkg/errors\.init(\.ializers)? .+/github\.com/pkg/errors/stack_test.go:\d+"$`, + }, { + 0, + `^"unknown"$`, + }} + for i, tt := range tests { + got, err := json.Marshal(tt.Frame) + if err != nil { + t.Fatal(err) + } + if !regexp.MustCompile(tt.want).Match(got) { + t.Errorf("test %d: MarshalJSON:\n got %q\n want %q", i+1, string(got), tt.want) + } + } +} diff --git a/vendor/github.com/pkg/errors/stack.go b/vendor/github.com/pkg/errors/stack.go new file mode 100644 index 0000000..779a834 --- /dev/null +++ b/vendor/github.com/pkg/errors/stack.go @@ -0,0 +1,177 @@ +package errors + +import ( + "fmt" + "io" + "path" + "runtime" + "strconv" + "strings" +) + +// Frame represents a program counter inside a stack frame. +// For historical reasons if Frame is interpreted as a uintptr +// its value represents the program counter + 1. +type Frame uintptr + +// pc returns the program counter for this frame; +// multiple frames may have the same PC value. +func (f Frame) pc() uintptr { return uintptr(f) - 1 } + +// file returns the full path to the file that contains the +// function for this Frame's pc. +func (f Frame) file() string { + fn := runtime.FuncForPC(f.pc()) + if fn == nil { + return "unknown" + } + file, _ := fn.FileLine(f.pc()) + return file +} + +// line returns the line number of source code of the +// function for this Frame's pc. +func (f Frame) line() int { + fn := runtime.FuncForPC(f.pc()) + if fn == nil { + return 0 + } + _, line := fn.FileLine(f.pc()) + return line +} + +// name returns the name of this function, if known. +func (f Frame) name() string { + fn := runtime.FuncForPC(f.pc()) + if fn == nil { + return "unknown" + } + return fn.Name() +} + +// Format formats the frame according to the fmt.Formatter interface. +// +// %s source file +// %d source line +// %n function name +// %v equivalent to %s:%d +// +// Format accepts flags that alter the printing of some verbs, as follows: +// +// %+s function name and path of source file relative to the compile time +// GOPATH separated by \n\t (\n\t) +// %+v equivalent to %+s:%d +func (f Frame) Format(s fmt.State, verb rune) { + switch verb { + case 's': + switch { + case s.Flag('+'): + io.WriteString(s, f.name()) + io.WriteString(s, "\n\t") + io.WriteString(s, f.file()) + default: + io.WriteString(s, path.Base(f.file())) + } + case 'd': + io.WriteString(s, strconv.Itoa(f.line())) + case 'n': + io.WriteString(s, funcname(f.name())) + case 'v': + f.Format(s, 's') + io.WriteString(s, ":") + f.Format(s, 'd') + } +} + +// MarshalText formats a stacktrace Frame as a text string. The output is the +// same as that of fmt.Sprintf("%+v", f), but without newlines or tabs. +func (f Frame) MarshalText() ([]byte, error) { + name := f.name() + if name == "unknown" { + return []byte(name), nil + } + return []byte(fmt.Sprintf("%s %s:%d", name, f.file(), f.line())), nil +} + +// StackTrace is stack of Frames from innermost (newest) to outermost (oldest). +type StackTrace []Frame + +// Format formats the stack of Frames according to the fmt.Formatter interface. +// +// %s lists source files for each Frame in the stack +// %v lists the source file and line number for each Frame in the stack +// +// Format accepts flags that alter the printing of some verbs, as follows: +// +// %+v Prints filename, function, and line number for each Frame in the stack. +func (st StackTrace) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + switch { + case s.Flag('+'): + for _, f := range st { + io.WriteString(s, "\n") + f.Format(s, verb) + } + case s.Flag('#'): + fmt.Fprintf(s, "%#v", []Frame(st)) + default: + st.formatSlice(s, verb) + } + case 's': + st.formatSlice(s, verb) + } +} + +// formatSlice will format this StackTrace into the given buffer as a slice of +// Frame, only valid when called with '%s' or '%v'. +func (st StackTrace) formatSlice(s fmt.State, verb rune) { + io.WriteString(s, "[") + for i, f := range st { + if i > 0 { + io.WriteString(s, " ") + } + f.Format(s, verb) + } + io.WriteString(s, "]") +} + +// stack represents a stack of program counters. +type stack []uintptr + +func (s *stack) Format(st fmt.State, verb rune) { + switch verb { + case 'v': + switch { + case st.Flag('+'): + for _, pc := range *s { + f := Frame(pc) + fmt.Fprintf(st, "\n%+v", f) + } + } + } +} + +func (s *stack) StackTrace() StackTrace { + f := make([]Frame, len(*s)) + for i := 0; i < len(f); i++ { + f[i] = Frame((*s)[i]) + } + return f +} + +func callers() *stack { + const depth = 32 + var pcs [depth]uintptr + n := runtime.Callers(3, pcs[:]) + var st stack = pcs[0:n] + return &st +} + +// funcname removes the path prefix component of a function's name reported by func.Name(). +func funcname(name string) string { + i := strings.LastIndex(name, "/") + name = name[i+1:] + i = strings.Index(name, ".") + return name[i+1:] +} diff --git a/vendor/github.com/pkg/errors/stack_test.go b/vendor/github.com/pkg/errors/stack_test.go new file mode 100644 index 0000000..1acd719 --- /dev/null +++ b/vendor/github.com/pkg/errors/stack_test.go @@ -0,0 +1,250 @@ +package errors + +import ( + "fmt" + "runtime" + "testing" +) + +var initpc = caller() + +type X struct{} + +// val returns a Frame pointing to itself. +func (x X) val() Frame { + return caller() +} + +// ptr returns a Frame pointing to itself. +func (x *X) ptr() Frame { + return caller() +} + +func TestFrameFormat(t *testing.T) { + var tests = []struct { + Frame + format string + want string + }{{ + initpc, + "%s", + "stack_test.go", + }, { + initpc, + "%+s", + "github.com/pkg/errors.init\n" + + "\t.+/github.com/pkg/errors/stack_test.go", + }, { + 0, + "%s", + "unknown", + }, { + 0, + "%+s", + "unknown", + }, { + initpc, + "%d", + "9", + }, { + 0, + "%d", + "0", + }, { + initpc, + "%n", + "init", + }, { + func() Frame { + var x X + return x.ptr() + }(), + "%n", + `\(\*X\).ptr`, + }, { + func() Frame { + var x X + return x.val() + }(), + "%n", + "X.val", + }, { + 0, + "%n", + "", + }, { + initpc, + "%v", + "stack_test.go:9", + }, { + initpc, + "%+v", + "github.com/pkg/errors.init\n" + + "\t.+/github.com/pkg/errors/stack_test.go:9", + }, { + 0, + "%v", + "unknown:0", + }} + + for i, tt := range tests { + testFormatRegexp(t, i, tt.Frame, tt.format, tt.want) + } +} + +func TestFuncname(t *testing.T) { + tests := []struct { + name, want string + }{ + {"", ""}, + {"runtime.main", "main"}, + {"github.com/pkg/errors.funcname", "funcname"}, + {"funcname", "funcname"}, + {"io.copyBuffer", "copyBuffer"}, + {"main.(*R).Write", "(*R).Write"}, + } + + for _, tt := range tests { + got := funcname(tt.name) + want := tt.want + if got != want { + t.Errorf("funcname(%q): want: %q, got %q", tt.name, want, got) + } + } +} + +func TestStackTrace(t *testing.T) { + tests := []struct { + err error + want []string + }{{ + New("ooh"), []string{ + "github.com/pkg/errors.TestStackTrace\n" + + "\t.+/github.com/pkg/errors/stack_test.go:121", + }, + }, { + Wrap(New("ooh"), "ahh"), []string{ + "github.com/pkg/errors.TestStackTrace\n" + + "\t.+/github.com/pkg/errors/stack_test.go:126", // this is the stack of Wrap, not New + }, + }, { + Cause(Wrap(New("ooh"), "ahh")), []string{ + "github.com/pkg/errors.TestStackTrace\n" + + "\t.+/github.com/pkg/errors/stack_test.go:131", // this is the stack of New + }, + }, { + func() error { return New("ooh") }(), []string{ + `github.com/pkg/errors.TestStackTrace.func1` + + "\n\t.+/github.com/pkg/errors/stack_test.go:136", // this is the stack of New + "github.com/pkg/errors.TestStackTrace\n" + + "\t.+/github.com/pkg/errors/stack_test.go:136", // this is the stack of New's caller + }, + }, { + Cause(func() error { + return func() error { + return Errorf("hello %s", fmt.Sprintf("world")) + }() + }()), []string{ + `github.com/pkg/errors.TestStackTrace.func2.1` + + "\n\t.+/github.com/pkg/errors/stack_test.go:145", // this is the stack of Errorf + `github.com/pkg/errors.TestStackTrace.func2` + + "\n\t.+/github.com/pkg/errors/stack_test.go:146", // this is the stack of Errorf's caller + "github.com/pkg/errors.TestStackTrace\n" + + "\t.+/github.com/pkg/errors/stack_test.go:147", // this is the stack of Errorf's caller's caller + }, + }} + for i, tt := range tests { + x, ok := tt.err.(interface { + StackTrace() StackTrace + }) + if !ok { + t.Errorf("expected %#v to implement StackTrace() StackTrace", tt.err) + continue + } + st := x.StackTrace() + for j, want := range tt.want { + testFormatRegexp(t, i, st[j], "%+v", want) + } + } +} + +func stackTrace() StackTrace { + const depth = 8 + var pcs [depth]uintptr + n := runtime.Callers(1, pcs[:]) + var st stack = pcs[0:n] + return st.StackTrace() +} + +func TestStackTraceFormat(t *testing.T) { + tests := []struct { + StackTrace + format string + want string + }{{ + nil, + "%s", + `\[\]`, + }, { + nil, + "%v", + `\[\]`, + }, { + nil, + "%+v", + "", + }, { + nil, + "%#v", + `\[\]errors.Frame\(nil\)`, + }, { + make(StackTrace, 0), + "%s", + `\[\]`, + }, { + make(StackTrace, 0), + "%v", + `\[\]`, + }, { + make(StackTrace, 0), + "%+v", + "", + }, { + make(StackTrace, 0), + "%#v", + `\[\]errors.Frame{}`, + }, { + stackTrace()[:2], + "%s", + `\[stack_test.go stack_test.go\]`, + }, { + stackTrace()[:2], + "%v", + `\[stack_test.go:174 stack_test.go:221\]`, + }, { + stackTrace()[:2], + "%+v", + "\n" + + "github.com/pkg/errors.stackTrace\n" + + "\t.+/github.com/pkg/errors/stack_test.go:174\n" + + "github.com/pkg/errors.TestStackTraceFormat\n" + + "\t.+/github.com/pkg/errors/stack_test.go:225", + }, { + stackTrace()[:2], + "%#v", + `\[\]errors.Frame{stack_test.go:174, stack_test.go:233}`, + }} + + for i, tt := range tests { + testFormatRegexp(t, i, tt.StackTrace, tt.format, tt.want) + } +} + +// a version of runtime.Caller that returns a Frame, not a uintptr. +func caller() Frame { + var pcs [3]uintptr + n := runtime.Callers(2, pcs[:]) + frames := runtime.CallersFrames(pcs[:n]) + frame, _ := frames.Next() + return Frame(frame.PC) +} diff --git a/vendor/github.com/spf13/afero/.travis.yml b/vendor/github.com/spf13/afero/.travis.yml index 6c296d2..0637db7 100644 --- a/vendor/github.com/spf13/afero/.travis.yml +++ b/vendor/github.com/spf13/afero/.travis.yml @@ -2,8 +2,8 @@ sudo: false language: go go: - - 1.7.5 - - 1.8 + - 1.9 + - "1.10" - tip os: @@ -16,5 +16,6 @@ matrix: fast_finish: true script: - - go test -v ./... - go build + - go test -race -v ./... + diff --git a/vendor/github.com/spf13/afero/README.md b/vendor/github.com/spf13/afero/README.md index d9e3327..0c9b04b 100644 --- a/vendor/github.com/spf13/afero/README.md +++ b/vendor/github.com/spf13/afero/README.md @@ -61,11 +61,11 @@ import "github.com/spf13/afero" First define a package variable and set it to a pointer to a filesystem. ```go -var AppFs afero.Fs = afero.NewMemMapFs() +var AppFs = afero.NewMemMapFs() or -var AppFs afero.Fs = afero.NewOsFs() +var AppFs = afero.NewOsFs() ``` It is important to note that if you repeat the composite literal you will be using a completely new and isolated filesystem. In the case of @@ -81,7 +81,10 @@ So if my application before had: ```go os.Open('/tmp/foo') ``` -We would replace it with a call to `AppFs.Open('/tmp/foo')`. +We would replace it with: +```go +AppFs.Open('/tmp/foo') +``` `AppFs` being the variable we defined above. @@ -166,8 +169,8 @@ f, err := afero.TempFile(fs,"", "ioutil-test") ### Calling via Afero ```go -fs := afero.NewMemMapFs -afs := &Afero{Fs: fs} +fs := afero.NewMemMapFs() +afs := &afero.Afero{Fs: fs} f, err := afs.TempFile("", "ioutil-test") ``` diff --git a/vendor/github.com/spf13/afero/afero_test.go b/vendor/github.com/spf13/afero/afero_test.go index 526afa9..e2c1362 100644 --- a/vendor/github.com/spf13/afero/afero_test.go +++ b/vendor/github.com/spf13/afero/afero_test.go @@ -570,6 +570,25 @@ func TestReaddir(t *testing.T) { } } +// https://github.com/spf13/afero/issues/169 +func TestReaddirRegularFile(t *testing.T) { + defer removeAllTestFiles(t) + for _, fs := range Fss { + f := tmpFile(fs) + defer f.Close() + + _, err := f.Readdirnames(-1) + if err == nil { + t.Fatal("Expected error") + } + + _, err = f.Readdir(-1) + if err == nil { + t.Fatal("Expected error") + } + } +} + type myFileInfo []os.FileInfo func (m myFileInfo) String() string { diff --git a/vendor/github.com/spf13/afero/appveyor.yml b/vendor/github.com/spf13/afero/appveyor.yml index 006f315..a633ad5 100644 --- a/vendor/github.com/spf13/afero/appveyor.yml +++ b/vendor/github.com/spf13/afero/appveyor.yml @@ -12,4 +12,4 @@ build_script: go build github.com/spf13/afero test_script: -- cmd: go test -v github.com/spf13/afero +- cmd: go test -race -v github.com/spf13/afero/... diff --git a/vendor/github.com/spf13/afero/basepath.go b/vendor/github.com/spf13/afero/basepath.go index 5e4fc2e..616ff8f 100644 --- a/vendor/github.com/spf13/afero/basepath.go +++ b/vendor/github.com/spf13/afero/basepath.go @@ -1,7 +1,6 @@ package afero import ( - "errors" "os" "path/filepath" "runtime" @@ -9,6 +8,8 @@ import ( "time" ) +var _ Lstater = (*BasePathFs)(nil) + // The BasePathFs restricts all operations to a given path within an Fs. // The given file name to the operations on this Fs will be prepended with // the base path before calling the base Fs. @@ -22,6 +23,16 @@ type BasePathFs struct { path string } +type BasePathFile struct { + File + path string +} + +func (f *BasePathFile) Name() string { + sourcename := f.File.Name() + return strings.TrimPrefix(sourcename, filepath.Clean(f.path)) +} + func NewBasePathFs(source Fs, path string) Fs { return &BasePathFs{source: source, path: path} } @@ -30,7 +41,7 @@ func NewBasePathFs(source Fs, path string) Fs { // else the given file with the base path prepended func (b *BasePathFs) RealPath(name string) (path string, err error) { if err := validateBasePathName(name); err != nil { - return "", err + return name, err } bpath := filepath.Clean(b.path) @@ -52,7 +63,7 @@ func validateBasePathName(name string) error { // On Windows a common mistake would be to provide an absolute OS path // We could strip out the base part, but that would not be very portable. if filepath.IsAbs(name) { - return &os.PathError{Op: "realPath", Path: name, Err: errors.New("got a real OS path instead of a virtual")} + return os.ErrNotExist } return nil @@ -111,14 +122,22 @@ func (b *BasePathFs) OpenFile(name string, flag int, mode os.FileMode) (f File, if name, err = b.RealPath(name); err != nil { return nil, &os.PathError{Op: "openfile", Path: name, Err: err} } - return b.source.OpenFile(name, flag, mode) + sourcef, err := b.source.OpenFile(name, flag, mode) + if err != nil { + return nil, err + } + return &BasePathFile{sourcef, b.path}, nil } func (b *BasePathFs) Open(name string) (f File, err error) { if name, err = b.RealPath(name); err != nil { return nil, &os.PathError{Op: "open", Path: name, Err: err} } - return b.source.Open(name) + sourcef, err := b.source.Open(name) + if err != nil { + return nil, err + } + return &BasePathFile{File: sourcef, path: b.path}, nil } func (b *BasePathFs) Mkdir(name string, mode os.FileMode) (err error) { @@ -139,7 +158,23 @@ func (b *BasePathFs) Create(name string) (f File, err error) { if name, err = b.RealPath(name); err != nil { return nil, &os.PathError{Op: "create", Path: name, Err: err} } - return b.source.Create(name) + sourcef, err := b.source.Create(name) + if err != nil { + return nil, err + } + return &BasePathFile{File: sourcef, path: b.path}, nil +} + +func (b *BasePathFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + name, err := b.RealPath(name) + if err != nil { + return nil, false, &os.PathError{Op: "lstat", Path: name, Err: err} + } + if lstater, ok := b.source.(Lstater); ok { + return lstater.LstatIfPossible(name) + } + fi, err := b.source.Stat(name) + return fi, false, err } // vim: ts=4 sw=4 noexpandtab nolist syn=go diff --git a/vendor/github.com/spf13/afero/basepath_test.go b/vendor/github.com/spf13/afero/basepath_test.go index abc22b9..77aa7df 100644 --- a/vendor/github.com/spf13/afero/basepath_test.go +++ b/vendor/github.com/spf13/afero/basepath_test.go @@ -68,8 +68,8 @@ func TestRealPath(t *testing.T) { if runtime.GOOS == "windows" { _, err = bp.RealPath(anotherDir) - if err == nil { - t.Errorf("Expected error") + if err != os.ErrNotExist { + t.Errorf("Expected os.ErrNotExist") } } else { @@ -140,3 +140,51 @@ func TestNestedBasePaths(t *testing.T) { } } } + +func TestBasePathOpenFile(t *testing.T) { + baseFs := &MemMapFs{} + baseFs.MkdirAll("/base/path/tmp", 0777) + bp := NewBasePathFs(baseFs, "/base/path") + f, err := bp.OpenFile("/tmp/file.txt", os.O_CREATE, 0600) + if err != nil { + t.Fatalf("failed to open file: %v", err) + } + if filepath.Dir(f.Name()) != filepath.Clean("/tmp") { + t.Fatalf("realpath leaked: %s", f.Name()) + } +} + +func TestBasePathCreate(t *testing.T) { + baseFs := &MemMapFs{} + baseFs.MkdirAll("/base/path/tmp", 0777) + bp := NewBasePathFs(baseFs, "/base/path") + f, err := bp.Create("/tmp/file.txt") + if err != nil { + t.Fatalf("failed to create file: %v", err) + } + if filepath.Dir(f.Name()) != filepath.Clean("/tmp") { + t.Fatalf("realpath leaked: %s", f.Name()) + } +} + +func TestBasePathTempFile(t *testing.T) { + baseFs := &MemMapFs{} + baseFs.MkdirAll("/base/path/tmp", 0777) + bp := NewBasePathFs(baseFs, "/base/path") + + tDir, err := TempDir(bp, "/tmp", "") + if err != nil { + t.Fatalf("Failed to TempDir: %v", err) + } + if filepath.Dir(tDir) != filepath.Clean("/tmp") { + t.Fatalf("Tempdir realpath leaked: %s", tDir) + } + tempFile, err := TempFile(bp, tDir, "") + if err != nil { + t.Fatalf("Failed to TempFile: %v", err) + } + defer tempFile.Close() + if expected, actual := tDir, filepath.Dir(tempFile.Name()); expected != actual { + t.Fatalf("TempFile realpath leaked: expected %s, got %s", expected, actual) + } +} diff --git a/vendor/github.com/spf13/afero/cacheOnReadFs.go b/vendor/github.com/spf13/afero/cacheOnReadFs.go index e54a4f8..29a26c6 100644 --- a/vendor/github.com/spf13/afero/cacheOnReadFs.go +++ b/vendor/github.com/spf13/afero/cacheOnReadFs.go @@ -64,15 +64,10 @@ func (u *CacheOnReadFs) cacheStatus(name string) (state cacheState, fi os.FileIn return cacheHit, lfi, nil } - if err == syscall.ENOENT { + if err == syscall.ENOENT || os.IsNotExist(err) { return cacheMiss, nil, nil } - var ok bool - if err, ok = err.(*os.PathError); ok { - if err == os.ErrNotExist { - return cacheMiss, nil, nil - } - } + return cacheMiss, nil, err } @@ -210,7 +205,7 @@ func (u *CacheOnReadFs) OpenFile(name string, flag int, perm os.FileMode) (File, bfi.Close() // oops, what if O_TRUNC was set and file opening in the layer failed...? return nil, err } - return &UnionFile{base: bfi, layer: lfi}, nil + return &UnionFile{Base: bfi, Layer: lfi}, nil } return u.layer.OpenFile(name, flag, perm) } @@ -256,7 +251,7 @@ func (u *CacheOnReadFs) Open(name string) (File, error) { if err != nil && bfile == nil { return nil, err } - return &UnionFile{base: bfile, layer: lfile}, nil + return &UnionFile{Base: bfile, Layer: lfile}, nil } func (u *CacheOnReadFs) Mkdir(name string, perm os.FileMode) error { @@ -291,5 +286,5 @@ func (u *CacheOnReadFs) Create(name string) (File, error) { bfh.Close() return nil, err } - return &UnionFile{base: bfh, layer: lfh}, nil + return &UnionFile{Base: bfh, Layer: lfh}, nil } diff --git a/vendor/github.com/spf13/afero/composite_test.go b/vendor/github.com/spf13/afero/composite_test.go index e8ac1a8..bc915c2 100644 --- a/vendor/github.com/spf13/afero/composite_test.go +++ b/vendor/github.com/spf13/afero/composite_test.go @@ -1,7 +1,9 @@ package afero import ( + "bytes" "fmt" + "io" "io/ioutil" "os" "testing" @@ -50,7 +52,6 @@ func CleanupTempDirs(t *testing.T) { func TestUnionCreateExisting(t *testing.T) { base := &MemMapFs{} roBase := &ReadOnlyFs{source: base} - ufs := NewCopyOnWriteFs(roBase, &MemMapFs{}) base.MkdirAll("/home/test", 0777) @@ -366,3 +367,141 @@ func TestUnionCacheExpire(t *testing.T) { t.Errorf("cache time failed: <%s>", data) } } + +func TestCacheOnReadFsNotInLayer(t *testing.T) { + base := NewMemMapFs() + layer := NewMemMapFs() + fs := NewCacheOnReadFs(base, layer, 0) + + fh, err := base.Create("/file.txt") + if err != nil { + t.Fatal("unable to create file: ", err) + } + + txt := []byte("This is a test") + fh.Write(txt) + fh.Close() + + fh, err = fs.Open("/file.txt") + if err != nil { + t.Fatal("could not open file: ", err) + } + + b, err := ReadAll(fh) + fh.Close() + + if err != nil { + t.Fatal("could not read file: ", err) + } else if !bytes.Equal(txt, b) { + t.Fatalf("wanted file text %q, got %q", txt, b) + } + + fh, err = layer.Open("/file.txt") + if err != nil { + t.Fatal("could not open file from layer: ", err) + } + fh.Close() +} + +// #194 +func TestUnionFileReaddirEmpty(t *testing.T) { + osFs := NewOsFs() + + base := NewMemMapFs() + overlay := NewMemMapFs() + ufs := &CopyOnWriteFs{base: base, layer: overlay} + mem := NewMemMapFs() + + // The OS file will return io.EOF on end of directory. + for _, fs := range []Fs{osFs, ufs, mem} { + baseDir, err := TempDir(fs, "", "empty-dir") + if err != nil { + t.Fatal(err) + } + + f, err := fs.Open(baseDir) + if err != nil { + t.Fatal(err) + } + + names, err := f.Readdirnames(1) + if err != io.EOF { + t.Fatal(err) + } + + if len(names) != 0 { + t.Fatal("should be empty") + } + + f.Close() + + fs.RemoveAll(baseDir) + } +} + +// #197 +func TestUnionFileReaddirDuplicateEmpty(t *testing.T) { + base := NewMemMapFs() + dir, err := TempDir(base, "", "empty-dir") + if err != nil { + t.Fatal(err) + } + + // Overlay shares same empty directory as base + overlay := NewMemMapFs() + err = overlay.Mkdir(dir, 0700) + if err != nil { + t.Fatal(err) + } + + ufs := &CopyOnWriteFs{base: base, layer: overlay} + + f, err := ufs.Open(dir) + if err != nil { + t.Fatal(err) + } + defer f.Close() + + names, err := f.Readdirnames(0) + + if err == io.EOF { + t.Errorf("unexpected io.EOF error") + } + + if len(names) != 0 { + t.Fatal("should be empty") + } +} + +func TestUnionFileReaddirAskForTooMany(t *testing.T) { + base := &MemMapFs{} + overlay := &MemMapFs{} + + for i := 0; i < 5; i++ { + WriteFile(base, fmt.Sprintf("file%d.txt", i), []byte("afero"), 0777) + } + + ufs := &CopyOnWriteFs{base: base, layer: overlay} + + f, err := ufs.Open("") + if err != nil { + t.Fatal(err) + } + + defer f.Close() + + names, err := f.Readdirnames(6) + if err != nil { + t.Fatal(err) + } + + if len(names) != 5 { + t.Fatal(names) + } + + // End of directory + _, err = f.Readdirnames(3) + if err != io.EOF { + t.Fatal(err) + } +} diff --git a/vendor/github.com/spf13/afero/copyOnWriteFs.go b/vendor/github.com/spf13/afero/copyOnWriteFs.go index ed692ae..e8108a8 100644 --- a/vendor/github.com/spf13/afero/copyOnWriteFs.go +++ b/vendor/github.com/spf13/afero/copyOnWriteFs.go @@ -8,6 +8,8 @@ import ( "time" ) +var _ Lstater = (*CopyOnWriteFs)(nil) + // The CopyOnWriteFs is a union filesystem: a read only base file system with // a possibly writeable layer on top. Changes to the file system will only // be made in the overlay: Changing an existing file in the base layer which @@ -76,18 +78,55 @@ func (u *CopyOnWriteFs) Chmod(name string, mode os.FileMode) error { func (u *CopyOnWriteFs) Stat(name string) (os.FileInfo, error) { fi, err := u.layer.Stat(name) if err != nil { - origErr := err - if e, ok := err.(*os.PathError); ok { - err = e.Err - } - if err == syscall.ENOENT || err == syscall.ENOTDIR { + isNotExist := u.isNotExist(err) + if isNotExist { return u.base.Stat(name) } - return nil, origErr + return nil, err } return fi, nil } +func (u *CopyOnWriteFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + llayer, ok1 := u.layer.(Lstater) + lbase, ok2 := u.base.(Lstater) + + if ok1 { + fi, b, err := llayer.LstatIfPossible(name) + if err == nil { + return fi, b, nil + } + + if !u.isNotExist(err) { + return nil, b, err + } + } + + if ok2 { + fi, b, err := lbase.LstatIfPossible(name) + if err == nil { + return fi, b, nil + } + if !u.isNotExist(err) { + return nil, b, err + } + } + + fi, err := u.Stat(name) + + return fi, false, err +} + +func (u *CopyOnWriteFs) isNotExist(err error) bool { + if e, ok := err.(*os.PathError); ok { + err = e.Err + } + if err == os.ErrNotExist || err == syscall.ENOENT || err == syscall.ENOTDIR { + return true + } + return false +} + // Renaming files present only in the base layer is not permitted func (u *CopyOnWriteFs) Rename(oldname, newname string) error { b, err := u.isBaseFile(oldname) @@ -219,7 +258,7 @@ func (u *CopyOnWriteFs) Open(name string) (File, error) { return nil, fmt.Errorf("BaseErr: %v\nOverlayErr: %v", bErr, lErr) } - return &UnionFile{base: bfile, layer: lfile}, nil + return &UnionFile{Base: bfile, Layer: lfile}, nil } func (u *CopyOnWriteFs) Mkdir(name string, perm os.FileMode) error { @@ -228,7 +267,7 @@ func (u *CopyOnWriteFs) Mkdir(name string, perm os.FileMode) error { return u.layer.MkdirAll(name, perm) } if dir { - return syscall.EEXIST + return ErrFileExists } return u.layer.MkdirAll(name, perm) } @@ -243,7 +282,8 @@ func (u *CopyOnWriteFs) MkdirAll(name string, perm os.FileMode) error { return u.layer.MkdirAll(name, perm) } if dir { - return syscall.EEXIST + // This is in line with how os.MkdirAll behaves. + return nil } return u.layer.MkdirAll(name, perm) } diff --git a/vendor/github.com/spf13/afero/copyOnWriteFs_test.go b/vendor/github.com/spf13/afero/copyOnWriteFs_test.go index 2a00fab..94ba725 100644 --- a/vendor/github.com/spf13/afero/copyOnWriteFs_test.go +++ b/vendor/github.com/spf13/afero/copyOnWriteFs_test.go @@ -1,23 +1,62 @@ package afero -import "testing" +import ( + "os" + "path/filepath" + "testing" +) func TestCopyOnWrite(t *testing.T) { - var fs Fs - var err error - base := NewOsFs() - roBase := NewReadOnlyFs(base) - ufs := NewCopyOnWriteFs(roBase, NewMemMapFs()) - fs = ufs - err = fs.MkdirAll("nonexistent/directory/", 0744) + osFs := NewOsFs() + writeDir, err := TempDir(osFs, "", "copy-on-write-test") if err != nil { - t.Error(err) - return + t.Fatal("error creating tempDir", err) } - _, err = fs.Create("nonexistent/directory/newfile") + defer osFs.RemoveAll(writeDir) + + compositeFs := NewCopyOnWriteFs(NewReadOnlyFs(NewOsFs()), osFs) + + var dir = filepath.Join(writeDir, "some/path") + + err = compositeFs.MkdirAll(dir, 0744) + if err != nil { + t.Fatal(err) + } + _, err = compositeFs.Create(filepath.Join(dir, "newfile")) if err != nil { - t.Error(err) - return + t.Fatal(err) + } + + // https://github.com/spf13/afero/issues/189 + // We want the composite file system to behave like the OS file system + // on Mkdir and MkdirAll + for _, fs := range []Fs{osFs, compositeFs} { + err = fs.Mkdir(dir, 0744) + if err == nil || !os.IsExist(err) { + t.Errorf("Mkdir: Got %q for %T", err, fs) + } + + // MkdirAll does not return an error when the directory already exists + err = fs.MkdirAll(dir, 0744) + if err != nil { + t.Errorf("MkdirAll: Got %q for %T", err, fs) + } + + } +} + +func TestCopyOnWriteFileInMemMapBase(t *testing.T) { + base := &MemMapFs{} + layer := &MemMapFs{} + + if err := WriteFile(base, "base.txt", []byte("base"), 0755); err != nil { + t.Fatalf("Failed to write file: %s", err) } + ufs := NewCopyOnWriteFs(base, layer) + + _, err := ufs.Stat("base.txt") + if err != nil { + t.Fatal(err) + } } diff --git a/vendor/github.com/spf13/afero/go.mod b/vendor/github.com/spf13/afero/go.mod new file mode 100644 index 0000000..0868550 --- /dev/null +++ b/vendor/github.com/spf13/afero/go.mod @@ -0,0 +1,3 @@ +module github.com/spf13/afero + +require golang.org/x/text v0.3.0 diff --git a/vendor/github.com/spf13/afero/go.sum b/vendor/github.com/spf13/afero/go.sum new file mode 100644 index 0000000..6bad37b --- /dev/null +++ b/vendor/github.com/spf13/afero/go.sum @@ -0,0 +1,2 @@ +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= diff --git a/vendor/github.com/spf13/afero/memradix.go b/vendor/github.com/spf13/afero/lstater.go similarity index 54% rename from vendor/github.com/spf13/afero/memradix.go rename to vendor/github.com/spf13/afero/lstater.go index 87527f3..89c1bfc 100644 --- a/vendor/github.com/spf13/afero/memradix.go +++ b/vendor/github.com/spf13/afero/lstater.go @@ -1,4 +1,4 @@ -// Copyright © 2014 Steve Francia . +// Copyright © 2018 Steve Francia . // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,3 +12,16 @@ // limitations under the License. package afero + +import ( + "os" +) + +// Lstater is an optional interface in Afero. It is only implemented by the +// filesystems saying so. +// It will call Lstat if the filesystem iself is, or it delegates to, the os filesystem. +// Else it will call Stat. +// In addtion to the FileInfo, it will return a boolean telling whether Lstat was called or not. +type Lstater interface { + LstatIfPossible(name string) (os.FileInfo, bool, error) +} diff --git a/vendor/github.com/spf13/afero/lstater_test.go b/vendor/github.com/spf13/afero/lstater_test.go new file mode 100644 index 0000000..477924f --- /dev/null +++ b/vendor/github.com/spf13/afero/lstater_test.go @@ -0,0 +1,102 @@ +// Copyright ©2018 Steve Francia +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "os" + "path/filepath" + "testing" +) + +func TestLstatIfPossible(t *testing.T) { + wd, _ := os.Getwd() + defer func() { + os.Chdir(wd) + }() + + osFs := &OsFs{} + + workDir, err := TempDir(osFs, "", "afero-lstate") + if err != nil { + t.Fatal(err) + } + + defer func() { + osFs.RemoveAll(workDir) + }() + + memWorkDir := "/lstate" + + memFs := NewMemMapFs() + overlayFs1 := &CopyOnWriteFs{base: osFs, layer: memFs} + overlayFs2 := &CopyOnWriteFs{base: memFs, layer: osFs} + overlayFsMemOnly := &CopyOnWriteFs{base: memFs, layer: NewMemMapFs()} + basePathFs := &BasePathFs{source: osFs, path: workDir} + basePathFsMem := &BasePathFs{source: memFs, path: memWorkDir} + roFs := &ReadOnlyFs{source: osFs} + roFsMem := &ReadOnlyFs{source: memFs} + + pathFileMem := filepath.Join(memWorkDir, "aferom.txt") + + WriteFile(osFs, filepath.Join(workDir, "afero.txt"), []byte("Hi, Afero!"), 0777) + WriteFile(memFs, filepath.Join(pathFileMem), []byte("Hi, Afero!"), 0777) + + os.Chdir(workDir) + if err := os.Symlink("afero.txt", "symafero.txt"); err != nil { + t.Fatal(err) + } + + pathFile := filepath.Join(workDir, "afero.txt") + pathSymlink := filepath.Join(workDir, "symafero.txt") + + checkLstat := func(l Lstater, name string, shouldLstat bool) os.FileInfo { + statFile, isLstat, err := l.LstatIfPossible(name) + if err != nil { + t.Fatalf("Lstat check failed: %s", err) + } + if isLstat != shouldLstat { + t.Fatalf("Lstat status was %t for %s", isLstat, name) + } + return statFile + } + + testLstat := func(l Lstater, pathFile, pathSymlink string) { + shouldLstat := pathSymlink != "" + statRegular := checkLstat(l, pathFile, shouldLstat) + statSymlink := checkLstat(l, pathSymlink, shouldLstat) + if statRegular == nil || statSymlink == nil { + t.Fatal("got nil FileInfo") + } + + symSym := statSymlink.Mode()&os.ModeSymlink == os.ModeSymlink + if symSym == (pathSymlink == "") { + t.Fatal("expected the FileInfo to describe the symlink") + } + + _, _, err := l.LstatIfPossible("this-should-not-exist.txt") + if err == nil || !os.IsNotExist(err) { + t.Fatalf("expected file to not exist, got %s", err) + } + } + + testLstat(osFs, pathFile, pathSymlink) + testLstat(overlayFs1, pathFile, pathSymlink) + testLstat(overlayFs2, pathFile, pathSymlink) + testLstat(basePathFs, "afero.txt", "symafero.txt") + testLstat(overlayFsMemOnly, pathFileMem, "") + testLstat(basePathFsMem, "aferom.txt", "") + testLstat(roFs, pathFile, pathSymlink) + testLstat(roFsMem, pathFileMem, "") +} diff --git a/vendor/github.com/spf13/afero/match.go b/vendor/github.com/spf13/afero/match.go new file mode 100644 index 0000000..c18a87f --- /dev/null +++ b/vendor/github.com/spf13/afero/match.go @@ -0,0 +1,110 @@ +// Copyright © 2014 Steve Francia . +// Copyright 2009 The Go Authors. All rights reserved. + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "path/filepath" + "sort" + "strings" +) + +// Glob returns the names of all files matching pattern or nil +// if there is no matching file. The syntax of patterns is the same +// as in Match. The pattern may describe hierarchical names such as +// /usr/*/bin/ed (assuming the Separator is '/'). +// +// Glob ignores file system errors such as I/O errors reading directories. +// The only possible returned error is ErrBadPattern, when pattern +// is malformed. +// +// This was adapted from (http://golang.org/pkg/path/filepath) and uses several +// built-ins from that package. +func Glob(fs Fs, pattern string) (matches []string, err error) { + if !hasMeta(pattern) { + // Lstat not supported by a ll filesystems. + if _, err = lstatIfPossible(fs, pattern); err != nil { + return nil, nil + } + return []string{pattern}, nil + } + + dir, file := filepath.Split(pattern) + switch dir { + case "": + dir = "." + case string(filepath.Separator): + // nothing + default: + dir = dir[0 : len(dir)-1] // chop off trailing separator + } + + if !hasMeta(dir) { + return glob(fs, dir, file, nil) + } + + var m []string + m, err = Glob(fs, dir) + if err != nil { + return + } + for _, d := range m { + matches, err = glob(fs, d, file, matches) + if err != nil { + return + } + } + return +} + +// glob searches for files matching pattern in the directory dir +// and appends them to matches. If the directory cannot be +// opened, it returns the existing matches. New matches are +// added in lexicographical order. +func glob(fs Fs, dir, pattern string, matches []string) (m []string, e error) { + m = matches + fi, err := fs.Stat(dir) + if err != nil { + return + } + if !fi.IsDir() { + return + } + d, err := fs.Open(dir) + if err != nil { + return + } + defer d.Close() + + names, _ := d.Readdirnames(-1) + sort.Strings(names) + + for _, n := range names { + matched, err := filepath.Match(pattern, n) + if err != nil { + return m, err + } + if matched { + m = append(m, filepath.Join(dir, n)) + } + } + return +} + +// hasMeta reports whether path contains any of the magic characters +// recognized by Match. +func hasMeta(path string) bool { + // TODO(niemeyer): Should other magic characters be added here? + return strings.IndexAny(path, "*?[") >= 0 +} diff --git a/vendor/github.com/spf13/afero/match_test.go b/vendor/github.com/spf13/afero/match_test.go new file mode 100644 index 0000000..21e1fae --- /dev/null +++ b/vendor/github.com/spf13/afero/match_test.go @@ -0,0 +1,183 @@ +// Copyright © 2014 Steve Francia . +// Copyright 2009 The Go Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "os" + "path/filepath" + "runtime" + "testing" +) + +// contains returns true if vector contains the string s. +func contains(vector []string, s string) bool { + for _, elem := range vector { + if elem == s { + return true + } + } + return false +} + +func setupGlobDirRoot(t *testing.T, fs Fs) string { + path := testDir(fs) + setupGlobFiles(t, fs, path) + return path +} + +func setupGlobDirReusePath(t *testing.T, fs Fs, path string) string { + testRegistry[fs] = append(testRegistry[fs], path) + return setupGlobFiles(t, fs, path) +} + +func setupGlobFiles(t *testing.T, fs Fs, path string) string { + testSubDir := filepath.Join(path, "globs", "bobs") + err := fs.MkdirAll(testSubDir, 0700) + if err != nil && !os.IsExist(err) { + t.Fatal(err) + } + + f, err := fs.Create(filepath.Join(testSubDir, "/matcher")) + if err != nil { + t.Fatal(err) + } + f.WriteString("Testfile 1 content") + f.Close() + + f, err = fs.Create(filepath.Join(testSubDir, "/../submatcher")) + if err != nil { + t.Fatal(err) + } + f.WriteString("Testfile 2 content") + f.Close() + + f, err = fs.Create(filepath.Join(testSubDir, "/../../match")) + if err != nil { + t.Fatal(err) + } + f.WriteString("Testfile 3 content") + f.Close() + + return testSubDir +} + +func TestGlob(t *testing.T) { + defer removeAllTestFiles(t) + var testDir string + for i, fs := range Fss { + if i == 0 { + testDir = setupGlobDirRoot(t, fs) + } else { + setupGlobDirReusePath(t, fs, testDir) + } + } + + var globTests = []struct { + pattern, result string + }{ + {testDir + "/globs/bobs/matcher", testDir + "/globs/bobs/matcher"}, + {testDir + "/globs/*/mat?her", testDir + "/globs/bobs/matcher"}, + {testDir + "/globs/bobs/../*", testDir + "/globs/submatcher"}, + {testDir + "/match", testDir + "/match"}, + } + + for _, fs := range Fss { + + for _, tt := range globTests { + pattern := tt.pattern + result := tt.result + if runtime.GOOS == "windows" { + pattern = filepath.Clean(pattern) + result = filepath.Clean(result) + } + matches, err := Glob(fs, pattern) + if err != nil { + t.Errorf("Glob error for %q: %s", pattern, err) + continue + } + if !contains(matches, result) { + t.Errorf("Glob(%#q) = %#v want %v", pattern, matches, result) + } + } + for _, pattern := range []string{"no_match", "../*/no_match"} { + matches, err := Glob(fs, pattern) + if err != nil { + t.Errorf("Glob error for %q: %s", pattern, err) + continue + } + if len(matches) != 0 { + t.Errorf("Glob(%#q) = %#v want []", pattern, matches) + } + } + + } +} + +func TestGlobSymlink(t *testing.T) { + defer removeAllTestFiles(t) + + fs := &OsFs{} + testDir := setupGlobDirRoot(t, fs) + + err := os.Symlink("target", filepath.Join(testDir, "symlink")) + if err != nil { + t.Skipf("skipping on %s", runtime.GOOS) + } + + var globSymlinkTests = []struct { + path, dest string + brokenLink bool + }{ + {"test1", "link1", false}, + {"test2", "link2", true}, + } + + for _, tt := range globSymlinkTests { + path := filepath.Join(testDir, tt.path) + dest := filepath.Join(testDir, tt.dest) + f, err := fs.Create(path) + if err != nil { + t.Fatal(err) + } + if err := f.Close(); err != nil { + t.Fatal(err) + } + err = os.Symlink(path, dest) + if err != nil { + t.Fatal(err) + } + if tt.brokenLink { + // Break the symlink. + fs.Remove(path) + } + matches, err := Glob(fs, dest) + if err != nil { + t.Errorf("GlobSymlink error for %q: %s", dest, err) + } + if !contains(matches, dest) { + t.Errorf("Glob(%#q) = %#v want %v", dest, matches, dest) + } + } +} + + +func TestGlobError(t *testing.T) { + for _, fs := range Fss { + _, err := Glob(fs, "[7]") + if err != nil { + t.Error("expected error for bad pattern; got none") + } + } +} diff --git a/vendor/github.com/spf13/afero/mem/file.go b/vendor/github.com/spf13/afero/mem/file.go index e41e012..7af2fb5 100644 --- a/vendor/github.com/spf13/afero/mem/file.go +++ b/vendor/github.com/spf13/afero/mem/file.go @@ -74,14 +74,24 @@ func CreateDir(name string) *FileData { } func ChangeFileName(f *FileData, newname string) { + f.Lock() f.name = newname + f.Unlock() } func SetMode(f *FileData, mode os.FileMode) { + f.Lock() f.mode = mode + f.Unlock() } func SetModTime(f *FileData, mtime time.Time) { + f.Lock() + setModTime(f, mtime) + f.Unlock() +} + +func setModTime(f *FileData, mtime time.Time) { f.modtime = mtime } @@ -102,7 +112,7 @@ func (f *File) Close() error { f.fileData.Lock() f.closed = true if !f.readOnly { - SetModTime(f.fileData, time.Now()) + setModTime(f.fileData, time.Now()) } f.fileData.Unlock() return nil @@ -121,6 +131,9 @@ func (f *File) Sync() error { } func (f *File) Readdir(count int) (res []os.FileInfo, err error) { + if !f.fileData.dir { + return nil, &os.PathError{Op: "readdir", Path: f.fileData.name, Err: errors.New("not a dir")} + } var outLength int64 f.fileData.Lock() @@ -166,6 +179,9 @@ func (f *File) Read(b []byte) (n int, err error) { if len(b) > 0 && int(f.at) == len(f.fileData.data) { return 0, io.EOF } + if int(f.at) > len(f.fileData.data) { + return 0, io.ErrUnexpectedEOF + } if len(f.fileData.data)-int(f.at) >= len(b) { n = len(b) } else { @@ -197,7 +213,7 @@ func (f *File) Truncate(size int64) error { } else { f.fileData.data = f.fileData.data[0:size] } - SetModTime(f.fileData, time.Now()) + setModTime(f.fileData, time.Now()) return nil } @@ -236,7 +252,7 @@ func (f *File) Write(b []byte) (n int, err error) { f.fileData.data = append(f.fileData.data[:cur], b...) f.fileData.data = append(f.fileData.data, tail...) } - SetModTime(f.fileData, time.Now()) + setModTime(f.fileData, time.Now()) atomic.StoreInt64(&f.at, int64(len(f.fileData.data))) return @@ -261,17 +277,33 @@ type FileInfo struct { // Implements os.FileInfo func (s *FileInfo) Name() string { + s.Lock() _, name := filepath.Split(s.name) + s.Unlock() return name } -func (s *FileInfo) Mode() os.FileMode { return s.mode } -func (s *FileInfo) ModTime() time.Time { return s.modtime } -func (s *FileInfo) IsDir() bool { return s.dir } -func (s *FileInfo) Sys() interface{} { return nil } +func (s *FileInfo) Mode() os.FileMode { + s.Lock() + defer s.Unlock() + return s.mode +} +func (s *FileInfo) ModTime() time.Time { + s.Lock() + defer s.Unlock() + return s.modtime +} +func (s *FileInfo) IsDir() bool { + s.Lock() + defer s.Unlock() + return s.dir +} +func (s *FileInfo) Sys() interface{} { return nil } func (s *FileInfo) Size() int64 { if s.IsDir() { return int64(42) } + s.Lock() + defer s.Unlock() return int64(len(s.data)) } diff --git a/vendor/github.com/spf13/afero/mem/file_test.go b/vendor/github.com/spf13/afero/mem/file_test.go new file mode 100644 index 0000000..5769067 --- /dev/null +++ b/vendor/github.com/spf13/afero/mem/file_test.go @@ -0,0 +1,154 @@ +package mem + +import ( + "testing" + "time" +) + +func TestFileDataNameRace(t *testing.T) { + t.Parallel() + const someName = "someName" + const someOtherName = "someOtherName" + d := FileData{ + name: someName, + } + + if d.Name() != someName { + t.Errorf("Failed to read correct Name, was %v", d.Name()) + } + + ChangeFileName(&d, someOtherName) + if d.Name() != someOtherName { + t.Errorf("Failed to set Name, was %v", d.Name()) + } + + go func() { + ChangeFileName(&d, someName) + }() + + if d.Name() != someName && d.Name() != someOtherName { + t.Errorf("Failed to read either Name, was %v", d.Name()) + } +} + +func TestFileDataModTimeRace(t *testing.T) { + t.Parallel() + someTime := time.Now() + someOtherTime := someTime.Add(1 * time.Minute) + + d := FileData{ + modtime: someTime, + } + + s := FileInfo{ + FileData: &d, + } + + if s.ModTime() != someTime { + t.Errorf("Failed to read correct value, was %v", s.ModTime()) + } + + SetModTime(&d, someOtherTime) + if s.ModTime() != someOtherTime { + t.Errorf("Failed to set ModTime, was %v", s.ModTime()) + } + + go func() { + SetModTime(&d, someTime) + }() + + if s.ModTime() != someTime && s.ModTime() != someOtherTime { + t.Errorf("Failed to read either modtime, was %v", s.ModTime()) + } +} + +func TestFileDataModeRace(t *testing.T) { + t.Parallel() + const someMode = 0777 + const someOtherMode = 0660 + + d := FileData{ + mode: someMode, + } + + s := FileInfo{ + FileData: &d, + } + + if s.Mode() != someMode { + t.Errorf("Failed to read correct value, was %v", s.Mode()) + } + + SetMode(&d, someOtherMode) + if s.Mode() != someOtherMode { + t.Errorf("Failed to set Mode, was %v", s.Mode()) + } + + go func() { + SetMode(&d, someMode) + }() + + if s.Mode() != someMode && s.Mode() != someOtherMode { + t.Errorf("Failed to read either mode, was %v", s.Mode()) + } +} + +func TestFileDataIsDirRace(t *testing.T) { + t.Parallel() + + d := FileData{ + dir: true, + } + + s := FileInfo{ + FileData: &d, + } + + if s.IsDir() != true { + t.Errorf("Failed to read correct value, was %v", s.IsDir()) + } + + go func() { + s.Lock() + d.dir = false + s.Unlock() + }() + + //just logging the value to trigger a read: + t.Logf("Value is %v", s.IsDir()) +} + +func TestFileDataSizeRace(t *testing.T) { + t.Parallel() + + const someData = "Hello" + const someOtherDataSize = "Hello World" + + d := FileData{ + data: []byte(someData), + dir: false, + } + + s := FileInfo{ + FileData: &d, + } + + if s.Size() != int64(len(someData)) { + t.Errorf("Failed to read correct value, was %v", s.Size()) + } + + go func() { + s.Lock() + d.data = []byte(someOtherDataSize) + s.Unlock() + }() + + //just logging the value to trigger a read: + t.Logf("Value is %v", s.Size()) + + //Testing the Dir size case + d.dir = true + if s.Size() != int64(42) { + t.Errorf("Failed to read correct value for dir, was %v", s.Size()) + } +} diff --git a/vendor/github.com/spf13/afero/memmap.go b/vendor/github.com/spf13/afero/memmap.go index 767ac1d..09498e7 100644 --- a/vendor/github.com/spf13/afero/memmap.go +++ b/vendor/github.com/spf13/afero/memmap.go @@ -66,7 +66,10 @@ func (m *MemMapFs) unRegisterWithParent(fileName string) error { if parent == nil { log.Panic("parent of ", f.Name(), " is nil") } + + parent.Lock() mem.RemoveFromMemDir(parent, f) + parent.Unlock() return nil } @@ -99,8 +102,10 @@ func (m *MemMapFs) registerWithParent(f *mem.FileData) { } } + parent.Lock() mem.InitializeDir(parent) mem.AddToMemDir(parent, f) + parent.Unlock() } func (m *MemMapFs) lockfreeMkdir(name string, perm os.FileMode) error { @@ -136,7 +141,7 @@ func (m *MemMapFs) Mkdir(name string, perm os.FileMode) error { m.registerWithParent(item) m.mu.Unlock() - m.Chmod(name, perm) + m.Chmod(name, perm|os.ModeDir) return nil } @@ -146,9 +151,8 @@ func (m *MemMapFs) MkdirAll(path string, perm os.FileMode) error { if err != nil { if err.(*os.PathError).Err == ErrFileExists { return nil - } else { - return err } + return err } return nil } diff --git a/vendor/github.com/spf13/afero/memmap_test.go b/vendor/github.com/spf13/afero/memmap_test.go index ca5abbc..47414ab 100644 --- a/vendor/github.com/spf13/afero/memmap_test.go +++ b/vendor/github.com/spf13/afero/memmap_test.go @@ -1,6 +1,8 @@ package afero import ( + "fmt" + "io" "os" "path/filepath" "runtime" @@ -109,6 +111,8 @@ func TestPermSet(t *testing.T) { const dirPathAll = "/my/path/to/dir" const fileMode = os.FileMode(0765) + // directories will also have the directory bit set + const dirMode = fileMode | os.ModeDir fs := NewMemMapFs() @@ -131,7 +135,7 @@ func TestPermSet(t *testing.T) { } // Test Mkdir - err = fs.Mkdir(dirPath, fileMode) + err = fs.Mkdir(dirPath, dirMode) if err != nil { t.Errorf("MkDir Create failed: %s", err) return @@ -141,13 +145,14 @@ func TestPermSet(t *testing.T) { t.Errorf("Stat failed: %s", err) return } - if s.Mode().String() != fileMode.String() { - t.Errorf("Permissions Incorrect: %s != %s", s.Mode().String(), fileMode.String()) + // sets File + if s.Mode().String() != dirMode.String() { + t.Errorf("Permissions Incorrect: %s != %s", s.Mode().String(), dirMode.String()) return } // Test MkdirAll - err = fs.MkdirAll(dirPathAll, fileMode) + err = fs.MkdirAll(dirPathAll, dirMode) if err != nil { t.Errorf("MkDir Create failed: %s", err) return @@ -157,8 +162,8 @@ func TestPermSet(t *testing.T) { t.Errorf("Stat failed: %s", err) return } - if s.Mode().String() != fileMode.String() { - t.Errorf("Permissions Incorrect: %s != %s", s.Mode().String(), fileMode.String()) + if s.Mode().String() != dirMode.String() { + t.Errorf("Permissions Incorrect: %s != %s", s.Mode().String(), dirMode.String()) return } } @@ -343,3 +348,104 @@ func TestRacingDeleteAndClose(t *testing.T) { }() close(in) } + +// This test should be run with the race detector on: +// go test -run TestMemFsDataRace -race +func TestMemFsDataRace(t *testing.T) { + const dir = "test_dir" + fs := NewMemMapFs() + + if err := fs.MkdirAll(dir, 0777); err != nil { + t.Fatal(err) + } + + const n = 1000 + done := make(chan struct{}) + + go func() { + defer close(done) + for i := 0; i < n; i++ { + fname := filepath.Join(dir, fmt.Sprintf("%d.txt", i)) + if err := WriteFile(fs, fname, []byte(""), 0777); err != nil { + panic(err) + } + if err := fs.Remove(fname); err != nil { + panic(err) + } + } + }() + +loop: + for { + select { + case <-done: + break loop + default: + _, err := ReadDir(fs, dir) + if err != nil { + t.Fatal(err) + } + } + } +} + +func TestMemFsDirMode(t *testing.T) { + fs := NewMemMapFs() + err := fs.Mkdir("/testDir1", 0644) + if err != nil { + t.Error(err) + } + err = fs.MkdirAll("/sub/testDir2", 0644) + if err != nil { + t.Error(err) + } + info, err := fs.Stat("/testDir1") + if err != nil { + t.Error(err) + } + if !info.IsDir() { + t.Error("should be a directory") + } + if !info.Mode().IsDir() { + t.Error("FileMode is not directory") + } + info, err = fs.Stat("/sub/testDir2") + if err != nil { + t.Error(err) + } + if !info.IsDir() { + t.Error("should be a directory") + } + if !info.Mode().IsDir() { + t.Error("FileMode is not directory") + } +} + +func TestMemFsUnexpectedEOF(t *testing.T) { + t.Parallel() + + fs := NewMemMapFs() + + if err := WriteFile(fs, "file.txt", []byte("abc"), 0777); err != nil { + t.Fatal(err) + } + + f, err := fs.Open("file.txt") + if err != nil { + t.Fatal(err) + } + defer f.Close() + + // Seek beyond the end. + _, err = f.Seek(512, 0) + if err != nil { + t.Fatal(err) + } + + buff := make([]byte, 256) + _, err = io.ReadAtLeast(f, buff, 256) + + if err != io.ErrUnexpectedEOF { + t.Fatal("Expected ErrUnexpectedEOF") + } +} diff --git a/vendor/github.com/spf13/afero/os.go b/vendor/github.com/spf13/afero/os.go index 6b8bce1..13cc1b8 100644 --- a/vendor/github.com/spf13/afero/os.go +++ b/vendor/github.com/spf13/afero/os.go @@ -19,6 +19,8 @@ import ( "time" ) +var _ Lstater = (*OsFs)(nil) + // OsFs is a Fs implementation that uses functions provided by the os package. // // For details in any method, check the documentation of the os package @@ -92,3 +94,8 @@ func (OsFs) Chmod(name string, mode os.FileMode) error { func (OsFs) Chtimes(name string, atime time.Time, mtime time.Time) error { return os.Chtimes(name, atime, mtime) } + +func (OsFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + fi, err := os.Lstat(name) + return fi, true, err +} diff --git a/vendor/github.com/spf13/afero/path.go b/vendor/github.com/spf13/afero/path.go index 1d90e46..18f60a0 100644 --- a/vendor/github.com/spf13/afero/path.go +++ b/vendor/github.com/spf13/afero/path.go @@ -60,7 +60,7 @@ func walk(fs Fs, path string, info os.FileInfo, walkFn filepath.WalkFunc) error for _, name := range names { filename := filepath.Join(path, name) - fileInfo, err := lstatIfOs(fs, filename) + fileInfo, err := lstatIfPossible(fs, filename) if err != nil { if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir { return err @@ -77,15 +77,13 @@ func walk(fs Fs, path string, info os.FileInfo, walkFn filepath.WalkFunc) error return nil } -// if the filesystem is OsFs use Lstat, else use fs.Stat -func lstatIfOs(fs Fs, path string) (info os.FileInfo, err error) { - _, ok := fs.(*OsFs) - if ok { - info, err = os.Lstat(path) - } else { - info, err = fs.Stat(path) +// if the filesystem supports it, use Lstat, else use fs.Stat +func lstatIfPossible(fs Fs, path string) (os.FileInfo, error) { + if lfs, ok := fs.(Lstater); ok { + fi, _, err := lfs.LstatIfPossible(path) + return fi, err } - return + return fs.Stat(path) } // Walk walks the file tree rooted at root, calling walkFn for each file or @@ -100,7 +98,7 @@ func (a Afero) Walk(root string, walkFn filepath.WalkFunc) error { } func Walk(fs Fs, root string, walkFn filepath.WalkFunc) error { - info, err := lstatIfOs(fs, root) + info, err := lstatIfPossible(fs, root) if err != nil { return walkFn(root, nil, err) } diff --git a/vendor/github.com/spf13/afero/readonlyfs.go b/vendor/github.com/spf13/afero/readonlyfs.go index f1fa55b..c6376ec 100644 --- a/vendor/github.com/spf13/afero/readonlyfs.go +++ b/vendor/github.com/spf13/afero/readonlyfs.go @@ -6,6 +6,8 @@ import ( "time" ) +var _ Lstater = (*ReadOnlyFs)(nil) + type ReadOnlyFs struct { source Fs } @@ -34,6 +36,14 @@ func (r *ReadOnlyFs) Stat(name string) (os.FileInfo, error) { return r.source.Stat(name) } +func (r *ReadOnlyFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + if lsf, ok := r.source.(Lstater); ok { + return lsf.LstatIfPossible(name) + } + fi, err := r.Stat(name) + return fi, false, err +} + func (r *ReadOnlyFs) Rename(o, n string) error { return syscall.EPERM } diff --git a/vendor/github.com/spf13/afero/unionFile.go b/vendor/github.com/spf13/afero/unionFile.go index 99f9e5d..eda9631 100644 --- a/vendor/github.com/spf13/afero/unionFile.go +++ b/vendor/github.com/spf13/afero/unionFile.go @@ -21,32 +21,33 @@ import ( // successful read in the overlay will move the cursor position in the base layer // by the number of bytes read. type UnionFile struct { - base File - layer File - off int - files []os.FileInfo + Base File + Layer File + Merger DirsMerger + off int + files []os.FileInfo } func (f *UnionFile) Close() error { // first close base, so we have a newer timestamp in the overlay. If we'd close // the overlay first, we'd get a cacheStale the next time we access this file // -> cache would be useless ;-) - if f.base != nil { - f.base.Close() + if f.Base != nil { + f.Base.Close() } - if f.layer != nil { - return f.layer.Close() + if f.Layer != nil { + return f.Layer.Close() } return BADFD } func (f *UnionFile) Read(s []byte) (int, error) { - if f.layer != nil { - n, err := f.layer.Read(s) - if (err == nil || err == io.EOF) && f.base != nil { + if f.Layer != nil { + n, err := f.Layer.Read(s) + if (err == nil || err == io.EOF) && f.Base != nil { // advance the file position also in the base file, the next // call may be a write at this position (or a seek with SEEK_CUR) - if _, seekErr := f.base.Seek(int64(n), os.SEEK_CUR); seekErr != nil { + if _, seekErr := f.Base.Seek(int64(n), os.SEEK_CUR); seekErr != nil { // only overwrite err in case the seek fails: we need to // report an eventual io.EOF to the caller err = seekErr @@ -54,109 +55,154 @@ func (f *UnionFile) Read(s []byte) (int, error) { } return n, err } - if f.base != nil { - return f.base.Read(s) + if f.Base != nil { + return f.Base.Read(s) } return 0, BADFD } func (f *UnionFile) ReadAt(s []byte, o int64) (int, error) { - if f.layer != nil { - n, err := f.layer.ReadAt(s, o) - if (err == nil || err == io.EOF) && f.base != nil { - _, err = f.base.Seek(o+int64(n), os.SEEK_SET) + if f.Layer != nil { + n, err := f.Layer.ReadAt(s, o) + if (err == nil || err == io.EOF) && f.Base != nil { + _, err = f.Base.Seek(o+int64(n), os.SEEK_SET) } return n, err } - if f.base != nil { - return f.base.ReadAt(s, o) + if f.Base != nil { + return f.Base.ReadAt(s, o) } return 0, BADFD } func (f *UnionFile) Seek(o int64, w int) (pos int64, err error) { - if f.layer != nil { - pos, err = f.layer.Seek(o, w) - if (err == nil || err == io.EOF) && f.base != nil { - _, err = f.base.Seek(o, w) + if f.Layer != nil { + pos, err = f.Layer.Seek(o, w) + if (err == nil || err == io.EOF) && f.Base != nil { + _, err = f.Base.Seek(o, w) } return pos, err } - if f.base != nil { - return f.base.Seek(o, w) + if f.Base != nil { + return f.Base.Seek(o, w) } return 0, BADFD } func (f *UnionFile) Write(s []byte) (n int, err error) { - if f.layer != nil { - n, err = f.layer.Write(s) - if err == nil && f.base != nil { // hmm, do we have fixed size files where a write may hit the EOF mark? - _, err = f.base.Write(s) + if f.Layer != nil { + n, err = f.Layer.Write(s) + if err == nil && f.Base != nil { // hmm, do we have fixed size files where a write may hit the EOF mark? + _, err = f.Base.Write(s) } return n, err } - if f.base != nil { - return f.base.Write(s) + if f.Base != nil { + return f.Base.Write(s) } return 0, BADFD } func (f *UnionFile) WriteAt(s []byte, o int64) (n int, err error) { - if f.layer != nil { - n, err = f.layer.WriteAt(s, o) - if err == nil && f.base != nil { - _, err = f.base.WriteAt(s, o) + if f.Layer != nil { + n, err = f.Layer.WriteAt(s, o) + if err == nil && f.Base != nil { + _, err = f.Base.WriteAt(s, o) } return n, err } - if f.base != nil { - return f.base.WriteAt(s, o) + if f.Base != nil { + return f.Base.WriteAt(s, o) } return 0, BADFD } func (f *UnionFile) Name() string { - if f.layer != nil { - return f.layer.Name() + if f.Layer != nil { + return f.Layer.Name() } - return f.base.Name() + return f.Base.Name() +} + +// DirsMerger is how UnionFile weaves two directories together. +// It takes the FileInfo slices from the layer and the base and returns a +// single view. +type DirsMerger func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) + +var defaultUnionMergeDirsFn = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) { + var files = make(map[string]os.FileInfo) + + for _, fi := range lofi { + files[fi.Name()] = fi + } + + for _, fi := range bofi { + if _, exists := files[fi.Name()]; !exists { + files[fi.Name()] = fi + } + } + + rfi := make([]os.FileInfo, len(files)) + + i := 0 + for _, fi := range files { + rfi[i] = fi + i++ + } + + return rfi, nil + } // Readdir will weave the two directories together and -// return a single view of the overlayed directories +// return a single view of the overlayed directories. +// At the end of the directory view, the error is io.EOF if c > 0. func (f *UnionFile) Readdir(c int) (ofi []os.FileInfo, err error) { + var merge DirsMerger = f.Merger + if merge == nil { + merge = defaultUnionMergeDirsFn + } + if f.off == 0 { - var files = make(map[string]os.FileInfo) - var rfi []os.FileInfo - if f.layer != nil { - rfi, err = f.layer.Readdir(-1) + var lfi []os.FileInfo + if f.Layer != nil { + lfi, err = f.Layer.Readdir(-1) if err != nil { return nil, err } - for _, fi := range rfi { - files[fi.Name()] = fi - } } - if f.base != nil { - rfi, err = f.base.Readdir(-1) + var bfi []os.FileInfo + if f.Base != nil { + bfi, err = f.Base.Readdir(-1) if err != nil { return nil, err } - for _, fi := range rfi { - if _, exists := files[fi.Name()]; !exists { - files[fi.Name()] = fi - } - } + } - for _, fi := range files { - f.files = append(f.files, fi) + merged, err := merge(lfi, bfi) + if err != nil { + return nil, err } + f.files = append(f.files, merged...) + } + + if c <= 0 && len(f.files) == 0 { + return f.files, nil } - if c == -1 { + + if f.off >= len(f.files) { + return nil, io.EOF + } + + if c <= 0 { return f.files[f.off:], nil } + + if c > len(f.files) { + c = len(f.files) + } + defer func() { f.off += c }() return f.files[f.off:c], nil } @@ -174,53 +220,53 @@ func (f *UnionFile) Readdirnames(c int) ([]string, error) { } func (f *UnionFile) Stat() (os.FileInfo, error) { - if f.layer != nil { - return f.layer.Stat() + if f.Layer != nil { + return f.Layer.Stat() } - if f.base != nil { - return f.base.Stat() + if f.Base != nil { + return f.Base.Stat() } return nil, BADFD } func (f *UnionFile) Sync() (err error) { - if f.layer != nil { - err = f.layer.Sync() - if err == nil && f.base != nil { - err = f.base.Sync() + if f.Layer != nil { + err = f.Layer.Sync() + if err == nil && f.Base != nil { + err = f.Base.Sync() } return err } - if f.base != nil { - return f.base.Sync() + if f.Base != nil { + return f.Base.Sync() } return BADFD } func (f *UnionFile) Truncate(s int64) (err error) { - if f.layer != nil { - err = f.layer.Truncate(s) - if err == nil && f.base != nil { - err = f.base.Truncate(s) + if f.Layer != nil { + err = f.Layer.Truncate(s) + if err == nil && f.Base != nil { + err = f.Base.Truncate(s) } return err } - if f.base != nil { - return f.base.Truncate(s) + if f.Base != nil { + return f.Base.Truncate(s) } return BADFD } func (f *UnionFile) WriteString(s string) (n int, err error) { - if f.layer != nil { - n, err = f.layer.WriteString(s) - if err == nil && f.base != nil { - _, err = f.base.WriteString(s) + if f.Layer != nil { + n, err = f.Layer.WriteString(s) + if err == nil && f.Base != nil { + _, err = f.Base.WriteString(s) } return n, err } - if f.base != nil { - return f.base.WriteString(s) + if f.Base != nil { + return f.Base.WriteString(s) } return 0, BADFD } diff --git a/vendor/github.com/spf13/afero/util.go b/vendor/github.com/spf13/afero/util.go index 2f44e6a..4f253f4 100644 --- a/vendor/github.com/spf13/afero/util.go +++ b/vendor/github.com/spf13/afero/util.go @@ -20,7 +20,6 @@ import ( "bytes" "fmt" "io" - "log" "os" "path/filepath" "strings" @@ -46,7 +45,7 @@ func WriteReader(fs Fs, path string, r io.Reader) (err error) { err = fs.MkdirAll(ospath, 0777) // rwx, rw, r if err != nil { if err != os.ErrExist { - log.Panicln(err) + return err } } } @@ -157,7 +156,7 @@ func UnicodeSanitize(s string) string { return string(target) } -// Transform characters with accents into plan forms +// Transform characters with accents into plain forms. func NeuterAccents(s string) string { t := transform.Chain(norm.NFD, transform.RemoveFunc(isMn), norm.NFC) result, _, _ := transform.String(t, string(s)) diff --git a/vendor/github.com/spf13/cast/.travis.yml b/vendor/github.com/spf13/cast/.travis.yml index 4da9766..6420d1c 100644 --- a/vendor/github.com/spf13/cast/.travis.yml +++ b/vendor/github.com/spf13/cast/.travis.yml @@ -1,8 +1,9 @@ language: go +env: + - GO111MODULE=on sudo: required go: - - 1.7.5 - - 1.8 + - "1.11.x" - tip os: - linux diff --git a/vendor/github.com/spf13/cast/cast.go b/vendor/github.com/spf13/cast/cast.go index 8b8c208..9fba638 100644 --- a/vendor/github.com/spf13/cast/cast.go +++ b/vendor/github.com/spf13/cast/cast.go @@ -122,6 +122,18 @@ func ToStringMapBool(i interface{}) map[string]bool { return v } +// ToStringMapInt casts an interface to a map[string]int type. +func ToStringMapInt(i interface{}) map[string]int { + v, _ := ToStringMapIntE(i) + return v +} + +// ToStringMapInt64 casts an interface to a map[string]int64 type. +func ToStringMapInt64(i interface{}) map[string]int64 { + v, _ := ToStringMapInt64E(i) + return v +} + // ToStringMap casts an interface to a map[string]interface{} type. func ToStringMap(i interface{}) map[string]interface{} { v, _ := ToStringMapE(i) diff --git a/vendor/github.com/spf13/cast/cast_test.go b/vendor/github.com/spf13/cast/cast_test.go index 404fe76..d9a1479 100644 --- a/vendor/github.com/spf13/cast/cast_test.go +++ b/vendor/github.com/spf13/cast/cast_test.go @@ -697,6 +697,10 @@ func TestStringMapStringSliceE(t *testing.T) { var stringMapInterface1 = map[string]interface{}{"key 1": []string{"value 1"}, "key 2": []string{"value 2"}} var stringMapInterfaceResult1 = map[string][]string{"key 1": {"value 1"}, "key 2": {"value 2"}} + var jsonStringMapString = `{"key 1": "value 1", "key 2": "value 2"}` + var jsonStringMapStringArray = `{"key 1": ["value 1"], "key 2": ["value 2", "value 3"]}` + var jsonStringMapStringArrayResult = map[string][]string{"key 1": {"value 1"}, "key 2": {"value 2", "value 3"}} + type Key struct { k string } @@ -718,11 +722,15 @@ func TestStringMapStringSliceE(t *testing.T) { {interfaceMapInterfaceSlice, stringMapStringSlice, false}, {interfaceMapString, stringMapStringSingleSliceFieldsResult, false}, {interfaceMapInterface, stringMapStringSingleSliceFieldsResult, false}, + {jsonStringMapStringArray, jsonStringMapStringArrayResult, false}, + // errors {nil, nil, true}, {testing.T{}, nil, true}, {map[interface{}]interface{}{"foo": testing.T{}}, nil, true}, {map[interface{}]interface{}{Key{"foo"}: "bar"}, nil, true}, // ToStringE(Key{"foo"}) should fail + {jsonStringMapString, nil, true}, + {"", nil, true}, } for i, test := range tests { @@ -751,9 +759,13 @@ func TestToStringMapE(t *testing.T) { }{ {map[interface{}]interface{}{"tag": "tags", "group": "groups"}, map[string]interface{}{"tag": "tags", "group": "groups"}, false}, {map[string]interface{}{"tag": "tags", "group": "groups"}, map[string]interface{}{"tag": "tags", "group": "groups"}, false}, + {`{"tag": "tags", "group": "groups"}`, map[string]interface{}{"tag": "tags", "group": "groups"}, false}, + {`{"tag": "tags", "group": true}`, map[string]interface{}{"tag": "tags", "group": true}, false}, + // errors {nil, nil, true}, {testing.T{}, nil, true}, + {"", nil, true}, } for i, test := range tests { @@ -783,9 +795,12 @@ func TestToStringMapBoolE(t *testing.T) { {map[interface{}]interface{}{"v1": true, "v2": false}, map[string]bool{"v1": true, "v2": false}, false}, {map[string]interface{}{"v1": true, "v2": false}, map[string]bool{"v1": true, "v2": false}, false}, {map[string]bool{"v1": true, "v2": false}, map[string]bool{"v1": true, "v2": false}, false}, + {`{"v1": true, "v2": false}`, map[string]bool{"v1": true, "v2": false}, false}, + // errors {nil, nil, true}, {testing.T{}, nil, true}, + {"", nil, true}, } for i, test := range tests { @@ -806,11 +821,91 @@ func TestToStringMapBoolE(t *testing.T) { } } +func TestToStringMapIntE(t *testing.T) { + tests := []struct { + input interface{} + expect map[string]int + iserr bool + }{ + {map[interface{}]interface{}{"v1": 1, "v2": 222}, map[string]int{"v1": 1, "v2": 222}, false}, + {map[string]interface{}{"v1": 342, "v2": 5141}, map[string]int{"v1": 342, "v2": 5141}, false}, + {map[string]int{"v1": 33, "v2": 88}, map[string]int{"v1": 33, "v2": 88}, false}, + {map[string]int32{"v1": int32(33), "v2": int32(88)}, map[string]int{"v1": 33, "v2": 88}, false}, + {map[string]uint16{"v1": uint16(33), "v2": uint16(88)}, map[string]int{"v1": 33, "v2": 88}, false}, + {map[string]float64{"v1": float64(8.22), "v2": float64(43.32)}, map[string]int{"v1": 8, "v2": 43}, false}, + {`{"v1": 67, "v2": 56}`, map[string]int{"v1": 67, "v2": 56}, false}, + + // errors + {nil, nil, true}, + {testing.T{}, nil, true}, + {"", nil, true}, + } + + for i, test := range tests { + errmsg := fmt.Sprintf("i = %d", i) // assert helper message + + v, err := ToStringMapIntE(test.input) + if test.iserr { + assert.Error(t, err, errmsg) + continue + } + + assert.NoError(t, err, errmsg) + assert.Equal(t, test.expect, v, errmsg) + + // Non-E test + v = ToStringMapInt(test.input) + assert.Equal(t, test.expect, v, errmsg) + } +} + +func TestToStringMapInt64E(t *testing.T) { + tests := []struct { + input interface{} + expect map[string]int64 + iserr bool + }{ + {map[interface{}]interface{}{"v1": int32(8), "v2": int32(888)}, map[string]int64{"v1": int64(8), "v2": int64(888)}, false}, + {map[string]interface{}{"v1": int64(45), "v2": int64(67)}, map[string]int64{"v1": 45, "v2": 67}, false}, + {map[string]int64{"v1": 33, "v2": 88}, map[string]int64{"v1": 33, "v2": 88}, false}, + {map[string]int{"v1": 33, "v2": 88}, map[string]int64{"v1": 33, "v2": 88}, false}, + {map[string]int32{"v1": int32(33), "v2": int32(88)}, map[string]int64{"v1": 33, "v2": 88}, false}, + {map[string]uint16{"v1": uint16(33), "v2": uint16(88)}, map[string]int64{"v1": 33, "v2": 88}, false}, + {map[string]float64{"v1": float64(8.22), "v2": float64(43.32)}, map[string]int64{"v1": 8, "v2": 43}, false}, + {`{"v1": 67, "v2": 56}`, map[string]int64{"v1": 67, "v2": 56}, false}, + + // errors + {nil, nil, true}, + {testing.T{}, nil, true}, + {"", nil, true}, + } + + for i, test := range tests { + errmsg := fmt.Sprintf("i = %d", i) // assert helper message + + v, err := ToStringMapInt64E(test.input) + if test.iserr { + assert.Error(t, err, errmsg) + continue + } + + assert.NoError(t, err, errmsg) + assert.Equal(t, test.expect, v, errmsg) + + // Non-E test + v = ToStringMapInt64(test.input) + assert.Equal(t, test.expect, v, errmsg) + } +} + func TestToStringMapStringE(t *testing.T) { var stringMapString = map[string]string{"key 1": "value 1", "key 2": "value 2", "key 3": "value 3"} var stringMapInterface = map[string]interface{}{"key 1": "value 1", "key 2": "value 2", "key 3": "value 3"} var interfaceMapString = map[interface{}]string{"key 1": "value 1", "key 2": "value 2", "key 3": "value 3"} var interfaceMapInterface = map[interface{}]interface{}{"key 1": "value 1", "key 2": "value 2", "key 3": "value 3"} + var jsonString = `{"key 1": "value 1", "key 2": "value 2", "key 3": "value 3"}` + var invalidJsonString = `{"key 1": "value 1", "key 2": "value 2", "key 3": "value 3"` + var emptyString = "" tests := []struct { input interface{} @@ -821,9 +916,13 @@ func TestToStringMapStringE(t *testing.T) { {stringMapInterface, stringMapString, false}, {interfaceMapString, stringMapString, false}, {interfaceMapInterface, stringMapString, false}, + {jsonString, stringMapString, false}, + // errors {nil, nil, true}, {testing.T{}, nil, true}, + {invalidJsonString, nil, true}, + {emptyString, nil, true}, } for i, test := range tests { @@ -984,9 +1083,12 @@ func TestToDurationSliceE(t *testing.T) { {[]string{"1s", "1m"}, []time.Duration{time.Second, time.Minute}, false}, {[]int{1, 2}, []time.Duration{1, 2}, false}, {[]interface{}{1, 3}, []time.Duration{1, 3}, false}, + {[]time.Duration{1, 3}, []time.Duration{1, 3}, false}, + // errors {nil, nil, true}, {testing.T{}, nil, true}, + {[]string{"invalid"}, nil, true}, } for i, test := range tests { @@ -1087,6 +1189,7 @@ func TestToTimeEE(t *testing.T) { {"Tue, 10 Nov 2009 23:00:00 UTC", time.Date(2009, 11, 10, 23, 0, 0, 0, time.UTC), false}, // RFC1123 {"Tue, 10 Nov 2009 23:00:00 +0000", time.Date(2009, 11, 10, 23, 0, 0, 0, time.UTC), false}, // RFC1123Z {"2009-11-10T23:00:00Z", time.Date(2009, 11, 10, 23, 0, 0, 0, time.UTC), false}, // RFC3339 + {"2018-10-21T23:21:29+0200", time.Date(2018, 10, 21, 21, 21, 29, 0, time.UTC), false}, // RFC3339 without timezone hh:mm colon {"2009-11-10T23:00:00Z", time.Date(2009, 11, 10, 23, 0, 0, 0, time.UTC), false}, // RFC3339Nano {"11:00PM", time.Date(0, 1, 1, 23, 0, 0, 0, time.UTC), false}, // Kitchen {"Nov 10 23:00:00", time.Date(0, 11, 10, 23, 0, 0, 0, time.UTC), false}, // Stamp @@ -1094,6 +1197,7 @@ func TestToTimeEE(t *testing.T) { {"Nov 10 23:00:00.000000", time.Date(0, 11, 10, 23, 0, 0, 0, time.UTC), false}, // StampMicro {"Nov 10 23:00:00.000000000", time.Date(0, 11, 10, 23, 0, 0, 0, time.UTC), false}, // StampNano {"2016-03-06 15:28:01-00:00", time.Date(2016, 3, 6, 15, 28, 1, 0, time.UTC), false}, // RFC3339 without T + {"2016-03-06 15:28:01-0000", time.Date(2016, 3, 6, 15, 28, 1, 0, time.UTC), false}, // RFC3339 without T or timezone hh:mm colon {"2016-03-06 15:28:01", time.Date(2016, 3, 6, 15, 28, 1, 0, time.UTC), false}, {"2016-03-06 15:28:01 -0000", time.Date(2016, 3, 6, 15, 28, 1, 0, time.UTC), false}, {"2016-03-06 15:28:01 -00:00", time.Date(2016, 3, 6, 15, 28, 1, 0, time.UTC), false}, diff --git a/vendor/github.com/spf13/cast/caste.go b/vendor/github.com/spf13/cast/caste.go index 81511fe..a4859fb 100644 --- a/vendor/github.com/spf13/cast/caste.go +++ b/vendor/github.com/spf13/cast/caste.go @@ -6,6 +6,7 @@ package cast import ( + "encoding/json" "errors" "fmt" "html/template" @@ -872,6 +873,9 @@ func ToStringMapStringE(i interface{}) (map[string]string, error) { m[ToString(k)] = ToString(val) } return m, nil + case string: + err := jsonStringToObject(v, &m) + return m, err default: return m, fmt.Errorf("unable to cast %#v of type %T to map[string]string", i, i) } @@ -932,6 +936,9 @@ func ToStringMapStringSliceE(i interface{}) (map[string][]string, error) { } m[key] = value } + case string: + err := jsonStringToObject(v, &m) + return m, err default: return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) } @@ -955,6 +962,9 @@ func ToStringMapBoolE(i interface{}) (map[string]bool, error) { return m, nil case map[string]bool: return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err default: return m, fmt.Errorf("unable to cast %#v of type %T to map[string]bool", i, i) } @@ -972,11 +982,95 @@ func ToStringMapE(i interface{}) (map[string]interface{}, error) { return m, nil case map[string]interface{}: return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err default: return m, fmt.Errorf("unable to cast %#v of type %T to map[string]interface{}", i, i) } } +// ToStringMapIntE casts an interface to a map[string]int{} type. +func ToStringMapIntE(i interface{}) (map[string]int, error) { + var m = map[string]int{} + if i == nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) + } + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToInt(val) + } + return m, nil + case map[string]interface{}: + for k, val := range v { + m[k] = ToInt(val) + } + return m, nil + case map[string]int: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + } + + if reflect.TypeOf(i).Kind() != reflect.Map { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) + } + + mVal := reflect.ValueOf(m) + v := reflect.ValueOf(i) + for _, keyVal := range v.MapKeys() { + val, err := ToIntE(v.MapIndex(keyVal).Interface()) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) + } + mVal.SetMapIndex(keyVal, reflect.ValueOf(val)) + } + return m, nil +} + +// ToStringMapInt64E casts an interface to a map[string]int64{} type. +func ToStringMapInt64E(i interface{}) (map[string]int64, error) { + var m = map[string]int64{} + if i == nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) + } + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToInt64(val) + } + return m, nil + case map[string]interface{}: + for k, val := range v { + m[k] = ToInt64(val) + } + return m, nil + case map[string]int64: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + } + + if reflect.TypeOf(i).Kind() != reflect.Map { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) + } + mVal := reflect.ValueOf(m) + v := reflect.ValueOf(i) + for _, keyVal := range v.MapKeys() { + val, err := ToInt64E(v.MapIndex(keyVal).Interface()) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) + } + mVal.SetMapIndex(keyVal, reflect.ValueOf(val)) + } + return m, nil +} + // ToSliceE casts an interface to a []interface{} type. func ToSliceE(i interface{}) ([]interface{}, error) { var s []interface{} @@ -1124,9 +1218,11 @@ func StringToDate(s string) (time.Time, error) { "2006-01-02 15:04:05.999999999 -0700 MST", // Time.String() "2006-01-02", "02 Jan 2006", + "2006-01-02T15:04:05-0700", // RFC3339 without timezone hh:mm colon "2006-01-02 15:04:05 -07:00", "2006-01-02 15:04:05 -0700", "2006-01-02 15:04:05Z07:00", // RFC3339 without T + "2006-01-02 15:04:05Z0700", // RFC3339 without T or timezone hh:mm colon "2006-01-02 15:04:05", time.Kitchen, time.Stamp, @@ -1144,3 +1240,10 @@ func parseDateWith(s string, dates []string) (d time.Time, e error) { } return d, fmt.Errorf("unable to parse date: %s", s) } + +// jsonStringToObject attempts to unmarshall a string as JSON into +// the object passed as pointer. +func jsonStringToObject(s string, v interface{}) error { + data := []byte(s) + return json.Unmarshal(data, v) +} diff --git a/vendor/github.com/spf13/cast/go.mod b/vendor/github.com/spf13/cast/go.mod new file mode 100644 index 0000000..c1c0232 --- /dev/null +++ b/vendor/github.com/spf13/cast/go.mod @@ -0,0 +1,7 @@ +module github.com/spf13/cast + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/stretchr/testify v1.2.2 +) diff --git a/vendor/github.com/spf13/cast/go.sum b/vendor/github.com/spf13/cast/go.sum new file mode 100644 index 0000000..e03ee77 --- /dev/null +++ b/vendor/github.com/spf13/cast/go.sum @@ -0,0 +1,6 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= diff --git a/vendor/github.com/spf13/cobra/.circleci/config.yml b/vendor/github.com/spf13/cobra/.circleci/config.yml new file mode 100644 index 0000000..8194464 --- /dev/null +++ b/vendor/github.com/spf13/cobra/.circleci/config.yml @@ -0,0 +1,53 @@ +version: 2 + +references: + workspace: &workspace + /go/src/github.com/spf13/cobra + + run_tests: &run_tests + run: + name: "All Commands" + command: | + mkdir -p bin + curl -Lso bin/shellcheck https://github.com/caarlos0/shellcheck-docker/releases/download/v0.4.6/shellcheck + chmod +x bin/shellcheck + go get -t -v ./... + PATH=$PATH:$PWD/bin go test -v ./... + go build + if [ -z $NOVET ]; then + diff -u <(echo -n) <(go vet . 2>&1 | grep -vE 'ExampleCommand|bash_completions.*Fprint'); + fi + +jobs: + go-current: + docker: + - image: circleci/golang:1.12 + working_directory: *workspace + steps: + - checkout + - *run_tests + - run: + name: "Check formatting" + command: diff -u <(echo -n) <(gofmt -d -s .) + go-previous: + docker: + - image: circleci/golang:1.11 + working_directory: *workspace + steps: + - checkout + - *run_tests + go-latest: + docker: + - image: circleci/golang:latest + working_directory: *workspace + steps: + - checkout + - *run_tests + +workflows: + version: 2 + main: + jobs: + - go-current + - go-previous + - go-latest diff --git a/vendor/github.com/spf13/cobra/.travis.yml b/vendor/github.com/spf13/cobra/.travis.yml index cb2bf0d..38b85f4 100644 --- a/vendor/github.com/spf13/cobra/.travis.yml +++ b/vendor/github.com/spf13/cobra/.travis.yml @@ -1,21 +1,31 @@ language: go +stages: + - diff + - test + +go: + - 1.10.x + - 1.11.x + - 1.12.x + - tip + matrix: - include: - - go: 1.7.5 - - go: 1.8.1 - - go: tip allow_failures: - go: tip + include: + - stage: diff + go: 1.12.x + script: diff -u <(echo -n) <(gofmt -d -s .) before_install: - mkdir -p bin - - curl -Lso bin/shellcheck https://github.com/caarlos0/shellcheck-docker/releases/download/v0.4.3/shellcheck + - curl -Lso bin/shellcheck https://github.com/caarlos0/shellcheck-docker/releases/download/v0.6.0/shellcheck - chmod +x bin/shellcheck + - go get -u github.com/kyoh86/richgo script: - - PATH=$PATH:$PWD/bin go test -v ./... + - PATH=$PATH:$PWD/bin richgo test -v ./... - go build - - diff -u <(echo -n) <(gofmt -d -s .) - if [ -z $NOVET ]; then - diff -u <(echo -n) <(go tool vet . 2>&1 | grep -vE 'ExampleCommand|bash_completions.*Fprint'); + diff -u <(echo -n) <(go vet . 2>&1 | grep -vE 'ExampleCommand|bash_completions.*Fprint'); fi diff --git a/vendor/github.com/spf13/cobra/README.md b/vendor/github.com/spf13/cobra/README.md index e249c1b..ff16e3f 100644 --- a/vendor/github.com/spf13/cobra/README.md +++ b/vendor/github.com/spf13/cobra/README.md @@ -2,30 +2,54 @@ Cobra is both a library for creating powerful modern CLI applications as well as a program to generate applications and command files. -Many of the most widely used Go projects are built using Cobra including: - -* [Kubernetes](http://kubernetes.io/) -* [Hugo](http://gohugo.io) -* [rkt](https://github.com/coreos/rkt) -* [etcd](https://github.com/coreos/etcd) -* [Moby (former Docker)](https://github.com/moby/moby) -* [Docker (distribution)](https://github.com/docker/distribution) -* [OpenShift](https://www.openshift.com/) -* [Delve](https://github.com/derekparker/delve) -* [GopherJS](http://www.gopherjs.org/) -* [CockroachDB](http://www.cockroachlabs.com/) -* [Bleve](http://www.blevesearch.com/) -* [ProjectAtomic (enterprise)](http://www.projectatomic.io/) -* [GiantSwarm's swarm](https://github.com/giantswarm/cli) -* [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack) -* [rclone](http://rclone.org/) - +Many of the most widely used Go projects are built using Cobra, such as: +[Kubernetes](http://kubernetes.io/), +[Hugo](http://gohugo.io), +[rkt](https://github.com/coreos/rkt), +[etcd](https://github.com/coreos/etcd), +[Moby (former Docker)](https://github.com/moby/moby), +[Docker (distribution)](https://github.com/docker/distribution), +[OpenShift](https://www.openshift.com/), +[Delve](https://github.com/derekparker/delve), +[GopherJS](http://www.gopherjs.org/), +[CockroachDB](http://www.cockroachlabs.com/), +[Bleve](http://www.blevesearch.com/), +[ProjectAtomic (enterprise)](http://www.projectatomic.io/), +[Giant Swarm's gsctl](https://github.com/giantswarm/gsctl), +[Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack), +[rclone](http://rclone.org/), +[nehm](https://github.com/bogem/nehm), +[Pouch](https://github.com/alibaba/pouch), +[Istio](https://istio.io), +[Prototool](https://github.com/uber/prototool), +[mattermost-server](https://github.com/mattermost/mattermost-server), +etc. [![Build Status](https://travis-ci.org/spf13/cobra.svg "Travis CI status")](https://travis-ci.org/spf13/cobra) [![CircleCI status](https://circleci.com/gh/spf13/cobra.png?circle-token=:circle-token "CircleCI status")](https://circleci.com/gh/spf13/cobra) [![GoDoc](https://godoc.org/github.com/spf13/cobra?status.svg)](https://godoc.org/github.com/spf13/cobra) -![cobra](https://cloud.githubusercontent.com/assets/173412/10911369/84832a8e-8212-11e5-9f82-cc96660a4794.gif) +# Table of Contents + +- [Overview](#overview) +- [Concepts](#concepts) + * [Commands](#commands) + * [Flags](#flags) +- [Installing](#installing) +- [Getting Started](#getting-started) + * [Using the Cobra Generator](#using-the-cobra-generator) + * [Using the Cobra Library](#using-the-cobra-library) + * [Working with Flags](#working-with-flags) + * [Positional and Custom Arguments](#positional-and-custom-arguments) + * [Example](#example) + * [Help Command](#help-command) + * [Usage Message](#usage-message) + * [PreRun and PostRun Hooks](#prerun-and-postrun-hooks) + * [Suggestions when "unknown command" happens](#suggestions-when-unknown-command-happens) + * [Generating documentation for your command](#generating-documentation-for-your-command) + * [Generating bash completions](#generating-bash-completions) +- [Contributing](#contributing) +- [License](#license) # Overview @@ -43,7 +67,6 @@ Cobra provides: * Easy generation of applications & commands with `cobra init appname` & `cobra add cmdname` * Intelligent suggestions (`app srver`... did you mean `app server`?) * Automatic help generation for commands and flags -* Automatic detailed help for `app help [command]` * Automatic help flag recognition of `-h`, `--help`, etc. * Automatically generated bash autocomplete for your application * Automatically generated man pages for your application @@ -51,16 +74,6 @@ Cobra provides: * The flexibility to define your own help, usage, etc. * Optional tight integration with [viper](http://github.com/spf13/viper) for 12-factor apps -Cobra has an exceptionally clean interface and simple design without needless -constructors or initialization methods. - -Applications built with Cobra commands are designed to be as user-friendly as -possible. Flags can be placed before or after the command (as long as a -confusing space isn’t provided). Both short and long flags can be used. A -command need not even be fully typed. Help is automatically generated and -available for the application or for a specific command using either the help -command or the `--help` flag. - # Concepts Cobra is built on a structure of commands, arguments & flags. @@ -93,20 +106,11 @@ have children commands and optionally run an action. In the example above, 'server' is the command. -A Command has the following structure: - -```go -type Command struct { - Use string // The one-line usage message. - Short string // The short description shown in the 'help' output. - Long string // The long message shown in the 'help ' output. - Run func(cmd *Command, args []string) // Run runs the command. -} -``` +[More about cobra.Command](https://godoc.org/github.com/spf13/cobra#Command) ## Flags -A Flag is a way to modify the behavior of a command. Cobra supports +A flag is a way to modify the behavior of a command. Cobra supports fully POSIX-compliant flags as well as the Go [flag package](https://golang.org/pkg/flag/). A Cobra command can define flags that persist through to children commands and flags that are only available to that command. @@ -117,14 +121,6 @@ Flag functionality is provided by the [pflag library](https://github.com/spf13/pflag), a fork of the flag standard library which maintains the same interface while adding POSIX compliance. -## Usage - -Cobra works by creating a set of commands and then organizing them into a tree. -The tree defines the structure of the application. - -Once each command is defined with its corresponding flags, then the -tree is assigned to the commander which is finally executed. - # Installing Using Cobra is easy. First, use `go get` to install the latest version of the library. This command will install the `cobra` generator executable @@ -159,17 +155,11 @@ In a Cobra app, typically the main.go file is very bare. It serves one purpose: package main import ( - "fmt" - "os" - - "{pathToYourApp}/cmd" + "{pathToYourApp}/cmd" ) func main() { - if err := cmd.RootCmd.Execute(); err != nil { - fmt.Println(err) - os.Exit(1) - } + cmd.Execute() } ``` @@ -178,121 +168,36 @@ func main() { Cobra provides its own program that will create your application and add any commands you want. It's the easiest way to incorporate Cobra into your application. -In order to use the cobra command, compile it using the following command: - - go get github.com/spf13/cobra/cobra - -This will create the cobra executable under your `$GOPATH/bin` directory. - -### cobra init - -The `cobra init [yourApp]` command will create your initial application code -for you. It is a very powerful application that will populate your program with -the right structure so you can immediately enjoy all the benefits of Cobra. It -will also automatically apply the license you specify to your application. - -Cobra init is pretty smart. You can provide it a full path, or simply a path -similar to what is expected in the import. - -``` -cobra init github.com/spf13/newAppName -``` - -### cobra add - -Once an application is initialized Cobra can create additional commands for you. -Let's say you created an app and you wanted the following commands for it: - -* app serve -* app config -* app config create - -In your project directory (where your main.go file is) you would run the following: - -``` -cobra add serve -cobra add config -cobra add create -p 'configCmd' -``` - -*Note: Use camelCase (not snake_case/snake-case) for command names. -Otherwise, you will encounter errors. -For example, `cobra add add-user` is incorrect, but `cobra add addUser` is valid.* - -Once you have run these three commands you would have an app structure similar to -the following: - -``` - ▾ app/ - ▾ cmd/ - serve.go - config.go - create.go - main.go -``` - -At this point you can run `go run main.go` and it would run your app. `go run -main.go serve`, `go run main.go config`, `go run main.go config create` along -with `go run main.go help serve`, etc. would all work. - -Obviously you haven't added your own code to these yet. The commands are ready -for you to give them their tasks. Have fun! +[Here](https://github.com/spf13/cobra/blob/master/cobra/README.md) you can find more information about it. -### Configuring the cobra generator +## Using the Cobra Library -The Cobra generator will be easier to use if you provide a simple configuration -file which will help you eliminate providing a bunch of repeated information in -flags over and over. - -An example ~/.cobra.yaml file: - -```yaml -author: Steve Francia -license: MIT -``` - -You can specify no license by setting `license` to `none` or you can specify -a custom license: - -```yaml -license: - header: This file is part of {{ .appName }}. - text: | - {{ .copyright }} - - This is my license. There are many like it, but this one is mine. - My license is my best friend. It is my life. I must master it as I must - master my life. -``` - -You can also use built-in licenses. For example, **GPLv2**, **GPLv3**, **LGPL**, -**AGPL**, **MIT**, **2-Clause BSD** or **3-Clause BSD**. - -## Manually implementing Cobra - -To manually implement Cobra you need to create a bare main.go file and a RootCmd file. +To manually implement Cobra you need to create a bare main.go file and a rootCmd file. You will optionally provide additional commands as you see fit. -### Create the root command - -The root command represents your binary itself. - -#### Manually create rootCmd +### Create rootCmd Cobra doesn't require any special constructors. Simply create your commands. Ideally you place this in app/cmd/root.go: ```go -var RootCmd = &cobra.Command{ - Use: "hugo", - Short: "Hugo is a very fast static site generator", - Long: `A Fast and Flexible Static Site Generator built with +var rootCmd = &cobra.Command{ + Use: "hugo", + Short: "Hugo is a very fast static site generator", + Long: `A Fast and Flexible Static Site Generator built with love by spf13 and friends in Go. Complete documentation is available at http://hugo.spf13.com`, - Run: func(cmd *cobra.Command, args []string) { - // Do Stuff Here - }, + Run: func(cmd *cobra.Command, args []string) { + // Do Stuff Here + }, +} + +func Execute() { + if err := rootCmd.Execute(); err != nil { + fmt.Println(err) + os.Exit(1) + } } ``` @@ -302,54 +207,50 @@ For example cmd/root.go: ```go import ( - "fmt" - "os" + "fmt" + "os" - homedir "github.com/mitchellh/go-homedir" - "github.com/spf13/cobra" - "github.com/spf13/viper" + homedir "github.com/mitchellh/go-homedir" + "github.com/spf13/cobra" + "github.com/spf13/viper" ) func init() { - cobra.OnInitialize(initConfig) - RootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)") - RootCmd.PersistentFlags().StringVarP(&projectBase, "projectbase", "b", "", "base project directory eg. github.com/spf13/") - RootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "Author name for copyright attribution") - RootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "Name of license for the project (can provide `licensetext` in config)") - RootCmd.PersistentFlags().Bool("viper", true, "Use Viper for configuration") - viper.BindPFlag("author", RootCmd.PersistentFlags().Lookup("author")) - viper.BindPFlag("projectbase", RootCmd.PersistentFlags().Lookup("projectbase")) - viper.BindPFlag("useViper", RootCmd.PersistentFlags().Lookup("viper")) - viper.SetDefault("author", "NAME HERE ") - viper.SetDefault("license", "apache") -} - -func Execute() { - RootCmd.Execute() + cobra.OnInitialize(initConfig) + rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)") + rootCmd.PersistentFlags().StringVarP(&projectBase, "projectbase", "b", "", "base project directory eg. github.com/spf13/") + rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "Author name for copyright attribution") + rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "Name of license for the project (can provide `licensetext` in config)") + rootCmd.PersistentFlags().Bool("viper", true, "Use Viper for configuration") + viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author")) + viper.BindPFlag("projectbase", rootCmd.PersistentFlags().Lookup("projectbase")) + viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper")) + viper.SetDefault("author", "NAME HERE ") + viper.SetDefault("license", "apache") } func initConfig() { // Don't forget to read config either from cfgFile or from home directory! - if cfgFile != "" { - // Use config file from the flag. - viper.SetConfigFile(cfgFile) - } else { - // Find home directory. - home, err := homedir.Dir() - if err != nil { - fmt.Println(err) - os.Exit(1) - } - - // Search config in home directory with name ".cobra" (without extension). - viper.AddConfigPath(home) - viper.SetConfigName(".cobra") - } - - if err := viper.ReadInConfig(); err != nil { - fmt.Println("Can't read config:", err) - os.Exit(1) - } + if cfgFile != "" { + // Use config file from the flag. + viper.SetConfigFile(cfgFile) + } else { + // Find home directory. + home, err := homedir.Dir() + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + // Search config in home directory with name ".cobra" (without extension). + viper.AddConfigPath(home) + viper.SetConfigName(".cobra") + } + + if err := viper.ReadInConfig(); err != nil { + fmt.Println("Can't read config:", err) + os.Exit(1) + } } ``` @@ -364,17 +265,11 @@ In a Cobra app, typically the main.go file is very bare. It serves, one purpose, package main import ( - "fmt" - "os" - - "{pathToYourApp}/cmd" + "{pathToYourApp}/cmd" ) func main() { - if err := cmd.RootCmd.Execute(); err != nil { - fmt.Println(err) - os.Exit(1) - } + cmd.Execute() } ``` @@ -390,48 +285,25 @@ populate it with the following: package cmd import ( - "github.com/spf13/cobra" - "fmt" + "fmt" + + "github.com/spf13/cobra" ) func init() { - RootCmd.AddCommand(versionCmd) + rootCmd.AddCommand(versionCmd) } var versionCmd = &cobra.Command{ - Use: "version", - Short: "Print the version number of Hugo", - Long: `All software has versions. This is Hugo's`, - Run: func(cmd *cobra.Command, args []string) { - fmt.Println("Hugo Static Site Generator v0.9 -- HEAD") - }, + Use: "version", + Short: "Print the version number of Hugo", + Long: `All software has versions. This is Hugo's`, + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Hugo Static Site Generator v0.9 -- HEAD") + }, } ``` -### Attach command to its parent - - -If you notice in the above example we attach the command to its parent. In -this case the parent is the rootCmd. In this example we are attaching it to the -root, but commands can be attached at any level. - -```go -RootCmd.AddCommand(versionCmd) -``` - -### Remove a command from its parent - -Removing a command is not a common action in simple programs, but it allows 3rd -parties to customize an existing command tree. - -In this example, we remove the existing `VersionCmd` command of an existing -root command, and we replace it with our own version: - -```go -mainlib.RootCmd.RemoveCommand(mainlib.VersionCmd) -mainlib.RootCmd.AddCommand(versionCmd) -``` - ## Working with Flags Flags provide modifiers to control how the action command operates. @@ -456,7 +328,7 @@ command it's assigned to as well as every command under that command. For global flags, assign a flag as a persistent flag on the root. ```go -RootCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose output") +rootCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose output") ``` ### Local Flags @@ -464,7 +336,20 @@ RootCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose out A flag can also be assigned locally which will only apply to that specific command. ```go -RootCmd.Flags().StringVarP(&Source, "source", "s", "", "Source directory to read from") +rootCmd.Flags().StringVarP(&Source, "source", "s", "", "Source directory to read from") +``` + +### Local Flag on Parent Commands + +By default Cobra only parses local flags on the target command, any local flags on +parent commands are ignored. By enabling `Command.TraverseChildren` Cobra will +parse local flags on each command before executing the target command. + +```go +command := cobra.Command{ + Use: "print [OPTIONS] [COMMANDS]", + TraverseChildren: true, +} ``` ### Bind Flags with Config @@ -474,8 +359,8 @@ You can also bind your flags with [viper](https://github.com/spf13/viper): var author string func init() { - RootCmd.PersistentFlags().StringVar(&author, "author", "YOUR NAME", "Author name for copyright attribution") - viper.BindPFlag("author", RootCmd.PersistentFlags().Lookup("author")) + rootCmd.PersistentFlags().StringVar(&author, "author", "YOUR NAME", "Author name for copyright attribution") + viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author")) } ``` @@ -485,6 +370,51 @@ when the `--author` flag is not provided by user. More in [viper documentation](https://github.com/spf13/viper#working-with-flags). +### Required flags + +Flags are optional by default. If instead you wish your command to report an error +when a flag has not been set, mark it as required: +```go +rootCmd.Flags().StringVarP(&Region, "region", "r", "", "AWS region (required)") +rootCmd.MarkFlagRequired("region") +``` + +## Positional and Custom Arguments + +Validation of positional arguments can be specified using the `Args` field +of `Command`. + +The following validators are built in: + +- `NoArgs` - the command will report an error if there are any positional args. +- `ArbitraryArgs` - the command will accept any args. +- `OnlyValidArgs` - the command will report an error if there are any positional args that are not in the `ValidArgs` field of `Command`. +- `MinimumNArgs(int)` - the command will report an error if there are not at least N positional args. +- `MaximumNArgs(int)` - the command will report an error if there are more than N positional args. +- `ExactArgs(int)` - the command will report an error if there are not exactly N positional args. +- `ExactValidArgs(int)` - the command will report an error if there are not exactly N positional args OR if there are any positional args that are not in the `ValidArgs` field of `Command` +- `RangeArgs(min, max)` - the command will report an error if the number of args is not between the minimum and maximum number of expected args. + +An example of setting the custom validator: + +```go +var cmd = &cobra.Command{ + Short: "hello", + Args: func(cmd *cobra.Command, args []string) error { + if len(args) < 1 { + return errors.New("requires a color argument") + } + if myapp.IsValidColor(args[0]) { + return nil + } + return fmt.Errorf("invalid color specified: %s", args[0]) + }, + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Hello, World!") + }, +} +``` + ## Example In the example below, we have defined three commands. Two are at the top level @@ -500,62 +430,62 @@ More documentation about flags is available at https://github.com/spf13/pflag package main import ( - "fmt" - "strings" + "fmt" + "strings" - "github.com/spf13/cobra" + "github.com/spf13/cobra" ) func main() { - - var echoTimes int - - var cmdPrint = &cobra.Command{ - Use: "print [string to print]", - Short: "Print anything to the screen", - Long: `print is for printing anything back to the screen. - For many years people have printed back to the screen. - `, - Run: func(cmd *cobra.Command, args []string) { - fmt.Println("Print: " + strings.Join(args, " ")) - }, - } - - var cmdEcho = &cobra.Command{ - Use: "echo [string to echo]", - Short: "Echo anything to the screen", - Long: `echo is for echoing anything back. - Echo works a lot like print, except it has a child command. - `, - Run: func(cmd *cobra.Command, args []string) { - fmt.Println("Print: " + strings.Join(args, " ")) - }, - } - - var cmdTimes = &cobra.Command{ - Use: "times [# times] [string to echo]", - Short: "Echo anything to the screen more times", - Long: `echo things multiple times back to the user by providing - a count and a string.`, - Run: func(cmd *cobra.Command, args []string) { - for i := 0; i < echoTimes; i++ { - fmt.Println("Echo: " + strings.Join(args, " ")) - } - }, - } - - cmdTimes.Flags().IntVarP(&echoTimes, "times", "t", 1, "times to echo the input") - - var rootCmd = &cobra.Command{Use: "app"} - rootCmd.AddCommand(cmdPrint, cmdEcho) - cmdEcho.AddCommand(cmdTimes) - rootCmd.Execute() + var echoTimes int + + var cmdPrint = &cobra.Command{ + Use: "print [string to print]", + Short: "Print anything to the screen", + Long: `print is for printing anything back to the screen. +For many years people have printed back to the screen.`, + Args: cobra.MinimumNArgs(1), + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Print: " + strings.Join(args, " ")) + }, + } + + var cmdEcho = &cobra.Command{ + Use: "echo [string to echo]", + Short: "Echo anything to the screen", + Long: `echo is for echoing anything back. +Echo works a lot like print, except it has a child command.`, + Args: cobra.MinimumNArgs(1), + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Print: " + strings.Join(args, " ")) + }, + } + + var cmdTimes = &cobra.Command{ + Use: "times [string to echo]", + Short: "Echo anything to the screen more times", + Long: `echo things multiple times back to the user by providing +a count and a string.`, + Args: cobra.MinimumNArgs(1), + Run: func(cmd *cobra.Command, args []string) { + for i := 0; i < echoTimes; i++ { + fmt.Println("Echo: " + strings.Join(args, " ")) + } + }, + } + + cmdTimes.Flags().IntVarP(&echoTimes, "times", "t", 1, "times to echo the input") + + var rootCmd = &cobra.Command{Use: "app"} + rootCmd.AddCommand(cmdPrint, cmdEcho) + cmdEcho.AddCommand(cmdTimes) + rootCmd.Execute() } ``` For a more complete example of a larger application, please checkout [Hugo](http://gohugo.io/). -## The Help Command +## Help Command Cobra automatically adds a help command to your application when you have subcommands. This will be called when a user runs 'app help'. Additionally, help will also @@ -568,60 +498,28 @@ create' is called. Every command will automatically have the '--help' flag adde The following output is automatically generated by Cobra. Nothing beyond the command and flag definitions are needed. - > hugo help + $ cobra help - hugo is the main command, used to build your Hugo site. - - Hugo is a Fast and Flexible Static Site Generator - built with love by spf13 and friends in Go. - - Complete documentation is available at http://gohugo.io/. + Cobra is a CLI library for Go that empowers applications. + This application is a tool to generate the needed files + to quickly create a Cobra application. Usage: - hugo [flags] - hugo [command] + cobra [command] Available Commands: - server Hugo runs its own webserver to render the files - version Print the version number of Hugo - config Print the site configuration - check Check content in the source directory - benchmark Benchmark hugo by building a site a number of times. - convert Convert your content to different formats - new Create new content for your site - list Listing out various types of content - undraft Undraft changes the content's draft status from 'True' to 'False' - genautocomplete Generate shell autocompletion script for Hugo - gendoc Generate Markdown documentation for the Hugo CLI. - genman Generate man page for Hugo - import Import your site from others. + add Add a command to a Cobra Application + help Help about any command + init Initialize a Cobra Application Flags: - -b, --baseURL="": hostname (and path) to the root, e.g. http://spf13.com/ - -D, --buildDrafts[=false]: include content marked as draft - -F, --buildFuture[=false]: include content with publishdate in the future - --cacheDir="": filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/ - --canonifyURLs[=false]: if true, all relative URLs will be canonicalized using baseURL - --config="": config file (default is path/config.yaml|json|toml) - -d, --destination="": filesystem path to write files to - --disableRSS[=false]: Do not build RSS files - --disableSitemap[=false]: Do not build Sitemap file - --editor="": edit new content with this editor, if provided - --ignoreCache[=false]: Ignores the cache directory for reading but still writes to it - --log[=false]: Enable Logging - --logFile="": Log File path (if set, logging enabled automatically) - --noTimes[=false]: Don't sync modification time of files - --pluralizeListTitles[=true]: Pluralize titles in lists using inflect - --preserveTaxonomyNames[=false]: Preserve taxonomy names as written ("Gérard Depardieu" vs "gerard-depardieu") - -s, --source="": filesystem path to read files relative from - --stepAnalysis[=false]: display memory and timing of different steps of the program - -t, --theme="": theme to use (located in /themes/THEMENAME/) - --uglyURLs[=false]: if true, use /filename.html instead of /filename/ - -v, --verbose[=false]: verbose output - --verboseLog[=false]: verbose logging - -w, --watch[=false]: watch filesystem for changes and recreate as needed - - Use "hugo [command] --help" for more information about a command. + -a, --author string author name for copyright attribution (default "YOUR NAME") + --config string config file (default is $HOME/.cobra.yaml) + -h, --help help for cobra + -l, --license string name of license for the project + --viper use Viper for configuration (default true) + + Use "cobra [command] --help" for more information about a command. Help is just a command like any other. There is no special logic or behavior @@ -629,38 +527,18 @@ around it. In fact, you can provide your own if you want. ### Defining your own help -You can provide your own Help command or your own template for the default command to use. - -The default help command is +You can provide your own Help command or your own template for the default command to use +with following functions: ```go -func (c *Command) initHelp() { - if c.helpCommand == nil { - c.helpCommand = &Command{ - Use: "help [command]", - Short: "Help about any command", - Long: `Help provides help for any command in the application. - Simply type ` + c.Name() + ` help [path to command] for full details.`, - Run: c.HelpFunc(), - } - } - c.AddCommand(c.helpCommand) -} -``` - -You can provide your own command, function or template through the following methods: - -```go -command.SetHelpCommand(cmd *Command) - -command.SetHelpFunc(f func(*Command, []string)) - -command.SetHelpTemplate(s string) +cmd.SetHelpCommand(cmd *Command) +cmd.SetHelpFunc(f func(*Command, []string)) +cmd.SetHelpTemplate(s string) ``` The latter two will also apply to any children commands. -## Usage +## Usage Message When the user provides an invalid flag or invalid command, Cobra responds by showing the user the 'usage'. @@ -669,71 +547,42 @@ showing the user the 'usage'. You may recognize this from the help above. That's because the default help embeds the usage as part of its output. + $ cobra --invalid + Error: unknown flag: --invalid Usage: - hugo [flags] - hugo [command] + cobra [command] Available Commands: - server Hugo runs its own webserver to render the files - version Print the version number of Hugo - config Print the site configuration - check Check content in the source directory - benchmark Benchmark hugo by building a site a number of times. - convert Convert your content to different formats - new Create new content for your site - list Listing out various types of content - undraft Undraft changes the content's draft status from 'True' to 'False' - genautocomplete Generate shell autocompletion script for Hugo - gendoc Generate Markdown documentation for the Hugo CLI. - genman Generate man page for Hugo - import Import your site from others. + add Add a command to a Cobra Application + help Help about any command + init Initialize a Cobra Application Flags: - -b, --baseURL="": hostname (and path) to the root, e.g. http://spf13.com/ - -D, --buildDrafts[=false]: include content marked as draft - -F, --buildFuture[=false]: include content with publishdate in the future - --cacheDir="": filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/ - --canonifyURLs[=false]: if true, all relative URLs will be canonicalized using baseURL - --config="": config file (default is path/config.yaml|json|toml) - -d, --destination="": filesystem path to write files to - --disableRSS[=false]: Do not build RSS files - --disableSitemap[=false]: Do not build Sitemap file - --editor="": edit new content with this editor, if provided - --ignoreCache[=false]: Ignores the cache directory for reading but still writes to it - --log[=false]: Enable Logging - --logFile="": Log File path (if set, logging enabled automatically) - --noTimes[=false]: Don't sync modification time of files - --pluralizeListTitles[=true]: Pluralize titles in lists using inflect - --preserveTaxonomyNames[=false]: Preserve taxonomy names as written ("Gérard Depardieu" vs "gerard-depardieu") - -s, --source="": filesystem path to read files relative from - --stepAnalysis[=false]: display memory and timing of different steps of the program - -t, --theme="": theme to use (located in /themes/THEMENAME/) - --uglyURLs[=false]: if true, use /filename.html instead of /filename/ - -v, --verbose[=false]: verbose output - --verboseLog[=false]: verbose logging - -w, --watch[=false]: watch filesystem for changes and recreate as needed + -a, --author string author name for copyright attribution (default "YOUR NAME") + --config string config file (default is $HOME/.cobra.yaml) + -h, --help help for cobra + -l, --license string name of license for the project + --viper use Viper for configuration (default true) + + Use "cobra [command] --help" for more information about a command. ### Defining your own usage You can provide your own usage function or template for Cobra to use. - -The default usage function is: +Like help, the function and template are overridable through public methods: ```go -return func(c *Command) error { - err := tmpl(c.Out(), c.UsageTemplate(), c) - return err -} +cmd.SetUsageFunc(f func(*Command) error) +cmd.SetUsageTemplate(s string) ``` -Like help, the function and template are overridable through public methods: +## Version Flag -```go -command.SetUsageFunc(f func(*Command) error) +Cobra adds a top-level '--version' flag if the Version field is set on the root command. +Running an application with the '--version' flag will print the version to stdout using +the version template. The template can be customized using the +`cmd.SetVersionTemplate(s string)` function. -command.SetUsageTemplate(s string) -``` - -## PreRun or PostRun Hooks +## PreRun and PostRun Hooks It is possible to run functions before or after the main `Run` function of your command. The `PersistentPreRun` and `PreRun` functions will be executed before `Run`. `PersistentPostRun` and `PostRun` will be executed after `Run`. The `Persistent*Run` functions will be inherited by children if they do not declare their own. These functions are run in the following order: @@ -749,105 +598,73 @@ An example of two commands which use all of these features is below. When the s package main import ( - "fmt" + "fmt" - "github.com/spf13/cobra" + "github.com/spf13/cobra" ) func main() { - var rootCmd = &cobra.Command{ - Use: "root [sub]", - Short: "My root command", - PersistentPreRun: func(cmd *cobra.Command, args []string) { - fmt.Printf("Inside rootCmd PersistentPreRun with args: %v\n", args) - }, - PreRun: func(cmd *cobra.Command, args []string) { - fmt.Printf("Inside rootCmd PreRun with args: %v\n", args) - }, - Run: func(cmd *cobra.Command, args []string) { - fmt.Printf("Inside rootCmd Run with args: %v\n", args) - }, - PostRun: func(cmd *cobra.Command, args []string) { - fmt.Printf("Inside rootCmd PostRun with args: %v\n", args) - }, - PersistentPostRun: func(cmd *cobra.Command, args []string) { - fmt.Printf("Inside rootCmd PersistentPostRun with args: %v\n", args) - }, - } - - var subCmd = &cobra.Command{ - Use: "sub [no options!]", - Short: "My subcommand", - PreRun: func(cmd *cobra.Command, args []string) { - fmt.Printf("Inside subCmd PreRun with args: %v\n", args) - }, - Run: func(cmd *cobra.Command, args []string) { - fmt.Printf("Inside subCmd Run with args: %v\n", args) - }, - PostRun: func(cmd *cobra.Command, args []string) { - fmt.Printf("Inside subCmd PostRun with args: %v\n", args) - }, - PersistentPostRun: func(cmd *cobra.Command, args []string) { - fmt.Printf("Inside subCmd PersistentPostRun with args: %v\n", args) - }, - } - - rootCmd.AddCommand(subCmd) - - rootCmd.SetArgs([]string{""}) - _ = rootCmd.Execute() - fmt.Print("\n") - rootCmd.SetArgs([]string{"sub", "arg1", "arg2"}) - _ = rootCmd.Execute() + var rootCmd = &cobra.Command{ + Use: "root [sub]", + Short: "My root command", + PersistentPreRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd PersistentPreRun with args: %v\n", args) + }, + PreRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd PreRun with args: %v\n", args) + }, + Run: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd Run with args: %v\n", args) + }, + PostRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd PostRun with args: %v\n", args) + }, + PersistentPostRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd PersistentPostRun with args: %v\n", args) + }, + } + + var subCmd = &cobra.Command{ + Use: "sub [no options!]", + Short: "My subcommand", + PreRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside subCmd PreRun with args: %v\n", args) + }, + Run: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside subCmd Run with args: %v\n", args) + }, + PostRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside subCmd PostRun with args: %v\n", args) + }, + PersistentPostRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside subCmd PersistentPostRun with args: %v\n", args) + }, + } + + rootCmd.AddCommand(subCmd) + + rootCmd.SetArgs([]string{""}) + rootCmd.Execute() + fmt.Println() + rootCmd.SetArgs([]string{"sub", "arg1", "arg2"}) + rootCmd.Execute() } ``` +Output: +``` +Inside rootCmd PersistentPreRun with args: [] +Inside rootCmd PreRun with args: [] +Inside rootCmd Run with args: [] +Inside rootCmd PostRun with args: [] +Inside rootCmd PersistentPostRun with args: [] -## Alternative Error Handling - -Cobra also has functions where the return signature is an error. This allows for errors to bubble up to the top, -providing a way to handle the errors in one location. The current list of functions that return an error is: - -* PersistentPreRunE -* PreRunE -* RunE -* PostRunE -* PersistentPostRunE - -If you would like to silence the default `error` and `usage` output in favor of your own, you can set `SilenceUsage` -and `SilenceErrors` to `true` on the command. A child command respects these flags if they are set on the parent -command. - -**Example Usage using RunE:** - -```go -package main - -import ( - "errors" - "log" - - "github.com/spf13/cobra" -) - -func main() { - var rootCmd = &cobra.Command{ - Use: "hugo", - Short: "Hugo is a very fast static site generator", - Long: `A Fast and Flexible Static Site Generator built with - love by spf13 and friends in Go. - Complete documentation is available at http://hugo.spf13.com`, - RunE: func(cmd *cobra.Command, args []string) error { - // Do Stuff Here - return errors.New("some random error") - }, - } - - if err := rootCmd.Execute(); err != nil { - log.Fatal(err) - } -} +Inside rootCmd PersistentPreRun with args: [arg1 arg2] +Inside subCmd PreRun with args: [arg1 arg2] +Inside subCmd Run with args: [arg1 arg2] +Inside subCmd PostRun with args: [arg1 arg2] +Inside subCmd PersistentPostRun with args: [arg1 arg2] ``` ## Suggestions when "unknown command" happens @@ -890,51 +707,28 @@ Did you mean this? Run 'kubectl help' for usage. ``` -## Generating Markdown-formatted documentation for your command - -Cobra can generate a Markdown-formatted document based on the subcommands, flags, etc. A simple example of how to do this for your command can be found in [Markdown Docs](doc/md_docs.md). +## Generating documentation for your command -## Generating man pages for your command +Cobra can generate documentation based on subcommands, flags, etc. in the following formats: -Cobra can generate a man page based on the subcommands, flags, etc. A simple example of how to do this for your command can be found in [Man Docs](doc/man_docs.md). +- [Markdown](doc/md_docs.md) +- [ReStructured Text](doc/rest_docs.md) +- [Man Page](doc/man_docs.md) -## Generating bash completions for your command +## Generating bash completions Cobra can generate a bash-completion file. If you add more information to your command, these completions can be amazingly powerful and flexible. Read more about it in [Bash Completions](bash_completions.md). -## Debugging - -Cobra provides a ‘DebugFlags’ method on a command which, when called, will print -out everything Cobra knows about the flags for each command. - -### Example - -```go -command.DebugFlags() -``` - -## Extensions - -Libraries for extending Cobra: - -* [cmdns](https://github.com/gosuri/cmdns): Enables name spacing a command's immediate children. It provides an alternative way to structure subcommands, similar to `heroku apps:create` and `ovrclk clusters:launch`. - -## Contributing +# Contributing 1. Fork it -2. Create your feature branch (`git checkout -b my-new-feature`) -3. Commit your changes (`git commit -am 'Add some feature'`) -4. Push to the branch (`git push origin my-new-feature`) -5. Create new Pull Request - -## Contributors - -Names in no particular order: - -* [spf13](https://github.com/spf13), -[eparis](https://github.com/eparis), -[bep](https://github.com/bep), and many more! - -## License +2. Download your fork to your PC (`git clone https://github.com/your_username/cobra && cd cobra`) +3. Create your feature branch (`git checkout -b my-new-feature`) +4. Make changes and add them (`git add .`) +5. Commit your changes (`git commit -m 'Add some feature'`) +6. Push to the branch (`git push origin my-new-feature`) +7. Create new pull request + +# License Cobra is released under the Apache 2.0 license. See [LICENSE.txt](https://github.com/spf13/cobra/blob/master/LICENSE.txt) diff --git a/vendor/github.com/spf13/cobra/args.go b/vendor/github.com/spf13/cobra/args.go new file mode 100644 index 0000000..c4d820b --- /dev/null +++ b/vendor/github.com/spf13/cobra/args.go @@ -0,0 +1,101 @@ +package cobra + +import ( + "fmt" +) + +type PositionalArgs func(cmd *Command, args []string) error + +// Legacy arg validation has the following behaviour: +// - root commands with no subcommands can take arbitrary arguments +// - root commands with subcommands will do subcommand validity checking +// - subcommands will always accept arbitrary arguments +func legacyArgs(cmd *Command, args []string) error { + // no subcommand, always take args + if !cmd.HasSubCommands() { + return nil + } + + // root command with subcommands, do subcommand checking. + if !cmd.HasParent() && len(args) > 0 { + return fmt.Errorf("unknown command %q for %q%s", args[0], cmd.CommandPath(), cmd.findSuggestions(args[0])) + } + return nil +} + +// NoArgs returns an error if any args are included. +func NoArgs(cmd *Command, args []string) error { + if len(args) > 0 { + return fmt.Errorf("unknown command %q for %q", args[0], cmd.CommandPath()) + } + return nil +} + +// OnlyValidArgs returns an error if any args are not in the list of ValidArgs. +func OnlyValidArgs(cmd *Command, args []string) error { + if len(cmd.ValidArgs) > 0 { + for _, v := range args { + if !stringInSlice(v, cmd.ValidArgs) { + return fmt.Errorf("invalid argument %q for %q%s", v, cmd.CommandPath(), cmd.findSuggestions(args[0])) + } + } + } + return nil +} + +// ArbitraryArgs never returns an error. +func ArbitraryArgs(cmd *Command, args []string) error { + return nil +} + +// MinimumNArgs returns an error if there is not at least N args. +func MinimumNArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) < n { + return fmt.Errorf("requires at least %d arg(s), only received %d", n, len(args)) + } + return nil + } +} + +// MaximumNArgs returns an error if there are more than N args. +func MaximumNArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) > n { + return fmt.Errorf("accepts at most %d arg(s), received %d", n, len(args)) + } + return nil + } +} + +// ExactArgs returns an error if there are not exactly n args. +func ExactArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) != n { + return fmt.Errorf("accepts %d arg(s), received %d", n, len(args)) + } + return nil + } +} + +// ExactValidArgs returns an error if +// there are not exactly N positional args OR +// there are any positional args that are not in the `ValidArgs` field of `Command` +func ExactValidArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if err := ExactArgs(n)(cmd, args); err != nil { + return err + } + return OnlyValidArgs(cmd, args) + } +} + +// RangeArgs returns an error if the number of args is not within the expected range. +func RangeArgs(min int, max int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) < min || len(args) > max { + return fmt.Errorf("accepts between %d and %d arg(s), received %d", min, max, len(args)) + } + return nil + } +} diff --git a/vendor/github.com/spf13/cobra/args_test.go b/vendor/github.com/spf13/cobra/args_test.go new file mode 100644 index 0000000..c81b212 --- /dev/null +++ b/vendor/github.com/spf13/cobra/args_test.go @@ -0,0 +1,287 @@ +package cobra + +import ( + "strings" + "testing" +) + +func TestNoArgs(t *testing.T) { + c := &Command{Use: "c", Args: NoArgs, Run: emptyRun} + + output, err := executeCommand(c) + if output != "" { + t.Errorf("Unexpected string: %v", output) + } + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } +} + +func TestNoArgsWithArgs(t *testing.T) { + c := &Command{Use: "c", Args: NoArgs, Run: emptyRun} + + _, err := executeCommand(c, "illegal") + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := `unknown command "illegal" for "c"` + if got != expected { + t.Errorf("Expected: %q, got: %q", expected, got) + } +} + +func TestOnlyValidArgs(t *testing.T) { + c := &Command{ + Use: "c", + Args: OnlyValidArgs, + ValidArgs: []string{"one", "two"}, + Run: emptyRun, + } + + output, err := executeCommand(c, "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } +} + +func TestOnlyValidArgsWithInvalidArgs(t *testing.T) { + c := &Command{ + Use: "c", + Args: OnlyValidArgs, + ValidArgs: []string{"one", "two"}, + Run: emptyRun, + } + + _, err := executeCommand(c, "three") + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := `invalid argument "three" for "c"` + if got != expected { + t.Errorf("Expected: %q, got: %q", expected, got) + } +} + +func TestArbitraryArgs(t *testing.T) { + c := &Command{Use: "c", Args: ArbitraryArgs, Run: emptyRun} + output, err := executeCommand(c, "a", "b") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } +} + +func TestMinimumNArgs(t *testing.T) { + c := &Command{Use: "c", Args: MinimumNArgs(2), Run: emptyRun} + output, err := executeCommand(c, "a", "b", "c") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } +} + +func TestMinimumNArgsWithLessArgs(t *testing.T) { + c := &Command{Use: "c", Args: MinimumNArgs(2), Run: emptyRun} + _, err := executeCommand(c, "a") + + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := "requires at least 2 arg(s), only received 1" + if got != expected { + t.Fatalf("Expected %q, got %q", expected, got) + } +} + +func TestMaximumNArgs(t *testing.T) { + c := &Command{Use: "c", Args: MaximumNArgs(3), Run: emptyRun} + output, err := executeCommand(c, "a", "b") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } +} + +func TestMaximumNArgsWithMoreArgs(t *testing.T) { + c := &Command{Use: "c", Args: MaximumNArgs(2), Run: emptyRun} + _, err := executeCommand(c, "a", "b", "c") + + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := "accepts at most 2 arg(s), received 3" + if got != expected { + t.Fatalf("Expected %q, got %q", expected, got) + } +} + +func TestExactArgs(t *testing.T) { + c := &Command{Use: "c", Args: ExactArgs(3), Run: emptyRun} + output, err := executeCommand(c, "a", "b", "c") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } +} + +func TestExactArgsWithInvalidCount(t *testing.T) { + c := &Command{Use: "c", Args: ExactArgs(2), Run: emptyRun} + _, err := executeCommand(c, "a", "b", "c") + + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := "accepts 2 arg(s), received 3" + if got != expected { + t.Fatalf("Expected %q, got %q", expected, got) + } +} + +func TestExactValidArgs(t *testing.T) { + c := &Command{Use: "c", Args: ExactValidArgs(3), ValidArgs: []string{"a", "b", "c"}, Run: emptyRun} + output, err := executeCommand(c, "a", "b", "c") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } +} + +func TestExactValidArgsWithInvalidCount(t *testing.T) { + c := &Command{Use: "c", Args: ExactValidArgs(2), Run: emptyRun} + _, err := executeCommand(c, "a", "b", "c") + + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := "accepts 2 arg(s), received 3" + if got != expected { + t.Fatalf("Expected %q, got %q", expected, got) + } +} + +func TestExactValidArgsWithInvalidArgs(t *testing.T) { + c := &Command{ + Use: "c", + Args: ExactValidArgs(1), + ValidArgs: []string{"one", "two"}, + Run: emptyRun, + } + + _, err := executeCommand(c, "three") + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := `invalid argument "three" for "c"` + if got != expected { + t.Errorf("Expected: %q, got: %q", expected, got) + } +} + +func TestRangeArgs(t *testing.T) { + c := &Command{Use: "c", Args: RangeArgs(2, 4), Run: emptyRun} + output, err := executeCommand(c, "a", "b", "c") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } +} + +func TestRangeArgsWithInvalidCount(t *testing.T) { + c := &Command{Use: "c", Args: RangeArgs(2, 4), Run: emptyRun} + _, err := executeCommand(c, "a") + + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := "accepts between 2 and 4 arg(s), received 1" + if got != expected { + t.Fatalf("Expected %q, got %q", expected, got) + } +} + +func TestRootTakesNoArgs(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Run: emptyRun} + rootCmd.AddCommand(childCmd) + + _, err := executeCommand(rootCmd, "illegal", "args") + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := `unknown command "illegal" for "root"` + if !strings.Contains(got, expected) { + t.Errorf("expected %q, got %q", expected, got) + } +} + +func TestRootTakesArgs(t *testing.T) { + rootCmd := &Command{Use: "root", Args: ArbitraryArgs, Run: emptyRun} + childCmd := &Command{Use: "child", Run: emptyRun} + rootCmd.AddCommand(childCmd) + + _, err := executeCommand(rootCmd, "legal", "args") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } +} + +func TestChildTakesNoArgs(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Args: NoArgs, Run: emptyRun} + rootCmd.AddCommand(childCmd) + + _, err := executeCommand(rootCmd, "child", "illegal", "args") + if err == nil { + t.Fatal("Expected an error") + } + + got := err.Error() + expected := `unknown command "illegal" for "root child"` + if !strings.Contains(got, expected) { + t.Errorf("expected %q, got %q", expected, got) + } +} + +func TestChildTakesArgs(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Args: ArbitraryArgs, Run: emptyRun} + rootCmd.AddCommand(childCmd) + + _, err := executeCommand(rootCmd, "child", "legal", "args") + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } +} diff --git a/vendor/github.com/spf13/cobra/bash_completions.go b/vendor/github.com/spf13/cobra/bash_completions.go index e0cfb34..c3c1e50 100644 --- a/vendor/github.com/spf13/cobra/bash_completions.go +++ b/vendor/github.com/spf13/cobra/bash_completions.go @@ -21,8 +21,8 @@ const ( func writePreamble(buf *bytes.Buffer, name string) { buf.WriteString(fmt.Sprintf("# bash completion for %-36s -*- shell-script -*-\n", name)) - buf.WriteString(` -__debug() + buf.WriteString(fmt.Sprintf(` +__%[1]s_debug() { if [[ -n ${BASH_COMP_DEBUG_FILE} ]]; then echo "$*" >> "${BASH_COMP_DEBUG_FILE}" @@ -31,13 +31,13 @@ __debug() # Homebrew on Macs have version 1.3 of bash-completion which doesn't include # _init_completion. This is a very minimal version of that function. -__my_init_completion() +__%[1]s_init_completion() { COMPREPLY=() _get_comp_words_by_ref "$@" cur prev words cword } -__index_of_word() +__%[1]s_index_of_word() { local w word=$1 shift @@ -49,7 +49,7 @@ __index_of_word() index=-1 } -__contains_word() +__%[1]s_contains_word() { local w word=$1; shift for w in "$@"; do @@ -58,9 +58,9 @@ __contains_word() return 1 } -__handle_reply() +__%[1]s_handle_reply() { - __debug "${FUNCNAME[0]}" + __%[1]s_debug "${FUNCNAME[0]}" case $cur in -*) if [[ $(type -t compopt) = "builtin" ]]; then @@ -85,14 +85,14 @@ __handle_reply() local index flag flag="${cur%%=*}" - __index_of_word "${flag}" "${flags_with_completion[@]}" + __%[1]s_index_of_word "${flag}" "${flags_with_completion[@]}" COMPREPLY=() if [[ ${index} -ge 0 ]]; then PREFIX="" cur="${cur#*=}" ${flags_completion[${index}]} if [ -n "${ZSH_VERSION}" ]; then - # zfs completion needs --flag= prefix + # zsh completion needs --flag= prefix eval "COMPREPLY=( \"\${COMPREPLY[@]/#/${flag}=}\" )" fi fi @@ -103,7 +103,7 @@ __handle_reply() # check if we are handling a flag with special work handling local index - __index_of_word "${prev}" "${flags_with_completion[@]}" + __%[1]s_index_of_word "${prev}" "${flags_with_completion[@]}" if [[ ${index} -ge 0 ]]; then ${flags_completion[${index}]} return @@ -129,31 +129,43 @@ __handle_reply() fi if [[ ${#COMPREPLY[@]} -eq 0 ]]; then - declare -F __custom_func >/dev/null && __custom_func + if declare -F __%[1]s_custom_func >/dev/null; then + # try command name qualified custom func + __%[1]s_custom_func + else + # otherwise fall back to unqualified for compatibility + declare -F __custom_func >/dev/null && __custom_func + fi fi # available in bash-completion >= 2, not always present on macOS if declare -F __ltrim_colon_completions >/dev/null; then __ltrim_colon_completions "$cur" fi + + # If there is only 1 completion and it is a flag with an = it will be completed + # but we don't want a space after the = + if [[ "${#COMPREPLY[@]}" -eq "1" ]] && [[ $(type -t compopt) = "builtin" ]] && [[ "${COMPREPLY[0]}" == --*= ]]; then + compopt -o nospace + fi } # The arguments should be in the form "ext1|ext2|extn" -__handle_filename_extension_flag() +__%[1]s_handle_filename_extension_flag() { local ext="$1" _filedir "@(${ext})" } -__handle_subdirs_in_dir_flag() +__%[1]s_handle_subdirs_in_dir_flag() { local dir="$1" pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 } -__handle_flag() +__%[1]s_handle_flag() { - __debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" # if a command required a flag, and we found it, unset must_have_one_flag() local flagname=${words[c]} @@ -164,27 +176,31 @@ __handle_flag() flagname=${flagname%%=*} # strip everything after the = flagname="${flagname}=" # but put the = back fi - __debug "${FUNCNAME[0]}: looking for ${flagname}" - if __contains_word "${flagname}" "${must_have_one_flag[@]}"; then + __%[1]s_debug "${FUNCNAME[0]}: looking for ${flagname}" + if __%[1]s_contains_word "${flagname}" "${must_have_one_flag[@]}"; then must_have_one_flag=() fi # if you set a flag which only applies to this command, don't show subcommands - if __contains_word "${flagname}" "${local_nonpersistent_flags[@]}"; then + if __%[1]s_contains_word "${flagname}" "${local_nonpersistent_flags[@]}"; then commands=() fi # keep flag value with flagname as flaghash - if [ -n "${flagvalue}" ] ; then - flaghash[${flagname}]=${flagvalue} - elif [ -n "${words[ $((c+1)) ]}" ] ; then - flaghash[${flagname}]=${words[ $((c+1)) ]} - else - flaghash[${flagname}]="true" # pad "true" for bool flag + # flaghash variable is an associative array which is only supported in bash > 3. + if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then + if [ -n "${flagvalue}" ] ; then + flaghash[${flagname}]=${flagvalue} + elif [ -n "${words[ $((c+1)) ]}" ] ; then + flaghash[${flagname}]=${words[ $((c+1)) ]} + else + flaghash[${flagname}]="true" # pad "true" for bool flag + fi fi # skip the argument to a two word flag - if __contains_word "${words[c]}" "${two_word_flags[@]}"; then + if [[ ${words[c]} != *"="* ]] && __%[1]s_contains_word "${words[c]}" "${two_word_flags[@]}"; then + __%[1]s_debug "${FUNCNAME[0]}: found a flag ${words[c]}, skip the next argument" c=$((c+1)) # if we are looking for a flags value, don't show commands if [[ $c -eq $cword ]]; then @@ -196,13 +212,13 @@ __handle_flag() } -__handle_noun() +__%[1]s_handle_noun() { - __debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" - if __contains_word "${words[c]}" "${must_have_one_noun[@]}"; then + if __%[1]s_contains_word "${words[c]}" "${must_have_one_noun[@]}"; then must_have_one_noun=() - elif __contains_word "${words[c]}" "${noun_aliases[@]}"; then + elif __%[1]s_contains_word "${words[c]}" "${noun_aliases[@]}"; then must_have_one_noun=() fi @@ -210,45 +226,53 @@ __handle_noun() c=$((c+1)) } -__handle_command() +__%[1]s_handle_command() { - __debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" local next_command if [[ -n ${last_command} ]]; then next_command="_${last_command}_${words[c]//:/__}" else if [[ $c -eq 0 ]]; then - next_command="_$(basename "${words[c]//:/__}")" + next_command="_%[1]s_root_command" else next_command="_${words[c]//:/__}" fi fi c=$((c+1)) - __debug "${FUNCNAME[0]}: looking for ${next_command}" + __%[1]s_debug "${FUNCNAME[0]}: looking for ${next_command}" declare -F "$next_command" >/dev/null && $next_command } -__handle_word() +__%[1]s_handle_word() { if [[ $c -ge $cword ]]; then - __handle_reply + __%[1]s_handle_reply return fi - __debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" if [[ "${words[c]}" == -* ]]; then - __handle_flag - elif __contains_word "${words[c]}" "${commands[@]}"; then - __handle_command - elif [[ $c -eq 0 ]] && __contains_word "$(basename "${words[c]}")" "${commands[@]}"; then - __handle_command + __%[1]s_handle_flag + elif __%[1]s_contains_word "${words[c]}" "${commands[@]}"; then + __%[1]s_handle_command + elif [[ $c -eq 0 ]]; then + __%[1]s_handle_command + elif __%[1]s_contains_word "${words[c]}" "${command_aliases[@]}"; then + # aliashash variable is an associative array which is only supported in bash > 3. + if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then + words[c]=${aliashash[${words[c]}]} + __%[1]s_handle_command + else + __%[1]s_handle_noun + fi else - __handle_noun + __%[1]s_handle_noun fi - __handle_word + __%[1]s_handle_word } -`) +`, name)) } func writePostscript(buf *bytes.Buffer, name string) { @@ -257,10 +281,11 @@ func writePostscript(buf *bytes.Buffer, name string) { buf.WriteString(fmt.Sprintf(`{ local cur prev words cword declare -A flaghash 2>/dev/null || : + declare -A aliashash 2>/dev/null || : if declare -F _init_completion >/dev/null 2>&1; then _init_completion -s || return else - __my_init_completion -n "=" || return + __%[1]s_init_completion -n "=" || return fi local c=0 @@ -269,13 +294,13 @@ func writePostscript(buf *bytes.Buffer, name string) { local local_nonpersistent_flags=() local flags_with_completion=() local flags_completion=() - local commands=("%s") + local commands=("%[1]s") local must_have_one_flag=() local must_have_one_noun=() local last_command local nouns=() - __handle_word + __%[1]s_handle_word } `, name)) @@ -296,11 +321,12 @@ func writeCommands(buf *bytes.Buffer, cmd *Command) { continue } buf.WriteString(fmt.Sprintf(" commands+=(%q)\n", c.Name())) + writeCmdAliases(buf, c) } buf.WriteString("\n") } -func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]string) { +func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]string, cmd *Command) { for key, value := range annotations { switch key { case BashCompFilenameExt: @@ -308,7 +334,7 @@ func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]s var ext string if len(value) > 0 { - ext = "__handle_filename_extension_flag " + strings.Join(value, "|") + ext = fmt.Sprintf("__%s_handle_filename_extension_flag ", cmd.Root().Name()) + strings.Join(value, "|") } else { ext = "_filedir" } @@ -326,7 +352,7 @@ func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]s var ext string if len(value) == 1 { - ext = "__handle_subdirs_in_dir_flag " + value[0] + ext = fmt.Sprintf("__%s_handle_subdirs_in_dir_flag ", cmd.Root().Name()) + value[0] } else { ext = "_filedir -d" } @@ -335,7 +361,7 @@ func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]s } } -func writeShortFlag(buf *bytes.Buffer, flag *pflag.Flag) { +func writeShortFlag(buf *bytes.Buffer, flag *pflag.Flag, cmd *Command) { name := flag.Shorthand format := " " if len(flag.NoOptDefVal) == 0 { @@ -343,10 +369,10 @@ func writeShortFlag(buf *bytes.Buffer, flag *pflag.Flag) { } format += "flags+=(\"-%s\")\n" buf.WriteString(fmt.Sprintf(format, name)) - writeFlagHandler(buf, "-"+name, flag.Annotations) + writeFlagHandler(buf, "-"+name, flag.Annotations, cmd) } -func writeFlag(buf *bytes.Buffer, flag *pflag.Flag) { +func writeFlag(buf *bytes.Buffer, flag *pflag.Flag, cmd *Command) { name := flag.Name format := " flags+=(\"--%s" if len(flag.NoOptDefVal) == 0 { @@ -354,7 +380,11 @@ func writeFlag(buf *bytes.Buffer, flag *pflag.Flag) { } format += "\")\n" buf.WriteString(fmt.Sprintf(format, name)) - writeFlagHandler(buf, "--"+name, flag.Annotations) + if len(flag.NoOptDefVal) == 0 { + format = " two_word_flags+=(\"--%s\")\n" + buf.WriteString(fmt.Sprintf(format, name)) + } + writeFlagHandler(buf, "--"+name, flag.Annotations, cmd) } func writeLocalNonPersistentFlag(buf *bytes.Buffer, flag *pflag.Flag) { @@ -380,9 +410,9 @@ func writeFlags(buf *bytes.Buffer, cmd *Command) { if nonCompletableFlag(flag) { return } - writeFlag(buf, flag) + writeFlag(buf, flag, cmd) if len(flag.Shorthand) > 0 { - writeShortFlag(buf, flag) + writeShortFlag(buf, flag, cmd) } if localNonPersistentFlags.Lookup(flag.Name) != nil { writeLocalNonPersistentFlag(buf, flag) @@ -392,9 +422,9 @@ func writeFlags(buf *bytes.Buffer, cmd *Command) { if nonCompletableFlag(flag) { return } - writeFlag(buf, flag) + writeFlag(buf, flag, cmd) if len(flag.Shorthand) > 0 { - writeShortFlag(buf, flag) + writeShortFlag(buf, flag, cmd) } }) @@ -434,6 +464,21 @@ func writeRequiredNouns(buf *bytes.Buffer, cmd *Command) { } } +func writeCmdAliases(buf *bytes.Buffer, cmd *Command) { + if len(cmd.Aliases) == 0 { + return + } + + sort.Sort(sort.StringSlice(cmd.Aliases)) + + buf.WriteString(fmt.Sprint(` if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then`, "\n")) + for _, value := range cmd.Aliases { + buf.WriteString(fmt.Sprintf(" command_aliases+=(%q)\n", value)) + buf.WriteString(fmt.Sprintf(" aliashash[%q]=%q\n", value, cmd.Name())) + } + buf.WriteString(` fi`) + buf.WriteString("\n") +} func writeArgAliases(buf *bytes.Buffer, cmd *Command) { buf.WriteString(" noun_aliases=()\n") sort.Sort(sort.StringSlice(cmd.ArgAliases)) @@ -452,8 +497,18 @@ func gen(buf *bytes.Buffer, cmd *Command) { commandName := cmd.CommandPath() commandName = strings.Replace(commandName, " ", "_", -1) commandName = strings.Replace(commandName, ":", "__", -1) - buf.WriteString(fmt.Sprintf("_%s()\n{\n", commandName)) + + if cmd.Root() == cmd { + buf.WriteString(fmt.Sprintf("_%s_root_command()\n{\n", commandName)) + } else { + buf.WriteString(fmt.Sprintf("_%s()\n{\n", commandName)) + } + buf.WriteString(fmt.Sprintf(" last_command=%q\n", commandName)) + buf.WriteString("\n") + buf.WriteString(" command_aliases=()\n") + buf.WriteString("\n") + writeCommands(buf, cmd) writeFlags(buf, cmd) writeRequiredFlag(buf, cmd) @@ -463,14 +518,14 @@ func gen(buf *bytes.Buffer, cmd *Command) { } // GenBashCompletion generates bash completion file and writes to the passed writer. -func (cmd *Command) GenBashCompletion(w io.Writer) error { +func (c *Command) GenBashCompletion(w io.Writer) error { buf := new(bytes.Buffer) - writePreamble(buf, cmd.Name()) - if len(cmd.BashCompletionFunction) > 0 { - buf.WriteString(cmd.BashCompletionFunction + "\n") + writePreamble(buf, c.Name()) + if len(c.BashCompletionFunction) > 0 { + buf.WriteString(c.BashCompletionFunction + "\n") } - gen(buf, cmd) - writePostscript(buf, cmd.Name()) + gen(buf, c) + writePostscript(buf, c.Name()) _, err := buf.WriteTo(w) return err @@ -481,47 +536,50 @@ func nonCompletableFlag(flag *pflag.Flag) bool { } // GenBashCompletionFile generates bash completion file. -func (cmd *Command) GenBashCompletionFile(filename string) error { +func (c *Command) GenBashCompletionFile(filename string) error { outFile, err := os.Create(filename) if err != nil { return err } defer outFile.Close() - return cmd.GenBashCompletion(outFile) + return c.GenBashCompletion(outFile) } -// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag, if it exists. -func (cmd *Command) MarkFlagRequired(name string) error { - return MarkFlagRequired(cmd.Flags(), name) +// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists, +// and causes your command to report an error if invoked without the flag. +func (c *Command) MarkFlagRequired(name string) error { + return MarkFlagRequired(c.Flags(), name) } -// MarkPersistentFlagRequired adds the BashCompOneRequiredFlag annotation to the named persistent flag, if it exists. -func (cmd *Command) MarkPersistentFlagRequired(name string) error { - return MarkFlagRequired(cmd.PersistentFlags(), name) +// MarkPersistentFlagRequired adds the BashCompOneRequiredFlag annotation to the named persistent flag if it exists, +// and causes your command to report an error if invoked without the flag. +func (c *Command) MarkPersistentFlagRequired(name string) error { + return MarkFlagRequired(c.PersistentFlags(), name) } -// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag in the flag set, if it exists. +// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists, +// and causes your command to report an error if invoked without the flag. func MarkFlagRequired(flags *pflag.FlagSet, name string) error { return flags.SetAnnotation(name, BashCompOneRequiredFlag, []string{"true"}) } // MarkFlagFilename adds the BashCompFilenameExt annotation to the named flag, if it exists. // Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided. -func (cmd *Command) MarkFlagFilename(name string, extensions ...string) error { - return MarkFlagFilename(cmd.Flags(), name, extensions...) +func (c *Command) MarkFlagFilename(name string, extensions ...string) error { + return MarkFlagFilename(c.Flags(), name, extensions...) } // MarkFlagCustom adds the BashCompCustom annotation to the named flag, if it exists. // Generated bash autocompletion will call the bash function f for the flag. -func (cmd *Command) MarkFlagCustom(name string, f string) error { - return MarkFlagCustom(cmd.Flags(), name, f) +func (c *Command) MarkFlagCustom(name string, f string) error { + return MarkFlagCustom(c.Flags(), name, f) } // MarkPersistentFlagFilename adds the BashCompFilenameExt annotation to the named persistent flag, if it exists. // Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided. -func (cmd *Command) MarkPersistentFlagFilename(name string, extensions ...string) error { - return MarkFlagFilename(cmd.PersistentFlags(), name, extensions...) +func (c *Command) MarkPersistentFlagFilename(name string, extensions ...string) error { + return MarkFlagFilename(c.PersistentFlags(), name, extensions...) } // MarkFlagFilename adds the BashCompFilenameExt annotation to the named flag in the flag set, if it exists. diff --git a/vendor/github.com/spf13/cobra/bash_completions.md b/vendor/github.com/spf13/cobra/bash_completions.md index 52bd39d..4ac61ee 100644 --- a/vendor/github.com/spf13/cobra/bash_completions.md +++ b/vendor/github.com/spf13/cobra/bash_completions.md @@ -1,20 +1,56 @@ # Generating Bash Completions For Your Own cobra.Command +If you are using the generator you can create a completion command by running + +```bash +cobra add completion +``` + +Update the help text show how to install the bash_completion Linux show here [Kubectl docs show mac options](https://kubernetes.io/docs/tasks/tools/install-kubectl/#enabling-shell-autocompletion) + +Writing the shell script to stdout allows the most flexible use. + +```go +// completionCmd represents the completion command +var completionCmd = &cobra.Command{ + Use: "completion", + Short: "Generates bash completion scripts", + Long: `To load completion run + +. <(bitbucket completion) + +To configure your bash shell to load completions for each session add to your bashrc + +# ~/.bashrc or ~/.profile +. <(bitbucket completion) +`, + Run: func(cmd *cobra.Command, args []string) { + rootCmd.GenBashCompletion(os.Stdout); + }, +} +``` + +**Note:** The cobra generator may include messages printed to stdout for example if the config file is loaded, this will break the auto complete script + + +## Example from kubectl + Generating bash completions from a cobra command is incredibly easy. An actual program which does so for the kubernetes kubectl binary is as follows: ```go package main import ( - "io/ioutil" - "os" + "io/ioutil" + "os" - "github.com/GoogleCloudPlatform/kubernetes/pkg/kubectl/cmd" + "k8s.io/kubernetes/pkg/kubectl/cmd" + "k8s.io/kubernetes/pkg/kubectl/cmd/util" ) func main() { - kubectl := cmd.NewFactory(nil).NewKubectlCommand(os.Stdin, ioutil.Discard, ioutil.Discard) - kubectl.GenBashCompletionFile("out.sh") + kubectl := cmd.NewKubectlCommand(util.NewFactory(nil), os.Stdin, ioutil.Discard, ioutil.Discard) + kubectl.GenBashCompletionFile("out.sh") } ``` @@ -46,7 +82,7 @@ __kubectl_get_resource() fi } -__custom_func() { +__kubectl_custom_func() { case ${last_command} in kubectl_get | kubectl_describe | kubectl_delete | kubectl_stop) __kubectl_get_resource @@ -73,7 +109,7 @@ Find more information at https://github.com/GoogleCloudPlatform/kubernetes.`, } ``` -The `BashCompletionFunction` option is really only valid/useful on the root command. Doing the above will cause `__custom_func()` to be called when the built in processor was unable to find a solution. In the case of kubernetes a valid command might look something like `kubectl get pod [mypod]`. If you type `kubectl get pod [tab][tab]` the `__customc_func()` will run because the cobra.Command only understood "kubectl" and "get." `__custom_func()` will see that the cobra.Command is "kubectl_get" and will thus call another helper `__kubectl_get_resource()`. `__kubectl_get_resource` will look at the 'nouns' collected. In our example the only noun will be `pod`. So it will call `__kubectl_parse_get pod`. `__kubectl_parse_get` will actually call out to kubernetes and get any pods. It will then set `COMPREPLY` to valid pods! +The `BashCompletionFunction` option is really only valid/useful on the root command. Doing the above will cause `__kubectl_custom_func()` (`___custom_func()`) to be called when the built in processor was unable to find a solution. In the case of kubernetes a valid command might look something like `kubectl get pod [mypod]`. If you type `kubectl get pod [tab][tab]` the `__kubectl_customc_func()` will run because the cobra.Command only understood "kubectl" and "get." `__kubectl_custom_func()` will see that the cobra.Command is "kubectl_get" and will thus call another helper `__kubectl_get_resource()`. `__kubectl_get_resource` will look at the 'nouns' collected. In our example the only noun will be `pod`. So it will call `__kubectl_parse_get pod`. `__kubectl_parse_get` will actually call out to kubernetes and get any pods. It will then set `COMPREPLY` to valid pods! ## Have the completions code complete your 'nouns' @@ -173,14 +209,14 @@ hello.yml test.json So while there are many other files in the CWD it only shows me subdirs and those with valid extensions. -# Specifiy custom flag completion +# Specify custom flag completion -Similar to the filename completion and filtering using cobra.BashCompFilenameExt, you can specifiy +Similar to the filename completion and filtering using cobra.BashCompFilenameExt, you can specify a custom flag completion function with cobra.BashCompCustom: ```go annotation := make(map[string][]string) - annotation[cobra.BashCompFilenameExt] = []string{"__kubectl_get_namespaces"} + annotation[cobra.BashCompCustom] = []string{"__kubectl_get_namespaces"} flag := &pflag.Flag{ Name: "namespace", @@ -204,3 +240,17 @@ __kubectl_get_namespaces() fi } ``` +# Using bash aliases for commands + +You can also configure the `bash aliases` for the commands and they will also support completions. + +```bash +alias aliasname=origcommand +complete -o default -F __start_origcommand aliasname + +# and now when you run `aliasname` completion will make +# suggestions as it did for `origcommand`. + +$) aliasname +completion firstcommand secondcommand +``` diff --git a/vendor/github.com/spf13/cobra/bash_completions_test.go b/vendor/github.com/spf13/cobra/bash_completions_test.go index 7511376..eefa3de 100644 --- a/vendor/github.com/spf13/cobra/bash_completions_test.go +++ b/vendor/github.com/spf13/cobra/bash_completions_test.go @@ -2,21 +2,40 @@ package cobra import ( "bytes" + "fmt" "os" "os/exec" + "regexp" "strings" "testing" ) func checkOmit(t *testing.T, found, unexpected string) { if strings.Contains(found, unexpected) { - t.Errorf("Unexpected response.\nGot: %q\nBut should not have!\n", unexpected) + t.Errorf("Got: %q\nBut should not have!\n", unexpected) } } func check(t *testing.T, found, expected string) { if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) + t.Errorf("Expecting to contain: \n %q\nGot:\n %q\n", expected, found) + } +} + +func checkNumOccurrences(t *testing.T, found, expected string, expectedOccurrences int) { + numOccurrences := strings.Count(found, expected) + if numOccurrences != expectedOccurrences { + t.Errorf("Expecting to contain %d occurrences of: \n %q\nGot %d:\n %q\n", expectedOccurrences, expected, numOccurrences, found) + } +} + +func checkRegex(t *testing.T, found, pattern string) { + matched, err := regexp.MatchString(pattern, found) + if err != nil { + t.Errorf("Error thrown performing MatchString: \n %s\n", err) + } + if !matched { + t.Errorf("Expecting to match: \n %q\nGot:\n %q\n", pattern, found) } } @@ -33,160 +52,186 @@ func runShellCheck(s string) error { return err } go func() { - defer stdin.Close() stdin.Write([]byte(s)) + stdin.Close() }() return cmd.Run() } // World worst custom function, just keep telling you to enter hello! -const ( - bashCompletionFunc = `__custom_func() { -COMPREPLY=( "hello" ) +const bashCompletionFunc = `__root_custom_func() { + COMPREPLY=( "hello" ) } ` -) func TestBashCompletions(t *testing.T) { - c := initializeWithRootCmd() - cmdEcho.AddCommand(cmdTimes) - c.AddCommand(cmdEcho, cmdPrint, cmdDeprecated, cmdColon) - - // custom completion function - c.BashCompletionFunction = bashCompletionFunc - - // required flag - c.MarkFlagRequired("introot") - - // valid nouns - validArgs := []string{"pod", "node", "service", "replicationcontroller"} - c.ValidArgs = validArgs - - // noun aliases - argAliases := []string{"pods", "nodes", "services", "replicationcontrollers", "po", "no", "svc", "rc"} - c.ArgAliases = argAliases - - // filename - var flagval string - c.Flags().StringVar(&flagval, "filename", "", "Enter a filename") - c.MarkFlagFilename("filename", "json", "yaml", "yml") - - // persistent filename - var flagvalPersistent string - c.PersistentFlags().StringVar(&flagvalPersistent, "persistent-filename", "", "Enter a filename") - c.MarkPersistentFlagFilename("persistent-filename") - c.MarkPersistentFlagRequired("persistent-filename") - - // filename extensions - var flagvalExt string - c.Flags().StringVar(&flagvalExt, "filename-ext", "", "Enter a filename (extension limited)") - c.MarkFlagFilename("filename-ext") - - // filename extensions - var flagvalCustom string - c.Flags().StringVar(&flagvalCustom, "custom", "", "Enter a filename (extension limited)") - c.MarkFlagCustom("custom", "__complete_custom") - - // subdirectories in a given directory - var flagvalTheme string - c.Flags().StringVar(&flagvalTheme, "theme", "", "theme to use (located in /themes/THEMENAME/)") - c.Flags().SetAnnotation("theme", BashCompSubdirsInDir, []string{"themes"}) - - out := new(bytes.Buffer) - c.GenBashCompletion(out) - str := out.String() - - check(t, str, "_cobra-test") - check(t, str, "_cobra-test_echo") - check(t, str, "_cobra-test_echo_times") - check(t, str, "_cobra-test_print") - check(t, str, "_cobra-test_cmd__colon") + rootCmd := &Command{ + Use: "root", + ArgAliases: []string{"pods", "nodes", "services", "replicationcontrollers", "po", "no", "svc", "rc"}, + ValidArgs: []string{"pod", "node", "service", "replicationcontroller"}, + BashCompletionFunction: bashCompletionFunc, + Run: emptyRun, + } + rootCmd.Flags().IntP("introot", "i", -1, "help message for flag introot") + rootCmd.MarkFlagRequired("introot") + + // Filename. + rootCmd.Flags().String("filename", "", "Enter a filename") + rootCmd.MarkFlagFilename("filename", "json", "yaml", "yml") + + // Persistent filename. + rootCmd.PersistentFlags().String("persistent-filename", "", "Enter a filename") + rootCmd.MarkPersistentFlagFilename("persistent-filename") + rootCmd.MarkPersistentFlagRequired("persistent-filename") + + // Filename extensions. + rootCmd.Flags().String("filename-ext", "", "Enter a filename (extension limited)") + rootCmd.MarkFlagFilename("filename-ext") + rootCmd.Flags().String("custom", "", "Enter a filename (extension limited)") + rootCmd.MarkFlagCustom("custom", "__complete_custom") + + // Subdirectories in a given directory. + rootCmd.Flags().String("theme", "", "theme to use (located in /themes/THEMENAME/)") + rootCmd.Flags().SetAnnotation("theme", BashCompSubdirsInDir, []string{"themes"}) + + // For two word flags check + rootCmd.Flags().StringP("two", "t", "", "this is two word flags") + rootCmd.Flags().BoolP("two-w-default", "T", false, "this is not two word flags") + + echoCmd := &Command{ + Use: "echo [string to echo]", + Aliases: []string{"say"}, + Short: "Echo anything to the screen", + Long: "an utterly useless command for testing.", + Example: "Just run cobra-test echo", + Run: emptyRun, + } + + echoCmd.Flags().String("filename", "", "Enter a filename") + echoCmd.MarkFlagFilename("filename", "json", "yaml", "yml") + echoCmd.Flags().String("config", "", "config to use (located in /config/PROFILE/)") + echoCmd.Flags().SetAnnotation("config", BashCompSubdirsInDir, []string{"config"}) + + printCmd := &Command{ + Use: "print [string to print]", + Args: MinimumNArgs(1), + Short: "Print anything to the screen", + Long: "an absolutely utterly useless command for testing.", + Run: emptyRun, + } + + deprecatedCmd := &Command{ + Use: "deprecated [can't do anything here]", + Args: NoArgs, + Short: "A command which is deprecated", + Long: "an absolutely utterly useless command for testing deprecation!.", + Deprecated: "Please use echo instead", + Run: emptyRun, + } + + colonCmd := &Command{ + Use: "cmd:colon", + Run: emptyRun, + } + + timesCmd := &Command{ + Use: "times [# times] [string to echo]", + SuggestFor: []string{"counts"}, + Args: OnlyValidArgs, + ValidArgs: []string{"one", "two", "three", "four"}, + Short: "Echo anything to the screen more times", + Long: "a slightly useless command for testing.", + Run: emptyRun, + } + + echoCmd.AddCommand(timesCmd) + rootCmd.AddCommand(echoCmd, printCmd, deprecatedCmd, colonCmd) + + buf := new(bytes.Buffer) + rootCmd.GenBashCompletion(buf) + output := buf.String() + + check(t, output, "_root") + check(t, output, "_root_echo") + check(t, output, "_root_echo_times") + check(t, output, "_root_print") + check(t, output, "_root_cmd__colon") // check for required flags - check(t, str, `must_have_one_flag+=("--introot=")`) - check(t, str, `must_have_one_flag+=("--persistent-filename=")`) - // check for custom completion function - check(t, str, `COMPREPLY=( "hello" )`) + check(t, output, `must_have_one_flag+=("--introot=")`) + check(t, output, `must_have_one_flag+=("--persistent-filename=")`) + // check for custom completion function with both qualified and unqualified name + checkNumOccurrences(t, output, `__custom_func`, 2) // 1. check existence, 2. invoke + checkNumOccurrences(t, output, `__root_custom_func`, 3) // 1. check existence, 2. invoke, 3. actual definition + // check for custom completion function body + check(t, output, `COMPREPLY=( "hello" )`) // check for required nouns - check(t, str, `must_have_one_noun+=("pod")`) + check(t, output, `must_have_one_noun+=("pod")`) // check for noun aliases - check(t, str, `noun_aliases+=("pods")`) - check(t, str, `noun_aliases+=("rc")`) - checkOmit(t, str, `must_have_one_noun+=("pods")`) + check(t, output, `noun_aliases+=("pods")`) + check(t, output, `noun_aliases+=("rc")`) + checkOmit(t, output, `must_have_one_noun+=("pods")`) + // check for filename extension flags + check(t, output, `flags_completion+=("_filedir")`) // check for filename extension flags - check(t, str, `flags_completion+=("_filedir")`) + check(t, output, `must_have_one_noun+=("three")`) // check for filename extension flags - check(t, str, `flags_completion+=("__handle_filename_extension_flag json|yaml|yml")`) + check(t, output, fmt.Sprintf(`flags_completion+=("__%s_handle_filename_extension_flag json|yaml|yml")`, rootCmd.Name())) + // check for filename extension flags in a subcommand + checkRegex(t, output, fmt.Sprintf(`_root_echo\(\)\n{[^}]*flags_completion\+=\("__%s_handle_filename_extension_flag json\|yaml\|yml"\)`, rootCmd.Name())) // check for custom flags - check(t, str, `flags_completion+=("__complete_custom")`) + check(t, output, `flags_completion+=("__complete_custom")`) // check for subdirs_in_dir flags - check(t, str, `flags_completion+=("__handle_subdirs_in_dir_flag themes")`) + check(t, output, fmt.Sprintf(`flags_completion+=("__%s_handle_subdirs_in_dir_flag themes")`, rootCmd.Name())) + // check for subdirs_in_dir flags in a subcommand + checkRegex(t, output, fmt.Sprintf(`_root_echo\(\)\n{[^}]*flags_completion\+=\("__%s_handle_subdirs_in_dir_flag config"\)`, rootCmd.Name())) - checkOmit(t, str, cmdDeprecated.Name()) + // check two word flags + check(t, output, `two_word_flags+=("--two")`) + check(t, output, `two_word_flags+=("-t")`) + checkOmit(t, output, `two_word_flags+=("--two-w-default")`) + checkOmit(t, output, `two_word_flags+=("-T")`) - // if available, run shellcheck against the script + checkOmit(t, output, deprecatedCmd.Name()) + + // If available, run shellcheck against the script. if err := exec.Command("which", "shellcheck").Run(); err != nil { return } - err := runShellCheck(str) - if err != nil { + if err := runShellCheck(output); err != nil { t.Fatalf("shellcheck failed: %v", err) } } func TestBashCompletionHiddenFlag(t *testing.T) { - var cmdTrue = &Command{ - Use: "does nothing", - Run: func(cmd *Command, args []string) {}, - } + c := &Command{Use: "c", Run: emptyRun} - const flagName = "hidden-foo-bar-baz" + const flagName = "hiddenFlag" + c.Flags().Bool(flagName, false, "") + c.Flags().MarkHidden(flagName) - var flagValue bool - cmdTrue.Flags().BoolVar(&flagValue, flagName, false, "hidden flag") - cmdTrue.Flags().MarkHidden(flagName) + buf := new(bytes.Buffer) + c.GenBashCompletion(buf) + output := buf.String() - out := new(bytes.Buffer) - cmdTrue.GenBashCompletion(out) - bashCompletion := out.String() - if strings.Contains(bashCompletion, flagName) { - t.Errorf("expected completion to not include %q flag: Got %v", flagName, bashCompletion) + if strings.Contains(output, flagName) { + t.Errorf("Expected completion to not include %q flag: Got %v", flagName, output) } } func TestBashCompletionDeprecatedFlag(t *testing.T) { - var cmdTrue = &Command{ - Use: "does nothing", - Run: func(cmd *Command, args []string) {}, - } - - const flagName = "deprecated-foo-bar-baz" - - var flagValue bool - cmdTrue.Flags().BoolVar(&flagValue, flagName, false, "hidden flag") - cmdTrue.Flags().MarkDeprecated(flagName, "use --does-not-exist instead") - - out := new(bytes.Buffer) - cmdTrue.GenBashCompletion(out) - bashCompletion := out.String() - if strings.Contains(bashCompletion, flagName) { - t.Errorf("expected completion to not include %q flag: Got %v", flagName, bashCompletion) - } -} + c := &Command{Use: "c", Run: emptyRun} -func BenchmarkBashCompletion(b *testing.B) { - c := initializeWithRootCmd() - cmdEcho.AddCommand(cmdTimes) - c.AddCommand(cmdEcho, cmdPrint, cmdDeprecated, cmdColon) + const flagName = "deprecated-flag" + c.Flags().Bool(flagName, false, "") + c.Flags().MarkDeprecated(flagName, "use --not-deprecated instead") buf := new(bytes.Buffer) + c.GenBashCompletion(buf) + output := buf.String() - b.ResetTimer() - for i := 0; i < b.N; i++ { - buf.Reset() - if err := c.GenBashCompletion(buf); err != nil { - b.Fatal(err) - } + if strings.Contains(output, flagName) { + t.Errorf("expected completion to not include %q flag: Got %v", flagName, output) } } diff --git a/vendor/github.com/spf13/cobra/cobra.go b/vendor/github.com/spf13/cobra/cobra.go index 8928cef..6505c07 100644 --- a/vendor/github.com/spf13/cobra/cobra.go +++ b/vendor/github.com/spf13/cobra/cobra.go @@ -23,6 +23,7 @@ import ( "strconv" "strings" "text/template" + "time" "unicode" ) @@ -56,6 +57,12 @@ var MousetrapHelpText string = `This is a command line tool. You need to open cmd.exe and run it from there. ` +// MousetrapDisplayDuration controls how long the MousetrapHelpText message is displayed on Windows +// if the CLI is started from explorer.exe. Set to 0 to wait for the return key to be pressed. +// To disable the mousetrap, just set MousetrapHelpText to blank string (""). +// Works only on Microsoft Windows. +var MousetrapDisplayDuration time.Duration = 5 * time.Second + // AddTemplateFunc adds a template function that's available to Usage and Help // template generation. func AddTemplateFunc(name string, tmplFunc interface{}) { @@ -70,7 +77,8 @@ func AddTemplateFuncs(tmplFuncs template.FuncMap) { } } -// OnInitialize takes a series of func() arguments and appends them to a slice of func(). +// OnInitialize sets the passed functions to be run when each command's +// Execute method is called. func OnInitialize(y ...func()) { initializers = append(initializers, y...) } @@ -188,3 +196,12 @@ func ld(s, t string, ignoreCase bool) int { } return d[len(s)][len(t)] } + +func stringInSlice(a string, list []string) bool { + for _, b := range list { + if b == a { + return true + } + } + return false +} diff --git a/vendor/github.com/spf13/cobra/cobra/README.md b/vendor/github.com/spf13/cobra/cobra/README.md new file mode 100644 index 0000000..6054f95 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/README.md @@ -0,0 +1,94 @@ +# Cobra Generator + +Cobra provides its own program that will create your application and add any +commands you want. It's the easiest way to incorporate Cobra into your application. + +In order to use the cobra command, compile it using the following command: + + go get github.com/spf13/cobra/cobra + +This will create the cobra executable under your `$GOPATH/bin` directory. + +### cobra init + +The `cobra init [app]` command will create your initial application code +for you. It is a very powerful application that will populate your program with +the right structure so you can immediately enjoy all the benefits of Cobra. It +will also automatically apply the license you specify to your application. + +Cobra init is pretty smart. You can provide it a full path, or simply a path +similar to what is expected in the import. + +``` +cobra init github.com/spf13/newApp +``` + +### cobra add + +Once an application is initialized, Cobra can create additional commands for you. +Let's say you created an app and you wanted the following commands for it: + +* app serve +* app config +* app config create + +In your project directory (where your main.go file is) you would run the following: + +``` +cobra add serve +cobra add config +cobra add create -p 'configCmd' +``` + +*Note: Use camelCase (not snake_case/snake-case) for command names. +Otherwise, you will encounter errors. +For example, `cobra add add-user` is incorrect, but `cobra add addUser` is valid.* + +Once you have run these three commands you would have an app structure similar to +the following: + +``` + ▾ app/ + ▾ cmd/ + serve.go + config.go + create.go + main.go +``` + +At this point you can run `go run main.go` and it would run your app. `go run +main.go serve`, `go run main.go config`, `go run main.go config create` along +with `go run main.go help serve`, etc. would all work. + +Obviously you haven't added your own code to these yet. The commands are ready +for you to give them their tasks. Have fun! + +### Configuring the cobra generator + +The Cobra generator will be easier to use if you provide a simple configuration +file which will help you eliminate providing a bunch of repeated information in +flags over and over. + +An example ~/.cobra.yaml file: + +```yaml +author: Steve Francia +license: MIT +``` + +You can specify no license by setting `license` to `none` or you can specify +a custom license: + +```yaml +license: + header: This file is part of {{ .appName }}. + text: | + {{ .copyright }} + + This is my license. There are many like it, but this one is mine. + My license is my best friend. It is my life. I must master it as I must + master my life. +``` + +You can also use built-in licenses. For example, **GPLv2**, **GPLv3**, **LGPL**, +**AGPL**, **MIT**, **2-Clause BSD** or **3-Clause BSD**. diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/add.go b/vendor/github.com/spf13/cobra/cobra/cmd/add.go index 45f00bb..fb22096 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/add.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/add.go @@ -24,7 +24,7 @@ import ( func init() { addCmd.Flags().StringVarP(&packageName, "package", "t", "", "target package name (e.g. github.com/spf13/hugo)") - addCmd.Flags().StringVarP(&parentName, "parent", "p", "RootCmd", "name of parent command for this command") + addCmd.Flags().StringVarP(&parentName, "parent", "p", "rootCmd", "variable name of parent command for this command") } var packageName, parentName string @@ -35,7 +35,7 @@ var addCmd = &cobra.Command{ Short: "Add a command to a Cobra Application", Long: `Add (cobra add) will create a new command, with a license and the appropriate structure for a Cobra-based CLI application, -and register it to its parent (default RootCmd). +and register it to its parent (default rootCmd). If you want your command to be public, pass in the command name with an initial uppercase letter. @@ -121,7 +121,7 @@ func validateCmdName(source string) string { func createCmdFile(license License, path, cmdName string) { template := `{{comment .copyright}} -{{comment .license}} +{{if .license}}{{comment .license}}{{end}} package {{.cmdPackage}} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/add_test.go b/vendor/github.com/spf13/cobra/cobra/cmd/add_test.go index dacbe83..b920e2b 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/add_test.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/add_test.go @@ -6,6 +6,8 @@ import ( "os" "path/filepath" "testing" + + "github.com/spf13/viper" ) // TestGoldenAddCmd initializes the project "github.com/spf13/testproject" @@ -16,10 +18,17 @@ import ( func TestGoldenAddCmd(t *testing.T) { projectName := "github.com/spf13/testproject" project := NewProject(projectName) + defer os.RemoveAll(project.AbsPath()) - // Initialize the project at first. + viper.Set("author", "NAME HERE ") + viper.Set("license", "apache") + viper.Set("year", 2017) + defer viper.Set("author", nil) + defer viper.Set("license", nil) + defer viper.Set("year", nil) + + // Initialize the project first. initializeProject(project) - defer os.RemoveAll(project.AbsPath()) // Then add the "test" command. cmdName := "test" @@ -48,7 +57,7 @@ func TestGoldenAddCmd(t *testing.T) { goldenPath := filepath.Join("testdata", filepath.Base(path)+".golden") switch relPath { - // Know directories. + // Known directories. case ".": return nil // Known files. diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/golden_test.go b/vendor/github.com/spf13/cobra/cobra/cmd/golden_test.go index 0ac7e89..9010caa 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/golden_test.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/golden_test.go @@ -17,6 +17,11 @@ func init() { initCmd.SetOutput(new(bytes.Buffer)) } +// ensureLF converts any \r\n to \n +func ensureLF(content []byte) []byte { + return bytes.Replace(content, []byte("\r\n"), []byte("\n"), -1) +} + // compareFiles compares the content of files with pathA and pathB. // If contents are equal, it returns nil. // If not, it returns which files are not equal @@ -30,7 +35,7 @@ func compareFiles(pathA, pathB string) error { if err != nil { return err } - if !bytes.Equal(contentA, contentB) { + if !bytes.Equal(ensureLF(contentA), ensureLF(contentB)) { output := new(bytes.Buffer) output.WriteString(fmt.Sprintf("%q and %q are not equal!\n\n", pathA, pathB)) @@ -39,11 +44,11 @@ func compareFiles(pathA, pathB string) error { // Don't execute diff if it can't be found. return nil } - diffCmd := exec.Command(diffPath, pathA, pathB) + diffCmd := exec.Command(diffPath, "-u", pathA, pathB) diffCmd.Stdout = output diffCmd.Stderr = output - output.WriteString("$ diff " + pathA + " " + pathB + "\n") + output.WriteString("$ diff -u " + pathA + " " + pathB + "\n") if err := diffCmd.Run(); err != nil { output.WriteString("\n" + err.Error()) } diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/helpers.go b/vendor/github.com/spf13/cobra/cobra/cmd/helpers.go index 6114227..cd94b3e 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/helpers.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/helpers.go @@ -18,12 +18,12 @@ import ( "fmt" "io" "os" + "os/exec" "path/filepath" "strings" "text/template" ) -var cmdDirs = [...]string{"cmd", "cmds", "command", "commands"} var srcPaths []string func init() { @@ -31,7 +31,27 @@ func init() { envGoPath := os.Getenv("GOPATH") goPaths := filepath.SplitList(envGoPath) if len(goPaths) == 0 { - er("$GOPATH is not set") + // Adapted from https://github.com/Masterminds/glide/pull/798/files. + // As of Go 1.8 the GOPATH is no longer required to be set. Instead there + // is a default value. If there is no GOPATH check for the default value. + // Note, checking the GOPATH first to avoid invoking the go toolchain if + // possible. + + goExecutable := os.Getenv("COBRA_GO_EXECUTABLE") + if len(goExecutable) <= 0 { + goExecutable = "go" + } + + out, err := exec.Command(goExecutable, "env", "GOPATH").Output() + if err != nil { + er(err) + } + + toolchainGoPath := strings.TrimSpace(string(out)) + goPaths = filepath.SplitList(toolchainGoPath) + if len(goPaths) == 0 { + er("$GOPATH is not set") + } } srcPaths = make([]string, 0, len(goPaths)) for _, goPath := range goPaths { @@ -45,24 +65,34 @@ func er(msg interface{}) { } // isEmpty checks if a given path is empty. +// Hidden files in path are ignored. func isEmpty(path string) bool { fi, err := os.Stat(path) if err != nil { er(err) } - if fi.IsDir() { - f, err := os.Open(path) - if err != nil { - er(err) - } - defer f.Close() - dirs, err := f.Readdirnames(1) - if err != nil && err != io.EOF { - er(err) + + if !fi.IsDir() { + return fi.Size() == 0 + } + + f, err := os.Open(path) + if err != nil { + er(err) + } + defer f.Close() + + names, err := f.Readdirnames(-1) + if err != nil && err != io.EOF { + er(err) + } + + for _, name := range names { + if len(name) > 0 && name[0] != '.' { + return false } - return len(dirs) == 0 } - return fi.Size() == 0 + return true } // exists checks if a file or directory exists. @@ -97,8 +127,6 @@ func writeStringToFile(path string, s string) error { // writeToFile writes r to file with path only // if file/directory on given path doesn't exist. -// If file/directory exists on given path, then -// it terminates app and prints an appropriate error. func writeToFile(path string, r io.Reader) error { if exists(path) { return fmt.Errorf("%v already exists", path) diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/init.go b/vendor/github.com/spf13/cobra/cobra/cmd/init.go index 149aabe..d65e6c8 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/init.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/init.go @@ -65,7 +65,7 @@ Init will not use an existing directory with contents.`, initializeProject(project) fmt.Fprintln(cmd.OutOrStdout(), `Your Cobra application is ready at -`+project.AbsPath()+`. +`+project.AbsPath()+` Give it a try by going there and running `+"`go run main.go`."+` Add commands to it by running `+"`cobra add [cmdname]`.") @@ -150,8 +150,8 @@ import ( var cfgFile string{{end}} -// RootCmd represents the base command when called without any subcommands -var RootCmd = &cobra.Command{ +// rootCmd represents the base command when called without any subcommands +var rootCmd = &cobra.Command{ Use: "{{.appName}}", Short: "A brief description of your application", Long: ` + "`" + `A longer description that spans multiple lines and likely contains @@ -168,24 +168,24 @@ to quickly create a Cobra application.` + "`" + `, // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. func Execute() { - if err := RootCmd.Execute(); err != nil { + if err := rootCmd.Execute(); err != nil { fmt.Println(err) os.Exit(1) } } -func init() { {{if .viper}} +func init() { {{- if .viper}} cobra.OnInitialize(initConfig) {{end}} // Here you will define your flags and configuration settings. // Cobra supports persistent flags, which, if defined here, // will be global for your application.{{ if .viper }} - RootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.{{ .appName }}.yaml)"){{ else }} - // RootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.{{ .appName }}.yaml)"){{ end }} + rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.{{ .appName }}.yaml)"){{ else }} + // rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.{{ .appName }}.yaml)"){{ end }} // Cobra also supports local flags, which will only run // when this action is called directly. - RootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") + rootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") }{{ if .viper }} // initConfig reads in config file and ENV variables if set. diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/init_test.go b/vendor/github.com/spf13/cobra/cobra/cmd/init_test.go index 9a918b9..40eb403 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/init_test.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/init_test.go @@ -6,6 +6,8 @@ import ( "os" "path/filepath" "testing" + + "github.com/spf13/viper" ) // TestGoldenInitCmd initializes the project "github.com/spf13/testproject" @@ -17,6 +19,13 @@ func TestGoldenInitCmd(t *testing.T) { project := NewProject(projectName) defer os.RemoveAll(project.AbsPath()) + viper.Set("author", "NAME HERE ") + viper.Set("license", "apache") + viper.Set("year", 2017) + defer viper.Set("author", nil) + defer viper.Set("license", nil) + defer viper.Set("year", nil) + os.Args = []string{"cobra", "init", projectName} if err := rootCmd.Execute(); err != nil { t.Fatal("Error by execution:", err) @@ -44,7 +53,7 @@ func TestGoldenInitCmd(t *testing.T) { goldenPath := filepath.Join("testdata", filepath.Base(path)+".golden") switch relPath { - // Know directories. + // Known directories. case ".", "cmd": return nil // Known files. diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_agpl.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_agpl.go index 4ea036e..bc22e97 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/license_agpl.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_agpl.go @@ -4,8 +4,7 @@ func initAgpl() { Licenses["agpl"] = License{ Name: "GNU Affero General Public License", PossibleMatches: []string{"agpl", "affero gpl", "gnu agpl"}, - Header: `{{.copyright}} - + Header: ` This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_apache_2.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_apache_2.go index 3f33086..38393d5 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/license_apache_2.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_apache_2.go @@ -19,7 +19,8 @@ func initApache2() { Licenses["apache"] = License{ Name: "Apache 2.0", PossibleMatches: []string{"apache", "apache20", "apache 2.0", "apache2.0", "apache-2.0"}, - Header: `Licensed under the Apache License, Version 2.0 (the "License"); + Header: ` +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_2.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_2.go index f2982da..4a847e0 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_2.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_2.go @@ -20,8 +20,7 @@ func initBsdClause2() { Name: "Simplified BSD License", PossibleMatches: []string{"freebsd", "simpbsd", "simple bsd", "2-clause bsd", "2 clause bsd", "simplified bsd license"}, - Header: ` -All rights reserved. + Header: `All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_3.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_3.go index 39c9571..c7476b3 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_3.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_3.go @@ -19,8 +19,7 @@ func initBsdClause3() { Licenses["bsd"] = License{ Name: "NewBSD", PossibleMatches: []string{"bsd", "newbsd", "3 clause bsd", "3-clause bsd"}, - Header: ` -All rights reserved. + Header: `All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_2.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_2.go index 054b470..03e05b3 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_2.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_2.go @@ -19,20 +19,19 @@ func initGpl2() { Licenses["gpl2"] = License{ Name: "GNU General Public License 2.0", PossibleMatches: []string{"gpl2", "gnu gpl2", "gplv2"}, - Header: `{{.copyright}} - - This program is free software; you can redistribute it and/or - modify it under the terms of the GNU General Public License - as published by the Free Software Foundation; either version 2 - of the License, or (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU Lesser General Public License - along with this program. If not, see .`, + Header: ` +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with this program. If not, see .`, Text: ` GNU GENERAL PUBLIC LICENSE Version 2, June 1991 diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_3.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_3.go index d1ef656..ce07679 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_3.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_3.go @@ -19,8 +19,7 @@ func initGpl3() { Licenses["gpl3"] = License{ Name: "GNU General Public License 3.0", PossibleMatches: []string{"gpl3", "gplv3", "gpl", "gnu gpl3", "gnu gpl"}, - Header: `{{.copyright}} - + Header: ` This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_lgpl.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_lgpl.go index 75fd043..0f8b96c 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/license_lgpl.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_lgpl.go @@ -4,8 +4,7 @@ func initLgpl() { Licenses["lgpl"] = License{ Name: "GNU Lesser General Public License", PossibleMatches: []string{"lgpl", "lesser gpl", "gnu lgpl"}, - Header: `{{.copyright}} - + Header: ` This program is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_mit.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_mit.go index 4ff5a4c..bd2d0c4 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/license_mit.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_mit.go @@ -17,7 +17,7 @@ package cmd func initMit() { Licenses["mit"] = License{ - Name: "Mit", + Name: "MIT License", PossibleMatches: []string{"mit"}, Header: ` Permission is hereby granted, free of charge, to any person obtaining a copy diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/licenses.go b/vendor/github.com/spf13/cobra/cobra/cmd/licenses.go index d73e6fb..a070134 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/licenses.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/licenses.go @@ -63,7 +63,7 @@ func getLicense() License { // If user wants to have custom license, use that. if viper.IsSet("license.header") || viper.IsSet("license.text") { return License{Header: viper.GetString("license.header"), - Text: "license.text"} + Text: viper.GetString("license.text")} } // If user wants to have built-in license, use that. @@ -77,7 +77,11 @@ func getLicense() License { func copyrightLine() string { author := viper.GetString("author") - year := time.Now().Format("2006") + + year := viper.GetString("year") // For tests. + if year == "" { + year = time.Now().Format("2006") + } return "Copyright © " + year + " " + author } diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/project.go b/vendor/github.com/spf13/cobra/cobra/cmd/project.go index de1168a..7ddb825 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/project.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/project.go @@ -17,10 +17,9 @@ type Project struct { } // NewProject returns Project with specified project name. -// If projectName is blank string, it returns nil. func NewProject(projectName string) *Project { if projectName == "" { - return nil + er("can't create project with blank name") } p := new(Project) @@ -54,8 +53,6 @@ func NewProject(projectName string) *Project { } // findPackage returns full path to existing go package in GOPATHs. -// findPackage returns "", if it can't find path. -// If packageName is "", findPackage returns "". func findPackage(packageName string) string { if packageName == "" { return "" @@ -73,16 +70,29 @@ func findPackage(packageName string) string { // NewProjectFromPath returns Project with specified absolute path to // package. -// If absPath is blank string or if absPath is not actually absolute, -// it returns nil. func NewProjectFromPath(absPath string) *Project { - if absPath == "" || !filepath.IsAbs(absPath) { - return nil + if absPath == "" { + er("can't create project: absPath can't be blank") + } + if !filepath.IsAbs(absPath) { + er("can't create project: absPath is not absolute") + } + + // If absPath is symlink, use its destination. + fi, err := os.Lstat(absPath) + if err != nil { + er("can't read path info: " + err.Error()) + } + if fi.Mode()&os.ModeSymlink != 0 { + path, err := os.Readlink(absPath) + if err != nil { + er("can't read the destination of symlink: " + err.Error()) + } + absPath = path } p := new(Project) - p.absPath = absPath - p.absPath = strings.TrimSuffix(p.absPath, findCmdDir(p.absPath)) + p.absPath = strings.TrimSuffix(absPath, findCmdDir(absPath)) p.name = filepath.ToSlash(trimSrcPath(p.absPath, p.SrcPath())) return p } @@ -91,7 +101,7 @@ func NewProjectFromPath(absPath string) *Project { func trimSrcPath(absPath, srcPath string) string { relPath, err := filepath.Rel(srcPath, absPath) if err != nil { - er("Cobra supports project only within $GOPATH: " + err.Error()) + er(err) } return relPath } @@ -101,7 +111,6 @@ func (p *Project) License() License { if p.license.Text == "" && p.license.Name != "None" { p.license = getLicense() } - return p.license } @@ -111,8 +120,6 @@ func (p Project) Name() string { } // CmdPath returns absolute path to directory, where all commands are located. -// -// CmdPath returns blank string, only if p.AbsPath() is a blank string. func (p *Project) CmdPath() string { if p.absPath == "" { return "" @@ -125,8 +132,6 @@ func (p *Project) CmdPath() string { // findCmdDir checks if base of absPath is cmd dir and returns it or // looks for existing cmd dir in absPath. -// If the cmd dir doesn't exist, empty, or cannot be found, -// it returns "cmd". func findCmdDir(absPath string) string { if !exists(absPath) || isEmpty(absPath) { return "cmd" @@ -149,7 +154,7 @@ func findCmdDir(absPath string) string { // isCmdDir checks if base of name is one of cmdDir. func isCmdDir(name string) bool { name = filepath.Base(name) - for _, cmdDir := range cmdDirs { + for _, cmdDir := range []string{"cmd", "cmds", "command", "commands"} { if name == cmdDir { return true } diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/root.go b/vendor/github.com/spf13/cobra/cobra/cmd/root.go index 1c5e690..19568f9 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/root.go +++ b/vendor/github.com/spf13/cobra/cobra/cmd/root.go @@ -40,7 +40,7 @@ func Execute() { } func init() { - initViper() + cobra.OnInitialize(initConfig) rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)") rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "author name for copyright attribution") @@ -55,7 +55,7 @@ func init() { rootCmd.AddCommand(initCmd) } -func initViper() { +func initConfig() { if cfgFile != "" { // Use config file from the flag. viper.SetConfigFile(cfgFile) diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/testdata/main.go.golden b/vendor/github.com/spf13/cobra/cobra/cmd/testdata/main.go.golden index 69ecbd4..cdbe38d 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/testdata/main.go.golden +++ b/vendor/github.com/spf13/cobra/cobra/cmd/testdata/main.go.golden @@ -1,4 +1,5 @@ // Copyright © 2017 NAME HERE +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/testdata/root.go.golden b/vendor/github.com/spf13/cobra/cobra/cmd/testdata/root.go.golden index ecc8760..d74f4cd 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/testdata/root.go.golden +++ b/vendor/github.com/spf13/cobra/cobra/cmd/testdata/root.go.golden @@ -1,4 +1,5 @@ // Copyright © 2017 NAME HERE +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at @@ -24,8 +25,8 @@ import ( var cfgFile string -// RootCmd represents the base command when called without any subcommands -var RootCmd = &cobra.Command{ +// rootCmd represents the base command when called without any subcommands +var rootCmd = &cobra.Command{ Use: "testproject", Short: "A brief description of your application", Long: `A longer description that spans multiple lines and likely contains @@ -42,23 +43,23 @@ to quickly create a Cobra application.`, // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. func Execute() { - if err := RootCmd.Execute(); err != nil { + if err := rootCmd.Execute(); err != nil { fmt.Println(err) os.Exit(1) } } -func init() { +func init() { cobra.OnInitialize(initConfig) // Here you will define your flags and configuration settings. // Cobra supports persistent flags, which, if defined here, // will be global for your application. - RootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.testproject.yaml)") + rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.testproject.yaml)") // Cobra also supports local flags, which will only run // when this action is called directly. - RootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") + rootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") } // initConfig reads in config file and ENV variables if set. diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/testdata/test.go.golden b/vendor/github.com/spf13/cobra/cobra/cmd/testdata/test.go.golden index c8319d1..ed64427 100644 --- a/vendor/github.com/spf13/cobra/cobra/cmd/testdata/test.go.golden +++ b/vendor/github.com/spf13/cobra/cobra/cmd/testdata/test.go.golden @@ -1,4 +1,5 @@ // Copyright © 2017 NAME HERE +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at @@ -35,7 +36,7 @@ to quickly create a Cobra application.`, } func init() { - RootCmd.AddCommand(testCmd) + rootCmd.AddCommand(testCmd) // Here you will define your flags and configuration settings. diff --git a/vendor/github.com/spf13/cobra/cobra_test.go b/vendor/github.com/spf13/cobra/cobra_test.go index 576c97d..0d1755b 100644 --- a/vendor/github.com/spf13/cobra/cobra_test.go +++ b/vendor/github.com/spf13/cobra/cobra_test.go @@ -1,1129 +1,10 @@ package cobra import ( - "bytes" - "fmt" - "os" - "reflect" - "runtime" - "strings" "testing" "text/template" - - "github.com/spf13/pflag" ) -var tp, te, tt, tr []string -var rootPersPre, echoPre, echoPersPre, timesPersPre []string -var flagb1, flagb2, flagb3, flagbr, flagbp bool -var flags1, flags2a, flags2b, flags3, outs string -var flagi1, flagi2, flagi3, flagi4, flagir int -var rootcalled bool -var versionUsed int - -const strtwoParentHelp = "help message for parent flag strtwo" -const strtwoChildHelp = "help message for child flag strtwo" - -var cmdHidden = &Command{ - Use: "hide [secret string to print]", - Short: "Print anything to screen (if command is known)", - Long: `an absolutely utterly useless command for testing.`, - Run: func(cmd *Command, args []string) { - outs = "hidden" - }, - Hidden: true, -} - -var cmdPrint = &Command{ - Use: "print [string to print]", - Short: "Print anything to the screen", - Long: `an absolutely utterly useless command for testing.`, - Run: func(cmd *Command, args []string) { - tp = args - }, -} - -var cmdEcho = &Command{ - Use: "echo [string to echo]", - Aliases: []string{"say"}, - Short: "Echo anything to the screen", - Long: `an utterly useless command for testing.`, - Example: "Just run cobra-test echo", - PersistentPreRun: func(cmd *Command, args []string) { - echoPersPre = args - }, - PreRun: func(cmd *Command, args []string) { - echoPre = args - }, - Run: func(cmd *Command, args []string) { - te = args - }, -} - -var cmdEchoSub = &Command{ - Use: "echosub [string to print]", - Short: "second sub command for echo", - Long: `an absolutely utterly useless command for testing gendocs!.`, - Run: func(cmd *Command, args []string) { - }, -} - -var cmdDeprecated = &Command{ - Use: "deprecated [can't do anything here]", - Short: "A command which is deprecated", - Long: `an absolutely utterly useless command for testing deprecation!.`, - Deprecated: "Please use echo instead", - Run: func(cmd *Command, args []string) { - }, -} - -var cmdTimes = &Command{ - Use: "times [# times] [string to echo]", - SuggestFor: []string{"counts"}, - Short: "Echo anything to the screen more times", - Long: `a slightly useless command for testing.`, - PersistentPreRun: func(cmd *Command, args []string) { - timesPersPre = args - }, - Run: func(cmd *Command, args []string) { - tt = args - }, -} - -var cmdRootNoRun = &Command{ - Use: "cobra-test", - Short: "The root can run its own function", - Long: "The root description for help", - PersistentPreRun: func(cmd *Command, args []string) { - rootPersPre = args - }, -} - -var cmdRootSameName = &Command{ - Use: "print", - Short: "Root with the same name as a subcommand", - Long: "The root description for help", -} - -var cmdRootWithRun = &Command{ - Use: "cobra-test", - Short: "The root can run its own function", - Long: "The root description for help", - Run: func(cmd *Command, args []string) { - tr = args - rootcalled = true - }, -} - -var cmdSubNoRun = &Command{ - Use: "subnorun", - Short: "A subcommand without a Run function", - Long: "A long output about a subcommand without a Run function", -} - -var cmdCustomFlags = &Command{ - Use: "customflags [flags] -- REMOTE_COMMAND", - Short: "A command that expects flags in a custom location", - Long: "A long output about a command that expects flags in a custom location", - Run: func(cmd *Command, args []string) { - }, -} - -var cmdVersion1 = &Command{ - Use: "version", - Short: "Print the version number", - Long: `First version of the version command`, - Run: func(cmd *Command, args []string) { - versionUsed = 1 - }, -} - -var cmdVersion2 = &Command{ - Use: "version", - Short: "Print the version number", - Long: `Second version of the version command`, - Run: func(cmd *Command, args []string) { - versionUsed = 2 - }, -} - -var cmdColon = &Command{ - Use: "cmd:colon", - Run: func(cmd *Command, args []string) { - }, -} - -func flagInit() { - cmdEcho.ResetFlags() - cmdPrint.ResetFlags() - cmdTimes.ResetFlags() - cmdRootNoRun.ResetFlags() - cmdRootSameName.ResetFlags() - cmdRootWithRun.ResetFlags() - cmdSubNoRun.ResetFlags() - cmdCustomFlags.ResetFlags() - cmdVersion1.ResetFlags() - cmdVersion2.ResetFlags() - - cmdRootNoRun.PersistentFlags().StringVarP(&flags2a, "strtwo", "t", "two", strtwoParentHelp) - cmdCustomFlags.Flags().IntVar(&flagi4, "intfour", 456, "help message for flag intfour") - cmdEcho.Flags().BoolVarP(&flagb1, "boolone", "b", true, "help message for flag boolone") - cmdEcho.Flags().IntVarP(&flagi1, "intone", "i", 123, "help message for flag intone") - cmdEcho.PersistentFlags().BoolVarP(&flagbp, "persistentbool", "p", false, "help message for flag persistentbool") - cmdEcho.PersistentFlags().StringVarP(&flags1, "strone", "s", "one", "help message for flag strone") - cmdPrint.Flags().IntVarP(&flagi3, "intthree", "i", 345, "help message for flag intthree") - cmdTimes.Flags().BoolVarP(&flagb2, "booltwo", "c", false, "help message for flag booltwo") - cmdTimes.Flags().IntVarP(&flagi2, "inttwo", "j", 234, "help message for flag inttwo") - cmdTimes.Flags().StringVarP(&flags2b, "strtwo", "t", "2", strtwoChildHelp) - cmdTimes.PersistentFlags().StringVarP(&flags2b, "strtwo", "t", "2", strtwoChildHelp) - cmdPrint.Flags().BoolVarP(&flagb3, "boolthree", "b", true, "help message for flag boolthree") - cmdPrint.PersistentFlags().StringVarP(&flags3, "strthree", "s", "three", "help message for flag strthree") -} - -func commandInit() { - cmdEcho.ResetCommands() - cmdPrint.ResetCommands() - cmdTimes.ResetCommands() - cmdRootNoRun.ResetCommands() - cmdRootSameName.ResetCommands() - cmdRootWithRun.ResetCommands() - cmdSubNoRun.ResetCommands() - cmdCustomFlags.ResetCommands() -} - -func initialize() *Command { - tt, tp, te = nil, nil, nil - rootPersPre, echoPre, echoPersPre, timesPersPre = nil, nil, nil, nil - - var c = cmdRootNoRun - flagInit() - commandInit() - return c -} - -func initializeWithSameName() *Command { - tt, tp, te = nil, nil, nil - rootPersPre, echoPre, echoPersPre, timesPersPre = nil, nil, nil, nil - var c = cmdRootSameName - flagInit() - commandInit() - return c -} - -func initializeWithRootCmd() *Command { - cmdRootWithRun.ResetCommands() - tt, tp, te, tr, rootcalled = nil, nil, nil, nil, false - flagInit() - cmdRootWithRun.Flags().BoolVarP(&flagbr, "boolroot", "b", false, "help message for flag boolroot") - cmdRootWithRun.Flags().IntVarP(&flagir, "introot", "i", 321, "help message for flag introot") - commandInit() - return cmdRootWithRun -} - -type resulter struct { - Error error - Output string - Command *Command -} - -func fullSetupTest(args ...string) resulter { - c := initializeWithRootCmd() - - return fullTester(c, args...) -} - -func noRRSetupTestSilenced(args ...string) resulter { - c := initialize() - c.SilenceErrors = true - c.SilenceUsage = true - return fullTester(c, args...) -} - -func noRRSetupTest(args ...string) resulter { - c := initialize() - - return fullTester(c, args...) -} - -func rootOnlySetupTest(args ...string) resulter { - c := initializeWithRootCmd() - - return simpleTester(c, args...) -} - -func simpleTester(c *Command, args ...string) resulter { - buf := new(bytes.Buffer) - // Testing flag with invalid input - c.SetOutput(buf) - c.SetArgs(args) - - err := c.Execute() - output := buf.String() - - return resulter{err, output, c} -} - -func simpleTesterC(c *Command, args ...string) resulter { - buf := new(bytes.Buffer) - // Testing flag with invalid input - c.SetOutput(buf) - c.SetArgs(args) - - cmd, err := c.ExecuteC() - output := buf.String() - - return resulter{err, output, cmd} -} - -func fullTester(c *Command, args ...string) resulter { - buf := new(bytes.Buffer) - // Testing flag with invalid input - c.SetOutput(buf) - cmdEcho.AddCommand(cmdTimes) - c.AddCommand(cmdPrint, cmdEcho, cmdSubNoRun, cmdCustomFlags, cmdDeprecated) - c.SetArgs(args) - - err := c.Execute() - output := buf.String() - - return resulter{err, output, c} -} - -func logErr(t *testing.T, found, expected string) { - out := new(bytes.Buffer) - - _, _, line, ok := runtime.Caller(2) - if ok { - fmt.Fprintf(out, "Line: %d ", line) - } - fmt.Fprintf(out, "Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - t.Errorf(out.String()) -} - -func checkStringContains(t *testing.T, found, expected string) { - if !strings.Contains(found, expected) { - logErr(t, found, expected) - } -} - -func checkResultContains(t *testing.T, x resulter, check string) { - checkStringContains(t, x.Output, check) -} - -func checkStringOmits(t *testing.T, found, expected string) { - if strings.Contains(found, expected) { - logErr(t, found, expected) - } -} - -func checkResultOmits(t *testing.T, x resulter, check string) { - checkStringOmits(t, x.Output, check) -} - -func checkOutputContains(t *testing.T, c *Command, check string) { - buf := new(bytes.Buffer) - c.SetOutput(buf) - c.Execute() - - if !strings.Contains(buf.String(), check) { - logErr(t, buf.String(), check) - } -} - -func TestSingleCommand(t *testing.T) { - noRRSetupTest("print", "one", "two") - - if te != nil || tt != nil { - t.Error("Wrong command called") - } - if tp == nil { - t.Error("Wrong command called") - } - if strings.Join(tp, " ") != "one two" { - t.Error("Command didn't parse correctly") - } -} - -func TestChildCommand(t *testing.T) { - noRRSetupTest("echo", "times", "one", "two") - - if te != nil || tp != nil { - t.Error("Wrong command called") - } - if tt == nil { - t.Error("Wrong command called") - } - if strings.Join(tt, " ") != "one two" { - t.Error("Command didn't parse correctly") - } -} - -func TestCommandAlias(t *testing.T) { - noRRSetupTest("say", "times", "one", "two") - - if te != nil || tp != nil { - t.Error("Wrong command called") - } - if tt == nil { - t.Error("Wrong command called") - } - if strings.Join(tt, " ") != "one two" { - t.Error("Command didn't parse correctly") - } -} - -func TestPrefixMatching(t *testing.T) { - EnablePrefixMatching = true - noRRSetupTest("ech", "times", "one", "two") - - if te != nil || tp != nil { - t.Error("Wrong command called") - } - if tt == nil { - t.Error("Wrong command called") - } - if strings.Join(tt, " ") != "one two" { - t.Error("Command didn't parse correctly") - } - - EnablePrefixMatching = false -} - -func TestNoPrefixMatching(t *testing.T) { - EnablePrefixMatching = false - - noRRSetupTest("ech", "times", "one", "two") - - if !(tt == nil && te == nil && tp == nil) { - t.Error("Wrong command called") - } -} - -func TestAliasPrefixMatching(t *testing.T) { - EnablePrefixMatching = true - noRRSetupTest("sa", "times", "one", "two") - - if te != nil || tp != nil { - t.Error("Wrong command called") - } - if tt == nil { - t.Error("Wrong command called") - } - if strings.Join(tt, " ") != "one two" { - t.Error("Command didn't parse correctly") - } - EnablePrefixMatching = false -} - -func TestChildSameName(t *testing.T) { - c := initializeWithSameName() - c.AddCommand(cmdPrint, cmdEcho) - c.SetArgs([]string{"print", "one", "two"}) - c.Execute() - - if te != nil || tt != nil { - t.Error("Wrong command called") - } - if tp == nil { - t.Error("Wrong command called") - } - if strings.Join(tp, " ") != "one two" { - t.Error("Command didn't parse correctly") - } -} - -func TestGrandChildSameName(t *testing.T) { - c := initializeWithSameName() - cmdTimes.AddCommand(cmdPrint) - c.AddCommand(cmdTimes) - c.SetArgs([]string{"times", "print", "one", "two"}) - c.Execute() - - if te != nil || tt != nil { - t.Error("Wrong command called") - } - if tp == nil { - t.Error("Wrong command called") - } - if strings.Join(tp, " ") != "one two" { - t.Error("Command didn't parse correctly") - } -} - -func TestUsage(t *testing.T) { - x := fullSetupTest("help") - checkResultContains(t, x, cmdRootWithRun.Use+" [flags]") - x = fullSetupTest("help", "customflags") - checkResultContains(t, x, cmdCustomFlags.Use) - checkResultOmits(t, x, cmdCustomFlags.Use+" [flags]") -} - -func TestFlagLong(t *testing.T) { - noRRSetupTest("echo", "--intone=13", "something", "--", "here") - - if cmdEcho.ArgsLenAtDash() != 1 { - t.Errorf("expected argsLenAtDash: %d but got %d", 1, cmdRootNoRun.ArgsLenAtDash()) - } - if strings.Join(te, " ") != "something here" { - t.Errorf("flags didn't leave proper args remaining..%s given", te) - } - if flagi1 != 13 { - t.Errorf("int flag didn't get correct value, had %d", flagi1) - } - if flagi2 != 234 { - t.Errorf("default flag value changed, 234 expected, %d given", flagi2) - } -} - -func TestFlagShort(t *testing.T) { - noRRSetupTest("echo", "-i13", "--", "something", "here") - - if cmdEcho.ArgsLenAtDash() != 0 { - t.Errorf("expected argsLenAtDash: %d but got %d", 0, cmdRootNoRun.ArgsLenAtDash()) - } - if strings.Join(te, " ") != "something here" { - t.Errorf("flags didn't leave proper args remaining..%s given", te) - } - if flagi1 != 13 { - t.Errorf("int flag didn't get correct value, had %d", flagi1) - } - if flagi2 != 234 { - t.Errorf("default flag value changed, 234 expected, %d given", flagi2) - } - - noRRSetupTest("echo", "-i", "13", "something", "here") - - if strings.Join(te, " ") != "something here" { - t.Errorf("flags didn't leave proper args remaining..%s given", te) - } - if flagi1 != 13 { - t.Errorf("int flag didn't get correct value, had %d", flagi1) - } - if flagi2 != 234 { - t.Errorf("default flag value changed, 234 expected, %d given", flagi2) - } - - noRRSetupTest("print", "-i99", "one", "two") - - if strings.Join(tp, " ") != "one two" { - t.Errorf("flags didn't leave proper args remaining..%s given", tp) - } - if flagi3 != 99 { - t.Errorf("int flag didn't get correct value, had %d", flagi3) - } - if flagi1 != 123 { - t.Errorf("default flag value changed on different command with same shortname, 234 expected, %d given", flagi2) - } -} - -func TestChildCommandFlags(t *testing.T) { - noRRSetupTest("echo", "times", "-j", "99", "one", "two") - - if strings.Join(tt, " ") != "one two" { - t.Errorf("flags didn't leave proper args remaining..%s given", tt) - } - - // Testing with flag that shouldn't be persistent - r := noRRSetupTest("echo", "times", "-j", "99", "-i77", "one", "two") - - if r.Error == nil { - t.Errorf("invalid flag should generate error") - } - - if !strings.Contains(r.Error.Error(), "unknown shorthand") { - t.Errorf("Wrong error message displayed, \n %s", r.Error) - } - - if flagi2 != 99 { - t.Errorf("flag value should be 99, %d given", flagi2) - } - - if flagi1 != 123 { - t.Errorf("unset flag should have default value, expecting 123, given %d", flagi1) - } - - // Testing with flag only existing on child - r = noRRSetupTest("echo", "-j", "99", "-i77", "one", "two") - - if r.Error == nil { - t.Errorf("invalid flag should generate error") - } - if !strings.Contains(r.Error.Error(), "unknown shorthand flag") { - t.Errorf("Wrong error message displayed, \n %s", r.Error) - } - - // Testing with persistent flag overwritten by child - noRRSetupTest("echo", "times", "--strtwo=child", "one", "two") - - if flags2b != "child" { - t.Errorf("flag value should be child, %s given", flags2b) - } - - if flags2a != "two" { - t.Errorf("unset flag should have default value, expecting two, given %s", flags2a) - } - - // Testing flag with invalid input - r = noRRSetupTest("echo", "-i10E") - - if r.Error == nil { - t.Errorf("invalid input should generate error") - } - if !strings.Contains(r.Error.Error(), "invalid syntax") { - t.Errorf("Wrong error message displayed, \n %s", r.Error) - } -} - -func TestTrailingCommandFlags(t *testing.T) { - x := fullSetupTest("echo", "two", "-x") - - if x.Error == nil { - t.Errorf("invalid flag should generate error") - } -} - -func TestInvalidSubcommandFlags(t *testing.T) { - cmd := initializeWithRootCmd() - cmd.AddCommand(cmdTimes) - - result := simpleTester(cmd, "times", "--inttwo=2", "--badflag=bar") - // given that we are not checking here result.Error we check for - // stock usage message - checkResultContains(t, result, "cobra-test times [# times]") - if strings.Contains(result.Error.Error(), "unknown flag: --inttwo") { - t.Errorf("invalid --badflag flag shouldn't fail on 'unknown' --inttwo flag") - } - -} - -func TestSubcommandExecuteC(t *testing.T) { - cmd := initializeWithRootCmd() - double := &Command{ - Use: "double message", - Run: func(c *Command, args []string) { - msg := strings.Join(args, " ") - c.Println(msg, msg) - }, - } - - echo := &Command{ - Use: "echo message", - Run: func(c *Command, args []string) { - msg := strings.Join(args, " ") - c.Println(msg) - }, - } - - cmd.AddCommand(double, echo) - - result := simpleTesterC(cmd, "double", "hello", "world") - checkResultContains(t, result, "hello world hello world") - - if result.Command.Name() != "double" { - t.Errorf("invalid cmd returned from ExecuteC: should be 'double' but got %s", result.Command.Name()) - } - - result = simpleTesterC(cmd, "echo", "msg", "to", "be", "echoed") - checkResultContains(t, result, "msg to be echoed") - - if result.Command.Name() != "echo" { - t.Errorf("invalid cmd returned from ExecuteC: should be 'echo' but got %s", result.Command.Name()) - } -} - -func TestSubcommandArgEvaluation(t *testing.T) { - cmd := initializeWithRootCmd() - - first := &Command{ - Use: "first", - Run: func(cmd *Command, args []string) { - }, - } - cmd.AddCommand(first) - - second := &Command{ - Use: "second", - Run: func(cmd *Command, args []string) { - fmt.Fprintf(cmd.OutOrStdout(), "%v", args) - }, - } - first.AddCommand(second) - - result := simpleTester(cmd, "first", "second", "first", "third") - - expectedOutput := fmt.Sprint([]string{"first third"}) - if result.Output != expectedOutput { - t.Errorf("exptected %v, got %v", expectedOutput, result.Output) - } -} - -func TestPersistentFlags(t *testing.T) { - fullSetupTest("echo", "-s", "something", "-p", "more", "here") - - // persistentFlag should act like normal flag on its own command - if strings.Join(te, " ") != "more here" { - t.Errorf("flags didn't leave proper args remaining..%s given", te) - } - if flags1 != "something" { - t.Errorf("string flag didn't get correct value, had %v", flags1) - } - if !flagbp { - t.Errorf("persistent bool flag not parsed correctly. Expected true, had %v", flagbp) - } - - // persistentFlag should act like normal flag on its own command - fullSetupTest("echo", "times", "-s", "again", "-c", "-p", "test", "here") - - if strings.Join(tt, " ") != "test here" { - t.Errorf("flags didn't leave proper args remaining. %s given", tt) - } - - if flags1 != "again" { - t.Errorf("string flag didn't get correct value, had %v", flags1) - } - - if !flagb2 { - t.Errorf("local flag not parsed correctly. Expected true, had %v", flagb2) - } - if !flagbp { - t.Errorf("persistent bool flag not parsed correctly. Expected true, had %v", flagbp) - } -} - -func TestHelpCommand(t *testing.T) { - x := fullSetupTest("help") - checkResultContains(t, x, cmdRootWithRun.Long) - - x = fullSetupTest("help", "echo") - checkResultContains(t, x, cmdEcho.Long) - - x = fullSetupTest("help", "echo", "times") - checkResultContains(t, x, cmdTimes.Long) -} - -func TestChildCommandHelp(t *testing.T) { - c := noRRSetupTest("print", "--help") - checkResultContains(t, c, strtwoParentHelp) - r := noRRSetupTest("echo", "times", "--help") - checkResultContains(t, r, strtwoChildHelp) -} - -func TestNonRunChildHelp(t *testing.T) { - x := noRRSetupTest("subnorun") - checkResultContains(t, x, cmdSubNoRun.Long) -} - -func TestRunnableRootCommand(t *testing.T) { - x := fullSetupTest("") - - if !rootcalled { - t.Errorf("Root Function was not called\n out:%v", x.Error) - } -} - -func TestVisitParents(t *testing.T) { - c := &Command{Use: "app"} - sub := &Command{Use: "sub"} - dsub := &Command{Use: "dsub"} - sub.AddCommand(dsub) - c.AddCommand(sub) - total := 0 - add := func(x *Command) { - total++ - } - sub.VisitParents(add) - if total != 1 { - t.Errorf("Should have visited 1 parent but visited %d", total) - } - - total = 0 - dsub.VisitParents(add) - if total != 2 { - t.Errorf("Should have visited 2 parent but visited %d", total) - } - - total = 0 - c.VisitParents(add) - if total != 0 { - t.Errorf("Should have not visited any parent but visited %d", total) - } -} - -func TestRunnableRootCommandNilInput(t *testing.T) { - c := initializeWithRootCmd() - - buf := new(bytes.Buffer) - // Testing flag with invalid input - c.SetOutput(buf) - cmdEcho.AddCommand(cmdTimes) - c.AddCommand(cmdPrint, cmdEcho) - c.SetArgs([]string{}) - - err := c.Execute() - if err != nil { - t.Errorf("Execute() failed with %v", err) - } - - if !rootcalled { - t.Errorf("Root Function was not called") - } -} - -func TestRunnableRootCommandEmptyInput(t *testing.T) { - args := []string{"", "--introot=12", ""} - c := initializeWithRootCmd() - - buf := new(bytes.Buffer) - // Testing flag with invalid input - c.SetOutput(buf) - cmdEcho.AddCommand(cmdTimes) - c.AddCommand(cmdPrint, cmdEcho) - c.SetArgs(args) - - c.Execute() - - if !rootcalled { - t.Errorf("Root Function was not called.\nOutput was:\n%s\n", buf) - } -} - -func TestInvalidSubcommandWhenArgsAllowed(t *testing.T) { - fullSetupTest("echo", "invalid-sub") - - if te[0] != "invalid-sub" { - t.Errorf("Subcommand didn't work...") - } -} - -func TestRootFlags(t *testing.T) { - fullSetupTest("-i", "17", "-b") - - if !flagbr { - t.Errorf("flag value should be true, %v given", flagbr) - } - - if flagir != 17 { - t.Errorf("flag value should be 17, %d given", flagir) - } -} - -func TestRootHelp(t *testing.T) { - x := fullSetupTest("--help") - - checkResultContains(t, x, "Available Commands:") - checkResultContains(t, x, "for more information about a command") - - if strings.Contains(x.Output, "unknown flag: --help") { - t.Errorf("--help shouldn't trigger an error, Got: \n %s", x.Output) - } - - if strings.Contains(x.Output, cmdEcho.Use) { - t.Errorf("--help shouldn't display subcommand's usage, Got: \n %s", x.Output) - } - - x = fullSetupTest("echo", "--help") - - if strings.Contains(x.Output, cmdTimes.Use) { - t.Errorf("--help shouldn't display subsubcommand's usage, Got: \n %s", x.Output) - } - - checkResultContains(t, x, "Available Commands:") - checkResultContains(t, x, "for more information about a command") - - if strings.Contains(x.Output, "unknown flag: --help") { - t.Errorf("--help shouldn't trigger an error, Got: \n %s", x.Output) - } - -} - -func TestFlagAccess(t *testing.T) { - initialize() - - local := cmdTimes.LocalFlags() - inherited := cmdTimes.InheritedFlags() - - for _, f := range []string{"inttwo", "strtwo", "booltwo"} { - if local.Lookup(f) == nil { - t.Errorf("LocalFlags expected to contain %s, Got: nil", f) - } - } - if inherited.Lookup("strone") == nil { - t.Errorf("InheritedFlags expected to contain strone, Got: nil") - } - if inherited.Lookup("strtwo") != nil { - t.Errorf("InheritedFlags shouldn not contain overwritten flag strtwo") - } -} - -func TestNoNRunnableRootCommandNilInput(t *testing.T) { - c := initialize() - - buf := new(bytes.Buffer) - // Testing flag with invalid input - c.SetOutput(buf) - cmdEcho.AddCommand(cmdTimes) - c.AddCommand(cmdPrint, cmdEcho) - c.SetArgs([]string{}) - - c.Execute() - - if !strings.Contains(buf.String(), cmdRootNoRun.Long) { - t.Errorf("Expected to get help output, Got: \n %s", buf) - } -} - -func TestRootNoCommandHelp(t *testing.T) { - x := rootOnlySetupTest("--help") - - checkResultOmits(t, x, "Available Commands:") - checkResultOmits(t, x, "for more information about a command") - - if strings.Contains(x.Output, "unknown flag: --help") { - t.Errorf("--help shouldn't trigger an error, Got: \n %s", x.Output) - } - - x = rootOnlySetupTest("echo", "--help") - - checkResultOmits(t, x, "Available Commands:") - checkResultOmits(t, x, "for more information about a command") - - if strings.Contains(x.Output, "unknown flag: --help") { - t.Errorf("--help shouldn't trigger an error, Got: \n %s", x.Output) - } -} - -func TestRootUnknownCommand(t *testing.T) { - r := noRRSetupTest("bogus") - s := "Error: unknown command \"bogus\" for \"cobra-test\"\nRun 'cobra-test --help' for usage.\n" - - if r.Output != s { - t.Errorf("Unexpected response.\nExpecting to be:\n %q\nGot:\n %q\n", s, r.Output) - } - - r = noRRSetupTest("--strtwo=a", "bogus") - if r.Output != s { - t.Errorf("Unexpected response.\nExpecting to be:\n %q\nGot:\n %q\n", s, r.Output) - } -} - -func TestRootUnknownCommandSilenced(t *testing.T) { - r := noRRSetupTestSilenced("bogus") - - if r.Output != "" { - t.Errorf("Unexpected response.\nExpecting to be: \n\"\"\n Got:\n %q\n", r.Output) - } - - r = noRRSetupTestSilenced("--strtwo=a", "bogus") - if r.Output != "" { - t.Errorf("Unexpected response.\nExpecting to be:\n\"\"\nGot:\n %q\n", r.Output) - } -} - -func TestRootSuggestions(t *testing.T) { - outputWithSuggestions := "Error: unknown command \"%s\" for \"cobra-test\"\n\nDid you mean this?\n\t%s\n\nRun 'cobra-test --help' for usage.\n" - outputWithoutSuggestions := "Error: unknown command \"%s\" for \"cobra-test\"\nRun 'cobra-test --help' for usage.\n" - - cmd := initializeWithRootCmd() - cmd.AddCommand(cmdTimes) - - tests := map[string]string{ - "time": "times", - "tiems": "times", - "tims": "times", - "timeS": "times", - "rimes": "times", - "ti": "times", - "t": "times", - "timely": "times", - "ri": "", - "timezone": "", - "foo": "", - "counts": "times", - } - - for typo, suggestion := range tests { - for _, suggestionsDisabled := range []bool{false, true} { - cmd.DisableSuggestions = suggestionsDisabled - result := simpleTester(cmd, typo) - expected := "" - if len(suggestion) == 0 || suggestionsDisabled { - expected = fmt.Sprintf(outputWithoutSuggestions, typo) - } else { - expected = fmt.Sprintf(outputWithSuggestions, typo, suggestion) - } - if result.Output != expected { - t.Errorf("Unexpected response.\nExpecting to be:\n %q\nGot:\n %q\n", expected, result.Output) - } - } - } -} - -func TestFlagsBeforeCommand(t *testing.T) { - // short without space - x := fullSetupTest("-i10", "echo") - if x.Error != nil { - t.Errorf("Valid Input shouldn't have errors, got:\n %q", x.Error) - } - - x = noRRSetupTest("echo", "-i=10") - if x.Error != nil { - t.Errorf("Valid Input shouldn't have errors, got:\n %s", x.Error) - } - - // long with equals - x = noRRSetupTest("--intone=123", "echo", "one", "two") - if x.Error != nil { - t.Errorf("Valid Input shouldn't have errors, got:\n %s", x.Error) - } - - // With parsing error properly reported - x = fullSetupTest("-i10E", "echo") - if !strings.Contains(x.Error.Error(), "invalid syntax") { - t.Errorf("Wrong error message displayed, \n %s", x.Error) - } -} - -func TestRemoveCommand(t *testing.T) { - versionUsed = 0 - c := initializeWithRootCmd() - c.AddCommand(cmdVersion1) - c.RemoveCommand(cmdVersion1) - x := fullTester(c, "version") - if x.Error == nil { - t.Errorf("Removed command should not have been called\n") - return - } -} - -func TestCommandWithoutSubcommands(t *testing.T) { - c := initializeWithRootCmd() - - x := simpleTester(c, "") - if x.Error != nil { - t.Errorf("Calling command without subcommands should not have error: %v", x.Error) - return - } -} - -func TestCommandWithoutSubcommandsWithArg(t *testing.T) { - c := initializeWithRootCmd() - expectedArgs := []string{"arg"} - - x := simpleTester(c, "arg") - if x.Error != nil { - t.Errorf("Calling command without subcommands but with arg should not have error: %v", x.Error) - return - } - if !reflect.DeepEqual(expectedArgs, tr) { - t.Errorf("Calling command without subcommands but with arg has wrong args: expected: %v, actual: %v", expectedArgs, tr) - return - } -} - -func TestReplaceCommandWithRemove(t *testing.T) { - versionUsed = 0 - c := initializeWithRootCmd() - c.AddCommand(cmdVersion1) - c.RemoveCommand(cmdVersion1) - c.AddCommand(cmdVersion2) - x := fullTester(c, "version") - if x.Error != nil { - t.Errorf("Valid Input shouldn't have errors, got:\n %q", x.Error) - return - } - if versionUsed == 1 { - t.Errorf("Removed command shouldn't be called\n") - } - if versionUsed != 2 { - t.Errorf("Replacing command should have been called but didn't\n") - } -} - -func TestDeprecatedSub(t *testing.T) { - c := fullSetupTest("deprecated") - - checkResultContains(t, c, cmdDeprecated.Deprecated) -} - -func TestPreRun(t *testing.T) { - noRRSetupTest("echo", "one", "two") - if echoPre == nil || echoPersPre == nil { - t.Error("PreRun or PersistentPreRun not called") - } - if rootPersPre != nil || timesPersPre != nil { - t.Error("Wrong *Pre functions called!") - } - - noRRSetupTest("echo", "times", "one", "two") - if timesPersPre == nil { - t.Error("PreRun or PersistentPreRun not called") - } - if echoPre != nil || echoPersPre != nil || rootPersPre != nil { - t.Error("Wrong *Pre functions called!") - } - - noRRSetupTest("print", "one", "two") - if rootPersPre == nil { - t.Error("Parent PersistentPreRun not called but should not have been") - } - if echoPre != nil || echoPersPre != nil || timesPersPre != nil { - t.Error("Wrong *Pre functions called!") - } -} - -// Check if cmdEchoSub gets PersistentPreRun from rootCmd even if is added last -func TestPeristentPreRunPropagation(t *testing.T) { - rootCmd := initialize() - - // First add the cmdEchoSub to cmdPrint - cmdPrint.AddCommand(cmdEchoSub) - // Now add cmdPrint to rootCmd - rootCmd.AddCommand(cmdPrint) - - rootCmd.SetArgs([]string{"print", "echosub", "lala"}) - rootCmd.Execute() - - if len(rootPersPre) == 0 || rootPersPre[0] != "lala" { - t.Error("RootCmd PersistentPreRun not called but should have been") - } -} - -func TestGlobalNormFuncPropagation(t *testing.T) { - normFunc := func(f *pflag.FlagSet, name string) pflag.NormalizedName { - return pflag.NormalizedName(name) - } - - rootCmd := initialize() - rootCmd.SetGlobalNormalizationFunc(normFunc) - if reflect.ValueOf(normFunc) != reflect.ValueOf(rootCmd.GlobalNormalizationFunc()) { - t.Error("rootCmd seems to have a wrong normalization function") - } - - // First add the cmdEchoSub to cmdPrint - cmdPrint.AddCommand(cmdEchoSub) - if cmdPrint.GlobalNormalizationFunc() != nil && cmdEchoSub.GlobalNormalizationFunc() != nil { - t.Error("cmdPrint and cmdEchoSub should had no normalization functions") - } - - // Now add cmdPrint to rootCmd - rootCmd.AddCommand(cmdPrint) - if reflect.ValueOf(cmdPrint.GlobalNormalizationFunc()).Pointer() != reflect.ValueOf(rootCmd.GlobalNormalizationFunc()).Pointer() || - reflect.ValueOf(cmdEchoSub.GlobalNormalizationFunc()).Pointer() != reflect.ValueOf(rootCmd.GlobalNormalizationFunc()).Pointer() { - t.Error("cmdPrint and cmdEchoSub should had the normalization function of rootCmd") - } -} - -func TestFlagOnPflagCommandLine(t *testing.T) { - flagName := "flagOnCommandLine" - pflag.String(flagName, "", "about my flag") - r := fullSetupTest("--help") - - checkResultContains(t, r, flagName) - - // Reset pflag.CommandLine flagset. - pflag.CommandLine = pflag.NewFlagSet(os.Args[0], pflag.ExitOnError) -} - func TestAddTemplateFunctions(t *testing.T) { AddTemplateFunc("t", func() bool { return true }) AddTemplateFuncs(template.FuncMap{ @@ -1131,43 +12,11 @@ func TestAddTemplateFunctions(t *testing.T) { "h": func() string { return "Hello," }, "w": func() string { return "world." }}) - const usage = "Hello, world." - c := &Command{} c.SetUsageTemplate(`{{if t}}{{h}}{{end}}{{if f}}{{h}}{{end}} {{w}}`) - if us := c.UsageString(); us != usage { - t.Errorf("c.UsageString() != \"%s\", is \"%s\"", usage, us) - } -} - -func TestUsageIsNotPrintedTwice(t *testing.T) { - var cmd = &Command{Use: "root"} - var sub = &Command{Use: "sub"} - cmd.AddCommand(sub) - - r := simpleTester(cmd, "") - if strings.Count(r.Output, "Usage:") != 1 { - t.Error("Usage output is not printed exactly once") - } -} - -func BenchmarkInheritedFlags(b *testing.B) { - initialize() - cmdEcho.AddCommand(cmdTimes) - b.ResetTimer() - - for i := 0; i < b.N; i++ { - cmdTimes.InheritedFlags() - } -} - -func BenchmarkLocalFlags(b *testing.B) { - initialize() - cmdEcho.AddCommand(cmdTimes) - b.ResetTimer() - - for i := 0; i < b.N; i++ { - cmdTimes.LocalFlags() + const expected = "Hello, world." + if got := c.UsageString(); got != expected { + t.Errorf("Expected UsageString: %v\nGot: %v", expected, got) } } diff --git a/vendor/github.com/spf13/cobra/command.go b/vendor/github.com/spf13/cobra/command.go index c3e16b1..b257f91 100644 --- a/vendor/github.com/spf13/cobra/command.go +++ b/vendor/github.com/spf13/cobra/command.go @@ -27,6 +27,9 @@ import ( flag "github.com/spf13/pflag" ) +// FParseErrWhitelist configures Flag parse errors to be ignored +type FParseErrWhitelist flag.ParseErrorsWhitelist + // Command is just that, a command for your application. // E.g. 'go run ...' - 'run' is the command. Cobra requires // you to define the usage and description as part of your command @@ -54,6 +57,9 @@ type Command struct { // ValidArgs is list of all valid non-flag arguments that are accepted in bash completions ValidArgs []string + // Expected arguments + Args PositionalArgs + // ArgAliases is List of aliases for ValidArgs. // These are not suggested to the user in the bash completion, // but accepted if entered manually. @@ -72,6 +78,11 @@ type Command struct { // group commands. Annotations map[string]string + // Version defines the version for this command. If this value is non-empty and the command does not + // define a "version" flag, a "version" boolean flag will be added to the command and, if specified, + // will print content of the "Version" variable. + Version string + // The *Run functions are executed in the following order: // * PersistentPreRun() // * PreRun() @@ -115,6 +126,10 @@ type Command struct { // will be printed by generating docs for this command. DisableAutoGenTag bool + // DisableFlagsInUseLine will disable the addition of [flags] to the usage + // line of a command when printing help or generating docs + DisableFlagsInUseLine bool + // DisableSuggestions disables the suggestions based on Levenshtein distance // that go along with 'unknown command' messages. DisableSuggestions bool @@ -122,8 +137,12 @@ type Command struct { // Must be > 0. SuggestionsMinimumDistance int - // name is the command name, usually the executable's name. - name string + // TraverseChildren parses flags on all parents before executing child command. + TraverseChildren bool + + //FParseErrWhitelist flag parse errors to be ignored + FParseErrWhitelist FParseErrWhitelist + // commands is the list of commands supported by this program. commands []*Command // parent is a parent command for this command. @@ -134,6 +153,11 @@ type Command struct { commandsMaxNameLen int // commandsAreSorted defines, if command slice are sorted or not. commandsAreSorted bool + // commandCalledAs is the name or alias value used to call this command. + commandCalledAs struct { + name string + called bool + } // args is actual args parsed from flags. args []string @@ -169,6 +193,8 @@ type Command struct { // helpCommand is command with usage 'help'. If it's not defined by user, // cobra uses default help command. helpCommand *Command + // versionTemplate is the version template defined by user. + versionTemplate string } // SetArgs sets arguments for the command. It is set to os.Args[1:] by default, if desired, can be overridden @@ -214,6 +240,11 @@ func (c *Command) SetHelpTemplate(s string) { c.helpTemplate = s } +// SetVersionTemplate sets version template to be used. Application can use it to set custom template. +func (c *Command) SetVersionTemplate(s string) { + c.versionTemplate = s +} + // SetGlobalNormalizationFunc sets a normalization function to all flag sets and also to child commands. // The user should not have a cyclic dependency on commands. func (c *Command) SetGlobalNormalizationFunc(n func(f *flag.FlagSet, name string) flag.NormalizedName) { @@ -403,6 +434,19 @@ func (c *Command) HelpTemplate() string { {{end}}{{if or .Runnable .HasSubCommands}}{{.UsageString}}{{end}}` } +// VersionTemplate return version template for the command. +func (c *Command) VersionTemplate() string { + if c.versionTemplate != "" { + return c.versionTemplate + } + + if c.HasParent() { + return c.parent.VersionTemplate() + } + return `{{with .Name}}{{printf "%s " .}}{{end}}{{printf "version %s" .Version}} +` +} + func hasNoOptDefVal(name string, fs *flag.FlagSet) bool { flag := fs.Lookup(name) if flag == nil { @@ -437,6 +481,9 @@ Loop: s := args[0] args = args[1:] switch { + case s == "--": + // "--" terminates the flags + break Loop case strings.HasPrefix(s, "--") && !strings.Contains(s, "=") && !hasNoOptDefVal(s[2:], flags): // If '--flag arg' then // delete arg from args. @@ -472,13 +519,14 @@ func argsMinusFirstX(args []string, x string) []string { return args } +func isFlagArg(arg string) bool { + return ((len(arg) >= 3 && arg[1] == '-') || + (len(arg) >= 2 && arg[0] == '-' && arg[1] != '-')) +} + // Find the target command given the args and command tree // Meant to be run on the highest node. Only searches down. func (c *Command) Find(args []string) (*Command, []string, error) { - if c == nil { - return nil, nil, fmt.Errorf("Called find() on a nil Command") - } - var innerfind func(*Command, []string) (*Command, []string) innerfind = func(c *Command, innerArgs []string) (*Command, []string) { @@ -487,57 +535,98 @@ func (c *Command) Find(args []string) (*Command, []string, error) { return c, innerArgs } nextSubCmd := argsWOflags[0] - matches := make([]*Command, 0) - for _, cmd := range c.commands { - if cmd.Name() == nextSubCmd || cmd.HasAlias(nextSubCmd) { // exact name or alias match - return innerfind(cmd, argsMinusFirstX(innerArgs, nextSubCmd)) - } - if EnablePrefixMatching { - if strings.HasPrefix(cmd.Name(), nextSubCmd) { // prefix match - matches = append(matches, cmd) - } - for _, x := range cmd.Aliases { - if strings.HasPrefix(x, nextSubCmd) { - matches = append(matches, cmd) - } - } - } - } - // only accept a single prefix match - multiple matches would be ambiguous - if len(matches) == 1 { - return innerfind(matches[0], argsMinusFirstX(innerArgs, argsWOflags[0])) + cmd := c.findNext(nextSubCmd) + if cmd != nil { + return innerfind(cmd, argsMinusFirstX(innerArgs, nextSubCmd)) } - return c, innerArgs } commandFound, a := innerfind(c, args) - argsWOflags := stripFlags(a, commandFound) + if commandFound.Args == nil { + return commandFound, a, legacyArgs(commandFound, stripFlags(a, commandFound)) + } + return commandFound, a, nil +} - // no subcommand, always take args - if !commandFound.HasSubCommands() { - return commandFound, a, nil +func (c *Command) findSuggestions(arg string) string { + if c.DisableSuggestions { + return "" } + if c.SuggestionsMinimumDistance <= 0 { + c.SuggestionsMinimumDistance = 2 + } + suggestionsString := "" + if suggestions := c.SuggestionsFor(arg); len(suggestions) > 0 { + suggestionsString += "\n\nDid you mean this?\n" + for _, s := range suggestions { + suggestionsString += fmt.Sprintf("\t%v\n", s) + } + } + return suggestionsString +} - // root command with subcommands, do subcommand checking - if commandFound == c && len(argsWOflags) > 0 { - suggestionsString := "" - if !c.DisableSuggestions { - if c.SuggestionsMinimumDistance <= 0 { - c.SuggestionsMinimumDistance = 2 - } - if suggestions := c.SuggestionsFor(argsWOflags[0]); len(suggestions) > 0 { - suggestionsString += "\n\nDid you mean this?\n" - for _, s := range suggestions { - suggestionsString += fmt.Sprintf("\t%v\n", s) - } - } +func (c *Command) findNext(next string) *Command { + matches := make([]*Command, 0) + for _, cmd := range c.commands { + if cmd.Name() == next || cmd.HasAlias(next) { + cmd.commandCalledAs.name = next + return cmd + } + if EnablePrefixMatching && cmd.hasNameOrAliasPrefix(next) { + matches = append(matches, cmd) } - return commandFound, a, fmt.Errorf("unknown command %q for %q%s", argsWOflags[0], commandFound.CommandPath(), suggestionsString) } - return commandFound, a, nil + if len(matches) == 1 { + return matches[0] + } + + return nil +} + +// Traverse the command tree to find the command, and parse args for +// each parent. +func (c *Command) Traverse(args []string) (*Command, []string, error) { + flags := []string{} + inFlag := false + + for i, arg := range args { + switch { + // A long flag with a space separated value + case strings.HasPrefix(arg, "--") && !strings.Contains(arg, "="): + // TODO: this isn't quite right, we should really check ahead for 'true' or 'false' + inFlag = !hasNoOptDefVal(arg[2:], c.Flags()) + flags = append(flags, arg) + continue + // A short flag with a space separated value + case strings.HasPrefix(arg, "-") && !strings.Contains(arg, "=") && len(arg) == 2 && !shortHasNoOptDefVal(arg[1:], c.Flags()): + inFlag = true + flags = append(flags, arg) + continue + // The value for a flag + case inFlag: + inFlag = false + flags = append(flags, arg) + continue + // A flag without a value, or with an `=` separated value + case isFlagArg(arg): + flags = append(flags, arg) + continue + } + + cmd := c.findNext(arg) + if cmd == nil { + return c, args, nil + } + + if err := c.ParseFlags(flags); err != nil { + return nil, args, err + } + return cmd.Traverse(args[i+1:]) + } + return c, args, nil } // SuggestionsFor provides suggestions for the typedName. @@ -577,10 +666,8 @@ func (c *Command) Root() *Command { return c } -// ArgsLenAtDash will return the length of f.Args at the moment when a -- was -// found during arg parsing. This allows your program to know which args were -// before the -- and which came after. (Description from -// https://godoc.org/github.com/spf13/pflag#FlagSet.ArgsLenAtDash). +// ArgsLenAtDash will return the length of c.Flags().Args at the moment +// when a -- was found during args parsing. func (c *Command) ArgsLenAtDash() int { return c.Flags().ArgsLenAtDash() } @@ -594,9 +681,10 @@ func (c *Command) execute(a []string) (err error) { c.Printf("Command %q is deprecated, %s\n", c.Name(), c.Deprecated) } - // initialize help flag as the last point possible to allow for user + // initialize help and version flag at the last point possible to allow for user // overriding c.InitDefaultHelpFlag() + c.InitDefaultVersionFlag() err = c.ParseFlags(a) if err != nil { @@ -613,7 +701,27 @@ func (c *Command) execute(a []string) (err error) { return err } - if helpVal || !c.Runnable() { + if helpVal { + return flag.ErrHelp + } + + // for back-compat, only add version flag behavior if version is defined + if c.Version != "" { + versionVal, err := c.Flags().GetBool("version") + if err != nil { + c.Println("\"version\" flag declared as non-bool. Please correct your code") + return err + } + if versionVal { + err := tmpl(c.OutOrStdout(), c.VersionTemplate(), c) + if err != nil { + c.Println(err) + } + return err + } + } + + if !c.Runnable() { return flag.ErrHelp } @@ -624,6 +732,10 @@ func (c *Command) execute(a []string) (err error) { argWoFlags = a } + if err := c.ValidateArgs(argWoFlags); err != nil { + return err + } + for p := c; p != nil; p = p.Parent() { if p.PersistentPreRunE != nil { if err := p.PersistentPreRunE(c, argWoFlags); err != nil { @@ -643,6 +755,9 @@ func (c *Command) execute(a []string) (err error) { c.PreRun(c, argWoFlags) } + if err := c.validateRequiredFlags(); err != nil { + return err + } if c.RunE != nil { if err := c.RunE(c, argWoFlags); err != nil { return err @@ -702,16 +817,19 @@ func (c *Command) ExecuteC() (cmd *Command, err error) { // overriding c.InitDefaultHelpCmd() - var args []string + args := c.args // Workaround FAIL with "go test -v" or "cobra.test -test.v", see #155 if c.args == nil && filepath.Base(os.Args[0]) != "cobra.test" { args = os.Args[1:] - } else { - args = c.args } - cmd, flags, err := c.Find(args) + var flags []string + if c.TraverseChildren { + cmd, flags, err = c.Traverse(args) + } else { + cmd, flags, err = c.Find(args) + } if err != nil { // If found parse to a subcommand and then failed, talk about the subcommand if cmd != nil { @@ -723,6 +841,12 @@ func (c *Command) ExecuteC() (cmd *Command, err error) { } return c, err } + + cmd.commandCalledAs.called = true + if cmd.commandCalledAs.name == "" { + cmd.commandCalledAs.name = cmd.Name() + } + err = cmd.execute(flags) if err != nil { // Always show help if requested, even if SilenceErrors is in @@ -747,6 +871,32 @@ func (c *Command) ExecuteC() (cmd *Command, err error) { return cmd, err } +func (c *Command) ValidateArgs(args []string) error { + if c.Args == nil { + return nil + } + return c.Args(c, args) +} + +func (c *Command) validateRequiredFlags() error { + flags := c.Flags() + missingFlagNames := []string{} + flags.VisitAll(func(pflag *flag.Flag) { + requiredAnnotation, found := pflag.Annotations[BashCompOneRequiredFlag] + if !found { + return + } + if (requiredAnnotation[0] == "true") && !pflag.Changed { + missingFlagNames = append(missingFlagNames, pflag.Name) + } + }) + + if len(missingFlagNames) > 0 { + return fmt.Errorf(`required flag(s) "%s" not set`, strings.Join(missingFlagNames, `", "`)) + } + return nil +} + // InitDefaultHelpFlag adds default help flag to c. // It is called automatically by executing the c or by calling help and usage. // If c already has help flag, it will do nothing. @@ -763,6 +913,27 @@ func (c *Command) InitDefaultHelpFlag() { } } +// InitDefaultVersionFlag adds default version flag to c. +// It is called automatically by executing the c. +// If c already has a version flag, it will do nothing. +// If c.Version is empty, it will do nothing. +func (c *Command) InitDefaultVersionFlag() { + if c.Version == "" { + return + } + + c.mergePersistentFlags() + if c.Flags().Lookup("version") == nil { + usage := "version for " + if c.Name() == "" { + usage += "this command" + } else { + usage += c.Name() + } + c.Flags().Bool("version", false, usage) + } +} + // InitDefaultHelpCmd adds default help command to c. // It is called automatically by executing the c or by calling help and usage. // If c already has help command or c has no subcommands, it will do nothing. @@ -794,8 +965,9 @@ Simply type ` + c.Name() + ` help [path to command] for full details.`, c.AddCommand(c.helpCommand) } -// ResetCommands used for testing. +// ResetCommands delete parent, subcommand and help command from c. func (c *Command) ResetCommands() { + c.parent = nil c.commands = nil c.helpCommand = nil c.parentsPflags = nil @@ -912,6 +1084,9 @@ func (c *Command) UseLine() string { } else { useline = c.Use } + if c.DisableFlagsInUseLine { + return useline + } if c.HasAvailableFlags() && !strings.Contains(useline, "[flags]") { useline += " [flags]" } @@ -961,15 +1136,12 @@ func (c *Command) DebugFlags() { // Name returns the command's name: the first word in the use line. func (c *Command) Name() string { - if c.name == "" { - name := c.Use - i := strings.Index(name, " ") - if i >= 0 { - name = name[:i] - } - c.name = name + name := c.Use + i := strings.Index(name, " ") + if i >= 0 { + name = name[:i] } - return c.name + return name } // HasAlias determines if a given string is an alias of the command. @@ -982,7 +1154,32 @@ func (c *Command) HasAlias(s string) bool { return false } -// NameAndAliases returns string containing name and all aliases +// CalledAs returns the command name or alias that was used to invoke +// this command or an empty string if the command has not been called. +func (c *Command) CalledAs() string { + if c.commandCalledAs.called { + return c.commandCalledAs.name + } + return "" +} + +// hasNameOrAliasPrefix returns true if the Name or any of aliases start +// with prefix +func (c *Command) hasNameOrAliasPrefix(prefix string) bool { + if strings.HasPrefix(c.Name(), prefix) { + c.commandCalledAs.name = c.Name() + return true + } + for _, alias := range c.Aliases { + if strings.HasPrefix(alias, prefix) { + c.commandCalledAs.name = alias + return true + } + } + return false +} + +// NameAndAliases returns a list of the command name and all aliases func (c *Command) NameAndAliases() string { return strings.Join(append([]string{c.Name()}, c.Aliases...), ", ") } @@ -1068,7 +1265,7 @@ func (c *Command) HasAvailableSubCommands() bool { } } - // the command either has no sub comamnds, or no available (non deprecated/help/hidden) + // the command either has no sub commands, or no available (non deprecated/help/hidden) // sub commands return false } @@ -1078,7 +1275,7 @@ func (c *Command) HasParent() bool { return c.parent != nil } -// GlobalNormalizationFunc returns the global normalization function or nil if doesn't exists. +// GlobalNormalizationFunc returns the global normalization function or nil if it doesn't exist. func (c *Command) GlobalNormalizationFunc() func(f *flag.FlagSet, name string) flag.NormalizedName { return c.globNormFunc } @@ -1122,6 +1319,9 @@ func (c *Command) LocalFlags() *flag.FlagSet { c.lflags.SetOutput(c.flagErrorBuf) } c.lflags.SortFlags = c.Flags().SortFlags + if c.globNormFunc != nil { + c.lflags.SetNormalizeFunc(c.globNormFunc) + } addToLocal := func(f *flag.Flag) { if c.lflags.Lookup(f.Name) == nil && c.parentsPflags.Lookup(f.Name) == nil { @@ -1133,7 +1333,7 @@ func (c *Command) LocalFlags() *flag.FlagSet { return c.lflags } -// InheritedFlags returns all flags which were inherited from parents commands. +// InheritedFlags returns all flags which were inherited from parent commands. func (c *Command) InheritedFlags() *flag.FlagSet { c.mergePersistentFlags() @@ -1146,6 +1346,10 @@ func (c *Command) InheritedFlags() *flag.FlagSet { } local := c.LocalFlags() + if c.globNormFunc != nil { + c.iflags.SetNormalizeFunc(c.globNormFunc) + } + c.parentsPflags.VisitAll(func(f *flag.Flag) { if c.iflags.Lookup(f.Name) == nil && local.Lookup(f.Name) == nil { c.iflags.AddFlag(f) @@ -1171,7 +1375,7 @@ func (c *Command) PersistentFlags() *flag.FlagSet { return c.pflags } -// ResetFlags is used in testing. +// ResetFlags deletes all flags from command. func (c *Command) ResetFlags() { c.flagErrorBuf = new(bytes.Buffer) c.flagErrorBuf.Reset() @@ -1179,6 +1383,10 @@ func (c *Command) ResetFlags() { c.flags.SetOutput(c.flagErrorBuf) c.pflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) c.pflags.SetOutput(c.flagErrorBuf) + + c.lflags = nil + c.iflags = nil + c.parentsPflags = nil } // HasFlags checks if the command contains any flags (local plus persistent from the entire structure). @@ -1254,8 +1462,15 @@ func (c *Command) ParseFlags(args []string) error { return nil } + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } beforeErrorBufLen := c.flagErrorBuf.Len() c.mergePersistentFlags() + + //do it here after merging all flags and just before parse + c.Flags().ParseErrorsWhitelist = flag.ParseErrorsWhitelist(c.FParseErrWhitelist) + err := c.Flags().Parse(args) // Print warnings if they occurred (e.g. deprecated flag messages). if c.flagErrorBuf.Len()-beforeErrorBufLen > 0 && err == nil { @@ -1288,6 +1503,10 @@ func (c *Command) updateParentsPflags() { c.parentsPflags.SortFlags = false } + if c.globNormFunc != nil { + c.parentsPflags.SetNormalizeFunc(c.globNormFunc) + } + c.Root().PersistentFlags().AddFlagSet(flag.CommandLine) c.VisitParents(func(parent *Command) { diff --git a/vendor/github.com/spf13/cobra/command_test.go b/vendor/github.com/spf13/cobra/command_test.go index aa6658f..6e483a3 100644 --- a/vendor/github.com/spf13/cobra/command_test.go +++ b/vendor/github.com/spf13/cobra/command_test.go @@ -11,144 +11,1322 @@ import ( "github.com/spf13/pflag" ) -// test to ensure hidden commands run as intended -func TestHiddenCommandExecutes(t *testing.T) { +func emptyRun(*Command, []string) {} + +func executeCommand(root *Command, args ...string) (output string, err error) { + _, output, err = executeCommandC(root, args...) + return output, err +} + +func executeCommandC(root *Command, args ...string) (c *Command, output string, err error) { + buf := new(bytes.Buffer) + root.SetOutput(buf) + root.SetArgs(args) + + c, err = root.ExecuteC() + + return c, buf.String(), err +} + +func resetCommandLineFlagSet() { + pflag.CommandLine = pflag.NewFlagSet(os.Args[0], pflag.ExitOnError) +} + +func checkStringContains(t *testing.T, got, expected string) { + if !strings.Contains(got, expected) { + t.Errorf("Expected to contain: \n %v\nGot:\n %v\n", expected, got) + } +} + +func checkStringOmits(t *testing.T, got, expected string) { + if strings.Contains(got, expected) { + t.Errorf("Expected to not contain: \n %v\nGot: %v", expected, got) + } +} + +func TestSingleCommand(t *testing.T) { + var rootCmdArgs []string + rootCmd := &Command{ + Use: "root", + Args: ExactArgs(2), + Run: func(_ *Command, args []string) { rootCmdArgs = args }, + } + aCmd := &Command{Use: "a", Args: NoArgs, Run: emptyRun} + bCmd := &Command{Use: "b", Args: NoArgs, Run: emptyRun} + rootCmd.AddCommand(aCmd, bCmd) + + output, err := executeCommand(rootCmd, "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + got := strings.Join(rootCmdArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("rootCmdArgs expected: %q, got: %q", expected, got) + } +} + +func TestChildCommand(t *testing.T) { + var child1CmdArgs []string + rootCmd := &Command{Use: "root", Args: NoArgs, Run: emptyRun} + child1Cmd := &Command{ + Use: "child1", + Args: ExactArgs(2), + Run: func(_ *Command, args []string) { child1CmdArgs = args }, + } + child2Cmd := &Command{Use: "child2", Args: NoArgs, Run: emptyRun} + rootCmd.AddCommand(child1Cmd, child2Cmd) + + output, err := executeCommand(rootCmd, "child1", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + got := strings.Join(child1CmdArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("child1CmdArgs expected: %q, got: %q", expected, got) + } +} + +func TestCallCommandWithoutSubcommands(t *testing.T) { + rootCmd := &Command{Use: "root", Args: NoArgs, Run: emptyRun} + _, err := executeCommand(rootCmd) + if err != nil { + t.Errorf("Calling command without subcommands should not have error: %v", err) + } +} + +func TestRootExecuteUnknownCommand(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + rootCmd.AddCommand(&Command{Use: "child", Run: emptyRun}) + + output, _ := executeCommand(rootCmd, "unknown") + + expected := "Error: unknown command \"unknown\" for \"root\"\nRun 'root --help' for usage.\n" + + if output != expected { + t.Errorf("Expected:\n %q\nGot:\n %q\n", expected, output) + } +} + +func TestSubcommandExecuteC(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Run: emptyRun} + rootCmd.AddCommand(childCmd) + + c, output, err := executeCommandC(rootCmd, "child") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if c.Name() != "child" { + t.Errorf(`invalid command returned from ExecuteC: expected "child"', got %q`, c.Name()) + } +} + +func TestRootUnknownCommandSilenced(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + rootCmd.SilenceErrors = true + rootCmd.SilenceUsage = true + rootCmd.AddCommand(&Command{Use: "child", Run: emptyRun}) + + output, _ := executeCommand(rootCmd, "unknown") + if output != "" { + t.Errorf("Expected blank output, because of silenced usage.\nGot:\n %q\n", output) + } +} + +func TestCommandAlias(t *testing.T) { + var timesCmdArgs []string + rootCmd := &Command{Use: "root", Args: NoArgs, Run: emptyRun} + echoCmd := &Command{ + Use: "echo", + Aliases: []string{"say", "tell"}, + Args: NoArgs, + Run: emptyRun, + } + timesCmd := &Command{ + Use: "times", + Args: ExactArgs(2), + Run: func(_ *Command, args []string) { timesCmdArgs = args }, + } + echoCmd.AddCommand(timesCmd) + rootCmd.AddCommand(echoCmd) + + output, err := executeCommand(rootCmd, "tell", "times", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + got := strings.Join(timesCmdArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("timesCmdArgs expected: %v, got: %v", expected, got) + } +} + +func TestEnablePrefixMatching(t *testing.T) { + EnablePrefixMatching = true + + var aCmdArgs []string + rootCmd := &Command{Use: "root", Args: NoArgs, Run: emptyRun} + aCmd := &Command{ + Use: "aCmd", + Args: ExactArgs(2), + Run: func(_ *Command, args []string) { aCmdArgs = args }, + } + bCmd := &Command{Use: "bCmd", Args: NoArgs, Run: emptyRun} + rootCmd.AddCommand(aCmd, bCmd) + + output, err := executeCommand(rootCmd, "a", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + got := strings.Join(aCmdArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("aCmdArgs expected: %q, got: %q", expected, got) + } + + EnablePrefixMatching = false +} + +func TestAliasPrefixMatching(t *testing.T) { + EnablePrefixMatching = true + + var timesCmdArgs []string + rootCmd := &Command{Use: "root", Args: NoArgs, Run: emptyRun} + echoCmd := &Command{ + Use: "echo", + Aliases: []string{"say", "tell"}, + Args: NoArgs, + Run: emptyRun, + } + timesCmd := &Command{ + Use: "times", + Args: ExactArgs(2), + Run: func(_ *Command, args []string) { timesCmdArgs = args }, + } + echoCmd.AddCommand(timesCmd) + rootCmd.AddCommand(echoCmd) + + output, err := executeCommand(rootCmd, "sa", "times", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + got := strings.Join(timesCmdArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("timesCmdArgs expected: %v, got: %v", expected, got) + } + + EnablePrefixMatching = false +} + +// TestChildSameName checks the correct behaviour of cobra in cases, +// when an application with name "foo" and with subcommand "foo" +// is executed with args "foo foo". +func TestChildSameName(t *testing.T) { + var fooCmdArgs []string + rootCmd := &Command{Use: "foo", Args: NoArgs, Run: emptyRun} + fooCmd := &Command{ + Use: "foo", + Args: ExactArgs(2), + Run: func(_ *Command, args []string) { fooCmdArgs = args }, + } + barCmd := &Command{Use: "bar", Args: NoArgs, Run: emptyRun} + rootCmd.AddCommand(fooCmd, barCmd) + + output, err := executeCommand(rootCmd, "foo", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + got := strings.Join(fooCmdArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("fooCmdArgs expected: %v, got: %v", expected, got) + } +} + +// TestGrandChildSameName checks the correct behaviour of cobra in cases, +// when user has a root command and a grand child +// with the same name. +func TestGrandChildSameName(t *testing.T) { + var fooCmdArgs []string + rootCmd := &Command{Use: "foo", Args: NoArgs, Run: emptyRun} + barCmd := &Command{Use: "bar", Args: NoArgs, Run: emptyRun} + fooCmd := &Command{ + Use: "foo", + Args: ExactArgs(2), + Run: func(_ *Command, args []string) { fooCmdArgs = args }, + } + barCmd.AddCommand(fooCmd) + rootCmd.AddCommand(barCmd) + + output, err := executeCommand(rootCmd, "bar", "foo", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + got := strings.Join(fooCmdArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("fooCmdArgs expected: %v, got: %v", expected, got) + } +} + +func TestFlagLong(t *testing.T) { + var cArgs []string + c := &Command{ + Use: "c", + Args: ArbitraryArgs, + Run: func(_ *Command, args []string) { cArgs = args }, + } + + var intFlagValue int + var stringFlagValue string + c.Flags().IntVar(&intFlagValue, "intf", -1, "") + c.Flags().StringVar(&stringFlagValue, "sf", "", "") + + output, err := executeCommand(c, "--intf=7", "--sf=abc", "one", "--", "two") + if output != "" { + t.Errorf("Unexpected output: %v", err) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if c.ArgsLenAtDash() != 1 { + t.Errorf("Expected ArgsLenAtDash: %v but got %v", 1, c.ArgsLenAtDash()) + } + if intFlagValue != 7 { + t.Errorf("Expected intFlagValue: %v, got %v", 7, intFlagValue) + } + if stringFlagValue != "abc" { + t.Errorf("Expected stringFlagValue: %q, got %q", "abc", stringFlagValue) + } + + got := strings.Join(cArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("Expected arguments: %q, got %q", expected, got) + } +} + +func TestFlagShort(t *testing.T) { + var cArgs []string + c := &Command{ + Use: "c", + Args: ArbitraryArgs, + Run: func(_ *Command, args []string) { cArgs = args }, + } + + var intFlagValue int + var stringFlagValue string + c.Flags().IntVarP(&intFlagValue, "intf", "i", -1, "") + c.Flags().StringVarP(&stringFlagValue, "sf", "s", "", "") + + output, err := executeCommand(c, "-i", "7", "-sabc", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", err) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if intFlagValue != 7 { + t.Errorf("Expected flag value: %v, got %v", 7, intFlagValue) + } + if stringFlagValue != "abc" { + t.Errorf("Expected stringFlagValue: %q, got %q", "abc", stringFlagValue) + } + + got := strings.Join(cArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("Expected arguments: %q, got %q", expected, got) + } +} + +func TestChildFlag(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Run: emptyRun} + rootCmd.AddCommand(childCmd) + + var intFlagValue int + childCmd.Flags().IntVarP(&intFlagValue, "intf", "i", -1, "") + + output, err := executeCommand(rootCmd, "child", "-i7") + if output != "" { + t.Errorf("Unexpected output: %v", err) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if intFlagValue != 7 { + t.Errorf("Expected flag value: %v, got %v", 7, intFlagValue) + } +} + +func TestChildFlagWithParentLocalFlag(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Run: emptyRun} + rootCmd.AddCommand(childCmd) + + var intFlagValue int + rootCmd.Flags().StringP("sf", "s", "", "") + childCmd.Flags().IntVarP(&intFlagValue, "intf", "i", -1, "") + + _, err := executeCommand(rootCmd, "child", "-i7", "-sabc") + if err == nil { + t.Errorf("Invalid flag should generate error") + } + + checkStringContains(t, err.Error(), "unknown shorthand") + + if intFlagValue != 7 { + t.Errorf("Expected flag value: %v, got %v", 7, intFlagValue) + } +} + +func TestFlagInvalidInput(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + rootCmd.Flags().IntP("intf", "i", -1, "") + + _, err := executeCommand(rootCmd, "-iabc") + if err == nil { + t.Errorf("Invalid flag value should generate error") + } + + checkStringContains(t, err.Error(), "invalid syntax") +} + +func TestFlagBeforeCommand(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Run: emptyRun} + rootCmd.AddCommand(childCmd) + + var flagValue int + childCmd.Flags().IntVarP(&flagValue, "intf", "i", -1, "") + + // With short flag. + _, err := executeCommand(rootCmd, "-i7", "child") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + if flagValue != 7 { + t.Errorf("Expected flag value: %v, got %v", 7, flagValue) + } + + // With long flag. + _, err = executeCommand(rootCmd, "--intf=8", "child") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + if flagValue != 8 { + t.Errorf("Expected flag value: %v, got %v", 9, flagValue) + } +} + +func TestStripFlags(t *testing.T) { + tests := []struct { + input []string + output []string + }{ + { + []string{"foo", "bar"}, + []string{"foo", "bar"}, + }, + { + []string{"foo", "--str", "-s"}, + []string{"foo"}, + }, + { + []string{"-s", "foo", "--str", "bar"}, + []string{}, + }, + { + []string{"-i10", "echo"}, + []string{"echo"}, + }, + { + []string{"-i=10", "echo"}, + []string{"echo"}, + }, + { + []string{"--int=100", "echo"}, + []string{"echo"}, + }, + { + []string{"-ib", "echo", "-sfoo", "baz"}, + []string{"echo", "baz"}, + }, + { + []string{"-i=baz", "bar", "-i", "foo", "blah"}, + []string{"bar", "blah"}, + }, + { + []string{"--int=baz", "-sbar", "-i", "foo", "blah"}, + []string{"blah"}, + }, + { + []string{"--bool", "bar", "-i", "foo", "blah"}, + []string{"bar", "blah"}, + }, + { + []string{"-b", "bar", "-i", "foo", "blah"}, + []string{"bar", "blah"}, + }, + { + []string{"--persist", "bar"}, + []string{"bar"}, + }, + { + []string{"-p", "bar"}, + []string{"bar"}, + }, + } + + c := &Command{Use: "c", Run: emptyRun} + c.PersistentFlags().BoolP("persist", "p", false, "") + c.Flags().IntP("int", "i", -1, "") + c.Flags().StringP("str", "s", "", "") + c.Flags().BoolP("bool", "b", false, "") + + for i, test := range tests { + got := stripFlags(test.input, c) + if !reflect.DeepEqual(test.output, got) { + t.Errorf("(%v) Expected: %v, got: %v", i, test.output, got) + } + } +} + +func TestDisableFlagParsing(t *testing.T) { + var cArgs []string + c := &Command{ + Use: "c", + DisableFlagParsing: true, + Run: func(_ *Command, args []string) { + cArgs = args + }, + } + + args := []string{"cmd", "-v", "-race", "-file", "foo.go"} + output, err := executeCommand(c, args...) + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if !reflect.DeepEqual(args, cArgs) { + t.Errorf("Expected: %v, got: %v", args, cArgs) + } +} + +func TestPersistentFlagsOnSameCommand(t *testing.T) { + var rootCmdArgs []string + rootCmd := &Command{ + Use: "root", + Args: ArbitraryArgs, + Run: func(_ *Command, args []string) { rootCmdArgs = args }, + } + + var flagValue int + rootCmd.PersistentFlags().IntVarP(&flagValue, "intf", "i", -1, "") + + output, err := executeCommand(rootCmd, "-i7", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + got := strings.Join(rootCmdArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("rootCmdArgs expected: %q, got %q", expected, got) + } + if flagValue != 7 { + t.Errorf("flagValue expected: %v, got %v", 7, flagValue) + } +} + +// TestEmptyInputs checks, +// if flags correctly parsed with blank strings in args. +func TestEmptyInputs(t *testing.T) { + c := &Command{Use: "c", Run: emptyRun} + + var flagValue int + c.Flags().IntVarP(&flagValue, "intf", "i", -1, "") + + output, err := executeCommand(c, "", "-i7", "") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if flagValue != 7 { + t.Errorf("flagValue expected: %v, got %v", 7, flagValue) + } +} + +func TestOverwrittenFlag(t *testing.T) { + // TODO: This test fails, but should work. + t.Skip() + + parent := &Command{Use: "parent", Run: emptyRun} + child := &Command{Use: "child", Run: emptyRun} + + parent.PersistentFlags().Bool("boolf", false, "") + parent.PersistentFlags().Int("intf", -1, "") + child.Flags().String("strf", "", "") + child.Flags().Int("intf", -1, "") + + parent.AddCommand(child) + + childInherited := child.InheritedFlags() + childLocal := child.LocalFlags() + + if childLocal.Lookup("strf") == nil { + t.Error(`LocalFlags expected to contain "strf", got "nil"`) + } + if childInherited.Lookup("boolf") == nil { + t.Error(`InheritedFlags expected to contain "boolf", got "nil"`) + } + + if childInherited.Lookup("intf") != nil { + t.Errorf(`InheritedFlags should not contain overwritten flag "intf"`) + } + if childLocal.Lookup("intf") == nil { + t.Error(`LocalFlags expected to contain "intf", got "nil"`) + } +} + +func TestPersistentFlagsOnChild(t *testing.T) { + var childCmdArgs []string + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{ + Use: "child", + Args: ArbitraryArgs, + Run: func(_ *Command, args []string) { childCmdArgs = args }, + } + rootCmd.AddCommand(childCmd) + + var parentFlagValue int + var childFlagValue int + rootCmd.PersistentFlags().IntVarP(&parentFlagValue, "parentf", "p", -1, "") + childCmd.Flags().IntVarP(&childFlagValue, "childf", "c", -1, "") + + output, err := executeCommand(rootCmd, "child", "-c7", "-p8", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + got := strings.Join(childCmdArgs, " ") + expected := "one two" + if got != expected { + t.Errorf("childCmdArgs expected: %q, got %q", expected, got) + } + if parentFlagValue != 8 { + t.Errorf("parentFlagValue expected: %v, got %v", 8, parentFlagValue) + } + if childFlagValue != 7 { + t.Errorf("childFlagValue expected: %v, got %v", 7, childFlagValue) + } +} + +func TestRequiredFlags(t *testing.T) { + c := &Command{Use: "c", Run: emptyRun} + c.Flags().String("foo1", "", "") + c.MarkFlagRequired("foo1") + c.Flags().String("foo2", "", "") + c.MarkFlagRequired("foo2") + c.Flags().String("bar", "", "") + + expected := fmt.Sprintf("required flag(s) %q, %q not set", "foo1", "foo2") + + _, err := executeCommand(c) + got := err.Error() + + if got != expected { + t.Errorf("Expected error: %q, got: %q", expected, got) + } +} + +func TestPersistentRequiredFlags(t *testing.T) { + parent := &Command{Use: "parent", Run: emptyRun} + parent.PersistentFlags().String("foo1", "", "") + parent.MarkPersistentFlagRequired("foo1") + parent.PersistentFlags().String("foo2", "", "") + parent.MarkPersistentFlagRequired("foo2") + parent.Flags().String("foo3", "", "") + + child := &Command{Use: "child", Run: emptyRun} + child.Flags().String("bar1", "", "") + child.MarkFlagRequired("bar1") + child.Flags().String("bar2", "", "") + child.MarkFlagRequired("bar2") + child.Flags().String("bar3", "", "") + + parent.AddCommand(child) + + expected := fmt.Sprintf("required flag(s) %q, %q, %q, %q not set", "bar1", "bar2", "foo1", "foo2") + + _, err := executeCommand(parent, "child") + if err.Error() != expected { + t.Errorf("Expected %q, got %q", expected, err.Error()) + } +} + +func TestInitHelpFlagMergesFlags(t *testing.T) { + usage := "custom flag" + rootCmd := &Command{Use: "root"} + rootCmd.PersistentFlags().Bool("help", false, "custom flag") + childCmd := &Command{Use: "child"} + rootCmd.AddCommand(childCmd) + + childCmd.InitDefaultHelpFlag() + got := childCmd.Flags().Lookup("help").Usage + if got != usage { + t.Errorf("Expected the help flag from the root command with usage: %v\nGot the default with usage: %v", usage, got) + } +} + +func TestHelpCommandExecuted(t *testing.T) { + rootCmd := &Command{Use: "root", Long: "Long description", Run: emptyRun} + rootCmd.AddCommand(&Command{Use: "child", Run: emptyRun}) + + output, err := executeCommand(rootCmd, "help") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, rootCmd.Long) +} + +func TestHelpCommandExecutedOnChild(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Long: "Long description", Run: emptyRun} + rootCmd.AddCommand(childCmd) + + output, err := executeCommand(rootCmd, "help", "child") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, childCmd.Long) +} + +func TestSetHelpCommand(t *testing.T) { + c := &Command{Use: "c", Run: emptyRun} + c.AddCommand(&Command{Use: "empty", Run: emptyRun}) + + expected := "WORKS" + c.SetHelpCommand(&Command{ + Use: "help [command]", + Short: "Help about any command", + Long: `Help provides help for any command in the application. + Simply type ` + c.Name() + ` help [path to command] for full details.`, + Run: func(c *Command, _ []string) { c.Print(expected) }, + }) + + got, err := executeCommand(c, "help") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if got != expected { + t.Errorf("Expected to contain %q, got %q", expected, got) + } +} + +func TestHelpFlagExecuted(t *testing.T) { + rootCmd := &Command{Use: "root", Long: "Long description", Run: emptyRun} + + output, err := executeCommand(rootCmd, "--help") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, rootCmd.Long) +} + +func TestHelpFlagExecutedOnChild(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Long: "Long description", Run: emptyRun} + rootCmd.AddCommand(childCmd) + + output, err := executeCommand(rootCmd, "child", "--help") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, childCmd.Long) +} + +// TestHelpFlagInHelp checks, +// if '--help' flag is shown in help for child (executing `parent help child`), +// that has no other flags. +// Related to https://github.com/spf13/cobra/issues/302. +func TestHelpFlagInHelp(t *testing.T) { + parentCmd := &Command{Use: "parent", Run: func(*Command, []string) {}} + + childCmd := &Command{Use: "child", Run: func(*Command, []string) {}} + parentCmd.AddCommand(childCmd) + + output, err := executeCommand(parentCmd, "help", "child") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, "[flags]") +} + +func TestFlagsInUsage(t *testing.T) { + rootCmd := &Command{Use: "root", Args: NoArgs, Run: func(*Command, []string) {}} + output, err := executeCommand(rootCmd, "--help") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, "[flags]") +} + +func TestHelpExecutedOnNonRunnableChild(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Long: "Long description"} + rootCmd.AddCommand(childCmd) + + output, err := executeCommand(rootCmd, "child") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, childCmd.Long) +} + +func TestVersionFlagExecuted(t *testing.T) { + rootCmd := &Command{Use: "root", Version: "1.0.0", Run: emptyRun} + + output, err := executeCommand(rootCmd, "--version", "arg1") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, "root version 1.0.0") +} + +func TestVersionTemplate(t *testing.T) { + rootCmd := &Command{Use: "root", Version: "1.0.0", Run: emptyRun} + rootCmd.SetVersionTemplate(`customized version: {{.Version}}`) + + output, err := executeCommand(rootCmd, "--version", "arg1") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, "customized version: 1.0.0") +} + +func TestVersionFlagExecutedOnSubcommand(t *testing.T) { + rootCmd := &Command{Use: "root", Version: "1.0.0"} + rootCmd.AddCommand(&Command{Use: "sub", Run: emptyRun}) + + output, err := executeCommand(rootCmd, "--version", "sub") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, "root version 1.0.0") +} + +func TestVersionFlagOnlyAddedToRoot(t *testing.T) { + rootCmd := &Command{Use: "root", Version: "1.0.0", Run: emptyRun} + rootCmd.AddCommand(&Command{Use: "sub", Run: emptyRun}) + + _, err := executeCommand(rootCmd, "sub", "--version") + if err == nil { + t.Errorf("Expected error") + } + + checkStringContains(t, err.Error(), "unknown flag: --version") +} + +func TestVersionFlagOnlyExistsIfVersionNonEmpty(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + + _, err := executeCommand(rootCmd, "--version") + if err == nil { + t.Errorf("Expected error") + } + checkStringContains(t, err.Error(), "unknown flag: --version") +} + +func TestUsageIsNotPrintedTwice(t *testing.T) { + var cmd = &Command{Use: "root"} + var sub = &Command{Use: "sub"} + cmd.AddCommand(sub) + + output, _ := executeCommand(cmd, "") + if strings.Count(output, "Usage:") != 1 { + t.Error("Usage output is not printed exactly once") + } +} + +func TestVisitParents(t *testing.T) { + c := &Command{Use: "app"} + sub := &Command{Use: "sub"} + dsub := &Command{Use: "dsub"} + sub.AddCommand(dsub) + c.AddCommand(sub) + + total := 0 + add := func(x *Command) { + total++ + } + sub.VisitParents(add) + if total != 1 { + t.Errorf("Should have visited 1 parent but visited %d", total) + } + + total = 0 + dsub.VisitParents(add) + if total != 2 { + t.Errorf("Should have visited 2 parents but visited %d", total) + } + + total = 0 + c.VisitParents(add) + if total != 0 { + t.Errorf("Should have visited no parents but visited %d", total) + } +} + +func TestSuggestions(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + timesCmd := &Command{ + Use: "times", + SuggestFor: []string{"counts"}, + Run: emptyRun, + } + rootCmd.AddCommand(timesCmd) + + templateWithSuggestions := "Error: unknown command \"%s\" for \"root\"\n\nDid you mean this?\n\t%s\n\nRun 'root --help' for usage.\n" + templateWithoutSuggestions := "Error: unknown command \"%s\" for \"root\"\nRun 'root --help' for usage.\n" + + tests := map[string]string{ + "time": "times", + "tiems": "times", + "tims": "times", + "timeS": "times", + "rimes": "times", + "ti": "times", + "t": "times", + "timely": "times", + "ri": "", + "timezone": "", + "foo": "", + "counts": "times", + } + + for typo, suggestion := range tests { + for _, suggestionsDisabled := range []bool{true, false} { + rootCmd.DisableSuggestions = suggestionsDisabled + + var expected string + output, _ := executeCommand(rootCmd, typo) + + if suggestion == "" || suggestionsDisabled { + expected = fmt.Sprintf(templateWithoutSuggestions, typo) + } else { + expected = fmt.Sprintf(templateWithSuggestions, typo, suggestion) + } + + if output != expected { + t.Errorf("Unexpected response.\nExpected:\n %q\nGot:\n %q\n", expected, output) + } + } + } +} + +func TestRemoveCommand(t *testing.T) { + rootCmd := &Command{Use: "root", Args: NoArgs, Run: emptyRun} + childCmd := &Command{Use: "child", Run: emptyRun} + rootCmd.AddCommand(childCmd) + rootCmd.RemoveCommand(childCmd) + + _, err := executeCommand(rootCmd, "child") + if err == nil { + t.Error("Expected error on calling removed command. Got nil.") + } +} + +func TestReplaceCommandWithRemove(t *testing.T) { + childUsed := 0 + rootCmd := &Command{Use: "root", Run: emptyRun} + child1Cmd := &Command{ + Use: "child", + Run: func(*Command, []string) { childUsed = 1 }, + } + child2Cmd := &Command{ + Use: "child", + Run: func(*Command, []string) { childUsed = 2 }, + } + rootCmd.AddCommand(child1Cmd) + rootCmd.RemoveCommand(child1Cmd) + rootCmd.AddCommand(child2Cmd) + + output, err := executeCommand(rootCmd, "child") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if childUsed == 1 { + t.Error("Removed command shouldn't be called") + } + if childUsed != 2 { + t.Error("Replacing command should have been called but didn't") + } +} + +func TestDeprecatedCommand(t *testing.T) { + rootCmd := &Command{Use: "root", Run: emptyRun} + deprecatedCmd := &Command{ + Use: "deprecated", + Deprecated: "This command is deprecated", + Run: emptyRun, + } + rootCmd.AddCommand(deprecatedCmd) + + output, err := executeCommand(rootCmd, "deprecated") + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + checkStringContains(t, output, deprecatedCmd.Deprecated) +} - // ensure that outs does not already equal what the command will be setting it - // to, if it did this test would not actually be testing anything... - if outs == "hidden" { - t.Errorf("outs should NOT EQUAL hidden") - } +func TestHooks(t *testing.T) { + var ( + persPreArgs string + preArgs string + runArgs string + postArgs string + persPostArgs string + ) - cmdHidden.Execute() + c := &Command{ + Use: "c", + PersistentPreRun: func(_ *Command, args []string) { + persPreArgs = strings.Join(args, " ") + }, + PreRun: func(_ *Command, args []string) { + preArgs = strings.Join(args, " ") + }, + Run: func(_ *Command, args []string) { + runArgs = strings.Join(args, " ") + }, + PostRun: func(_ *Command, args []string) { + postArgs = strings.Join(args, " ") + }, + PersistentPostRun: func(_ *Command, args []string) { + persPostArgs = strings.Join(args, " ") + }, + } - // upon running the command, the value of outs should now be 'hidden' - if outs != "hidden" { - t.Errorf("Hidden command failed to run!") + output, err := executeCommand(c, "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) } -} -// test to ensure hidden commands do not show up in usage/help text -func TestHiddenCommandIsHidden(t *testing.T) { - if cmdHidden.IsAvailableCommand() { - t.Errorf("Hidden command found!") + if persPreArgs != "one two" { + t.Errorf("Expected persPreArgs %q, got %q", "one two", persPreArgs) + } + if preArgs != "one two" { + t.Errorf("Expected preArgs %q, got %q", "one two", preArgs) + } + if runArgs != "one two" { + t.Errorf("Expected runArgs %q, got %q", "one two", runArgs) + } + if postArgs != "one two" { + t.Errorf("Expected postArgs %q, got %q", "one two", postArgs) + } + if persPostArgs != "one two" { + t.Errorf("Expected persPostArgs %q, got %q", "one two", persPostArgs) } } -func TestStripFlags(t *testing.T) { - tests := []struct { - input []string - output []string - }{ - { - []string{"foo", "bar"}, - []string{"foo", "bar"}, - }, - { - []string{"foo", "--bar", "-b"}, - []string{"foo"}, - }, - { - []string{"-b", "foo", "--bar", "bar"}, - []string{}, - }, - { - []string{"-i10", "echo"}, - []string{"echo"}, +func TestPersistentHooks(t *testing.T) { + var ( + parentPersPreArgs string + parentPreArgs string + parentRunArgs string + parentPostArgs string + parentPersPostArgs string + ) + + var ( + childPersPreArgs string + childPreArgs string + childRunArgs string + childPostArgs string + childPersPostArgs string + ) + + parentCmd := &Command{ + Use: "parent", + PersistentPreRun: func(_ *Command, args []string) { + parentPersPreArgs = strings.Join(args, " ") }, - { - []string{"-i=10", "echo"}, - []string{"echo"}, + PreRun: func(_ *Command, args []string) { + parentPreArgs = strings.Join(args, " ") }, - { - []string{"--int=100", "echo"}, - []string{"echo"}, + Run: func(_ *Command, args []string) { + parentRunArgs = strings.Join(args, " ") }, - { - []string{"-ib", "echo", "-bfoo", "baz"}, - []string{"echo", "baz"}, + PostRun: func(_ *Command, args []string) { + parentPostArgs = strings.Join(args, " ") }, - { - []string{"-i=baz", "bar", "-i", "foo", "blah"}, - []string{"bar", "blah"}, + PersistentPostRun: func(_ *Command, args []string) { + parentPersPostArgs = strings.Join(args, " ") }, - { - []string{"--int=baz", "-bbar", "-i", "foo", "blah"}, - []string{"blah"}, + } + + childCmd := &Command{ + Use: "child", + PersistentPreRun: func(_ *Command, args []string) { + childPersPreArgs = strings.Join(args, " ") }, - { - []string{"--cat", "bar", "-i", "foo", "blah"}, - []string{"bar", "blah"}, + PreRun: func(_ *Command, args []string) { + childPreArgs = strings.Join(args, " ") }, - { - []string{"-c", "bar", "-i", "foo", "blah"}, - []string{"bar", "blah"}, + Run: func(_ *Command, args []string) { + childRunArgs = strings.Join(args, " ") }, - { - []string{"--persist", "bar"}, - []string{"bar"}, + PostRun: func(_ *Command, args []string) { + childPostArgs = strings.Join(args, " ") }, - { - []string{"-p", "bar"}, - []string{"bar"}, + PersistentPostRun: func(_ *Command, args []string) { + childPersPostArgs = strings.Join(args, " ") }, } + parentCmd.AddCommand(childCmd) - cmdPrint := &Command{ - Use: "print [string to print]", - Short: "Print anything to the screen", - Long: `an utterly useless command for testing.`, - Run: func(cmd *Command, args []string) { - tp = args - }, + output, err := executeCommand(parentCmd, "child", "one", "two") + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) } - var flagi int - var flagstr string - var flagbool bool - cmdPrint.PersistentFlags().BoolVarP(&flagbool, "persist", "p", false, "help for persistent one") - cmdPrint.Flags().IntVarP(&flagi, "int", "i", 345, "help message for flag int") - cmdPrint.Flags().StringVarP(&flagstr, "bar", "b", "bar", "help message for flag string") - cmdPrint.Flags().BoolVarP(&flagbool, "cat", "c", false, "help message for flag bool") + // TODO: currently PersistenPreRun* defined in parent does not + // run if the matchin child subcommand has PersistenPreRun. + // If the behavior changes (https://github.com/spf13/cobra/issues/252) + // this test must be fixed. + if parentPersPreArgs != "" { + t.Errorf("Expected blank parentPersPreArgs, got %q", parentPersPreArgs) + } + if parentPreArgs != "" { + t.Errorf("Expected blank parentPreArgs, got %q", parentPreArgs) + } + if parentRunArgs != "" { + t.Errorf("Expected blank parentRunArgs, got %q", parentRunArgs) + } + if parentPostArgs != "" { + t.Errorf("Expected blank parentPostArgs, got %q", parentPostArgs) + } + // TODO: currently PersistenPostRun* defined in parent does not + // run if the matchin child subcommand has PersistenPostRun. + // If the behavior changes (https://github.com/spf13/cobra/issues/252) + // this test must be fixed. + if parentPersPostArgs != "" { + t.Errorf("Expected blank parentPersPostArgs, got %q", parentPersPostArgs) + } - for _, test := range tests { - output := stripFlags(test.input, cmdPrint) - if !reflect.DeepEqual(test.output, output) { - t.Errorf("expected: %v, got: %v", test.output, output) - } + if childPersPreArgs != "one two" { + t.Errorf("Expected childPersPreArgs %q, got %q", "one two", childPersPreArgs) + } + if childPreArgs != "one two" { + t.Errorf("Expected childPreArgs %q, got %q", "one two", childPreArgs) + } + if childRunArgs != "one two" { + t.Errorf("Expected childRunArgs %q, got %q", "one two", childRunArgs) + } + if childPostArgs != "one two" { + t.Errorf("Expected childPostArgs %q, got %q", "one two", childPostArgs) + } + if childPersPostArgs != "one two" { + t.Errorf("Expected childPersPostArgs %q, got %q", "one two", childPersPostArgs) } } -func TestDisableFlagParsing(t *testing.T) { - targs := []string{} - cmdPrint := &Command{ - DisableFlagParsing: true, - Run: func(cmd *Command, args []string) { - targs = args - }, +// Related to https://github.com/spf13/cobra/issues/521. +func TestGlobalNormFuncPropagation(t *testing.T) { + normFunc := func(f *pflag.FlagSet, name string) pflag.NormalizedName { + return pflag.NormalizedName(name) } - args := []string{"cmd", "-v", "-race", "-file", "foo.go"} - cmdPrint.SetArgs(args) - err := cmdPrint.Execute() - if err != nil { - t.Error(err) + + rootCmd := &Command{Use: "root", Run: emptyRun} + childCmd := &Command{Use: "child", Run: emptyRun} + rootCmd.AddCommand(childCmd) + + rootCmd.SetGlobalNormalizationFunc(normFunc) + if reflect.ValueOf(normFunc).Pointer() != reflect.ValueOf(rootCmd.GlobalNormalizationFunc()).Pointer() { + t.Error("rootCmd seems to have a wrong normalization function") } - if !reflect.DeepEqual(args, targs) { - t.Errorf("expected: %v, got: %v", args, targs) + + if reflect.ValueOf(normFunc).Pointer() != reflect.ValueOf(childCmd.GlobalNormalizationFunc()).Pointer() { + t.Error("childCmd should have had the normalization function of rootCmd") } } -func TestInitHelpFlagMergesFlags(t *testing.T) { - usage := "custom flag" - baseCmd := Command{Use: "testcmd"} - baseCmd.PersistentFlags().Bool("help", false, usage) - cmd := Command{Use: "do"} - baseCmd.AddCommand(&cmd) +// Related to https://github.com/spf13/cobra/issues/521. +func TestNormPassedOnLocal(t *testing.T) { + toUpper := func(f *pflag.FlagSet, name string) pflag.NormalizedName { + return pflag.NormalizedName(strings.ToUpper(name)) + } + + c := &Command{} + c.Flags().Bool("flagname", true, "this is a dummy flag") + c.SetGlobalNormalizationFunc(toUpper) + if c.LocalFlags().Lookup("flagname") != c.LocalFlags().Lookup("FLAGNAME") { + t.Error("Normalization function should be passed on to Local flag set") + } +} + +// Related to https://github.com/spf13/cobra/issues/521. +func TestNormPassedOnInherited(t *testing.T) { + toUpper := func(f *pflag.FlagSet, name string) pflag.NormalizedName { + return pflag.NormalizedName(strings.ToUpper(name)) + } + + c := &Command{} + c.SetGlobalNormalizationFunc(toUpper) + + child1 := &Command{} + c.AddCommand(child1) + + c.PersistentFlags().Bool("flagname", true, "") + + child2 := &Command{} + c.AddCommand(child2) + + inherited := child1.InheritedFlags() + if inherited.Lookup("flagname") == nil || inherited.Lookup("flagname") != inherited.Lookup("FLAGNAME") { + t.Error("Normalization function should be passed on to inherited flag set in command added before flag") + } + + inherited = child2.InheritedFlags() + if inherited.Lookup("flagname") == nil || inherited.Lookup("flagname") != inherited.Lookup("FLAGNAME") { + t.Error("Normalization function should be passed on to inherited flag set in command added after flag") + } +} + +// Related to https://github.com/spf13/cobra/issues/521. +func TestConsistentNormalizedName(t *testing.T) { + toUpper := func(f *pflag.FlagSet, name string) pflag.NormalizedName { + return pflag.NormalizedName(strings.ToUpper(name)) + } + n := func(f *pflag.FlagSet, name string) pflag.NormalizedName { + return pflag.NormalizedName(name) + } + + c := &Command{} + c.Flags().Bool("flagname", true, "") + c.SetGlobalNormalizationFunc(toUpper) + c.SetGlobalNormalizationFunc(n) + + if c.LocalFlags().Lookup("flagname") == c.LocalFlags().Lookup("FLAGNAME") { + t.Error("Normalizing flag names should not result in duplicate flags") + } +} + +func TestFlagOnPflagCommandLine(t *testing.T) { + flagName := "flagOnCommandLine" + pflag.String(flagName, "", "about my flag") + + c := &Command{Use: "c", Run: emptyRun} + c.AddCommand(&Command{Use: "child", Run: emptyRun}) + + output, _ := executeCommand(c, "--help") + checkStringContains(t, output, flagName) + + resetCommandLineFlagSet() +} - cmd.InitDefaultHelpFlag() - actual := cmd.Flags().Lookup("help").Usage - if actual != usage { - t.Fatalf("Expected the help flag from the base command with usage '%s', but got the default with usage '%s'", usage, actual) +// TestHiddenCommandExecutes checks, +// if hidden commands run as intended. +func TestHiddenCommandExecutes(t *testing.T) { + executed := false + c := &Command{ + Use: "c", + Hidden: true, + Run: func(*Command, []string) { executed = true }, + } + + output, err := executeCommand(c) + if output != "" { + t.Errorf("Unexpected output: %v", output) + } + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + + if !executed { + t.Error("Hidden command should have been executed") + } +} + +// test to ensure hidden commands do not show up in usage/help text +func TestHiddenCommandIsHidden(t *testing.T) { + c := &Command{Use: "c", Hidden: true, Run: emptyRun} + if c.IsAvailableCommand() { + t.Errorf("Hidden command should be unavailable") } } @@ -158,15 +1336,16 @@ func TestCommandsAreSorted(t *testing.T) { originalNames := []string{"middle", "zlast", "afirst"} expectedNames := []string{"afirst", "middle", "zlast"} - var tmpCommand = &Command{Use: "tmp"} + var rootCmd = &Command{Use: "root"} for _, name := range originalNames { - tmpCommand.AddCommand(&Command{Use: name}) + rootCmd.AddCommand(&Command{Use: name}) } - for i, c := range tmpCommand.Commands() { - if expectedNames[i] != c.Name() { - t.Errorf("expected: %s, got: %s", expectedNames[i], c.Name()) + for i, c := range rootCmd.Commands() { + got := c.Name() + if expectedNames[i] != got { + t.Errorf("Expected: %s, got: %s", expectedNames[i], got) } } @@ -178,15 +1357,16 @@ func TestEnableCommandSortingIsDisabled(t *testing.T) { originalNames := []string{"middle", "zlast", "afirst"} - var tmpCommand = &Command{Use: "tmp"} + var rootCmd = &Command{Use: "root"} for _, name := range originalNames { - tmpCommand.AddCommand(&Command{Use: name}) + rootCmd.AddCommand(&Command{Use: name}) } - for i, c := range tmpCommand.Commands() { - if originalNames[i] != c.Name() { - t.Errorf("expected: %s, got: %s", originalNames[i], c.Name()) + for i, c := range rootCmd.Commands() { + got := c.Name() + if originalNames[i] != got { + t.Errorf("expected: %s, got: %s", originalNames[i], got) } } @@ -194,33 +1374,27 @@ func TestEnableCommandSortingIsDisabled(t *testing.T) { } func TestSetOutput(t *testing.T) { - cmd := &Command{} - cmd.SetOutput(nil) - if out := cmd.OutOrStdout(); out != os.Stdout { - t.Fatalf("expected setting output to nil to revert back to stdout, got %v", out) + c := &Command{} + c.SetOutput(nil) + if out := c.OutOrStdout(); out != os.Stdout { + t.Errorf("Expected setting output to nil to revert back to stdout") } } func TestFlagErrorFunc(t *testing.T) { - cmd := &Command{ - Use: "print", - RunE: func(cmd *Command, args []string) error { - return nil - }, - } - expectedFmt := "This is expected: %s" + c := &Command{Use: "c", Run: emptyRun} - cmd.SetFlagErrorFunc(func(c *Command, err error) error { + expectedFmt := "This is expected: %v" + c.SetFlagErrorFunc(func(_ *Command, err error) error { return fmt.Errorf(expectedFmt, err) }) - cmd.SetArgs([]string{"--bogus-flag"}) - cmd.SetOutput(new(bytes.Buffer)) - err := cmd.Execute() + _, err := executeCommand(c, "--unknown-flag") - expected := fmt.Sprintf(expectedFmt, "unknown flag: --bogus-flag") - if err.Error() != expected { - t.Errorf("expected %v, got %v", expected, err.Error()) + got := err.Error() + expected := fmt.Sprintf(expectedFmt, "unknown flag: --unknown-flag") + if got != expected { + t.Errorf("Expected %v, got %v", expected, got) } } @@ -228,19 +1402,19 @@ func TestFlagErrorFunc(t *testing.T) { // if cmd.LocalFlags() is unsorted when cmd.Flags().SortFlags set to false. // Related to https://github.com/spf13/cobra/issues/404. func TestSortedFlags(t *testing.T) { - cmd := &Command{} - cmd.Flags().SortFlags = false + c := &Command{} + c.Flags().SortFlags = false names := []string{"C", "B", "A", "D"} for _, name := range names { - cmd.Flags().Bool(name, false, "") + c.Flags().Bool(name, false, "") } i := 0 - cmd.LocalFlags().VisitAll(func(f *pflag.Flag) { + c.LocalFlags().VisitAll(func(f *pflag.Flag) { if i == len(names) { return } - if isStringInStringSlice(f.Name, names) { + if stringInSlice(f.Name, names) { if names[i] != f.Name { t.Errorf("Incorrect order. Expected %v, got %v", names[i], f.Name) } @@ -249,101 +1423,313 @@ func TestSortedFlags(t *testing.T) { }) } -// contains checks, if s is in ss. -func isStringInStringSlice(s string, ss []string) bool { - for _, v := range ss { - if v == s { - return true - } - } - return false -} - -// TestHelpFlagInHelp checks, -// if '--help' flag is shown in help for child (executing `parent help child`), -// that has no other flags. -// Related to https://github.com/spf13/cobra/issues/302. -func TestHelpFlagInHelp(t *testing.T) { - output := new(bytes.Buffer) - parent := &Command{Use: "parent", Run: func(*Command, []string) {}} - parent.SetOutput(output) - - child := &Command{Use: "child", Run: func(*Command, []string) {}} - parent.AddCommand(child) - - parent.SetArgs([]string{"help", "child"}) - err := parent.Execute() - if err != nil { - t.Fatal(err) - } - - if !strings.Contains(output.String(), "[flags]") { - t.Errorf("\nExpecting to contain: %v\nGot: %v", "[flags]", output.String()) - } -} - // TestMergeCommandLineToFlags checks, // if pflag.CommandLine is correctly merged to c.Flags() after first call // of c.mergePersistentFlags. // Related to https://github.com/spf13/cobra/issues/443. func TestMergeCommandLineToFlags(t *testing.T) { pflag.Bool("boolflag", false, "") - c := &Command{Use: "c", Run: func(*Command, []string) {}} + c := &Command{Use: "c", Run: emptyRun} c.mergePersistentFlags() if c.Flags().Lookup("boolflag") == nil { t.Fatal("Expecting to have flag from CommandLine in c.Flags()") } - // Reset pflag.CommandLine flagset. - pflag.CommandLine = pflag.NewFlagSet(os.Args[0], pflag.ExitOnError) + resetCommandLineFlagSet() } // TestUseDeprecatedFlags checks, // if cobra.Execute() prints a message, if a deprecated flag is used. // Related to https://github.com/spf13/cobra/issues/463. func TestUseDeprecatedFlags(t *testing.T) { - c := &Command{Use: "c", Run: func(*Command, []string) {}} - output := new(bytes.Buffer) - c.SetOutput(output) + c := &Command{Use: "c", Run: emptyRun} c.Flags().BoolP("deprecated", "d", false, "deprecated flag") c.Flags().MarkDeprecated("deprecated", "This flag is deprecated") - c.SetArgs([]string{"c", "-d"}) - if err := c.Execute(); err != nil { + output, err := executeCommand(c, "c", "-d") + if err != nil { t.Error("Unexpected error:", err) } - if !strings.Contains(output.String(), "This flag is deprecated") { - t.Errorf("Expected to contain deprecated message, but got %q", output.String()) + checkStringContains(t, output, "This flag is deprecated") +} + +func TestTraverseWithParentFlags(t *testing.T) { + rootCmd := &Command{Use: "root", TraverseChildren: true} + rootCmd.Flags().String("str", "", "") + rootCmd.Flags().BoolP("bool", "b", false, "") + + childCmd := &Command{Use: "child"} + childCmd.Flags().Int("int", -1, "") + + rootCmd.AddCommand(childCmd) + + c, args, err := rootCmd.Traverse([]string{"-b", "--str", "ok", "child", "--int"}) + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + if len(args) != 1 && args[0] != "--add" { + t.Errorf("Wrong args: %v", args) + } + if c.Name() != childCmd.Name() { + t.Errorf("Expected command: %q, got: %q", childCmd.Name(), c.Name()) } } -// TestSetHelpCommand checks, if SetHelpCommand works correctly. -func TestSetHelpCommand(t *testing.T) { - c := &Command{Use: "c", Run: func(*Command, []string) {}} +func TestTraverseNoParentFlags(t *testing.T) { + rootCmd := &Command{Use: "root", TraverseChildren: true} + rootCmd.Flags().String("foo", "", "foo things") + + childCmd := &Command{Use: "child"} + childCmd.Flags().String("str", "", "") + rootCmd.AddCommand(childCmd) + + c, args, err := rootCmd.Traverse([]string{"child"}) + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + if len(args) != 0 { + t.Errorf("Wrong args %v", args) + } + if c.Name() != childCmd.Name() { + t.Errorf("Expected command: %q, got: %q", childCmd.Name(), c.Name()) + } +} + +func TestTraverseWithBadParentFlags(t *testing.T) { + rootCmd := &Command{Use: "root", TraverseChildren: true} + + childCmd := &Command{Use: "child"} + childCmd.Flags().String("str", "", "") + rootCmd.AddCommand(childCmd) + + expected := "unknown flag: --str" + + c, _, err := rootCmd.Traverse([]string{"--str", "ok", "child"}) + if err == nil || !strings.Contains(err.Error(), expected) { + t.Errorf("Expected error, %q, got %q", expected, err) + } + if c != nil { + t.Errorf("Expected nil command") + } +} + +func TestTraverseWithBadChildFlag(t *testing.T) { + rootCmd := &Command{Use: "root", TraverseChildren: true} + rootCmd.Flags().String("str", "", "") + + childCmd := &Command{Use: "child"} + rootCmd.AddCommand(childCmd) + + // Expect no error because the last commands args shouldn't be parsed in + // Traverse. + c, args, err := rootCmd.Traverse([]string{"child", "--str"}) + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + if len(args) != 1 && args[0] != "--str" { + t.Errorf("Wrong args: %v", args) + } + if c.Name() != childCmd.Name() { + t.Errorf("Expected command %q, got: %q", childCmd.Name(), c.Name()) + } +} + +func TestTraverseWithTwoSubcommands(t *testing.T) { + rootCmd := &Command{Use: "root", TraverseChildren: true} + + subCmd := &Command{Use: "sub", TraverseChildren: true} + rootCmd.AddCommand(subCmd) + + subsubCmd := &Command{ + Use: "subsub", + } + subCmd.AddCommand(subsubCmd) + + c, _, err := rootCmd.Traverse([]string{"sub", "subsub"}) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + if c.Name() != subsubCmd.Name() { + t.Fatalf("Expected command: %q, got %q", subsubCmd.Name(), c.Name()) + } +} + +// TestUpdateName checks if c.Name() updates on changed c.Use. +// Related to https://github.com/spf13/cobra/pull/422#discussion_r143918343. +func TestUpdateName(t *testing.T) { + c := &Command{Use: "name xyz"} + originalName := c.Name() + + c.Use = "changedName abc" + if originalName == c.Name() || c.Name() != "changedName" { + t.Error("c.Name() should be updated on changed c.Use") + } +} + +type calledAsTestcase struct { + args []string + call string + want string + epm bool + tc bool +} + +func (tc *calledAsTestcase) test(t *testing.T) { + defer func(ov bool) { EnablePrefixMatching = ov }(EnablePrefixMatching) + EnablePrefixMatching = tc.epm + + var called *Command + run := func(c *Command, _ []string) { t.Logf("called: %q", c.Name()); called = c } + + parent := &Command{Use: "parent", Run: run} + child1 := &Command{Use: "child1", Run: run, Aliases: []string{"this"}} + child2 := &Command{Use: "child2", Run: run, Aliases: []string{"that"}} + + parent.AddCommand(child1) + parent.AddCommand(child2) + parent.SetArgs(tc.args) + output := new(bytes.Buffer) - c.SetOutput(output) - c.SetArgs([]string{"help"}) + parent.SetOutput(output) - // Help will not be shown, if c has no subcommands. - c.AddCommand(&Command{ - Use: "empty", - Run: func(cmd *Command, args []string) {}, - }) + parent.Execute() - correctMessage := "WORKS" - c.SetHelpCommand(&Command{ - Use: "help [command]", - Short: "Help about any command", - Long: `Help provides help for any command in the application. - Simply type ` + c.Name() + ` help [path to command] for full details.`, - Run: func(c *Command, args []string) { c.Print(correctMessage) }, - }) + if called == nil { + if tc.call != "" { + t.Errorf("missing expected call to command: %s", tc.call) + } + return + } - if err := c.Execute(); err != nil { - t.Error("Unexpected error:", err) + if called.Name() != tc.call { + t.Errorf("called command == %q; Wanted %q", called.Name(), tc.call) + } else if got := called.CalledAs(); got != tc.want { + t.Errorf("%s.CalledAs() == %q; Wanted: %q", tc.call, got, tc.want) + } +} + +func TestCalledAs(t *testing.T) { + tests := map[string]calledAsTestcase{ + "find/no-args": {nil, "parent", "parent", false, false}, + "find/real-name": {[]string{"child1"}, "child1", "child1", false, false}, + "find/full-alias": {[]string{"that"}, "child2", "that", false, false}, + "find/part-no-prefix": {[]string{"thi"}, "", "", false, false}, + "find/part-alias": {[]string{"thi"}, "child1", "this", true, false}, + "find/conflict": {[]string{"th"}, "", "", true, false}, + "traverse/no-args": {nil, "parent", "parent", false, true}, + "traverse/real-name": {[]string{"child1"}, "child1", "child1", false, true}, + "traverse/full-alias": {[]string{"that"}, "child2", "that", false, true}, + "traverse/part-no-prefix": {[]string{"thi"}, "", "", false, true}, + "traverse/part-alias": {[]string{"thi"}, "child1", "this", true, true}, + "traverse/conflict": {[]string{"th"}, "", "", true, true}, + } + + for name, tc := range tests { + t.Run(name, tc.test) + } +} + +func TestFParseErrWhitelistBackwardCompatibility(t *testing.T) { + c := &Command{Use: "c", Run: emptyRun} + c.Flags().BoolP("boola", "a", false, "a boolean flag") + + output, err := executeCommand(c, "c", "-a", "--unknown", "flag") + if err == nil { + t.Error("expected unknown flag error") + } + checkStringContains(t, output, "unknown flag: --unknown") +} + +func TestFParseErrWhitelistSameCommand(t *testing.T) { + c := &Command{ + Use: "c", + Run: emptyRun, + FParseErrWhitelist: FParseErrWhitelist{ + UnknownFlags: true, + }, + } + c.Flags().BoolP("boola", "a", false, "a boolean flag") + + _, err := executeCommand(c, "c", "-a", "--unknown", "flag") + if err != nil { + t.Error("unexpected error: ", err) + } +} + +func TestFParseErrWhitelistParentCommand(t *testing.T) { + root := &Command{ + Use: "root", + Run: emptyRun, + FParseErrWhitelist: FParseErrWhitelist{ + UnknownFlags: true, + }, + } + + c := &Command{ + Use: "child", + Run: emptyRun, + } + c.Flags().BoolP("boola", "a", false, "a boolean flag") + + root.AddCommand(c) + + output, err := executeCommand(root, "child", "-a", "--unknown", "flag") + if err == nil { + t.Error("expected unknown flag error") + } + checkStringContains(t, output, "unknown flag: --unknown") +} + +func TestFParseErrWhitelistChildCommand(t *testing.T) { + root := &Command{ + Use: "root", + Run: emptyRun, } - if output.String() != correctMessage { - t.Errorf("Expected to contain %q message, but got %q", correctMessage, output.String()) + c := &Command{ + Use: "child", + Run: emptyRun, + FParseErrWhitelist: FParseErrWhitelist{ + UnknownFlags: true, + }, + } + c.Flags().BoolP("boola", "a", false, "a boolean flag") + + root.AddCommand(c) + + _, err := executeCommand(root, "child", "-a", "--unknown", "flag") + if err != nil { + t.Error("unexpected error: ", err.Error()) + } +} + +func TestFParseErrWhitelistSiblingCommand(t *testing.T) { + root := &Command{ + Use: "root", + Run: emptyRun, + } + + c := &Command{ + Use: "child", + Run: emptyRun, + FParseErrWhitelist: FParseErrWhitelist{ + UnknownFlags: true, + }, + } + c.Flags().BoolP("boola", "a", false, "a boolean flag") + + s := &Command{ + Use: "sibling", + Run: emptyRun, + } + s.Flags().BoolP("boolb", "b", false, "a boolean flag") + + root.AddCommand(c) + root.AddCommand(s) + + output, err := executeCommand(root, "sibling", "-b", "--unknown", "flag") + if err == nil { + t.Error("expected unknown flag error") } + checkStringContains(t, output, "unknown flag: --unknown") } diff --git a/vendor/github.com/spf13/cobra/command_win.go b/vendor/github.com/spf13/cobra/command_win.go index edec728..8768b17 100644 --- a/vendor/github.com/spf13/cobra/command_win.go +++ b/vendor/github.com/spf13/cobra/command_win.go @@ -3,6 +3,7 @@ package cobra import ( + "fmt" "os" "time" @@ -14,7 +15,12 @@ var preExecHookFn = preExecHook func preExecHook(c *Command) { if MousetrapHelpText != "" && mousetrap.StartedByExplorer() { c.Print(MousetrapHelpText) - time.Sleep(5 * time.Second) + if MousetrapDisplayDuration > 0 { + time.Sleep(MousetrapDisplayDuration) + } else { + c.Println("Press return to continue...") + fmt.Scanln() + } os.Exit(1) } } diff --git a/vendor/github.com/spf13/cobra/doc/cmd_test.go b/vendor/github.com/spf13/cobra/doc/cmd_test.go index a4b5568..d29c577 100644 --- a/vendor/github.com/spf13/cobra/doc/cmd_test.go +++ b/vendor/github.com/spf13/cobra/doc/cmd_test.go @@ -1,145 +1,86 @@ package doc import ( - "bytes" - "fmt" - "runtime" "strings" "testing" "github.com/spf13/cobra" ) -var flagb1, flagb2, flagb3, flagbr, flagbp bool -var flags1, flags2a, flags2b, flags3 string -var flagi1, flagi2, flagi3, flagir int +func emptyRun(*cobra.Command, []string) {} -const strtwoParentHelp = "help message for parent flag strtwo" -const strtwoChildHelp = "help message for child flag strtwo" +func init() { + rootCmd.PersistentFlags().StringP("rootflag", "r", "two", "") + rootCmd.PersistentFlags().StringP("strtwo", "t", "two", "help message for parent flag strtwo") -var cmdEcho = &cobra.Command{ + echoCmd.PersistentFlags().StringP("strone", "s", "one", "help message for flag strone") + echoCmd.PersistentFlags().BoolP("persistentbool", "p", false, "help message for flag persistentbool") + echoCmd.Flags().IntP("intone", "i", 123, "help message for flag intone") + echoCmd.Flags().BoolP("boolone", "b", true, "help message for flag boolone") + + timesCmd.PersistentFlags().StringP("strtwo", "t", "2", "help message for child flag strtwo") + timesCmd.Flags().IntP("inttwo", "j", 234, "help message for flag inttwo") + timesCmd.Flags().BoolP("booltwo", "c", false, "help message for flag booltwo") + + printCmd.PersistentFlags().StringP("strthree", "s", "three", "help message for flag strthree") + printCmd.Flags().IntP("intthree", "i", 345, "help message for flag intthree") + printCmd.Flags().BoolP("boolthree", "b", true, "help message for flag boolthree") + + echoCmd.AddCommand(timesCmd, echoSubCmd, deprecatedCmd) + rootCmd.AddCommand(printCmd, echoCmd) +} + +var rootCmd = &cobra.Command{ + Use: "root", + Short: "Root short description", + Long: "Root long description", + Run: emptyRun, +} + +var echoCmd = &cobra.Command{ Use: "echo [string to echo]", Aliases: []string{"say"}, Short: "Echo anything to the screen", - Long: `an utterly useless command for testing.`, + Long: "an utterly useless command for testing", Example: "Just run cobra-test echo", } -var cmdEchoSub = &cobra.Command{ +var echoSubCmd = &cobra.Command{ Use: "echosub [string to print]", Short: "second sub command for echo", - Long: `an absolutely utterly useless command for testing gendocs!.`, - Run: func(cmd *cobra.Command, args []string) {}, + Long: "an absolutely utterly useless command for testing gendocs!.", + Run: emptyRun, +} + +var timesCmd = &cobra.Command{ + Use: "times [# times] [string to echo]", + SuggestFor: []string{"counts"}, + Short: "Echo anything to the screen more times", + Long: `a slightly useless command for testing.`, + Run: emptyRun, } -var cmdDeprecated = &cobra.Command{ +var deprecatedCmd = &cobra.Command{ Use: "deprecated [can't do anything here]", Short: "A command which is deprecated", Long: `an absolutely utterly useless command for testing deprecation!.`, Deprecated: "Please use echo instead", } -var cmdTimes = &cobra.Command{ - Use: "times [# times] [string to echo]", - SuggestFor: []string{"counts"}, - Short: "Echo anything to the screen more times", - Long: `a slightly useless command for testing.`, - PersistentPreRun: func(cmd *cobra.Command, args []string) {}, - Run: func(cmd *cobra.Command, args []string) {}, -} - -var cmdPrint = &cobra.Command{ +var printCmd = &cobra.Command{ Use: "print [string to print]", Short: "Print anything to the screen", Long: `an absolutely utterly useless command for testing.`, } -var cmdRootNoRun = &cobra.Command{ - Use: "cobra-test", - Short: "The root can run its own function", - Long: "The root description for help", -} - -var cmdRootSameName = &cobra.Command{ - Use: "print", - Short: "Root with the same name as a subcommand", - Long: "The root description for help", -} - -var cmdRootWithRun = &cobra.Command{ - Use: "cobra-test", - Short: "The root can run its own function", - Long: "The root description for help", -} - -var cmdSubNoRun = &cobra.Command{ - Use: "subnorun", - Short: "A subcommand without a Run function", - Long: "A long output about a subcommand without a Run function", -} - -var cmdVersion1 = &cobra.Command{ - Use: "version", - Short: "Print the version number", - Long: `First version of the version command`, -} - -var cmdVersion2 = &cobra.Command{ - Use: "version", - Short: "Print the version number", - Long: `Second version of the version command`, -} - -func flagInit() { - cmdEcho.ResetFlags() - cmdPrint.ResetFlags() - cmdTimes.ResetFlags() - cmdRootNoRun.ResetFlags() - cmdRootSameName.ResetFlags() - cmdRootWithRun.ResetFlags() - cmdSubNoRun.ResetFlags() - cmdRootNoRun.PersistentFlags().StringVarP(&flags2a, "strtwo", "t", "two", strtwoParentHelp) - cmdEcho.Flags().IntVarP(&flagi1, "intone", "i", 123, "help message for flag intone") - cmdTimes.Flags().IntVarP(&flagi2, "inttwo", "j", 234, "help message for flag inttwo") - cmdPrint.Flags().IntVarP(&flagi3, "intthree", "i", 345, "help message for flag intthree") - cmdEcho.PersistentFlags().StringVarP(&flags1, "strone", "s", "one", "help message for flag strone") - cmdEcho.PersistentFlags().BoolVarP(&flagbp, "persistentbool", "p", false, "help message for flag persistentbool") - cmdTimes.PersistentFlags().StringVarP(&flags2b, "strtwo", "t", "2", strtwoChildHelp) - cmdPrint.PersistentFlags().StringVarP(&flags3, "strthree", "s", "three", "help message for flag strthree") - cmdEcho.Flags().BoolVarP(&flagb1, "boolone", "b", true, "help message for flag boolone") - cmdTimes.Flags().BoolVarP(&flagb2, "booltwo", "c", false, "help message for flag booltwo") - cmdPrint.Flags().BoolVarP(&flagb3, "boolthree", "b", true, "help message for flag boolthree") - cmdVersion1.ResetFlags() - cmdVersion2.ResetFlags() -} - -func initializeWithRootCmd() *cobra.Command { - cmdRootWithRun.ResetCommands() - flagInit() - cmdRootWithRun.Flags().BoolVarP(&flagbr, "boolroot", "b", false, "help message for flag boolroot") - cmdRootWithRun.Flags().IntVarP(&flagir, "introot", "i", 321, "help message for flag introot") - return cmdRootWithRun -} - -func checkStringContains(t *testing.T, found, expected string) { - if !strings.Contains(found, expected) { - logErr(t, found, expected) +func checkStringContains(t *testing.T, got, expected string) { + if !strings.Contains(got, expected) { + t.Errorf("Expected to contain: \n %v\nGot:\n %v\n", expected, got) } } -func checkStringOmits(t *testing.T, found, expected string) { - if strings.Contains(found, expected) { - logErr(t, found, expected) - } -} - -func logErr(t *testing.T, found, expected string) { - out := new(bytes.Buffer) - - _, _, line, ok := runtime.Caller(2) - if ok { - fmt.Fprintf(out, "Line: %d ", line) +func checkStringOmits(t *testing.T, got, expected string) { + if strings.Contains(got, expected) { + t.Errorf("Expected to not contain: \n %v\nGot: %v", expected, got) } - fmt.Fprintf(out, "Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - t.Errorf(out.String()) } diff --git a/vendor/github.com/spf13/cobra/doc/man_docs.go b/vendor/github.com/spf13/cobra/doc/man_docs.go index ce92332..4a06233 100644 --- a/vendor/github.com/spf13/cobra/doc/man_docs.go +++ b/vendor/github.com/spf13/cobra/doc/man_docs.go @@ -20,6 +20,7 @@ import ( "os" "path/filepath" "sort" + "strconv" "strings" "time" @@ -87,7 +88,7 @@ type GenManTreeOptions struct { // GenManHeader is a lot like the .TH header at the start of man pages. These // include the title, section, date, source, and manual. We will use the -// current time if Date if unset and will use "Auto generated by spf13/cobra" +// current time if Date is unset and will use "Auto generated by spf13/cobra" // if the Source is unset. type GenManHeader struct { Title string @@ -104,14 +105,16 @@ func GenMan(cmd *cobra.Command, header *GenManHeader, w io.Writer) error { if header == nil { header = &GenManHeader{} } - fillHeader(header, cmd.CommandPath()) + if err := fillHeader(header, cmd.CommandPath()); err != nil { + return err + } b := genMan(cmd, header) _, err := w.Write(md2man.Render(b)) return err } -func fillHeader(header *GenManHeader, name string) { +func fillHeader(header *GenManHeader, name string) error { if header.Title == "" { header.Title = strings.ToUpper(strings.Replace(name, " ", "\\-", -1)) } @@ -120,12 +123,20 @@ func fillHeader(header *GenManHeader, name string) { } if header.Date == nil { now := time.Now() + if epoch := os.Getenv("SOURCE_DATE_EPOCH"); epoch != "" { + unixEpoch, err := strconv.ParseInt(epoch, 10, 64) + if err != nil { + return fmt.Errorf("invalid SOURCE_DATE_EPOCH: %v", err) + } + now = time.Unix(unixEpoch, 0) + } header.Date = &now } header.date = (*header.Date).Format("Jan 2006") if header.Source == "" { header.Source = "Auto generated by spf13/cobra" } + return nil } func manPreamble(buf *bytes.Buffer, header *GenManHeader, cmd *cobra.Command, dashedName string) { @@ -176,13 +187,13 @@ func manPrintFlags(buf *bytes.Buffer, flags *pflag.FlagSet) { func manPrintOptions(buf *bytes.Buffer, command *cobra.Command) { flags := command.NonInheritedFlags() - if flags.HasFlags() { + if flags.HasAvailableFlags() { buf.WriteString("# OPTIONS\n") manPrintFlags(buf, flags) buf.WriteString("\n") } flags = command.InheritedFlags() - if flags.HasFlags() { + if flags.HasAvailableFlags() { buf.WriteString("# OPTIONS INHERITED FROM PARENT COMMANDS\n") manPrintFlags(buf, flags) buf.WriteString("\n") diff --git a/vendor/github.com/spf13/cobra/doc/man_docs_test.go b/vendor/github.com/spf13/cobra/doc/man_docs_test.go index 8799106..2c400f5 100644 --- a/vendor/github.com/spf13/cobra/doc/man_docs_test.go +++ b/vendor/github.com/spf13/cobra/doc/man_docs_test.go @@ -18,135 +18,133 @@ func translate(in string) string { } func TestGenManDoc(t *testing.T) { - c := initializeWithRootCmd() - // Need two commands to run the command alphabetical sort - cmdEcho.AddCommand(cmdTimes, cmdEchoSub, cmdDeprecated) - c.AddCommand(cmdPrint, cmdEcho) - cmdRootWithRun.PersistentFlags().StringVarP(&flags2a, "rootflag", "r", "two", strtwoParentHelp) - - out := new(bytes.Buffer) - header := &GenManHeader{ Title: "Project", Section: "2", } + // We generate on a subcommand so we have both subcommands and parents - if err := GenMan(cmdEcho, header, out); err != nil { + buf := new(bytes.Buffer) + if err := GenMan(echoCmd, header, buf); err != nil { t.Fatal(err) } - found := out.String() + output := buf.String() // Make sure parent has - in CommandPath() in SEE ALSO: - parentPath := cmdEcho.Parent().CommandPath() + parentPath := echoCmd.Parent().CommandPath() dashParentPath := strings.Replace(parentPath, " ", "-", -1) expected := translate(dashParentPath) expected = expected + "(" + header.Section + ")" - checkStringContains(t, found, expected) - - // Our description - expected = translate(cmdEcho.Name()) - checkStringContains(t, found, expected) - - // Better have our example - expected = translate(cmdEcho.Name()) - checkStringContains(t, found, expected) - - // A local flag - expected = "boolone" - checkStringContains(t, found, expected) - - // persistent flag on parent - expected = "rootflag" - checkStringContains(t, found, expected) - - // We better output info about our parent - expected = translate(cmdRootWithRun.Name()) - checkStringContains(t, found, expected) + checkStringContains(t, output, expected) + + checkStringContains(t, output, translate(echoCmd.Name())) + checkStringContains(t, output, translate(echoCmd.Name())) + checkStringContains(t, output, "boolone") + checkStringContains(t, output, "rootflag") + checkStringContains(t, output, translate(rootCmd.Name())) + checkStringContains(t, output, translate(echoSubCmd.Name())) + checkStringOmits(t, output, translate(deprecatedCmd.Name())) + checkStringContains(t, output, translate("Auto generated")) +} - // And about subcommands - expected = translate(cmdEchoSub.Name()) - checkStringContains(t, found, expected) +func TestGenManNoHiddenParents(t *testing.T) { + header := &GenManHeader{ + Title: "Project", + Section: "2", + } - unexpected := translate(cmdDeprecated.Name()) - checkStringOmits(t, found, unexpected) + // We generate on a subcommand so we have both subcommands and parents + for _, name := range []string{"rootflag", "strtwo"} { + f := rootCmd.PersistentFlags().Lookup(name) + f.Hidden = true + defer func() { f.Hidden = false }() + } + buf := new(bytes.Buffer) + if err := GenMan(echoCmd, header, buf); err != nil { + t.Fatal(err) + } + output := buf.String() - // auto generated - expected = translate("Auto generated") - checkStringContains(t, found, expected) + // Make sure parent has - in CommandPath() in SEE ALSO: + parentPath := echoCmd.Parent().CommandPath() + dashParentPath := strings.Replace(parentPath, " ", "-", -1) + expected := translate(dashParentPath) + expected = expected + "(" + header.Section + ")" + checkStringContains(t, output, expected) + + checkStringContains(t, output, translate(echoCmd.Name())) + checkStringContains(t, output, translate(echoCmd.Name())) + checkStringContains(t, output, "boolone") + checkStringOmits(t, output, "rootflag") + checkStringContains(t, output, translate(rootCmd.Name())) + checkStringContains(t, output, translate(echoSubCmd.Name())) + checkStringOmits(t, output, translate(deprecatedCmd.Name())) + checkStringContains(t, output, translate("Auto generated")) + checkStringOmits(t, output, "OPTIONS INHERITED FROM PARENT COMMANDS") } func TestGenManNoGenTag(t *testing.T) { - c := initializeWithRootCmd() - // Need two commands to run the command alphabetical sort - cmdEcho.AddCommand(cmdTimes, cmdEchoSub, cmdDeprecated) - c.AddCommand(cmdPrint, cmdEcho) - cmdRootWithRun.PersistentFlags().StringVarP(&flags2a, "rootflag", "r", "two", strtwoParentHelp) - cmdEcho.DisableAutoGenTag = true - out := new(bytes.Buffer) + echoCmd.DisableAutoGenTag = true + defer func() { echoCmd.DisableAutoGenTag = false }() header := &GenManHeader{ Title: "Project", Section: "2", } + // We generate on a subcommand so we have both subcommands and parents - if err := GenMan(cmdEcho, header, out); err != nil { + buf := new(bytes.Buffer) + if err := GenMan(echoCmd, header, buf); err != nil { t.Fatal(err) } - found := out.String() + output := buf.String() unexpected := translate("#HISTORY") - checkStringOmits(t, found, unexpected) + checkStringOmits(t, output, unexpected) } func TestGenManSeeAlso(t *testing.T) { - noop := func(cmd *cobra.Command, args []string) {} - - top := &cobra.Command{Use: "top", Run: noop} - aaa := &cobra.Command{Use: "aaa", Run: noop, Hidden: true} // #229 - bbb := &cobra.Command{Use: "bbb", Run: noop} - ccc := &cobra.Command{Use: "ccc", Run: noop} - top.AddCommand(aaa, bbb, ccc) + rootCmd := &cobra.Command{Use: "root", Run: emptyRun} + aCmd := &cobra.Command{Use: "aaa", Run: emptyRun, Hidden: true} // #229 + bCmd := &cobra.Command{Use: "bbb", Run: emptyRun} + cCmd := &cobra.Command{Use: "ccc", Run: emptyRun} + rootCmd.AddCommand(aCmd, bCmd, cCmd) - out := new(bytes.Buffer) + buf := new(bytes.Buffer) header := &GenManHeader{} - if err := GenMan(top, header, out); err != nil { + if err := GenMan(rootCmd, header, buf); err != nil { t.Fatal(err) } + scanner := bufio.NewScanner(buf) - scanner := bufio.NewScanner(out) - - if err := AssertLineFound(scanner, ".SH SEE ALSO"); err != nil { - t.Fatal(fmt.Errorf("Couldn't find SEE ALSO section header: %s", err.Error())) + if err := assertLineFound(scanner, ".SH SEE ALSO"); err != nil { + t.Fatalf("Couldn't find SEE ALSO section header: %v", err) } - - if err := AssertNextLineEquals(scanner, ".PP"); err != nil { - t.Fatal(fmt.Errorf("First line after SEE ALSO wasn't break-indent: %s", err.Error())) + if err := assertNextLineEquals(scanner, ".PP"); err != nil { + t.Fatalf("First line after SEE ALSO wasn't break-indent: %v", err) } - - if err := AssertNextLineEquals(scanner, `\fBtop\-bbb(1)\fP, \fBtop\-ccc(1)\fP`); err != nil { - t.Fatal(fmt.Errorf("Second line after SEE ALSO wasn't correct: %s", err.Error())) + if err := assertNextLineEquals(scanner, `\fBroot\-bbb(1)\fP, \fBroot\-ccc(1)\fP`); err != nil { + t.Fatalf("Second line after SEE ALSO wasn't correct: %v", err) } } func TestManPrintFlagsHidesShortDeperecated(t *testing.T) { - cmd := &cobra.Command{} - flags := cmd.Flags() - flags.StringP("foo", "f", "default", "Foo flag") - flags.MarkShorthandDeprecated("foo", "don't use it no more") + c := &cobra.Command{} + c.Flags().StringP("foo", "f", "default", "Foo flag") + c.Flags().MarkShorthandDeprecated("foo", "don't use it no more") - out := new(bytes.Buffer) - manPrintFlags(out, flags) + buf := new(bytes.Buffer) + manPrintFlags(buf, c.Flags()) + got := buf.String() expected := "**--foo**=\"default\"\n\tFoo flag\n\n" - if out.String() != expected { - t.Fatalf("Expected %s, but got %s", expected, out.String()) + if got != expected { + t.Errorf("Expected %v, got %v", expected, got) } } func TestGenManTree(t *testing.T) { - cmd := &cobra.Command{ - Use: "do [OPTIONS] arg1 arg2", - } + c := &cobra.Command{Use: "do [OPTIONS] arg1 arg2"} header := &GenManHeader{Section: "2"} tmpdir, err := ioutil.TempDir("", "test-gen-man-tree") if err != nil { @@ -154,7 +152,7 @@ func TestGenManTree(t *testing.T) { } defer os.RemoveAll(tmpdir) - if err := GenManTree(cmd, header, tmpdir); err != nil { + if err := GenManTree(c, header, tmpdir); err != nil { t.Fatalf("GenManTree failed: %s", err.Error()) } @@ -167,7 +165,7 @@ func TestGenManTree(t *testing.T) { } } -func AssertLineFound(scanner *bufio.Scanner, expectedLine string) error { +func assertLineFound(scanner *bufio.Scanner, expectedLine string) error { for scanner.Scan() { line := scanner.Text() if line == expectedLine { @@ -176,30 +174,29 @@ func AssertLineFound(scanner *bufio.Scanner, expectedLine string) error { } if err := scanner.Err(); err != nil { - return fmt.Errorf("AssertLineFound: scan failed: %s", err.Error()) + return fmt.Errorf("scan failed: %s", err) } - return fmt.Errorf("AssertLineFound: hit EOF before finding %#v", expectedLine) + return fmt.Errorf("hit EOF before finding %v", expectedLine) } -func AssertNextLineEquals(scanner *bufio.Scanner, expectedLine string) error { +func assertNextLineEquals(scanner *bufio.Scanner, expectedLine string) error { if scanner.Scan() { line := scanner.Text() if line == expectedLine { return nil } - return fmt.Errorf("AssertNextLineEquals: got %#v, not %#v", line, expectedLine) + return fmt.Errorf("got %v, not %v", line, expectedLine) } if err := scanner.Err(); err != nil { - return fmt.Errorf("AssertNextLineEquals: scan failed: %s", err.Error()) + return fmt.Errorf("scan failed: %v", err) } - return fmt.Errorf("AssertNextLineEquals: hit EOF before finding %#v", expectedLine) + return fmt.Errorf("hit EOF before finding %v", expectedLine) } func BenchmarkGenManToFile(b *testing.B) { - c := initializeWithRootCmd() file, err := ioutil.TempFile("", "") if err != nil { b.Fatal(err) @@ -209,7 +206,7 @@ func BenchmarkGenManToFile(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - if err := GenMan(c, nil, file); err != nil { + if err := GenMan(rootCmd, nil, file); err != nil { b.Fatal(err) } } diff --git a/vendor/github.com/spf13/cobra/doc/md_docs.go b/vendor/github.com/spf13/cobra/doc/md_docs.go index 68cf5bf..d76f6d5 100644 --- a/vendor/github.com/spf13/cobra/doc/md_docs.go +++ b/vendor/github.com/spf13/cobra/doc/md_docs.go @@ -29,7 +29,7 @@ import ( func printOptions(buf *bytes.Buffer, cmd *cobra.Command, name string) error { flags := cmd.NonInheritedFlags() flags.SetOutput(buf) - if flags.HasFlags() { + if flags.HasAvailableFlags() { buf.WriteString("### Options\n\n```\n") flags.PrintDefaults() buf.WriteString("```\n\n") @@ -37,7 +37,7 @@ func printOptions(buf *bytes.Buffer, cmd *cobra.Command, name string) error { parentFlags := cmd.InheritedFlags() parentFlags.SetOutput(buf) - if parentFlags.HasFlags() { + if parentFlags.HasAvailableFlags() { buf.WriteString("### Options inherited from parent commands\n\n```\n") parentFlags.PrintDefaults() buf.WriteString("```\n\n") @@ -67,7 +67,7 @@ func GenMarkdownCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string) buf.WriteString("## " + name + "\n\n") buf.WriteString(short + "\n\n") buf.WriteString("### Synopsis\n\n") - buf.WriteString("\n" + long + "\n\n") + buf.WriteString(long + "\n\n") if cmd.Runnable() { buf.WriteString(fmt.Sprintf("```\n%s\n```\n\n", cmd.UseLine())) @@ -82,7 +82,7 @@ func GenMarkdownCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string) return err } if hasSeeAlso(cmd) { - buf.WriteString("### SEE ALSO\n") + buf.WriteString("### SEE ALSO\n\n") if cmd.HasParent() { parent := cmd.Parent() pname := parent.CommandPath() diff --git a/vendor/github.com/spf13/cobra/doc/md_docs_test.go b/vendor/github.com/spf13/cobra/doc/md_docs_test.go index ba6b9a4..c060f32 100644 --- a/vendor/github.com/spf13/cobra/doc/md_docs_test.go +++ b/vendor/github.com/spf13/cobra/doc/md_docs_test.go @@ -5,100 +5,75 @@ import ( "io/ioutil" "os" "path/filepath" - "strings" "testing" "github.com/spf13/cobra" ) func TestGenMdDoc(t *testing.T) { - c := initializeWithRootCmd() - // Need two commands to run the command alphabetical sort - cmdEcho.AddCommand(cmdTimes, cmdEchoSub, cmdDeprecated) - c.AddCommand(cmdPrint, cmdEcho) - cmdRootWithRun.PersistentFlags().StringVarP(&flags2a, "rootflag", "r", "two", strtwoParentHelp) - - out := new(bytes.Buffer) - - // We generate on s subcommand so we have both subcommands and parents - if err := GenMarkdown(cmdEcho, out); err != nil { + // We generate on subcommand so we have both subcommands and parents. + buf := new(bytes.Buffer) + if err := GenMarkdown(echoCmd, buf); err != nil { t.Fatal(err) } - found := out.String() - - // Our description - expected := cmdEcho.Long - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - // Better have our example - expected = cmdEcho.Example - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - // A local flag - expected = "boolone" - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - // persistent flag on parent - expected = "rootflag" - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - // We better output info about our parent - expected = cmdRootWithRun.Short - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } + output := buf.String() + + checkStringContains(t, output, echoCmd.Long) + checkStringContains(t, output, echoCmd.Example) + checkStringContains(t, output, "boolone") + checkStringContains(t, output, "rootflag") + checkStringContains(t, output, rootCmd.Short) + checkStringContains(t, output, echoSubCmd.Short) + checkStringOmits(t, output, deprecatedCmd.Short) + checkStringContains(t, output, "Options inherited from parent commands") +} - // And about subcommands - expected = cmdEchoSub.Short - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) +func TestGenMdNoHiddenParents(t *testing.T) { + // We generate on subcommand so we have both subcommands and parents. + for _, name := range []string{"rootflag", "strtwo"} { + f := rootCmd.PersistentFlags().Lookup(name) + f.Hidden = true + defer func() { f.Hidden = false }() } - - unexpected := cmdDeprecated.Short - if strings.Contains(found, unexpected) { - t.Errorf("Unexpected response.\nFound: %v\nBut should not have!!\n", unexpected) + buf := new(bytes.Buffer) + if err := GenMarkdown(echoCmd, buf); err != nil { + t.Fatal(err) } + output := buf.String() + + checkStringContains(t, output, echoCmd.Long) + checkStringContains(t, output, echoCmd.Example) + checkStringContains(t, output, "boolone") + checkStringOmits(t, output, "rootflag") + checkStringContains(t, output, rootCmd.Short) + checkStringContains(t, output, echoSubCmd.Short) + checkStringOmits(t, output, deprecatedCmd.Short) + checkStringOmits(t, output, "Options inherited from parent commands") } func TestGenMdNoTag(t *testing.T) { - c := initializeWithRootCmd() - // Need two commands to run the command alphabetical sort - cmdEcho.AddCommand(cmdTimes, cmdEchoSub, cmdDeprecated) - c.AddCommand(cmdPrint, cmdEcho) - c.DisableAutoGenTag = true - cmdRootWithRun.PersistentFlags().StringVarP(&flags2a, "rootflag", "r", "two", strtwoParentHelp) - out := new(bytes.Buffer) + rootCmd.DisableAutoGenTag = true + defer func() { rootCmd.DisableAutoGenTag = false }() - if err := GenMarkdown(c, out); err != nil { + buf := new(bytes.Buffer) + if err := GenMarkdown(rootCmd, buf); err != nil { t.Fatal(err) } - found := out.String() - - unexpected := "Auto generated" - checkStringOmits(t, found, unexpected) + output := buf.String() + checkStringOmits(t, output, "Auto generated") } func TestGenMdTree(t *testing.T) { - cmd := &cobra.Command{ - Use: "do [OPTIONS] arg1 arg2", - } + c := &cobra.Command{Use: "do [OPTIONS] arg1 arg2"} tmpdir, err := ioutil.TempDir("", "test-gen-md-tree") if err != nil { - t.Fatalf("Failed to create tmpdir: %s", err.Error()) + t.Fatalf("Failed to create tmpdir: %v", err) } defer os.RemoveAll(tmpdir) - if err := GenMarkdownTree(cmd, tmpdir); err != nil { - t.Fatalf("GenMarkdownTree failed: %s", err.Error()) + if err := GenMarkdownTree(c, tmpdir); err != nil { + t.Fatalf("GenMarkdownTree failed: %v", err) } if _, err := os.Stat(filepath.Join(tmpdir, "do.md")); err != nil { @@ -107,7 +82,6 @@ func TestGenMdTree(t *testing.T) { } func BenchmarkGenMarkdownToFile(b *testing.B) { - c := initializeWithRootCmd() file, err := ioutil.TempFile("", "") if err != nil { b.Fatal(err) @@ -117,7 +91,7 @@ func BenchmarkGenMarkdownToFile(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - if err := GenMarkdown(c, file); err != nil { + if err := GenMarkdown(rootCmd, file); err != nil { b.Fatal(err) } } diff --git a/vendor/github.com/spf13/cobra/doc/rest_docs.go b/vendor/github.com/spf13/cobra/doc/rest_docs.go new file mode 100644 index 0000000..051d8dc --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/rest_docs.go @@ -0,0 +1,185 @@ +//Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/spf13/cobra" +) + +func printOptionsReST(buf *bytes.Buffer, cmd *cobra.Command, name string) error { + flags := cmd.NonInheritedFlags() + flags.SetOutput(buf) + if flags.HasAvailableFlags() { + buf.WriteString("Options\n") + buf.WriteString("~~~~~~~\n\n::\n\n") + flags.PrintDefaults() + buf.WriteString("\n") + } + + parentFlags := cmd.InheritedFlags() + parentFlags.SetOutput(buf) + if parentFlags.HasAvailableFlags() { + buf.WriteString("Options inherited from parent commands\n") + buf.WriteString("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n::\n\n") + parentFlags.PrintDefaults() + buf.WriteString("\n") + } + return nil +} + +// linkHandler for default ReST hyperlink markup +func defaultLinkHandler(name, ref string) string { + return fmt.Sprintf("`%s <%s.rst>`_", name, ref) +} + +// GenReST creates reStructured Text output. +func GenReST(cmd *cobra.Command, w io.Writer) error { + return GenReSTCustom(cmd, w, defaultLinkHandler) +} + +// GenReSTCustom creates custom reStructured Text output. +func GenReSTCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string, string) string) error { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + buf := new(bytes.Buffer) + name := cmd.CommandPath() + + short := cmd.Short + long := cmd.Long + if len(long) == 0 { + long = short + } + ref := strings.Replace(name, " ", "_", -1) + + buf.WriteString(".. _" + ref + ":\n\n") + buf.WriteString(name + "\n") + buf.WriteString(strings.Repeat("-", len(name)) + "\n\n") + buf.WriteString(short + "\n\n") + buf.WriteString("Synopsis\n") + buf.WriteString("~~~~~~~~\n\n") + buf.WriteString("\n" + long + "\n\n") + + if cmd.Runnable() { + buf.WriteString(fmt.Sprintf("::\n\n %s\n\n", cmd.UseLine())) + } + + if len(cmd.Example) > 0 { + buf.WriteString("Examples\n") + buf.WriteString("~~~~~~~~\n\n") + buf.WriteString(fmt.Sprintf("::\n\n%s\n\n", indentString(cmd.Example, " "))) + } + + if err := printOptionsReST(buf, cmd, name); err != nil { + return err + } + if hasSeeAlso(cmd) { + buf.WriteString("SEE ALSO\n") + buf.WriteString("~~~~~~~~\n\n") + if cmd.HasParent() { + parent := cmd.Parent() + pname := parent.CommandPath() + ref = strings.Replace(pname, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(pname, ref), parent.Short)) + cmd.VisitParents(func(c *cobra.Command) { + if c.DisableAutoGenTag { + cmd.DisableAutoGenTag = c.DisableAutoGenTag + } + }) + } + + children := cmd.Commands() + sort.Sort(byName(children)) + + for _, child := range children { + if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() { + continue + } + cname := name + " " + child.Name() + ref = strings.Replace(cname, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(cname, ref), child.Short)) + } + buf.WriteString("\n") + } + if !cmd.DisableAutoGenTag { + buf.WriteString("*Auto generated by spf13/cobra on " + time.Now().Format("2-Jan-2006") + "*\n") + } + _, err := buf.WriteTo(w) + return err +} + +// GenReSTTree will generate a ReST page for this command and all +// descendants in the directory given. +// This function may not work correctly if your command names have `-` in them. +// If you have `cmd` with two subcmds, `sub` and `sub-third`, +// and `sub` has a subcommand called `third`, it is undefined which +// help output will be in the file `cmd-sub-third.1`. +func GenReSTTree(cmd *cobra.Command, dir string) error { + emptyStr := func(s string) string { return "" } + return GenReSTTreeCustom(cmd, dir, emptyStr, defaultLinkHandler) +} + +// GenReSTTreeCustom is the the same as GenReSTTree, but +// with custom filePrepender and linkHandler. +func GenReSTTreeCustom(cmd *cobra.Command, dir string, filePrepender func(string) string, linkHandler func(string, string) string) error { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenReSTTreeCustom(c, dir, filePrepender, linkHandler); err != nil { + return err + } + } + + basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".rst" + filename := filepath.Join(dir, basename) + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + if _, err := io.WriteString(f, filePrepender(filename)); err != nil { + return err + } + if err := GenReSTCustom(cmd, f, linkHandler); err != nil { + return err + } + return nil +} + +// adapted from: https://github.com/kr/text/blob/main/indent.go +func indentString(s, p string) string { + var res []byte + b := []byte(s) + prefix := []byte(p) + bol := true + for _, c := range b { + if bol && c != '\n' { + res = append(res, prefix...) + } + res = append(res, c) + bol = c == '\n' + } + return string(res) +} diff --git a/vendor/github.com/spf13/cobra/doc/rest_docs.md b/vendor/github.com/spf13/cobra/doc/rest_docs.md new file mode 100644 index 0000000..6098430 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/rest_docs.md @@ -0,0 +1,114 @@ +# Generating ReStructured Text Docs For Your Own cobra.Command + +Generating ReST pages from a cobra command is incredibly easy. An example is as follows: + +```go +package main + +import ( + "log" + + "github.com/spf13/cobra" + "github.com/spf13/cobra/doc" +) + +func main() { + cmd := &cobra.Command{ + Use: "test", + Short: "my test program", + } + err := doc.GenReSTTree(cmd, "/tmp") + if err != nil { + log.Fatal(err) + } +} +``` + +That will get you a ReST document `/tmp/test.rst` + +## Generate ReST docs for the entire command tree + +This program can actually generate docs for the kubectl command in the kubernetes project + +```go +package main + +import ( + "log" + "io/ioutil" + "os" + + "k8s.io/kubernetes/pkg/kubectl/cmd" + cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" + + "github.com/spf13/cobra/doc" +) + +func main() { + kubectl := cmd.NewKubectlCommand(cmdutil.NewFactory(nil), os.Stdin, ioutil.Discard, ioutil.Discard) + err := doc.GenReSTTree(kubectl, "./") + if err != nil { + log.Fatal(err) + } +} +``` + +This will generate a whole series of files, one for each command in the tree, in the directory specified (in this case "./") + +## Generate ReST docs for a single command + +You may wish to have more control over the output, or only generate for a single command, instead of the entire command tree. If this is the case you may prefer to `GenReST` instead of `GenReSTTree` + +```go + out := new(bytes.Buffer) + err := doc.GenReST(cmd, out) + if err != nil { + log.Fatal(err) + } +``` + +This will write the ReST doc for ONLY "cmd" into the out, buffer. + +## Customize the output + +Both `GenReST` and `GenReSTTree` have alternate versions with callbacks to get some control of the output: + +```go +func GenReSTTreeCustom(cmd *Command, dir string, filePrepender func(string) string, linkHandler func(string, string) string) error { + //... +} +``` + +```go +func GenReSTCustom(cmd *Command, out *bytes.Buffer, linkHandler func(string, string) string) error { + //... +} +``` + +The `filePrepender` will prepend the return value given the full filepath to the rendered ReST file. A common use case is to add front matter to use the generated documentation with [Hugo](http://gohugo.io/): + +```go +const fmTemplate = `--- +date: %s +title: "%s" +slug: %s +url: %s +--- +` +filePrepender := func(filename string) string { + now := time.Now().Format(time.RFC3339) + name := filepath.Base(filename) + base := strings.TrimSuffix(name, path.Ext(name)) + url := "/commands/" + strings.ToLower(base) + "/" + return fmt.Sprintf(fmTemplate, now, strings.Replace(base, "_", " ", -1), base, url) +} +``` + +The `linkHandler` can be used to customize the rendered links to the commands, given a command name and reference. This is useful while converting rst to html or while generating documentation with tools like Sphinx where `:ref:` is used: + +```go +// Sphinx cross-referencing format +linkHandler := func(name, ref string) string { + return fmt.Sprintf(":ref:`%s <%s>`", name, ref) +} +``` diff --git a/vendor/github.com/spf13/cobra/doc/rest_docs_test.go b/vendor/github.com/spf13/cobra/doc/rest_docs_test.go new file mode 100644 index 0000000..330a2e5 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/rest_docs_test.go @@ -0,0 +1,99 @@ +package doc + +import ( + "bytes" + "io/ioutil" + "os" + "path/filepath" + "testing" + + "github.com/spf13/cobra" +) + +func TestGenRSTDoc(t *testing.T) { + // We generate on a subcommand so we have both subcommands and parents + buf := new(bytes.Buffer) + if err := GenReST(echoCmd, buf); err != nil { + t.Fatal(err) + } + output := buf.String() + + checkStringContains(t, output, echoCmd.Long) + checkStringContains(t, output, echoCmd.Example) + checkStringContains(t, output, "boolone") + checkStringContains(t, output, "rootflag") + checkStringContains(t, output, rootCmd.Short) + checkStringContains(t, output, echoSubCmd.Short) + checkStringOmits(t, output, deprecatedCmd.Short) +} + +func TestGenRSTNoHiddenParents(t *testing.T) { + // We generate on a subcommand so we have both subcommands and parents + for _, name := range []string{"rootflag", "strtwo"} { + f := rootCmd.PersistentFlags().Lookup(name) + f.Hidden = true + defer func() { f.Hidden = false }() + } + buf := new(bytes.Buffer) + if err := GenReST(echoCmd, buf); err != nil { + t.Fatal(err) + } + output := buf.String() + + checkStringContains(t, output, echoCmd.Long) + checkStringContains(t, output, echoCmd.Example) + checkStringContains(t, output, "boolone") + checkStringOmits(t, output, "rootflag") + checkStringContains(t, output, rootCmd.Short) + checkStringContains(t, output, echoSubCmd.Short) + checkStringOmits(t, output, deprecatedCmd.Short) + checkStringOmits(t, output, "Options inherited from parent commands") +} + +func TestGenRSTNoTag(t *testing.T) { + rootCmd.DisableAutoGenTag = true + defer func() { rootCmd.DisableAutoGenTag = false }() + + buf := new(bytes.Buffer) + if err := GenReST(rootCmd, buf); err != nil { + t.Fatal(err) + } + output := buf.String() + + unexpected := "Auto generated" + checkStringOmits(t, output, unexpected) +} + +func TestGenRSTTree(t *testing.T) { + c := &cobra.Command{Use: "do [OPTIONS] arg1 arg2"} + + tmpdir, err := ioutil.TempDir("", "test-gen-rst-tree") + if err != nil { + t.Fatalf("Failed to create tmpdir: %s", err.Error()) + } + defer os.RemoveAll(tmpdir) + + if err := GenReSTTree(c, tmpdir); err != nil { + t.Fatalf("GenReSTTree failed: %s", err.Error()) + } + + if _, err := os.Stat(filepath.Join(tmpdir, "do.rst")); err != nil { + t.Fatalf("Expected file 'do.rst' to exist") + } +} + +func BenchmarkGenReSTToFile(b *testing.B) { + file, err := ioutil.TempFile("", "") + if err != nil { + b.Fatal(err) + } + defer os.Remove(file.Name()) + defer file.Close() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + if err := GenReST(rootCmd, file); err != nil { + b.Fatal(err) + } + } +} diff --git a/vendor/github.com/spf13/cobra/doc/yaml_docs_test.go b/vendor/github.com/spf13/cobra/doc/yaml_docs_test.go index 29e985e..c5a6359 100644 --- a/vendor/github.com/spf13/cobra/doc/yaml_docs_test.go +++ b/vendor/github.com/spf13/cobra/doc/yaml_docs_test.go @@ -5,92 +5,42 @@ import ( "io/ioutil" "os" "path/filepath" - "strings" "testing" "github.com/spf13/cobra" ) func TestGenYamlDoc(t *testing.T) { - c := initializeWithRootCmd() - // Need two commands to run the command alphabetical sort - cmdEcho.AddCommand(cmdTimes, cmdEchoSub, cmdDeprecated) - c.AddCommand(cmdPrint, cmdEcho) - cmdRootWithRun.PersistentFlags().StringVarP(&flags2a, "rootflag", "r", "two", strtwoParentHelp) - - out := new(bytes.Buffer) - // We generate on s subcommand so we have both subcommands and parents - if err := GenYaml(cmdEcho, out); err != nil { + buf := new(bytes.Buffer) + if err := GenYaml(echoCmd, buf); err != nil { t.Fatal(err) } - found := out.String() - - // Our description - expected := cmdEcho.Long - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - // Better have our example - expected = cmdEcho.Example - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - // A local flag - expected = "boolone" - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - // persistent flag on parent - expected = "rootflag" - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - // We better output info about our parent - expected = cmdRootWithRun.Short - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - // And about subcommands - expected = cmdEchoSub.Short - if !strings.Contains(found, expected) { - t.Errorf("Unexpected response.\nExpecting to contain: \n %q\nGot:\n %q\n", expected, found) - } - - unexpected := cmdDeprecated.Short - if strings.Contains(found, unexpected) { - t.Errorf("Unexpected response.\nFound: %v\nBut should not have!!\n", unexpected) - } + output := buf.String() + + checkStringContains(t, output, echoCmd.Long) + checkStringContains(t, output, echoCmd.Example) + checkStringContains(t, output, "boolone") + checkStringContains(t, output, "rootflag") + checkStringContains(t, output, rootCmd.Short) + checkStringContains(t, output, echoSubCmd.Short) } func TestGenYamlNoTag(t *testing.T) { - c := initializeWithRootCmd() - // Need two commands to run the command alphabetical sort - cmdEcho.AddCommand(cmdTimes, cmdEchoSub, cmdDeprecated) - c.AddCommand(cmdPrint, cmdEcho) - c.DisableAutoGenTag = true - cmdRootWithRun.PersistentFlags().StringVarP(&flags2a, "rootflag", "r", "two", strtwoParentHelp) - out := new(bytes.Buffer) + rootCmd.DisableAutoGenTag = true + defer func() { rootCmd.DisableAutoGenTag = false }() - if err := GenYaml(c, out); err != nil { + buf := new(bytes.Buffer) + if err := GenYaml(rootCmd, buf); err != nil { t.Fatal(err) } - found := out.String() - - unexpected := "Auto generated" - checkStringOmits(t, found, unexpected) + output := buf.String() + checkStringOmits(t, output, "Auto generated") } func TestGenYamlTree(t *testing.T) { - cmd := &cobra.Command{ - Use: "do [OPTIONS] arg1 arg2", - } + c := &cobra.Command{Use: "do [OPTIONS] arg1 arg2"} tmpdir, err := ioutil.TempDir("", "test-gen-yaml-tree") if err != nil { @@ -98,7 +48,7 @@ func TestGenYamlTree(t *testing.T) { } defer os.RemoveAll(tmpdir) - if err := GenYamlTree(cmd, tmpdir); err != nil { + if err := GenYamlTree(c, tmpdir); err != nil { t.Fatalf("GenYamlTree failed: %s", err.Error()) } @@ -108,7 +58,6 @@ func TestGenYamlTree(t *testing.T) { } func BenchmarkGenYamlToFile(b *testing.B) { - c := initializeWithRootCmd() file, err := ioutil.TempFile("", "") if err != nil { b.Fatal(err) @@ -118,7 +67,7 @@ func BenchmarkGenYamlToFile(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - if err := GenYaml(c, file); err != nil { + if err := GenYaml(rootCmd, file); err != nil { b.Fatal(err) } } diff --git a/vendor/github.com/spf13/cobra/go.mod b/vendor/github.com/spf13/cobra/go.mod new file mode 100644 index 0000000..9a9eb65 --- /dev/null +++ b/vendor/github.com/spf13/cobra/go.mod @@ -0,0 +1,13 @@ +module github.com/spf13/cobra + +go 1.12 + +require ( + github.com/BurntSushi/toml v0.3.1 // indirect + github.com/cpuguy83/go-md2man v1.0.10 + github.com/inconshreveable/mousetrap v1.0.0 + github.com/mitchellh/go-homedir v1.1.0 + github.com/spf13/pflag v1.0.3 + github.com/spf13/viper v1.3.2 + gopkg.in/yaml.v2 v2.2.2 +) diff --git a/vendor/github.com/spf13/cobra/go.sum b/vendor/github.com/spf13/cobra/go.sum new file mode 100644 index 0000000..9761f4d --- /dev/null +++ b/vendor/github.com/spf13/cobra/go.sum @@ -0,0 +1,51 @@ +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= +github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/spf13/cobra/zsh_completions.go b/vendor/github.com/spf13/cobra/zsh_completions.go new file mode 100644 index 0000000..889c22e --- /dev/null +++ b/vendor/github.com/spf13/cobra/zsh_completions.go @@ -0,0 +1,126 @@ +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" + "strings" +) + +// GenZshCompletionFile generates zsh completion file. +func (c *Command) GenZshCompletionFile(filename string) error { + outFile, err := os.Create(filename) + if err != nil { + return err + } + defer outFile.Close() + + return c.GenZshCompletion(outFile) +} + +// GenZshCompletion generates a zsh completion file and writes to the passed writer. +func (c *Command) GenZshCompletion(w io.Writer) error { + buf := new(bytes.Buffer) + + writeHeader(buf, c) + maxDepth := maxDepth(c) + writeLevelMapping(buf, maxDepth) + writeLevelCases(buf, maxDepth, c) + + _, err := buf.WriteTo(w) + return err +} + +func writeHeader(w io.Writer, cmd *Command) { + fmt.Fprintf(w, "#compdef %s\n\n", cmd.Name()) +} + +func maxDepth(c *Command) int { + if len(c.Commands()) == 0 { + return 0 + } + maxDepthSub := 0 + for _, s := range c.Commands() { + subDepth := maxDepth(s) + if subDepth > maxDepthSub { + maxDepthSub = subDepth + } + } + return 1 + maxDepthSub +} + +func writeLevelMapping(w io.Writer, numLevels int) { + fmt.Fprintln(w, `_arguments \`) + for i := 1; i <= numLevels; i++ { + fmt.Fprintf(w, ` '%d: :->level%d' \`, i, i) + fmt.Fprintln(w) + } + fmt.Fprintf(w, ` '%d: :%s'`, numLevels+1, "_files") + fmt.Fprintln(w) +} + +func writeLevelCases(w io.Writer, maxDepth int, root *Command) { + fmt.Fprintln(w, "case $state in") + defer fmt.Fprintln(w, "esac") + + for i := 1; i <= maxDepth; i++ { + fmt.Fprintf(w, " level%d)\n", i) + writeLevel(w, root, i) + fmt.Fprintln(w, " ;;") + } + fmt.Fprintln(w, " *)") + fmt.Fprintln(w, " _arguments '*: :_files'") + fmt.Fprintln(w, " ;;") +} + +func writeLevel(w io.Writer, root *Command, i int) { + fmt.Fprintf(w, " case $words[%d] in\n", i) + defer fmt.Fprintln(w, " esac") + + commands := filterByLevel(root, i) + byParent := groupByParent(commands) + + for p, c := range byParent { + names := names(c) + fmt.Fprintf(w, " %s)\n", p) + fmt.Fprintf(w, " _arguments '%d: :(%s)'\n", i, strings.Join(names, " ")) + fmt.Fprintln(w, " ;;") + } + fmt.Fprintln(w, " *)") + fmt.Fprintln(w, " _arguments '*: :_files'") + fmt.Fprintln(w, " ;;") + +} + +func filterByLevel(c *Command, l int) []*Command { + cs := make([]*Command, 0) + if l == 0 { + cs = append(cs, c) + return cs + } + for _, s := range c.Commands() { + cs = append(cs, filterByLevel(s, l-1)...) + } + return cs +} + +func groupByParent(commands []*Command) map[string][]*Command { + m := make(map[string][]*Command) + for _, c := range commands { + parent := c.Parent() + if parent == nil { + continue + } + m[parent.Name()] = append(m[parent.Name()], c) + } + return m +} + +func names(commands []*Command) []string { + ns := make([]string, len(commands)) + for i, c := range commands { + ns[i] = c.Name() + } + return ns +} diff --git a/vendor/github.com/spf13/cobra/zsh_completions_test.go b/vendor/github.com/spf13/cobra/zsh_completions_test.go new file mode 100644 index 0000000..34e6949 --- /dev/null +++ b/vendor/github.com/spf13/cobra/zsh_completions_test.go @@ -0,0 +1,89 @@ +package cobra + +import ( + "bytes" + "strings" + "testing" +) + +func TestZshCompletion(t *testing.T) { + tcs := []struct { + name string + root *Command + expectedExpressions []string + }{ + { + name: "trivial", + root: &Command{Use: "trivialapp"}, + expectedExpressions: []string{"#compdef trivial"}, + }, + { + name: "linear", + root: func() *Command { + r := &Command{Use: "linear"} + + sub1 := &Command{Use: "sub1"} + r.AddCommand(sub1) + + sub2 := &Command{Use: "sub2"} + sub1.AddCommand(sub2) + + sub3 := &Command{Use: "sub3"} + sub2.AddCommand(sub3) + return r + }(), + expectedExpressions: []string{"sub1", "sub2", "sub3"}, + }, + { + name: "flat", + root: func() *Command { + r := &Command{Use: "flat"} + r.AddCommand(&Command{Use: "c1"}) + r.AddCommand(&Command{Use: "c2"}) + return r + }(), + expectedExpressions: []string{"(c1 c2)"}, + }, + { + name: "tree", + root: func() *Command { + r := &Command{Use: "tree"} + + sub1 := &Command{Use: "sub1"} + r.AddCommand(sub1) + + sub11 := &Command{Use: "sub11"} + sub12 := &Command{Use: "sub12"} + + sub1.AddCommand(sub11) + sub1.AddCommand(sub12) + + sub2 := &Command{Use: "sub2"} + r.AddCommand(sub2) + + sub21 := &Command{Use: "sub21"} + sub22 := &Command{Use: "sub22"} + + sub2.AddCommand(sub21) + sub2.AddCommand(sub22) + + return r + }(), + expectedExpressions: []string{"(sub11 sub12)", "(sub21 sub22)"}, + }, + } + + for _, tc := range tcs { + t.Run(tc.name, func(t *testing.T) { + buf := new(bytes.Buffer) + tc.root.GenZshCompletion(buf) + output := buf.String() + + for _, expectedExpression := range tc.expectedExpressions { + if !strings.Contains(output, expectedExpression) { + t.Errorf("Expected completion to contain %q somewhere; got %q", expectedExpression, output) + } + } + }) + } +} diff --git a/vendor/github.com/spf13/jwalterweatherman/.gitignore b/vendor/github.com/spf13/jwalterweatherman/.gitignore index 0026861..a71f88a 100644 --- a/vendor/github.com/spf13/jwalterweatherman/.gitignore +++ b/vendor/github.com/spf13/jwalterweatherman/.gitignore @@ -20,3 +20,5 @@ _cgo_export.* _testmain.go *.exe +*.bench +go.sum \ No newline at end of file diff --git a/vendor/github.com/spf13/jwalterweatherman/README.md b/vendor/github.com/spf13/jwalterweatherman/README.md index 350a968..932a23f 100644 --- a/vendor/github.com/spf13/jwalterweatherman/README.md +++ b/vendor/github.com/spf13/jwalterweatherman/README.md @@ -18,7 +18,7 @@ provides a few advantages over using the standard log library alone. I really wanted a very straightforward library that could seamlessly do the following things. -1. Replace all the println, printf, etc statements thought my code with +1. Replace all the println, printf, etc statements thoughout my code with something more useful 2. Allow the user to easily control what levels are printed to stdout 3. Allow the user to easily control what levels are logged @@ -144,5 +144,5 @@ This is an early release. I’ve been using it for a while and this is the third interface I’ve tried. I like this one pretty well, but no guarantees that it won’t change a bit. -I wrote this for use in [hugo](http://hugo.spf13.com). If you are looking +I wrote this for use in [hugo](https://gohugo.io). If you are looking for a static website engine that’s super fast please checkout Hugo. diff --git a/vendor/github.com/spf13/jwalterweatherman/default_notepad.go b/vendor/github.com/spf13/jwalterweatherman/default_notepad.go index bcb7634..a018c15 100644 --- a/vendor/github.com/spf13/jwalterweatherman/default_notepad.go +++ b/vendor/github.com/spf13/jwalterweatherman/default_notepad.go @@ -64,6 +64,13 @@ func SetStdoutThreshold(threshold Threshold) { reloadDefaultNotepad() } +// SetStdoutOutput set the stdout output for the default notepad. Default is stdout. +func SetStdoutOutput(handle io.Writer) { + defaultNotepad.outHandle = handle + defaultNotepad.init() + reloadDefaultNotepad() +} + // SetPrefix set the prefix for the default logger. Empty by default. func SetPrefix(prefix string) { defaultNotepad.SetPrefix(prefix) @@ -76,6 +83,13 @@ func SetFlags(flags int) { reloadDefaultNotepad() } +// SetLogListeners configures the default logger with one or more log listeners. +func SetLogListeners(l ...LogListener) { + defaultNotepad.logListeners = l + defaultNotepad.init() + reloadDefaultNotepad() +} + // Level returns the current global log threshold. func LogThreshold() Threshold { return defaultNotepad.logThreshold @@ -95,19 +109,3 @@ func GetLogThreshold() Threshold { func GetStdoutThreshold() Threshold { return defaultNotepad.GetStdoutThreshold() } - -// LogCountForLevel returns the number of log invocations for a given threshold. -func LogCountForLevel(l Threshold) uint64 { - return defaultNotepad.LogCountForLevel(l) -} - -// LogCountForLevelsGreaterThanorEqualTo returns the number of log invocations -// greater than or equal to a given threshold. -func LogCountForLevelsGreaterThanorEqualTo(threshold Threshold) uint64 { - return defaultNotepad.LogCountForLevelsGreaterThanorEqualTo(threshold) -} - -// ResetLogCounters resets the invocation counters for all levels. -func ResetLogCounters() { - defaultNotepad.ResetLogCounters() -} diff --git a/vendor/github.com/spf13/jwalterweatherman/default_notepad_test.go b/vendor/github.com/spf13/jwalterweatherman/default_notepad_test.go index 2670c8d..994bac1 100644 --- a/vendor/github.com/spf13/jwalterweatherman/default_notepad_test.go +++ b/vendor/github.com/spf13/jwalterweatherman/default_notepad_test.go @@ -59,11 +59,16 @@ func TestDefaultLogging(t *testing.T) { } func TestLogCounter(t *testing.T) { + assert := require.New(t) + defaultNotepad.logHandle = ioutil.Discard defaultNotepad.outHandle = ioutil.Discard + errorCounter := &Counter{} + SetLogThreshold(LevelTrace) SetStdoutThreshold(LevelTrace) + SetLogListeners(LogCounter(errorCounter, LevelError)) FATAL.Println("fatal err") CRITICAL.Println("critical err") @@ -82,8 +87,7 @@ func TestLogCounter(t *testing.T) { for j := 0; j < 10; j++ { ERROR.Println("error", j) // check for data races - require.True(t, LogCountForLevel(LevelError) > uint64(j)) - require.True(t, LogCountForLevelsGreaterThanorEqualTo(LevelError) > uint64(j)) + assert.True(errorCounter.Count() > uint64(j)) } }() @@ -91,12 +95,5 @@ func TestLogCounter(t *testing.T) { wg.Wait() - require.Equal(t, uint64(1), LogCountForLevel(LevelFatal)) - require.Equal(t, uint64(1), LogCountForLevel(LevelCritical)) - require.Equal(t, uint64(2), LogCountForLevel(LevelWarn)) - require.Equal(t, uint64(1), LogCountForLevel(LevelInfo)) - require.Equal(t, uint64(1), LogCountForLevel(LevelDebug)) - require.Equal(t, uint64(1), LogCountForLevel(LevelTrace)) - require.Equal(t, uint64(100), LogCountForLevel(LevelError)) - require.Equal(t, uint64(102), LogCountForLevelsGreaterThanorEqualTo(LevelError)) + assert.Equal(uint64(102), errorCounter.Count()) } diff --git a/vendor/github.com/spf13/jwalterweatherman/go.mod b/vendor/github.com/spf13/jwalterweatherman/go.mod new file mode 100644 index 0000000..1dbcfd3 --- /dev/null +++ b/vendor/github.com/spf13/jwalterweatherman/go.mod @@ -0,0 +1,7 @@ +module github.com/spf13/jwalterweatherman + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/stretchr/testify v1.2.2 +) diff --git a/vendor/github.com/spf13/jwalterweatherman/log_counter.go b/vendor/github.com/spf13/jwalterweatherman/log_counter.go index 11423ac..41285f3 100644 --- a/vendor/github.com/spf13/jwalterweatherman/log_counter.go +++ b/vendor/github.com/spf13/jwalterweatherman/log_counter.go @@ -6,50 +6,41 @@ package jwalterweatherman import ( + "io" "sync/atomic" ) -type logCounter struct { - counter uint64 +// Counter is an io.Writer that increments a counter on Write. +type Counter struct { + count uint64 } -func (c *logCounter) incr() { - atomic.AddUint64(&c.counter, 1) +func (c *Counter) incr() { + atomic.AddUint64(&c.count, 1) } -func (c *logCounter) resetCounter() { - atomic.StoreUint64(&c.counter, 0) +// Reset resets the counter. +func (c *Counter) Reset() { + atomic.StoreUint64(&c.count, 0) } -func (c *logCounter) getCount() uint64 { - return atomic.LoadUint64(&c.counter) +// Count returns the current count. +func (c *Counter) Count() uint64 { + return atomic.LoadUint64(&c.count) } -func (c *logCounter) Write(p []byte) (n int, err error) { +func (c *Counter) Write(p []byte) (n int, err error) { c.incr() return len(p), nil } -// LogCountForLevel returns the number of log invocations for a given threshold. -func (n *Notepad) LogCountForLevel(l Threshold) uint64 { - return n.logCounters[l].getCount() -} - -// LogCountForLevelsGreaterThanorEqualTo returns the number of log invocations -// greater than or equal to a given threshold. -func (n *Notepad) LogCountForLevelsGreaterThanorEqualTo(threshold Threshold) uint64 { - var cnt uint64 - - for i := int(threshold); i < len(n.logCounters); i++ { - cnt += n.LogCountForLevel(Threshold(i)) - } - - return cnt -} - -// ResetLogCounters resets the invocation counters for all levels. -func (n *Notepad) ResetLogCounters() { - for _, np := range n.logCounters { - np.resetCounter() +// LogCounter creates a LogListener that counts log statements >= the given threshold. +func LogCounter(counter *Counter, t1 Threshold) LogListener { + return func(t2 Threshold) io.Writer { + if t2 < t1 { + // Not interested in this threshold. + return nil + } + return counter } } diff --git a/vendor/github.com/spf13/jwalterweatherman/notepad.go b/vendor/github.com/spf13/jwalterweatherman/notepad.go index edeff3a..cc7957b 100644 --- a/vendor/github.com/spf13/jwalterweatherman/notepad.go +++ b/vendor/github.com/spf13/jwalterweatherman/notepad.go @@ -8,6 +8,7 @@ package jwalterweatherman import ( "fmt" "io" + "io/ioutil" "log" ) @@ -58,13 +59,28 @@ type Notepad struct { prefix string flags int - // One per Threshold - logCounters [7]*logCounter + logListeners []LogListener } -// NewNotepad create a new notepad. -func NewNotepad(outThreshold Threshold, logThreshold Threshold, outHandle, logHandle io.Writer, prefix string, flags int) *Notepad { - n := &Notepad{} +// A LogListener can ble supplied to a Notepad to listen on log writes for a given +// threshold. This can be used to capture log events in unit tests and similar. +// Note that this function will be invoked once for each log threshold. If +// the given threshold is not of interest to you, return nil. +// Note that these listeners will receive log events for a given threshold, even +// if the current configuration says not to log it. That way you can count ERRORs even +// if you don't print them to the console. +type LogListener func(t Threshold) io.Writer + +// NewNotepad creates a new Notepad. +func NewNotepad( + outThreshold Threshold, + logThreshold Threshold, + outHandle, logHandle io.Writer, + prefix string, flags int, + logListeners ...LogListener, +) *Notepad { + + n := &Notepad{logListeners: logListeners} n.loggers = [7]**log.Logger{&n.TRACE, &n.DEBUG, &n.INFO, &n.WARN, &n.ERROR, &n.CRITICAL, &n.FATAL} n.outHandle = outHandle @@ -95,26 +111,41 @@ func (n *Notepad) init() { for t, logger := range n.loggers { threshold := Threshold(t) - counter := &logCounter{} - n.logCounters[t] = counter prefix := n.prefix + threshold.String() + " " switch { case threshold >= n.logThreshold && threshold >= n.stdoutThreshold: - *logger = log.New(io.MultiWriter(counter, logAndOut), prefix, n.flags) + *logger = log.New(n.createLogWriters(threshold, logAndOut), prefix, n.flags) case threshold >= n.logThreshold: - *logger = log.New(io.MultiWriter(counter, n.logHandle), prefix, n.flags) + *logger = log.New(n.createLogWriters(threshold, n.logHandle), prefix, n.flags) case threshold >= n.stdoutThreshold: - *logger = log.New(io.MultiWriter(counter, n.outHandle), prefix, n.flags) + *logger = log.New(n.createLogWriters(threshold, n.outHandle), prefix, n.flags) default: - // counter doesn't care about prefix and flags, so don't use them - // for performance. - *logger = log.New(counter, "", 0) + *logger = log.New(n.createLogWriters(threshold, ioutil.Discard), prefix, n.flags) + } + } +} + +func (n *Notepad) createLogWriters(t Threshold, handle io.Writer) io.Writer { + if len(n.logListeners) == 0 { + return handle + } + writers := []io.Writer{handle} + for _, l := range n.logListeners { + w := l(t) + if w != nil { + writers = append(writers, w) } } + + if len(writers) == 1 { + return handle + } + + return io.MultiWriter(writers...) } // SetLogThreshold changes the threshold above which messages are written to the @@ -148,7 +179,7 @@ func (n *Notepad) GetStdoutThreshold() Threshold { } // SetPrefix changes the prefix used by the notepad. Prefixes are displayed between -// brackets at the begining of the line. An empty prefix won't be displayed at all. +// brackets at the beginning of the line. An empty prefix won't be displayed at all. func (n *Notepad) SetPrefix(prefix string) { if len(prefix) != 0 { n.prefix = "[" + prefix + "] " diff --git a/vendor/github.com/spf13/jwalterweatherman/notepad_test.go b/vendor/github.com/spf13/jwalterweatherman/notepad_test.go index 69ad6f8..64a7aa0 100644 --- a/vendor/github.com/spf13/jwalterweatherman/notepad_test.go +++ b/vendor/github.com/spf13/jwalterweatherman/notepad_test.go @@ -7,6 +7,8 @@ package jwalterweatherman import ( "bytes" + "io" + "io/ioutil" "testing" "github.com/stretchr/testify/require" @@ -15,7 +17,9 @@ import ( func TestNotepad(t *testing.T) { var logHandle, outHandle bytes.Buffer - n := NewNotepad(LevelCritical, LevelError, &outHandle, &logHandle, "TestNotePad", 0) + errorCounter := &Counter{} + + n := NewNotepad(LevelCritical, LevelError, &outHandle, &logHandle, "TestNotePad", 0, LogCounter(errorCounter, LevelError)) require.Equal(t, LevelCritical, n.GetStdoutThreshold()) require.Equal(t, LevelError, n.GetLogThreshold()) @@ -29,9 +33,48 @@ func TestNotepad(t *testing.T) { require.NotContains(t, outHandle.String(), "Some error") require.Contains(t, outHandle.String(), "Some critical error") - require.Equal(t, n.LogCountForLevel(LevelError), uint64(1)) - require.Equal(t, n.LogCountForLevel(LevelDebug), uint64(1)) - require.Equal(t, n.LogCountForLevel(LevelTrace), uint64(0)) + // 1 error + 1 critical + require.Equal(t, errorCounter.Count(), uint64(2)) +} + +func TestNotepadLogListener(t *testing.T) { + assert := require.New(t) + + var errorBuff, infoBuff bytes.Buffer + + errorCapture := func(t Threshold) io.Writer { + if t != LevelError { + // Only interested in ERROR + return nil + } + + return &errorBuff + } + + infoCapture := func(t Threshold) io.Writer { + if t != LevelInfo { + return nil + } + + return &infoBuff + } + + n := NewNotepad(LevelCritical, LevelError, ioutil.Discard, ioutil.Discard, "TestNotePad", 0, infoCapture, errorCapture) + + n.DEBUG.Println("Some debug") + n.INFO.Println("Some info") + n.INFO.Println("Some more info") + n.ERROR.Println("Some error") + n.CRITICAL.Println("Some critical error") + n.ERROR.Println("Some more error") + + assert.Equal(`[TestNotePad] ERROR Some error +[TestNotePad] ERROR Some more error +`, errorBuff.String()) + assert.Equal(`[TestNotePad] INFO Some info +[TestNotePad] INFO Some more info +`, infoBuff.String()) + } func TestThresholdString(t *testing.T) { diff --git a/vendor/github.com/spf13/pflag/.travis.yml b/vendor/github.com/spf13/pflag/.travis.yml index f8a63b3..00d04cb 100644 --- a/vendor/github.com/spf13/pflag/.travis.yml +++ b/vendor/github.com/spf13/pflag/.travis.yml @@ -3,8 +3,9 @@ sudo: false language: go go: - - 1.7.3 - - 1.8.1 + - 1.9.x + - 1.10.x + - 1.11.x - tip matrix: @@ -12,7 +13,7 @@ matrix: - go: tip install: - - go get github.com/golang/lint/golint + - go get golang.org/x/lint/golint - export PATH=$GOPATH/bin:$PATH - go install ./... diff --git a/vendor/github.com/spf13/pflag/README.md b/vendor/github.com/spf13/pflag/README.md index b052414..7eacc5b 100644 --- a/vendor/github.com/spf13/pflag/README.md +++ b/vendor/github.com/spf13/pflag/README.md @@ -86,8 +86,8 @@ fmt.Println("ip has value ", *ip) fmt.Println("flagvar has value ", flagvar) ``` -There are helpers function to get values later if you have the FlagSet but -it was difficult to keep up with all of the flag pointers in your code. +There are helper functions available to get the value stored in a Flag if you have a FlagSet but find +it difficult to keep up with all of the pointers in your code. If you have a pflag.FlagSet with a flag called 'flagname' of type int you can use GetInt() to get the int value. But notice that 'flagname' must exist and it must be an int. GetString("flagname") will fail. diff --git a/vendor/github.com/spf13/pflag/bytes.go b/vendor/github.com/spf13/pflag/bytes.go new file mode 100644 index 0000000..67d5304 --- /dev/null +++ b/vendor/github.com/spf13/pflag/bytes.go @@ -0,0 +1,209 @@ +package pflag + +import ( + "encoding/base64" + "encoding/hex" + "fmt" + "strings" +) + +// BytesHex adapts []byte for use as a flag. Value of flag is HEX encoded +type bytesHexValue []byte + +// String implements pflag.Value.String. +func (bytesHex bytesHexValue) String() string { + return fmt.Sprintf("%X", []byte(bytesHex)) +} + +// Set implements pflag.Value.Set. +func (bytesHex *bytesHexValue) Set(value string) error { + bin, err := hex.DecodeString(strings.TrimSpace(value)) + + if err != nil { + return err + } + + *bytesHex = bin + + return nil +} + +// Type implements pflag.Value.Type. +func (*bytesHexValue) Type() string { + return "bytesHex" +} + +func newBytesHexValue(val []byte, p *[]byte) *bytesHexValue { + *p = val + return (*bytesHexValue)(p) +} + +func bytesHexConv(sval string) (interface{}, error) { + + bin, err := hex.DecodeString(sval) + + if err == nil { + return bin, nil + } + + return nil, fmt.Errorf("invalid string being converted to Bytes: %s %s", sval, err) +} + +// GetBytesHex return the []byte value of a flag with the given name +func (f *FlagSet) GetBytesHex(name string) ([]byte, error) { + val, err := f.getFlagType(name, "bytesHex", bytesHexConv) + + if err != nil { + return []byte{}, err + } + + return val.([]byte), nil +} + +// BytesHexVar defines an []byte flag with specified name, default value, and usage string. +// The argument p points to an []byte variable in which to store the value of the flag. +func (f *FlagSet) BytesHexVar(p *[]byte, name string, value []byte, usage string) { + f.VarP(newBytesHexValue(value, p), name, "", usage) +} + +// BytesHexVarP is like BytesHexVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BytesHexVarP(p *[]byte, name, shorthand string, value []byte, usage string) { + f.VarP(newBytesHexValue(value, p), name, shorthand, usage) +} + +// BytesHexVar defines an []byte flag with specified name, default value, and usage string. +// The argument p points to an []byte variable in which to store the value of the flag. +func BytesHexVar(p *[]byte, name string, value []byte, usage string) { + CommandLine.VarP(newBytesHexValue(value, p), name, "", usage) +} + +// BytesHexVarP is like BytesHexVar, but accepts a shorthand letter that can be used after a single dash. +func BytesHexVarP(p *[]byte, name, shorthand string, value []byte, usage string) { + CommandLine.VarP(newBytesHexValue(value, p), name, shorthand, usage) +} + +// BytesHex defines an []byte flag with specified name, default value, and usage string. +// The return value is the address of an []byte variable that stores the value of the flag. +func (f *FlagSet) BytesHex(name string, value []byte, usage string) *[]byte { + p := new([]byte) + f.BytesHexVarP(p, name, "", value, usage) + return p +} + +// BytesHexP is like BytesHex, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BytesHexP(name, shorthand string, value []byte, usage string) *[]byte { + p := new([]byte) + f.BytesHexVarP(p, name, shorthand, value, usage) + return p +} + +// BytesHex defines an []byte flag with specified name, default value, and usage string. +// The return value is the address of an []byte variable that stores the value of the flag. +func BytesHex(name string, value []byte, usage string) *[]byte { + return CommandLine.BytesHexP(name, "", value, usage) +} + +// BytesHexP is like BytesHex, but accepts a shorthand letter that can be used after a single dash. +func BytesHexP(name, shorthand string, value []byte, usage string) *[]byte { + return CommandLine.BytesHexP(name, shorthand, value, usage) +} + +// BytesBase64 adapts []byte for use as a flag. Value of flag is Base64 encoded +type bytesBase64Value []byte + +// String implements pflag.Value.String. +func (bytesBase64 bytesBase64Value) String() string { + return base64.StdEncoding.EncodeToString([]byte(bytesBase64)) +} + +// Set implements pflag.Value.Set. +func (bytesBase64 *bytesBase64Value) Set(value string) error { + bin, err := base64.StdEncoding.DecodeString(strings.TrimSpace(value)) + + if err != nil { + return err + } + + *bytesBase64 = bin + + return nil +} + +// Type implements pflag.Value.Type. +func (*bytesBase64Value) Type() string { + return "bytesBase64" +} + +func newBytesBase64Value(val []byte, p *[]byte) *bytesBase64Value { + *p = val + return (*bytesBase64Value)(p) +} + +func bytesBase64ValueConv(sval string) (interface{}, error) { + + bin, err := base64.StdEncoding.DecodeString(sval) + if err == nil { + return bin, nil + } + + return nil, fmt.Errorf("invalid string being converted to Bytes: %s %s", sval, err) +} + +// GetBytesBase64 return the []byte value of a flag with the given name +func (f *FlagSet) GetBytesBase64(name string) ([]byte, error) { + val, err := f.getFlagType(name, "bytesBase64", bytesBase64ValueConv) + + if err != nil { + return []byte{}, err + } + + return val.([]byte), nil +} + +// BytesBase64Var defines an []byte flag with specified name, default value, and usage string. +// The argument p points to an []byte variable in which to store the value of the flag. +func (f *FlagSet) BytesBase64Var(p *[]byte, name string, value []byte, usage string) { + f.VarP(newBytesBase64Value(value, p), name, "", usage) +} + +// BytesBase64VarP is like BytesBase64Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BytesBase64VarP(p *[]byte, name, shorthand string, value []byte, usage string) { + f.VarP(newBytesBase64Value(value, p), name, shorthand, usage) +} + +// BytesBase64Var defines an []byte flag with specified name, default value, and usage string. +// The argument p points to an []byte variable in which to store the value of the flag. +func BytesBase64Var(p *[]byte, name string, value []byte, usage string) { + CommandLine.VarP(newBytesBase64Value(value, p), name, "", usage) +} + +// BytesBase64VarP is like BytesBase64Var, but accepts a shorthand letter that can be used after a single dash. +func BytesBase64VarP(p *[]byte, name, shorthand string, value []byte, usage string) { + CommandLine.VarP(newBytesBase64Value(value, p), name, shorthand, usage) +} + +// BytesBase64 defines an []byte flag with specified name, default value, and usage string. +// The return value is the address of an []byte variable that stores the value of the flag. +func (f *FlagSet) BytesBase64(name string, value []byte, usage string) *[]byte { + p := new([]byte) + f.BytesBase64VarP(p, name, "", value, usage) + return p +} + +// BytesBase64P is like BytesBase64, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BytesBase64P(name, shorthand string, value []byte, usage string) *[]byte { + p := new([]byte) + f.BytesBase64VarP(p, name, shorthand, value, usage) + return p +} + +// BytesBase64 defines an []byte flag with specified name, default value, and usage string. +// The return value is the address of an []byte variable that stores the value of the flag. +func BytesBase64(name string, value []byte, usage string) *[]byte { + return CommandLine.BytesBase64P(name, "", value, usage) +} + +// BytesBase64P is like BytesBase64, but accepts a shorthand letter that can be used after a single dash. +func BytesBase64P(name, shorthand string, value []byte, usage string) *[]byte { + return CommandLine.BytesBase64P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/bytes_test.go b/vendor/github.com/spf13/pflag/bytes_test.go new file mode 100644 index 0000000..5251f34 --- /dev/null +++ b/vendor/github.com/spf13/pflag/bytes_test.go @@ -0,0 +1,134 @@ +package pflag + +import ( + "encoding/base64" + "fmt" + "os" + "testing" +) + +func setUpBytesHex(bytesHex *[]byte) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.BytesHexVar(bytesHex, "bytes", []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0}, "Some bytes in HEX") + f.BytesHexVarP(bytesHex, "bytes2", "B", []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0}, "Some bytes in HEX") + return f +} + +func TestBytesHex(t *testing.T) { + testCases := []struct { + input string + success bool + expected string + }{ + /// Positive cases + {"", true, ""}, // Is empty string OK ? + {"01", true, "01"}, + {"0101", true, "0101"}, + {"1234567890abcdef", true, "1234567890ABCDEF"}, + {"1234567890ABCDEF", true, "1234567890ABCDEF"}, + + // Negative cases + {"0", false, ""}, // Short string + {"000", false, ""}, /// Odd-length string + {"qq", false, ""}, /// non-hex character + } + + devnull, _ := os.Open(os.DevNull) + os.Stderr = devnull + + for i := range testCases { + var bytesHex []byte + f := setUpBytesHex(&bytesHex) + + tc := &testCases[i] + + // --bytes + args := []string{ + fmt.Sprintf("--bytes=%s", tc.input), + fmt.Sprintf("-B %s", tc.input), + fmt.Sprintf("--bytes2=%s", tc.input), + } + + for _, arg := range args { + err := f.Parse([]string{arg}) + + if err != nil && tc.success == true { + t.Errorf("expected success, got %q", err) + continue + } else if err == nil && tc.success == false { + // bytesHex, err := f.GetBytesHex("bytes") + t.Errorf("expected failure while processing %q", tc.input) + continue + } else if tc.success { + bytesHex, err := f.GetBytesHex("bytes") + if err != nil { + t.Errorf("Got error trying to fetch the 'bytes' flag: %v", err) + } + if fmt.Sprintf("%X", bytesHex) != tc.expected { + t.Errorf("expected %q, got '%X'", tc.expected, bytesHex) + } + } + } + } +} + +func setUpBytesBase64(bytesBase64 *[]byte) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.BytesBase64Var(bytesBase64, "bytes", []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0}, "Some bytes in Base64") + f.BytesBase64VarP(bytesBase64, "bytes2", "B", []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0}, "Some bytes in Base64") + return f +} + +func TestBytesBase64(t *testing.T) { + testCases := []struct { + input string + success bool + expected string + }{ + /// Positive cases + {"", true, ""}, // Is empty string OK ? + {"AQ==", true, "AQ=="}, + + // Negative cases + {"AQ", false, ""}, // Padding removed + {"ï", false, ""}, // non-base64 characters + } + + devnull, _ := os.Open(os.DevNull) + os.Stderr = devnull + + for i := range testCases { + var bytesBase64 []byte + f := setUpBytesBase64(&bytesBase64) + + tc := &testCases[i] + + // --bytes + args := []string{ + fmt.Sprintf("--bytes=%s", tc.input), + fmt.Sprintf("-B %s", tc.input), + fmt.Sprintf("--bytes2=%s", tc.input), + } + + for _, arg := range args { + err := f.Parse([]string{arg}) + + if err != nil && tc.success == true { + t.Errorf("expected success, got %q", err) + continue + } else if err == nil && tc.success == false { + // bytesBase64, err := f.GetBytesBase64("bytes") + t.Errorf("expected failure while processing %q", tc.input) + continue + } else if tc.success { + bytesBase64, err := f.GetBytesBase64("bytes") + if err != nil { + t.Errorf("Got error trying to fetch the 'bytes' flag: %v", err) + } + if base64.StdEncoding.EncodeToString(bytesBase64) != tc.expected { + t.Errorf("expected %q, got '%X'", tc.expected, bytesBase64) + } + } + } + } +} diff --git a/vendor/github.com/spf13/pflag/count.go b/vendor/github.com/spf13/pflag/count.go index 250a438..a0b2679 100644 --- a/vendor/github.com/spf13/pflag/count.go +++ b/vendor/github.com/spf13/pflag/count.go @@ -11,13 +11,13 @@ func newCountValue(val int, p *int) *countValue { } func (i *countValue) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 64) - // -1 means that no specific value was passed, so increment - if v == -1 { + // "+1" means that no specific value was passed, so increment + if s == "+1" { *i = countValue(*i + 1) - } else { - *i = countValue(v) + return nil } + v, err := strconv.ParseInt(s, 0, 0) + *i = countValue(v) return err } @@ -46,7 +46,7 @@ func (f *FlagSet) GetCount(name string) (int, error) { // CountVar defines a count flag with specified name, default value, and usage string. // The argument p points to an int variable in which to store the value of the flag. -// A count flag will add 1 to its value evey time it is found on the command line +// A count flag will add 1 to its value every time it is found on the command line func (f *FlagSet) CountVar(p *int, name string, usage string) { f.CountVarP(p, name, "", usage) } @@ -54,7 +54,7 @@ func (f *FlagSet) CountVar(p *int, name string, usage string) { // CountVarP is like CountVar only take a shorthand for the flag name. func (f *FlagSet) CountVarP(p *int, name, shorthand string, usage string) { flag := f.VarPF(newCountValue(0, p), name, shorthand, usage) - flag.NoOptDefVal = "-1" + flag.NoOptDefVal = "+1" } // CountVar like CountVar only the flag is placed on the CommandLine instead of a given flag set @@ -69,7 +69,7 @@ func CountVarP(p *int, name, shorthand string, usage string) { // Count defines a count flag with specified name, default value, and usage string. // The return value is the address of an int variable that stores the value of the flag. -// A count flag will add 1 to its value evey time it is found on the command line +// A count flag will add 1 to its value every time it is found on the command line func (f *FlagSet) Count(name string, usage string) *int { p := new(int) f.CountVarP(p, name, "", usage) diff --git a/vendor/github.com/spf13/pflag/count_test.go b/vendor/github.com/spf13/pflag/count_test.go index 460d96a..3785d37 100644 --- a/vendor/github.com/spf13/pflag/count_test.go +++ b/vendor/github.com/spf13/pflag/count_test.go @@ -17,10 +17,14 @@ func TestCount(t *testing.T) { success bool expected int }{ + {[]string{}, true, 0}, + {[]string{"-v"}, true, 1}, {[]string{"-vvv"}, true, 3}, {[]string{"-v", "-v", "-v"}, true, 3}, {[]string{"-v", "--verbose", "-v"}, true, 3}, {[]string{"-v=3", "-v"}, true, 4}, + {[]string{"--verbose=0"}, true, 0}, + {[]string{"-v=0"}, true, 0}, {[]string{"-v=a"}, false, 0}, } @@ -45,7 +49,7 @@ func TestCount(t *testing.T) { t.Errorf("Got error trying to fetch the counter flag") } if c != tc.expected { - t.Errorf("expected %q, got %q", tc.expected, c) + t.Errorf("expected %d, got %d", tc.expected, c) } } } diff --git a/vendor/github.com/spf13/pflag/duration_slice.go b/vendor/github.com/spf13/pflag/duration_slice.go new file mode 100644 index 0000000..52c6b6d --- /dev/null +++ b/vendor/github.com/spf13/pflag/duration_slice.go @@ -0,0 +1,128 @@ +package pflag + +import ( + "fmt" + "strings" + "time" +) + +// -- durationSlice Value +type durationSliceValue struct { + value *[]time.Duration + changed bool +} + +func newDurationSliceValue(val []time.Duration, p *[]time.Duration) *durationSliceValue { + dsv := new(durationSliceValue) + dsv.value = p + *dsv.value = val + return dsv +} + +func (s *durationSliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]time.Duration, len(ss)) + for i, d := range ss { + var err error + out[i], err = time.ParseDuration(d) + if err != nil { + return err + } + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *durationSliceValue) Type() string { + return "durationSlice" +} + +func (s *durationSliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%s", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func durationSliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []time.Duration{}, nil + } + ss := strings.Split(val, ",") + out := make([]time.Duration, len(ss)) + for i, d := range ss { + var err error + out[i], err = time.ParseDuration(d) + if err != nil { + return nil, err + } + + } + return out, nil +} + +// GetDurationSlice returns the []time.Duration value of a flag with the given name +func (f *FlagSet) GetDurationSlice(name string) ([]time.Duration, error) { + val, err := f.getFlagType(name, "durationSlice", durationSliceConv) + if err != nil { + return []time.Duration{}, err + } + return val.([]time.Duration), nil +} + +// DurationSliceVar defines a durationSlice flag with specified name, default value, and usage string. +// The argument p points to a []time.Duration variable in which to store the value of the flag. +func (f *FlagSet) DurationSliceVar(p *[]time.Duration, name string, value []time.Duration, usage string) { + f.VarP(newDurationSliceValue(value, p), name, "", usage) +} + +// DurationSliceVarP is like DurationSliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) DurationSliceVarP(p *[]time.Duration, name, shorthand string, value []time.Duration, usage string) { + f.VarP(newDurationSliceValue(value, p), name, shorthand, usage) +} + +// DurationSliceVar defines a duration[] flag with specified name, default value, and usage string. +// The argument p points to a duration[] variable in which to store the value of the flag. +func DurationSliceVar(p *[]time.Duration, name string, value []time.Duration, usage string) { + CommandLine.VarP(newDurationSliceValue(value, p), name, "", usage) +} + +// DurationSliceVarP is like DurationSliceVar, but accepts a shorthand letter that can be used after a single dash. +func DurationSliceVarP(p *[]time.Duration, name, shorthand string, value []time.Duration, usage string) { + CommandLine.VarP(newDurationSliceValue(value, p), name, shorthand, usage) +} + +// DurationSlice defines a []time.Duration flag with specified name, default value, and usage string. +// The return value is the address of a []time.Duration variable that stores the value of the flag. +func (f *FlagSet) DurationSlice(name string, value []time.Duration, usage string) *[]time.Duration { + p := []time.Duration{} + f.DurationSliceVarP(&p, name, "", value, usage) + return &p +} + +// DurationSliceP is like DurationSlice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) DurationSliceP(name, shorthand string, value []time.Duration, usage string) *[]time.Duration { + p := []time.Duration{} + f.DurationSliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// DurationSlice defines a []time.Duration flag with specified name, default value, and usage string. +// The return value is the address of a []time.Duration variable that stores the value of the flag. +func DurationSlice(name string, value []time.Duration, usage string) *[]time.Duration { + return CommandLine.DurationSliceP(name, "", value, usage) +} + +// DurationSliceP is like DurationSlice, but accepts a shorthand letter that can be used after a single dash. +func DurationSliceP(name, shorthand string, value []time.Duration, usage string) *[]time.Duration { + return CommandLine.DurationSliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/duration_slice_test.go b/vendor/github.com/spf13/pflag/duration_slice_test.go new file mode 100644 index 0000000..489b012 --- /dev/null +++ b/vendor/github.com/spf13/pflag/duration_slice_test.go @@ -0,0 +1,165 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code ds governed by a BSD-style +// license that can be found in the LICENSE file. + +package pflag + +import ( + "fmt" + "strings" + "testing" + "time" +) + +func setUpDSFlagSet(dsp *[]time.Duration) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.DurationSliceVar(dsp, "ds", []time.Duration{}, "Command separated list!") + return f +} + +func setUpDSFlagSetWithDefault(dsp *[]time.Duration) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.DurationSliceVar(dsp, "ds", []time.Duration{0, 1}, "Command separated list!") + return f +} + +func TestEmptyDS(t *testing.T) { + var ds []time.Duration + f := setUpDSFlagSet(&ds) + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + + getDS, err := f.GetDurationSlice("ds") + if err != nil { + t.Fatal("got an error from GetDurationSlice():", err) + } + if len(getDS) != 0 { + t.Fatalf("got ds %v with len=%d but expected length=0", getDS, len(getDS)) + } +} + +func TestDS(t *testing.T) { + var ds []time.Duration + f := setUpDSFlagSet(&ds) + + vals := []string{"1ns", "2ms", "3m", "4h"} + arg := fmt.Sprintf("--ds=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range ds { + d, err := time.ParseDuration(vals[i]) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected ds[%d] to be %s but got: %d", i, vals[i], v) + } + } + getDS, err := f.GetDurationSlice("ds") + if err != nil { + t.Fatalf("got error: %v", err) + } + for i, v := range getDS { + d, err := time.ParseDuration(vals[i]) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected ds[%d] to be %s but got: %d from GetDurationSlice", i, vals[i], v) + } + } +} + +func TestDSDefault(t *testing.T) { + var ds []time.Duration + f := setUpDSFlagSetWithDefault(&ds) + + vals := []string{"0s", "1ns"} + + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range ds { + d, err := time.ParseDuration(vals[i]) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected ds[%d] to be %d but got: %d", i, d, v) + } + } + + getDS, err := f.GetDurationSlice("ds") + if err != nil { + t.Fatal("got an error from GetDurationSlice():", err) + } + for i, v := range getDS { + d, err := time.ParseDuration(vals[i]) + if err != nil { + t.Fatal("got an error from GetDurationSlice():", err) + } + if d != v { + t.Fatalf("expected ds[%d] to be %d from GetDurationSlice but got: %d", i, d, v) + } + } +} + +func TestDSWithDefault(t *testing.T) { + var ds []time.Duration + f := setUpDSFlagSetWithDefault(&ds) + + vals := []string{"1ns", "2ns"} + arg := fmt.Sprintf("--ds=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range ds { + d, err := time.ParseDuration(vals[i]) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected ds[%d] to be %d but got: %d", i, d, v) + } + } + + getDS, err := f.GetDurationSlice("ds") + if err != nil { + t.Fatal("got an error from GetDurationSlice():", err) + } + for i, v := range getDS { + d, err := time.ParseDuration(vals[i]) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected ds[%d] to be %d from GetDurationSlice but got: %d", i, d, v) + } + } +} + +func TestDSCalledTwice(t *testing.T) { + var ds []time.Duration + f := setUpDSFlagSet(&ds) + + in := []string{"1ns,2ns", "3ns"} + expected := []time.Duration{1, 2, 3} + argfmt := "--ds=%s" + arg1 := fmt.Sprintf(argfmt, in[0]) + arg2 := fmt.Sprintf(argfmt, in[1]) + err := f.Parse([]string{arg1, arg2}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range ds { + if expected[i] != v { + t.Fatalf("expected ds[%d] to be %d but got: %d", i, expected[i], v) + } + } +} diff --git a/vendor/github.com/spf13/pflag/flag.go b/vendor/github.com/spf13/pflag/flag.go index 6f1fc30..9beeda8 100644 --- a/vendor/github.com/spf13/pflag/flag.go +++ b/vendor/github.com/spf13/pflag/flag.go @@ -101,6 +101,7 @@ package pflag import ( "bytes" "errors" + goflag "flag" "fmt" "io" "os" @@ -123,6 +124,12 @@ const ( PanicOnError ) +// ParseErrorsWhitelist defines the parsing errors that can be ignored +type ParseErrorsWhitelist struct { + // UnknownFlags will ignore unknown flags errors and continue parsing rest of the flags + UnknownFlags bool +} + // NormalizedName is a flag name that has been normalized according to rules // for the FlagSet (e.g. making '-' and '_' equivalent). type NormalizedName string @@ -138,6 +145,9 @@ type FlagSet struct { // help/usage messages. SortFlags bool + // ParseErrorsWhitelist is used to configure a whitelist of errors + ParseErrorsWhitelist ParseErrorsWhitelist + name string parsed bool actual map[NormalizedName]*Flag @@ -153,6 +163,8 @@ type FlagSet struct { output io.Writer // nil means stderr; use out() accessor interspersed bool // allow interspersed option/non-option args normalizeNameFunc func(f *FlagSet, name string) NormalizedName + + addedGoFlagSets []*goflag.FlagSet } // A Flag represents the state of a flag. @@ -202,12 +214,18 @@ func sortFlags(flags map[NormalizedName]*Flag) []*Flag { func (f *FlagSet) SetNormalizeFunc(n func(f *FlagSet, name string) NormalizedName) { f.normalizeNameFunc = n f.sortedFormal = f.sortedFormal[:0] - for k, v := range f.orderedFormal { - delete(f.formal, NormalizedName(v.Name)) - nname := f.normalizeFlagName(v.Name) - v.Name = string(nname) - f.formal[nname] = v - f.orderedFormal[k] = v + for fname, flag := range f.formal { + nname := f.normalizeFlagName(flag.Name) + if fname == nname { + continue + } + flag.Name = string(nname) + delete(f.formal, fname) + f.formal[nname] = flag + if _, set := f.actual[fname]; set { + delete(f.actual, fname) + f.actual[nname] = flag + } } } @@ -261,16 +279,16 @@ func (f *FlagSet) VisitAll(fn func(*Flag)) { } } -// HasFlags returns a bool to indicate if the FlagSet has any flags definied. +// HasFlags returns a bool to indicate if the FlagSet has any flags defined. func (f *FlagSet) HasFlags() bool { return len(f.formal) > 0 } // HasAvailableFlags returns a bool to indicate if the FlagSet has any flags -// definied that are not hidden or deprecated. +// that are not hidden. func (f *FlagSet) HasAvailableFlags() bool { for _, flag := range f.formal { - if !flag.Hidden && len(flag.Deprecated) == 0 { + if !flag.Hidden { return true } } @@ -380,6 +398,7 @@ func (f *FlagSet) MarkDeprecated(name string, usageMessage string) error { return fmt.Errorf("deprecated message for flag %q must be set", name) } flag.Deprecated = usageMessage + flag.Hidden = true return nil } @@ -440,13 +459,15 @@ func (f *FlagSet) Set(name, value string) error { return fmt.Errorf("invalid argument %q for %q flag: %v", value, flagName, err) } - if f.actual == nil { - f.actual = make(map[NormalizedName]*Flag) - } - f.actual[normalName] = flag - f.orderedActual = append(f.orderedActual, flag) + if !flag.Changed { + if f.actual == nil { + f.actual = make(map[NormalizedName]*Flag) + } + f.actual[normalName] = flag + f.orderedActual = append(f.orderedActual, flag) - flag.Changed = true + flag.Changed = true + } if flag.Deprecated != "" { fmt.Fprintf(f.out(), "Flag --%s has been deprecated, %s\n", flag.Name, flag.Deprecated) @@ -556,6 +577,14 @@ func UnquoteUsage(flag *Flag) (name string, usage string) { name = "int" case "uint64": name = "uint" + case "stringSlice": + name = "strings" + case "intSlice": + name = "ints" + case "uintSlice": + name = "uints" + case "boolSlice": + name = "bools" } return @@ -570,11 +599,14 @@ func wrapN(i, slop int, s string) (string, string) { return s, "" } - w := strings.LastIndexAny(s[:i], " \t") + w := strings.LastIndexAny(s[:i], " \t\n") if w <= 0 { return s, "" } - + nlPos := strings.LastIndex(s[:i], "\n") + if nlPos > 0 && nlPos < w { + return s[:nlPos], s[nlPos+1:] + } return s[:w], s[w+1:] } @@ -583,7 +615,7 @@ func wrapN(i, slop int, s string) (string, string) { // caller). Pass `w` == 0 to do no wrapping func wrap(i, w int, s string) string { if w == 0 { - return s + return strings.Replace(s, "\n", "\n"+strings.Repeat(" ", i), -1) } // space between indent i and end of line width w into which @@ -601,7 +633,7 @@ func wrap(i, w int, s string) string { } // If still not enough space then don't even try to wrap. if wrap < 24 { - return s + return strings.Replace(s, "\n", r, -1) } // Try to avoid short orphan words on the final line, by @@ -613,14 +645,14 @@ func wrap(i, w int, s string) string { // Handle first line, which is indented by the caller (or the // special case above) l, s = wrapN(wrap, slop, s) - r = r + l + r = r + strings.Replace(l, "\n", "\n"+strings.Repeat(" ", i), -1) // Now wrap the rest for s != "" { var t string t, s = wrapN(wrap, slop, s) - r = r + "\n" + strings.Repeat(" ", i) + t + r = r + "\n" + strings.Repeat(" ", i) + strings.Replace(t, "\n", "\n"+strings.Repeat(" ", i), -1) } return r @@ -637,7 +669,7 @@ func (f *FlagSet) FlagUsagesWrapped(cols int) string { maxlen := 0 f.VisitAll(func(flag *Flag) { - if flag.Deprecated != "" || flag.Hidden { + if flag.Hidden { return } @@ -660,6 +692,10 @@ func (f *FlagSet) FlagUsagesWrapped(cols int) string { if flag.NoOptDefVal != "true" { line += fmt.Sprintf("[=%s]", flag.NoOptDefVal) } + case "count": + if flag.NoOptDefVal != "+1" { + line += fmt.Sprintf("[=%s]", flag.NoOptDefVal) + } default: line += fmt.Sprintf("[=%s]", flag.NoOptDefVal) } @@ -680,6 +716,9 @@ func (f *FlagSet) FlagUsagesWrapped(cols int) string { line += fmt.Sprintf(" (default %s)", flag.DefValue) } } + if len(flag.Deprecated) != 0 { + line += fmt.Sprintf(" (DEPRECATED: %s)", flag.Deprecated) + } lines = append(lines, line) }) @@ -857,8 +896,10 @@ func VarP(value Value, name, shorthand, usage string) { // returns the error. func (f *FlagSet) failf(format string, a ...interface{}) error { err := fmt.Errorf(format, a...) - fmt.Fprintln(f.out(), err) - f.usage() + if f.errorHandling != ContinueOnError { + fmt.Fprintln(f.out(), err) + f.usage() + } return err } @@ -874,6 +915,28 @@ func (f *FlagSet) usage() { } } +//--unknown (args will be empty) +//--unknown --next-flag ... (args will be --next-flag ...) +//--unknown arg ... (args will be arg ...) +func stripUnknownFlagValue(args []string) []string { + if len(args) == 0 { + //--unknown + return args + } + + first := args[0] + if len(first) > 0 && first[0] == '-' { + //--unknown --next-flag ... + return args + } + + //--unknown arg ... (args will be arg ...) + if len(args) > 1 { + return args[1:] + } + return nil +} + func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []string, err error) { a = args name := s[2:] @@ -885,13 +948,24 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin split := strings.SplitN(name, "=", 2) name = split[0] flag, exists := f.formal[f.normalizeFlagName(name)] + if !exists { - if name == "help" { // special case for nice help message. + switch { + case name == "help": f.usage() return a, ErrHelp + case f.ParseErrorsWhitelist.UnknownFlags: + // --unknown=unknownval arg ... + // we do not want to lose arg in this case + if len(split) >= 2 { + return a, nil + } + + return stripUnknownFlagValue(a), nil + default: + err = f.failf("unknown flag: --%s", name) + return } - err = f.failf("unknown flag: --%s", name) - return } var value string @@ -912,27 +986,43 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin } err = fn(flag, value) + if err != nil { + f.failf(err.Error()) + } return } func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parseFunc) (outShorts string, outArgs []string, err error) { + outArgs = args + if strings.HasPrefix(shorthands, "test.") { return } - outArgs = args outShorts = shorthands[1:] c := shorthands[0] flag, exists := f.shorthands[c] if !exists { - if c == 'h' { // special case for nice help message. + switch { + case c == 'h': f.usage() err = ErrHelp return + case f.ParseErrorsWhitelist.UnknownFlags: + // '-f=arg arg ...' + // we do not want to lose arg in this case + if len(shorthands) > 2 && shorthands[1] == '=' { + outShorts = "" + return + } + + outArgs = stripUnknownFlagValue(outArgs) + return + default: + err = f.failf("unknown shorthand flag: %q in -%s", c, shorthands) + return } - err = f.failf("unknown shorthand flag: %q in -%s", c, shorthands) - return } var value string @@ -962,6 +1052,9 @@ func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parse } err = fn(flag, value) + if err != nil { + f.failf(err.Error()) + } return } @@ -1016,6 +1109,11 @@ func (f *FlagSet) parseArgs(args []string, fn parseFunc) (err error) { // are defined and before flags are accessed by the program. // The return value will be ErrHelp if -help was set but not defined. func (f *FlagSet) Parse(arguments []string) error { + if f.addedGoFlagSets != nil { + for _, goFlagSet := range f.addedGoFlagSets { + goFlagSet.Parse(nil) + } + } f.parsed = true if len(arguments) < 0 { @@ -1034,6 +1132,7 @@ func (f *FlagSet) Parse(arguments []string) error { case ContinueOnError: return err case ExitOnError: + fmt.Println(err) os.Exit(2) case PanicOnError: panic(err) diff --git a/vendor/github.com/spf13/pflag/flag_test.go b/vendor/github.com/spf13/pflag/flag_test.go index c3def0f..7d02dbc 100644 --- a/vendor/github.com/spf13/pflag/flag_test.go +++ b/vendor/github.com/spf13/pflag/flag_test.go @@ -106,8 +106,8 @@ func TestUsage(t *testing.T) { if GetCommandLine().Parse([]string{"--x"}) == nil { t.Error("parse did not fail for unknown flag") } - if !called { - t.Error("did not call Usage for unknown flag") + if called { + t.Error("did call Usage while using ContinueOnError") } } @@ -168,6 +168,7 @@ func testParse(f *FlagSet, t *testing.T) { bool3Flag := f.Bool("bool3", false, "bool3 value") intFlag := f.Int("int", 0, "int value") int8Flag := f.Int8("int8", 0, "int value") + int16Flag := f.Int16("int16", 0, "int value") int32Flag := f.Int32("int32", 0, "int value") int64Flag := f.Int64("int64", 0, "int64 value") uintFlag := f.Uint("uint", 0, "uint value") @@ -192,6 +193,7 @@ func testParse(f *FlagSet, t *testing.T) { "--bool3=false", "--int=22", "--int8=-8", + "--int16=-16", "--int32=-32", "--int64=0x23", "--uint", "24", @@ -236,9 +238,15 @@ func testParse(f *FlagSet, t *testing.T) { if *int8Flag != -8 { t.Error("int8 flag should be 0x23, is ", *int8Flag) } + if *int16Flag != -16 { + t.Error("int16 flag should be -16, is ", *int16Flag) + } if v, err := f.GetInt8("int8"); err != nil || v != *int8Flag { t.Error("GetInt8 does not work.") } + if v, err := f.GetInt16("int16"); err != nil || v != *int16Flag { + t.Error("GetInt16 does not work.") + } if *int32Flag != -32 { t.Error("int32 flag should be 0x23, is ", *int32Flag) } @@ -381,7 +389,83 @@ func testParseAll(f *FlagSet, t *testing.T) { } if !reflect.DeepEqual(got, want) { t.Errorf("f.ParseAll() fail to restore the args") - t.Errorf("Got: %v", got) + t.Errorf("Got: %v", got) + t.Errorf("Want: %v", want) + } +} + +func testParseWithUnknownFlags(f *FlagSet, t *testing.T) { + if f.Parsed() { + t.Error("f.Parse() = true before Parse") + } + f.ParseErrorsWhitelist.UnknownFlags = true + + f.BoolP("boola", "a", false, "bool value") + f.BoolP("boolb", "b", false, "bool2 value") + f.BoolP("boolc", "c", false, "bool3 value") + f.BoolP("boold", "d", false, "bool4 value") + f.BoolP("boole", "e", false, "bool4 value") + f.StringP("stringa", "s", "0", "string value") + f.StringP("stringz", "z", "0", "string value") + f.StringP("stringx", "x", "0", "string value") + f.StringP("stringy", "y", "0", "string value") + f.StringP("stringo", "o", "0", "string value") + f.Lookup("stringx").NoOptDefVal = "1" + args := []string{ + "-ab", + "-cs=xx", + "--stringz=something", + "--unknown1", + "unknown1Value", + "-d=true", + "-x", + "--unknown2=unknown2Value", + "-u=unknown3Value", + "-p", + "unknown4Value", + "-q", //another unknown with bool value + "-y", + "ee", + "--unknown7=unknown7value", + "--stringo=ovalue", + "--unknown8=unknown8value", + "--boole", + "--unknown6", + "", + "-uuuuu", + "", + "--unknown10", + "--unknown11", + } + want := []string{ + "boola", "true", + "boolb", "true", + "boolc", "true", + "stringa", "xx", + "stringz", "something", + "boold", "true", + "stringx", "1", + "stringy", "ee", + "stringo", "ovalue", + "boole", "true", + } + got := []string{} + store := func(flag *Flag, value string) error { + got = append(got, flag.Name) + if len(value) > 0 { + got = append(got, value) + } + return nil + } + if err := f.ParseAll(args, store); err != nil { + t.Errorf("expected no error, got %s", err) + } + if !f.Parsed() { + t.Errorf("f.Parse() = false after Parse") + } + if !reflect.DeepEqual(got, want) { + t.Errorf("f.ParseAll() fail to restore the args") + t.Errorf("Got: %v", got) t.Errorf("Want: %v", want) } } @@ -492,6 +576,11 @@ func TestParseAll(t *testing.T) { testParseAll(GetCommandLine(), t) } +func TestIgnoreUnknownFlags(t *testing.T) { + ResetForTesting(func() { t.Error("bad parse") }) + testParseWithUnknownFlags(GetCommandLine(), t) +} + func TestFlagSetParse(t *testing.T) { testParse(NewFlagSet("test", ContinueOnError), t) } @@ -604,7 +693,6 @@ func aliasAndWordSepFlagNames(f *FlagSet, name string) NormalizedName { switch name { case oldName: name = newName - break } return NormalizedName(name) @@ -658,6 +746,70 @@ func TestNormalizationFuncShouldChangeFlagName(t *testing.T) { } } +// Related to https://github.com/spf13/cobra/issues/521. +func TestNormalizationSharedFlags(t *testing.T) { + f := NewFlagSet("set f", ContinueOnError) + g := NewFlagSet("set g", ContinueOnError) + nfunc := wordSepNormalizeFunc + testName := "valid_flag" + normName := nfunc(nil, testName) + if testName == string(normName) { + t.Error("TestNormalizationSharedFlags meaningless: the original and normalized flag names are identical:", testName) + } + + f.Bool(testName, false, "bool value") + g.AddFlagSet(f) + + f.SetNormalizeFunc(nfunc) + g.SetNormalizeFunc(nfunc) + + if len(f.formal) != 1 { + t.Error("Normalizing flags should not result in duplications in the flag set:", f.formal) + } + if f.orderedFormal[0].Name != string(normName) { + t.Error("Flag name not normalized") + } + for k := range f.formal { + if k != "valid.flag" { + t.Errorf("The key in the flag map should have been normalized: wanted \"%s\", got \"%s\" instead", normName, k) + } + } + + if !reflect.DeepEqual(f.formal, g.formal) || !reflect.DeepEqual(f.orderedFormal, g.orderedFormal) { + t.Error("Two flag sets sharing the same flags should stay consistent after being normalized. Original set:", f.formal, "Duplicate set:", g.formal) + } +} + +func TestNormalizationSetFlags(t *testing.T) { + f := NewFlagSet("normalized", ContinueOnError) + nfunc := wordSepNormalizeFunc + testName := "valid_flag" + normName := nfunc(nil, testName) + if testName == string(normName) { + t.Error("TestNormalizationSetFlags meaningless: the original and normalized flag names are identical:", testName) + } + + f.Bool(testName, false, "bool value") + f.Set(testName, "true") + f.SetNormalizeFunc(nfunc) + + if len(f.formal) != 1 { + t.Error("Normalizing flags should not result in duplications in the flag set:", f.formal) + } + if f.orderedFormal[0].Name != string(normName) { + t.Error("Flag name not normalized") + } + for k := range f.formal { + if k != "valid.flag" { + t.Errorf("The key in the flag map should have been normalized: wanted \"%s\", got \"%s\" instead", normName, k) + } + } + + if !reflect.DeepEqual(f.formal, f.actual) { + t.Error("The map of set flags should get normalized. Formal:", f.formal, "Actual:", f.actual) + } +} + // Declare a user-defined flag type. type flagVar []string @@ -819,10 +971,14 @@ func TestTermination(t *testing.T) { } } -func TestDeprecatedFlagInDocs(t *testing.T) { +func getDeprecatedFlagSet() *FlagSet { f := NewFlagSet("bob", ContinueOnError) f.Bool("badflag", true, "always true") f.MarkDeprecated("badflag", "use --good-flag instead") + return f +} +func TestDeprecatedFlagInDocs(t *testing.T) { + f := getDeprecatedFlagSet() out := new(bytes.Buffer) f.SetOutput(out) @@ -833,6 +989,27 @@ func TestDeprecatedFlagInDocs(t *testing.T) { } } +func TestUnHiddenDeprecatedFlagInDocs(t *testing.T) { + f := getDeprecatedFlagSet() + flg := f.Lookup("badflag") + if flg == nil { + t.Fatalf("Unable to lookup 'bob' in TestUnHiddenDeprecatedFlagInDocs") + } + flg.Hidden = false + + out := new(bytes.Buffer) + f.SetOutput(out) + f.PrintDefaults() + + defaults := out.String() + if !strings.Contains(defaults, "badflag") { + t.Errorf("Did not find deprecated flag in usage!") + } + if !strings.Contains(defaults, "use --good-flag instead") { + t.Errorf("Did not find 'use --good-flag instead' in defaults") + } +} + func TestDeprecatedFlagShorthandInDocs(t *testing.T) { f := NewFlagSet("bob", ContinueOnError) name := "noshorthandflag" @@ -978,16 +1155,17 @@ const defaultOutput = ` --A for bootstrapping, allo --IP ip IP address with no default --IPMask ipMask Netmask address with no default --IPNet ipNet IP network with no default - --Ints intSlice int slice with zero default + --Ints ints int slice with zero default --N int a non-zero int (default 27) --ND1 string[="bar"] a string with NoOptDefVal (default "foo") --ND2 num[=4321] a num with NoOptDefVal (default 1234) --StringArray stringArray string array with zero default - --StringSlice stringSlice string slice with zero default + --StringSlice strings string slice with zero default --Z int an int that defaults to zero --custom custom custom Value implementation --customP custom a VarP with default (default 10) --maxT timeout set timeout for dial + -v, --verbose count verbosity ` // Custom value that satisfies the Value interface. @@ -1028,6 +1206,7 @@ func TestPrintDefaults(t *testing.T) { fs.ShorthandLookup("E").NoOptDefVal = "1234" fs.StringSlice("StringSlice", []string{}, "string slice with zero default") fs.StringArray("StringArray", []string{}, "string array with zero default") + fs.CountP("verbose", "v", "verbosity") var cv customValue fs.Var(&cv, "custom", "custom Value implementation") diff --git a/vendor/github.com/spf13/pflag/float32_slice.go b/vendor/github.com/spf13/pflag/float32_slice.go new file mode 100644 index 0000000..a80848a --- /dev/null +++ b/vendor/github.com/spf13/pflag/float32_slice.go @@ -0,0 +1,132 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- float32Slice Value +type float32SliceValue struct { + value *[]float32 + changed bool +} + +func newFloat32SliceValue(val []float32, p *[]float32) *float32SliceValue { + isv := new(float32SliceValue) + isv.value = p + *isv.value = val + return isv +} + +func (s *float32SliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]float32, len(ss)) + for i, d := range ss { + var err error + var temp64 float64 + temp64, err = strconv.ParseFloat(d, 32) + if err != nil { + return err + } + out[i] = float32(temp64) + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *float32SliceValue) Type() string { + return "float32Slice" +} + +func (s *float32SliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%f", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func float32SliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []float32{}, nil + } + ss := strings.Split(val, ",") + out := make([]float32, len(ss)) + for i, d := range ss { + var err error + var temp64 float64 + temp64, err = strconv.ParseFloat(d, 32) + if err != nil { + return nil, err + } + out[i] = float32(temp64) + + } + return out, nil +} + +// GetFloat32Slice return the []float32 value of a flag with the given name +func (f *FlagSet) GetFloat32Slice(name string) ([]float32, error) { + val, err := f.getFlagType(name, "float32Slice", float32SliceConv) + if err != nil { + return []float32{}, err + } + return val.([]float32), nil +} + +// Float32SliceVar defines a float32Slice flag with specified name, default value, and usage string. +// The argument p points to a []float32 variable in which to store the value of the flag. +func (f *FlagSet) Float32SliceVar(p *[]float32, name string, value []float32, usage string) { + f.VarP(newFloat32SliceValue(value, p), name, "", usage) +} + +// Float32SliceVarP is like Float32SliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float32SliceVarP(p *[]float32, name, shorthand string, value []float32, usage string) { + f.VarP(newFloat32SliceValue(value, p), name, shorthand, usage) +} + +// Float32SliceVar defines a float32[] flag with specified name, default value, and usage string. +// The argument p points to a float32[] variable in which to store the value of the flag. +func Float32SliceVar(p *[]float32, name string, value []float32, usage string) { + CommandLine.VarP(newFloat32SliceValue(value, p), name, "", usage) +} + +// Float32SliceVarP is like Float32SliceVar, but accepts a shorthand letter that can be used after a single dash. +func Float32SliceVarP(p *[]float32, name, shorthand string, value []float32, usage string) { + CommandLine.VarP(newFloat32SliceValue(value, p), name, shorthand, usage) +} + +// Float32Slice defines a []float32 flag with specified name, default value, and usage string. +// The return value is the address of a []float32 variable that stores the value of the flag. +func (f *FlagSet) Float32Slice(name string, value []float32, usage string) *[]float32 { + p := []float32{} + f.Float32SliceVarP(&p, name, "", value, usage) + return &p +} + +// Float32SliceP is like Float32Slice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float32SliceP(name, shorthand string, value []float32, usage string) *[]float32 { + p := []float32{} + f.Float32SliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// Float32Slice defines a []float32 flag with specified name, default value, and usage string. +// The return value is the address of a []float32 variable that stores the value of the flag. +func Float32Slice(name string, value []float32, usage string) *[]float32 { + return CommandLine.Float32SliceP(name, "", value, usage) +} + +// Float32SliceP is like Float32Slice, but accepts a shorthand letter that can be used after a single dash. +func Float32SliceP(name, shorthand string, value []float32, usage string) *[]float32 { + return CommandLine.Float32SliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/float32_slice_test.go b/vendor/github.com/spf13/pflag/float32_slice_test.go new file mode 100644 index 0000000..2429c8f --- /dev/null +++ b/vendor/github.com/spf13/pflag/float32_slice_test.go @@ -0,0 +1,177 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package pflag + +import ( + "fmt" + "strconv" + "strings" + "testing" +) + +func setUpF32SFlagSet(f32sp *[]float32) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.Float32SliceVar(f32sp, "f32s", []float32{}, "Command separated list!") + return f +} + +func setUpF32SFlagSetWithDefault(f32sp *[]float32) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.Float32SliceVar(f32sp, "f32s", []float32{0.0, 1.0}, "Command separated list!") + return f +} + +func TestEmptyF32S(t *testing.T) { + var f32s []float32 + f := setUpF32SFlagSet(&f32s) + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + + getF32S, err := f.GetFloat32Slice("f32s") + if err != nil { + t.Fatal("got an error from GetFloat32Slice():", err) + } + if len(getF32S) != 0 { + t.Fatalf("got f32s %v with len=%d but expected length=0", getF32S, len(getF32S)) + } +} + +func TestF32S(t *testing.T) { + var f32s []float32 + f := setUpF32SFlagSet(&f32s) + + vals := []string{"1.0", "2.0", "4.0", "3.0"} + arg := fmt.Sprintf("--f32s=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range f32s { + d64, err := strconv.ParseFloat(vals[i], 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + + d := float32(d64) + if d != v { + t.Fatalf("expected f32s[%d] to be %s but got: %f", i, vals[i], v) + } + } + getF32S, err := f.GetFloat32Slice("f32s") + if err != nil { + t.Fatalf("got error: %v", err) + } + for i, v := range getF32S { + d64, err := strconv.ParseFloat(vals[i], 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + + d := float32(d64) + if d != v { + t.Fatalf("expected f32s[%d] to be %s but got: %f from GetFloat32Slice", i, vals[i], v) + } + } +} + +func TestF32SDefault(t *testing.T) { + var f32s []float32 + f := setUpF32SFlagSetWithDefault(&f32s) + + vals := []string{"0.0", "1.0"} + + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range f32s { + d64, err := strconv.ParseFloat(vals[i], 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + + d := float32(d64) + if d != v { + t.Fatalf("expected f32s[%d] to be %f but got: %f", i, d, v) + } + } + + getF32S, err := f.GetFloat32Slice("f32s") + if err != nil { + t.Fatal("got an error from GetFloat32Slice():", err) + } + for i, v := range getF32S { + d64, err := strconv.ParseFloat(vals[i], 32) + if err != nil { + t.Fatal("got an error from GetFloat32Slice():", err) + } + + d := float32(d64) + if d != v { + t.Fatalf("expected f32s[%d] to be %f from GetFloat32Slice but got: %f", i, d, v) + } + } +} + +func TestF32SWithDefault(t *testing.T) { + var f32s []float32 + f := setUpF32SFlagSetWithDefault(&f32s) + + vals := []string{"1.0", "2.0"} + arg := fmt.Sprintf("--f32s=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range f32s { + d64, err := strconv.ParseFloat(vals[i], 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + + d := float32(d64) + if d != v { + t.Fatalf("expected f32s[%d] to be %f but got: %f", i, d, v) + } + } + + getF32S, err := f.GetFloat32Slice("f32s") + if err != nil { + t.Fatal("got an error from GetFloat32Slice():", err) + } + for i, v := range getF32S { + d64, err := strconv.ParseFloat(vals[i], 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + + d := float32(d64) + if d != v { + t.Fatalf("expected f32s[%d] to be %f from GetFloat32Slice but got: %f", i, d, v) + } + } +} + +func TestF32SCalledTwice(t *testing.T) { + var f32s []float32 + f := setUpF32SFlagSet(&f32s) + + in := []string{"1.0,2.0", "3.0"} + expected := []float32{1.0, 2.0, 3.0} + argfmt := "--f32s=%s" + arg1 := fmt.Sprintf(argfmt, in[0]) + arg2 := fmt.Sprintf(argfmt, in[1]) + err := f.Parse([]string{arg1, arg2}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range f32s { + if expected[i] != v { + t.Fatalf("expected f32s[%d] to be %f but got: %f", i, expected[i], v) + } + } +} diff --git a/vendor/github.com/spf13/pflag/float64_slice.go b/vendor/github.com/spf13/pflag/float64_slice.go new file mode 100644 index 0000000..45d12b8 --- /dev/null +++ b/vendor/github.com/spf13/pflag/float64_slice.go @@ -0,0 +1,128 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- float64Slice Value +type float64SliceValue struct { + value *[]float64 + changed bool +} + +func newFloat64SliceValue(val []float64, p *[]float64) *float64SliceValue { + isv := new(float64SliceValue) + isv.value = p + *isv.value = val + return isv +} + +func (s *float64SliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]float64, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.ParseFloat(d, 64) + if err != nil { + return err + } + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *float64SliceValue) Type() string { + return "float64Slice" +} + +func (s *float64SliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%f", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func float64SliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []float64{}, nil + } + ss := strings.Split(val, ",") + out := make([]float64, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.ParseFloat(d, 64) + if err != nil { + return nil, err + } + + } + return out, nil +} + +// GetFloat64Slice return the []float64 value of a flag with the given name +func (f *FlagSet) GetFloat64Slice(name string) ([]float64, error) { + val, err := f.getFlagType(name, "float64Slice", float64SliceConv) + if err != nil { + return []float64{}, err + } + return val.([]float64), nil +} + +// Float64SliceVar defines a float64Slice flag with specified name, default value, and usage string. +// The argument p points to a []float64 variable in which to store the value of the flag. +func (f *FlagSet) Float64SliceVar(p *[]float64, name string, value []float64, usage string) { + f.VarP(newFloat64SliceValue(value, p), name, "", usage) +} + +// Float64SliceVarP is like Float64SliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float64SliceVarP(p *[]float64, name, shorthand string, value []float64, usage string) { + f.VarP(newFloat64SliceValue(value, p), name, shorthand, usage) +} + +// Float64SliceVar defines a float64[] flag with specified name, default value, and usage string. +// The argument p points to a float64[] variable in which to store the value of the flag. +func Float64SliceVar(p *[]float64, name string, value []float64, usage string) { + CommandLine.VarP(newFloat64SliceValue(value, p), name, "", usage) +} + +// Float64SliceVarP is like Float64SliceVar, but accepts a shorthand letter that can be used after a single dash. +func Float64SliceVarP(p *[]float64, name, shorthand string, value []float64, usage string) { + CommandLine.VarP(newFloat64SliceValue(value, p), name, shorthand, usage) +} + +// Float64Slice defines a []float64 flag with specified name, default value, and usage string. +// The return value is the address of a []float64 variable that stores the value of the flag. +func (f *FlagSet) Float64Slice(name string, value []float64, usage string) *[]float64 { + p := []float64{} + f.Float64SliceVarP(&p, name, "", value, usage) + return &p +} + +// Float64SliceP is like Float64Slice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float64SliceP(name, shorthand string, value []float64, usage string) *[]float64 { + p := []float64{} + f.Float64SliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// Float64Slice defines a []float64 flag with specified name, default value, and usage string. +// The return value is the address of a []float64 variable that stores the value of the flag. +func Float64Slice(name string, value []float64, usage string) *[]float64 { + return CommandLine.Float64SliceP(name, "", value, usage) +} + +// Float64SliceP is like Float64Slice, but accepts a shorthand letter that can be used after a single dash. +func Float64SliceP(name, shorthand string, value []float64, usage string) *[]float64 { + return CommandLine.Float64SliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/float64_slice_test.go b/vendor/github.com/spf13/pflag/float64_slice_test.go new file mode 100644 index 0000000..fe9ede4 --- /dev/null +++ b/vendor/github.com/spf13/pflag/float64_slice_test.go @@ -0,0 +1,165 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package pflag + +import ( + "fmt" + "strconv" + "strings" + "testing" +) + +func setUpF64SFlagSet(f64sp *[]float64) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.Float64SliceVar(f64sp, "f64s", []float64{}, "Command separated list!") + return f +} + +func setUpF64SFlagSetWithDefault(f64sp *[]float64) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.Float64SliceVar(f64sp, "f64s", []float64{0.0, 1.0}, "Command separated list!") + return f +} + +func TestEmptyF64S(t *testing.T) { + var f64s []float64 + f := setUpF64SFlagSet(&f64s) + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + + getF64S, err := f.GetFloat64Slice("f64s") + if err != nil { + t.Fatal("got an error from GetFloat64Slice():", err) + } + if len(getF64S) != 0 { + t.Fatalf("got f64s %v with len=%d but expected length=0", getF64S, len(getF64S)) + } +} + +func TestF64S(t *testing.T) { + var f64s []float64 + f := setUpF64SFlagSet(&f64s) + + vals := []string{"1.0", "2.0", "4.0", "3.0"} + arg := fmt.Sprintf("--f64s=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range f64s { + d, err := strconv.ParseFloat(vals[i], 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected f64s[%d] to be %s but got: %f", i, vals[i], v) + } + } + getF64S, err := f.GetFloat64Slice("f64s") + if err != nil { + t.Fatalf("got error: %v", err) + } + for i, v := range getF64S { + d, err := strconv.ParseFloat(vals[i], 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected f64s[%d] to be %s but got: %f from GetFloat64Slice", i, vals[i], v) + } + } +} + +func TestF64SDefault(t *testing.T) { + var f64s []float64 + f := setUpF64SFlagSetWithDefault(&f64s) + + vals := []string{"0.0", "1.0"} + + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range f64s { + d, err := strconv.ParseFloat(vals[i], 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected f64s[%d] to be %f but got: %f", i, d, v) + } + } + + getF64S, err := f.GetFloat64Slice("f64s") + if err != nil { + t.Fatal("got an error from GetFloat64Slice():", err) + } + for i, v := range getF64S { + d, err := strconv.ParseFloat(vals[i], 64) + if err != nil { + t.Fatal("got an error from GetFloat64Slice():", err) + } + if d != v { + t.Fatalf("expected f64s[%d] to be %f from GetFloat64Slice but got: %f", i, d, v) + } + } +} + +func TestF64SWithDefault(t *testing.T) { + var f64s []float64 + f := setUpF64SFlagSetWithDefault(&f64s) + + vals := []string{"1.0", "2.0"} + arg := fmt.Sprintf("--f64s=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range f64s { + d, err := strconv.ParseFloat(vals[i], 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected f64s[%d] to be %f but got: %f", i, d, v) + } + } + + getF64S, err := f.GetFloat64Slice("f64s") + if err != nil { + t.Fatal("got an error from GetFloat64Slice():", err) + } + for i, v := range getF64S { + d, err := strconv.ParseFloat(vals[i], 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected f64s[%d] to be %f from GetFloat64Slice but got: %f", i, d, v) + } + } +} + +func TestF64SCalledTwice(t *testing.T) { + var f64s []float64 + f := setUpF64SFlagSet(&f64s) + + in := []string{"1.0,2.0", "3.0"} + expected := []float64{1.0, 2.0, 3.0} + argfmt := "--f64s=%s" + arg1 := fmt.Sprintf(argfmt, in[0]) + arg2 := fmt.Sprintf(argfmt, in[1]) + err := f.Parse([]string{arg1, arg2}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range f64s { + if expected[i] != v { + t.Fatalf("expected f64s[%d] to be %f but got: %f", i, expected[i], v) + } + } +} diff --git a/vendor/github.com/spf13/pflag/golangflag.go b/vendor/github.com/spf13/pflag/golangflag.go index c4f47eb..d3dd72b 100644 --- a/vendor/github.com/spf13/pflag/golangflag.go +++ b/vendor/github.com/spf13/pflag/golangflag.go @@ -98,4 +98,8 @@ func (f *FlagSet) AddGoFlagSet(newSet *goflag.FlagSet) { newSet.VisitAll(func(goflag *goflag.Flag) { f.AddGoFlag(goflag) }) + if f.addedGoFlagSets == nil { + f.addedGoFlagSets = make([]*goflag.FlagSet, 0) + } + f.addedGoFlagSets = append(f.addedGoFlagSets, newSet) } diff --git a/vendor/github.com/spf13/pflag/golangflag_test.go b/vendor/github.com/spf13/pflag/golangflag_test.go index 77e2d7d..5bd831b 100644 --- a/vendor/github.com/spf13/pflag/golangflag_test.go +++ b/vendor/github.com/spf13/pflag/golangflag_test.go @@ -36,4 +36,12 @@ func TestGoflags(t *testing.T) { if getBool != true { t.Fatalf("expected getBool=true but got getBool=%v", getBool) } + if !f.Parsed() { + t.Fatal("f.Parsed() return false after f.Parse() called") + } + + // in fact it is useless. because `go test` called flag.Parse() + if !goflag.CommandLine.Parsed() { + t.Fatal("goflag.CommandLine.Parsed() return false after f.Parse() called") + } } diff --git a/vendor/github.com/spf13/pflag/int16.go b/vendor/github.com/spf13/pflag/int16.go new file mode 100644 index 0000000..f1a01d0 --- /dev/null +++ b/vendor/github.com/spf13/pflag/int16.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- int16 Value +type int16Value int16 + +func newInt16Value(val int16, p *int16) *int16Value { + *p = val + return (*int16Value)(p) +} + +func (i *int16Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 16) + *i = int16Value(v) + return err +} + +func (i *int16Value) Type() string { + return "int16" +} + +func (i *int16Value) String() string { return strconv.FormatInt(int64(*i), 10) } + +func int16Conv(sval string) (interface{}, error) { + v, err := strconv.ParseInt(sval, 0, 16) + if err != nil { + return 0, err + } + return int16(v), nil +} + +// GetInt16 returns the int16 value of a flag with the given name +func (f *FlagSet) GetInt16(name string) (int16, error) { + val, err := f.getFlagType(name, "int16", int16Conv) + if err != nil { + return 0, err + } + return val.(int16), nil +} + +// Int16Var defines an int16 flag with specified name, default value, and usage string. +// The argument p points to an int16 variable in which to store the value of the flag. +func (f *FlagSet) Int16Var(p *int16, name string, value int16, usage string) { + f.VarP(newInt16Value(value, p), name, "", usage) +} + +// Int16VarP is like Int16Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int16VarP(p *int16, name, shorthand string, value int16, usage string) { + f.VarP(newInt16Value(value, p), name, shorthand, usage) +} + +// Int16Var defines an int16 flag with specified name, default value, and usage string. +// The argument p points to an int16 variable in which to store the value of the flag. +func Int16Var(p *int16, name string, value int16, usage string) { + CommandLine.VarP(newInt16Value(value, p), name, "", usage) +} + +// Int16VarP is like Int16Var, but accepts a shorthand letter that can be used after a single dash. +func Int16VarP(p *int16, name, shorthand string, value int16, usage string) { + CommandLine.VarP(newInt16Value(value, p), name, shorthand, usage) +} + +// Int16 defines an int16 flag with specified name, default value, and usage string. +// The return value is the address of an int16 variable that stores the value of the flag. +func (f *FlagSet) Int16(name string, value int16, usage string) *int16 { + p := new(int16) + f.Int16VarP(p, name, "", value, usage) + return p +} + +// Int16P is like Int16, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int16P(name, shorthand string, value int16, usage string) *int16 { + p := new(int16) + f.Int16VarP(p, name, shorthand, value, usage) + return p +} + +// Int16 defines an int16 flag with specified name, default value, and usage string. +// The return value is the address of an int16 variable that stores the value of the flag. +func Int16(name string, value int16, usage string) *int16 { + return CommandLine.Int16P(name, "", value, usage) +} + +// Int16P is like Int16, but accepts a shorthand letter that can be used after a single dash. +func Int16P(name, shorthand string, value int16, usage string) *int16 { + return CommandLine.Int16P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int32_slice.go b/vendor/github.com/spf13/pflag/int32_slice.go new file mode 100644 index 0000000..6ccedcc --- /dev/null +++ b/vendor/github.com/spf13/pflag/int32_slice.go @@ -0,0 +1,132 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- int32Slice Value +type int32SliceValue struct { + value *[]int32 + changed bool +} + +func newInt32SliceValue(val []int32, p *[]int32) *int32SliceValue { + isv := new(int32SliceValue) + isv.value = p + *isv.value = val + return isv +} + +func (s *int32SliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]int32, len(ss)) + for i, d := range ss { + var err error + var temp64 int64 + temp64, err = strconv.ParseInt(d, 0, 32) + if err != nil { + return err + } + out[i] = int32(temp64) + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *int32SliceValue) Type() string { + return "int32Slice" +} + +func (s *int32SliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%d", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func int32SliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []int32{}, nil + } + ss := strings.Split(val, ",") + out := make([]int32, len(ss)) + for i, d := range ss { + var err error + var temp64 int64 + temp64, err = strconv.ParseInt(d, 0, 32) + if err != nil { + return nil, err + } + out[i] = int32(temp64) + + } + return out, nil +} + +// GetInt32Slice return the []int32 value of a flag with the given name +func (f *FlagSet) GetInt32Slice(name string) ([]int32, error) { + val, err := f.getFlagType(name, "int32Slice", int32SliceConv) + if err != nil { + return []int32{}, err + } + return val.([]int32), nil +} + +// Int32SliceVar defines a int32Slice flag with specified name, default value, and usage string. +// The argument p points to a []int32 variable in which to store the value of the flag. +func (f *FlagSet) Int32SliceVar(p *[]int32, name string, value []int32, usage string) { + f.VarP(newInt32SliceValue(value, p), name, "", usage) +} + +// Int32SliceVarP is like Int32SliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int32SliceVarP(p *[]int32, name, shorthand string, value []int32, usage string) { + f.VarP(newInt32SliceValue(value, p), name, shorthand, usage) +} + +// Int32SliceVar defines a int32[] flag with specified name, default value, and usage string. +// The argument p points to a int32[] variable in which to store the value of the flag. +func Int32SliceVar(p *[]int32, name string, value []int32, usage string) { + CommandLine.VarP(newInt32SliceValue(value, p), name, "", usage) +} + +// Int32SliceVarP is like Int32SliceVar, but accepts a shorthand letter that can be used after a single dash. +func Int32SliceVarP(p *[]int32, name, shorthand string, value []int32, usage string) { + CommandLine.VarP(newInt32SliceValue(value, p), name, shorthand, usage) +} + +// Int32Slice defines a []int32 flag with specified name, default value, and usage string. +// The return value is the address of a []int32 variable that stores the value of the flag. +func (f *FlagSet) Int32Slice(name string, value []int32, usage string) *[]int32 { + p := []int32{} + f.Int32SliceVarP(&p, name, "", value, usage) + return &p +} + +// Int32SliceP is like Int32Slice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int32SliceP(name, shorthand string, value []int32, usage string) *[]int32 { + p := []int32{} + f.Int32SliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// Int32Slice defines a []int32 flag with specified name, default value, and usage string. +// The return value is the address of a []int32 variable that stores the value of the flag. +func Int32Slice(name string, value []int32, usage string) *[]int32 { + return CommandLine.Int32SliceP(name, "", value, usage) +} + +// Int32SliceP is like Int32Slice, but accepts a shorthand letter that can be used after a single dash. +func Int32SliceP(name, shorthand string, value []int32, usage string) *[]int32 { + return CommandLine.Int32SliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int32_slice_test.go b/vendor/github.com/spf13/pflag/int32_slice_test.go new file mode 100644 index 0000000..69aaf9b --- /dev/null +++ b/vendor/github.com/spf13/pflag/int32_slice_test.go @@ -0,0 +1,171 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package pflag + +import ( + "fmt" + "strconv" + "strings" + "testing" +) + +func setUpI32SFlagSet(isp *[]int32) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.Int32SliceVar(isp, "is", []int32{}, "Command separated list!") + return f +} + +func setUpI32SFlagSetWithDefault(isp *[]int32) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.Int32SliceVar(isp, "is", []int32{0, 1}, "Command separated list!") + return f +} + +func TestEmptyI32S(t *testing.T) { + var is []int32 + f := setUpI32SFlagSet(&is) + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + + getI32S, err := f.GetInt32Slice("is") + if err != nil { + t.Fatal("got an error from GetInt32Slice():", err) + } + if len(getI32S) != 0 { + t.Fatalf("got is %v with len=%d but expected length=0", getI32S, len(getI32S)) + } +} + +func TestI32S(t *testing.T) { + var is []int32 + f := setUpI32SFlagSet(&is) + + vals := []string{"1", "2", "4", "3"} + arg := fmt.Sprintf("--is=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range is { + d64, err := strconv.ParseInt(vals[i], 0, 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + d := int32(d64) + if d != v { + t.Fatalf("expected is[%d] to be %s but got: %d", i, vals[i], v) + } + } + getI32S, err := f.GetInt32Slice("is") + if err != nil { + t.Fatalf("got error: %v", err) + } + for i, v := range getI32S { + d64, err := strconv.ParseInt(vals[i], 0, 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + d := int32(d64) + if d != v { + t.Fatalf("expected is[%d] to be %s but got: %d from GetInt32Slice", i, vals[i], v) + } + } +} + +func TestI32SDefault(t *testing.T) { + var is []int32 + f := setUpI32SFlagSetWithDefault(&is) + + vals := []string{"0", "1"} + + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range is { + d64, err := strconv.ParseInt(vals[i], 0, 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + d := int32(d64) + if d != v { + t.Fatalf("expected is[%d] to be %d but got: %d", i, d, v) + } + } + + getI32S, err := f.GetInt32Slice("is") + if err != nil { + t.Fatal("got an error from GetInt32Slice():", err) + } + for i, v := range getI32S { + d64, err := strconv.ParseInt(vals[i], 0, 32) + if err != nil { + t.Fatal("got an error from GetInt32Slice():", err) + } + d := int32(d64) + if d != v { + t.Fatalf("expected is[%d] to be %d from GetInt32Slice but got: %d", i, d, v) + } + } +} + +func TestI32SWithDefault(t *testing.T) { + var is []int32 + f := setUpI32SFlagSetWithDefault(&is) + + vals := []string{"1", "2"} + arg := fmt.Sprintf("--is=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range is { + d64, err := strconv.ParseInt(vals[i], 0, 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + d := int32(d64) + if d != v { + t.Fatalf("expected is[%d] to be %d but got: %d", i, d, v) + } + } + + getI32S, err := f.GetInt32Slice("is") + if err != nil { + t.Fatal("got an error from GetInt32Slice():", err) + } + for i, v := range getI32S { + d64, err := strconv.ParseInt(vals[i], 0, 32) + if err != nil { + t.Fatalf("got error: %v", err) + } + d := int32(d64) + if d != v { + t.Fatalf("expected is[%d] to be %d from GetInt32Slice but got: %d", i, d, v) + } + } +} + +func TestI32SCalledTwice(t *testing.T) { + var is []int32 + f := setUpI32SFlagSet(&is) + + in := []string{"1,2", "3"} + expected := []int32{1, 2, 3} + argfmt := "--is=%s" + arg1 := fmt.Sprintf(argfmt, in[0]) + arg2 := fmt.Sprintf(argfmt, in[1]) + err := f.Parse([]string{arg1, arg2}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range is { + if expected[i] != v { + t.Fatalf("expected is[%d] to be %d but got: %d", i, expected[i], v) + } + } +} diff --git a/vendor/github.com/spf13/pflag/int64_slice.go b/vendor/github.com/spf13/pflag/int64_slice.go new file mode 100644 index 0000000..86cb619 --- /dev/null +++ b/vendor/github.com/spf13/pflag/int64_slice.go @@ -0,0 +1,128 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- int64Slice Value +type int64SliceValue struct { + value *[]int64 + changed bool +} + +func newInt64SliceValue(val []int64, p *[]int64) *int64SliceValue { + isv := new(int64SliceValue) + isv.value = p + *isv.value = val + return isv +} + +func (s *int64SliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]int64, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.ParseInt(d, 0, 64) + if err != nil { + return err + } + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *int64SliceValue) Type() string { + return "int64Slice" +} + +func (s *int64SliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%d", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func int64SliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []int64{}, nil + } + ss := strings.Split(val, ",") + out := make([]int64, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.ParseInt(d, 0, 64) + if err != nil { + return nil, err + } + + } + return out, nil +} + +// GetInt64Slice return the []int64 value of a flag with the given name +func (f *FlagSet) GetInt64Slice(name string) ([]int64, error) { + val, err := f.getFlagType(name, "int64Slice", int64SliceConv) + if err != nil { + return []int64{}, err + } + return val.([]int64), nil +} + +// Int64SliceVar defines a int64Slice flag with specified name, default value, and usage string. +// The argument p points to a []int64 variable in which to store the value of the flag. +func (f *FlagSet) Int64SliceVar(p *[]int64, name string, value []int64, usage string) { + f.VarP(newInt64SliceValue(value, p), name, "", usage) +} + +// Int64SliceVarP is like Int64SliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int64SliceVarP(p *[]int64, name, shorthand string, value []int64, usage string) { + f.VarP(newInt64SliceValue(value, p), name, shorthand, usage) +} + +// Int64SliceVar defines a int64[] flag with specified name, default value, and usage string. +// The argument p points to a int64[] variable in which to store the value of the flag. +func Int64SliceVar(p *[]int64, name string, value []int64, usage string) { + CommandLine.VarP(newInt64SliceValue(value, p), name, "", usage) +} + +// Int64SliceVarP is like Int64SliceVar, but accepts a shorthand letter that can be used after a single dash. +func Int64SliceVarP(p *[]int64, name, shorthand string, value []int64, usage string) { + CommandLine.VarP(newInt64SliceValue(value, p), name, shorthand, usage) +} + +// Int64Slice defines a []int64 flag with specified name, default value, and usage string. +// The return value is the address of a []int64 variable that stores the value of the flag. +func (f *FlagSet) Int64Slice(name string, value []int64, usage string) *[]int64 { + p := []int64{} + f.Int64SliceVarP(&p, name, "", value, usage) + return &p +} + +// Int64SliceP is like Int64Slice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int64SliceP(name, shorthand string, value []int64, usage string) *[]int64 { + p := []int64{} + f.Int64SliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// Int64Slice defines a []int64 flag with specified name, default value, and usage string. +// The return value is the address of a []int64 variable that stores the value of the flag. +func Int64Slice(name string, value []int64, usage string) *[]int64 { + return CommandLine.Int64SliceP(name, "", value, usage) +} + +// Int64SliceP is like Int64Slice, but accepts a shorthand letter that can be used after a single dash. +func Int64SliceP(name, shorthand string, value []int64, usage string) *[]int64 { + return CommandLine.Int64SliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int64_slice_test.go b/vendor/github.com/spf13/pflag/int64_slice_test.go new file mode 100644 index 0000000..b536943 --- /dev/null +++ b/vendor/github.com/spf13/pflag/int64_slice_test.go @@ -0,0 +1,165 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package pflag + +import ( + "fmt" + "strconv" + "strings" + "testing" +) + +func setUpI64SFlagSet(isp *[]int64) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.Int64SliceVar(isp, "is", []int64{}, "Command separated list!") + return f +} + +func setUpI64SFlagSetWithDefault(isp *[]int64) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.Int64SliceVar(isp, "is", []int64{0, 1}, "Command separated list!") + return f +} + +func TestEmptyI64S(t *testing.T) { + var is []int64 + f := setUpI64SFlagSet(&is) + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + + getI64S, err := f.GetInt64Slice("is") + if err != nil { + t.Fatal("got an error from GetInt64Slice():", err) + } + if len(getI64S) != 0 { + t.Fatalf("got is %v with len=%d but expected length=0", getI64S, len(getI64S)) + } +} + +func TestI64S(t *testing.T) { + var is []int64 + f := setUpI64SFlagSet(&is) + + vals := []string{"1", "2", "4", "3"} + arg := fmt.Sprintf("--is=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range is { + d, err := strconv.ParseInt(vals[i], 0, 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected is[%d] to be %s but got: %d", i, vals[i], v) + } + } + getI64S, err := f.GetInt64Slice("is") + if err != nil { + t.Fatalf("got error: %v", err) + } + for i, v := range getI64S { + d, err := strconv.ParseInt(vals[i], 0, 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected is[%d] to be %s but got: %d from GetInt64Slice", i, vals[i], v) + } + } +} + +func TestI64SDefault(t *testing.T) { + var is []int64 + f := setUpI64SFlagSetWithDefault(&is) + + vals := []string{"0", "1"} + + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range is { + d, err := strconv.ParseInt(vals[i], 0, 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected is[%d] to be %d but got: %d", i, d, v) + } + } + + getI64S, err := f.GetInt64Slice("is") + if err != nil { + t.Fatal("got an error from GetInt64Slice():", err) + } + for i, v := range getI64S { + d, err := strconv.ParseInt(vals[i], 0, 64) + if err != nil { + t.Fatal("got an error from GetInt64Slice():", err) + } + if d != v { + t.Fatalf("expected is[%d] to be %d from GetInt64Slice but got: %d", i, d, v) + } + } +} + +func TestI64SWithDefault(t *testing.T) { + var is []int64 + f := setUpI64SFlagSetWithDefault(&is) + + vals := []string{"1", "2"} + arg := fmt.Sprintf("--is=%s", strings.Join(vals, ",")) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range is { + d, err := strconv.ParseInt(vals[i], 0, 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected is[%d] to be %d but got: %d", i, d, v) + } + } + + getI64S, err := f.GetInt64Slice("is") + if err != nil { + t.Fatal("got an error from GetInt64Slice():", err) + } + for i, v := range getI64S { + d, err := strconv.ParseInt(vals[i], 0, 64) + if err != nil { + t.Fatalf("got error: %v", err) + } + if d != v { + t.Fatalf("expected is[%d] to be %d from GetInt64Slice but got: %d", i, d, v) + } + } +} + +func TestI64SCalledTwice(t *testing.T) { + var is []int64 + f := setUpI64SFlagSet(&is) + + in := []string{"1,2", "3"} + expected := []int64{1, 2, 3} + argfmt := "--is=%s" + arg1 := fmt.Sprintf(argfmt, in[0]) + arg2 := fmt.Sprintf(argfmt, in[1]) + err := f.Parse([]string{arg1, arg2}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range is { + if expected[i] != v { + t.Fatalf("expected is[%d] to be %d but got: %d", i, expected[i], v) + } + } +} diff --git a/vendor/github.com/spf13/pflag/ip_slice.go b/vendor/github.com/spf13/pflag/ip_slice.go index 7dd196f..6ad2e92 100644 --- a/vendor/github.com/spf13/pflag/ip_slice.go +++ b/vendor/github.com/spf13/pflag/ip_slice.go @@ -74,7 +74,7 @@ func (s *ipSliceValue) String() string { func ipSliceConv(val string) (interface{}, error) { val = strings.Trim(val, "[]") - // Emtpy string would cause a slice with one (empty) entry + // Empty string would cause a slice with one (empty) entry if len(val) == 0 { return []net.IP{}, nil } diff --git a/vendor/github.com/spf13/pflag/printusage_test.go b/vendor/github.com/spf13/pflag/printusage_test.go new file mode 100644 index 0000000..df982aa --- /dev/null +++ b/vendor/github.com/spf13/pflag/printusage_test.go @@ -0,0 +1,74 @@ +package pflag + +import ( + "bytes" + "io" + "testing" +) + +const expectedOutput = ` --long-form Some description + --long-form2 Some description + with multiline + -s, --long-name Some description + -t, --long-name2 Some description with + multiline +` + +func setUpPFlagSet(buf io.Writer) *FlagSet { + f := NewFlagSet("test", ExitOnError) + f.Bool("long-form", false, "Some description") + f.Bool("long-form2", false, "Some description\n with multiline") + f.BoolP("long-name", "s", false, "Some description") + f.BoolP("long-name2", "t", false, "Some description with\n multiline") + f.SetOutput(buf) + return f +} + +func TestPrintUsage(t *testing.T) { + buf := bytes.Buffer{} + f := setUpPFlagSet(&buf) + f.PrintDefaults() + res := buf.String() + if res != expectedOutput { + t.Errorf("Expected \n%s \nActual \n%s", expectedOutput, res) + } +} + +func setUpPFlagSet2(buf io.Writer) *FlagSet { + f := NewFlagSet("test", ExitOnError) + f.Bool("long-form", false, "Some description") + f.Bool("long-form2", false, "Some description\n with multiline") + f.BoolP("long-name", "s", false, "Some description") + f.BoolP("long-name2", "t", false, "Some description with\n multiline") + f.StringP("some-very-long-arg", "l", "test", "Some very long description having break the limit") + f.StringP("other-very-long-arg", "o", "long-default-value", "Some very long description having break the limit") + f.String("some-very-long-arg2", "very long default value", "Some very long description\nwith line break\nmultiple") + f.SetOutput(buf) + return f +} + +const expectedOutput2 = ` --long-form Some description + --long-form2 Some description + with multiline + -s, --long-name Some description + -t, --long-name2 Some description with + multiline + -o, --other-very-long-arg string Some very long description having + break the limit (default + "long-default-value") + -l, --some-very-long-arg string Some very long description having + break the limit (default "test") + --some-very-long-arg2 string Some very long description + with line break + multiple (default "very long default + value") +` + +func TestPrintUsage_2(t *testing.T) { + buf := bytes.Buffer{} + f := setUpPFlagSet2(&buf) + res := f.FlagUsagesWrapped(80) + if res != expectedOutput2 { + t.Errorf("Expected \n%q \nActual \n%q", expectedOutput2, res) + } +} diff --git a/vendor/github.com/spf13/pflag/string_array.go b/vendor/github.com/spf13/pflag/string_array.go index 276b7ed..fa7bc60 100644 --- a/vendor/github.com/spf13/pflag/string_array.go +++ b/vendor/github.com/spf13/pflag/string_array.go @@ -52,7 +52,7 @@ func (f *FlagSet) GetStringArray(name string) ([]string, error) { // StringArrayVar defines a string flag with specified name, default value, and usage string. // The argument p points to a []string variable in which to store the values of the multiple flags. -// The value of each argument will not try to be separated by comma +// The value of each argument will not try to be separated by comma. Use a StringSlice for that. func (f *FlagSet) StringArrayVar(p *[]string, name string, value []string, usage string) { f.VarP(newStringArrayValue(value, p), name, "", usage) } @@ -64,7 +64,7 @@ func (f *FlagSet) StringArrayVarP(p *[]string, name, shorthand string, value []s // StringArrayVar defines a string flag with specified name, default value, and usage string. // The argument p points to a []string variable in which to store the value of the flag. -// The value of each argument will not try to be separated by comma +// The value of each argument will not try to be separated by comma. Use a StringSlice for that. func StringArrayVar(p *[]string, name string, value []string, usage string) { CommandLine.VarP(newStringArrayValue(value, p), name, "", usage) } @@ -76,7 +76,7 @@ func StringArrayVarP(p *[]string, name, shorthand string, value []string, usage // StringArray defines a string flag with specified name, default value, and usage string. // The return value is the address of a []string variable that stores the value of the flag. -// The value of each argument will not try to be separated by comma +// The value of each argument will not try to be separated by comma. Use a StringSlice for that. func (f *FlagSet) StringArray(name string, value []string, usage string) *[]string { p := []string{} f.StringArrayVarP(&p, name, "", value, usage) @@ -92,7 +92,7 @@ func (f *FlagSet) StringArrayP(name, shorthand string, value []string, usage str // StringArray defines a string flag with specified name, default value, and usage string. // The return value is the address of a []string variable that stores the value of the flag. -// The value of each argument will not try to be separated by comma +// The value of each argument will not try to be separated by comma. Use a StringSlice for that. func StringArray(name string, value []string, usage string) *[]string { return CommandLine.StringArrayP(name, "", value, usage) } diff --git a/vendor/github.com/spf13/pflag/string_slice.go b/vendor/github.com/spf13/pflag/string_slice.go index 05eee75..0cd3ccc 100644 --- a/vendor/github.com/spf13/pflag/string_slice.go +++ b/vendor/github.com/spf13/pflag/string_slice.go @@ -82,6 +82,11 @@ func (f *FlagSet) GetStringSlice(name string) ([]string, error) { // StringSliceVar defines a string flag with specified name, default value, and usage string. // The argument p points to a []string variable in which to store the value of the flag. +// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly. +// For example: +// --ss="v1,v2" -ss="v3" +// will result in +// []string{"v1", "v2", "v3"} func (f *FlagSet) StringSliceVar(p *[]string, name string, value []string, usage string) { f.VarP(newStringSliceValue(value, p), name, "", usage) } @@ -93,6 +98,11 @@ func (f *FlagSet) StringSliceVarP(p *[]string, name, shorthand string, value []s // StringSliceVar defines a string flag with specified name, default value, and usage string. // The argument p points to a []string variable in which to store the value of the flag. +// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly. +// For example: +// --ss="v1,v2" -ss="v3" +// will result in +// []string{"v1", "v2", "v3"} func StringSliceVar(p *[]string, name string, value []string, usage string) { CommandLine.VarP(newStringSliceValue(value, p), name, "", usage) } @@ -104,6 +114,11 @@ func StringSliceVarP(p *[]string, name, shorthand string, value []string, usage // StringSlice defines a string flag with specified name, default value, and usage string. // The return value is the address of a []string variable that stores the value of the flag. +// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly. +// For example: +// --ss="v1,v2" -ss="v3" +// will result in +// []string{"v1", "v2", "v3"} func (f *FlagSet) StringSlice(name string, value []string, usage string) *[]string { p := []string{} f.StringSliceVarP(&p, name, "", value, usage) @@ -119,6 +134,11 @@ func (f *FlagSet) StringSliceP(name, shorthand string, value []string, usage str // StringSlice defines a string flag with specified name, default value, and usage string. // The return value is the address of a []string variable that stores the value of the flag. +// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly. +// For example: +// --ss="v1,v2" -ss="v3" +// will result in +// []string{"v1", "v2", "v3"} func StringSlice(name string, value []string, usage string) *[]string { return CommandLine.StringSliceP(name, "", value, usage) } diff --git a/vendor/github.com/spf13/pflag/string_to_int.go b/vendor/github.com/spf13/pflag/string_to_int.go new file mode 100644 index 0000000..5ceda39 --- /dev/null +++ b/vendor/github.com/spf13/pflag/string_to_int.go @@ -0,0 +1,149 @@ +package pflag + +import ( + "bytes" + "fmt" + "strconv" + "strings" +) + +// -- stringToInt Value +type stringToIntValue struct { + value *map[string]int + changed bool +} + +func newStringToIntValue(val map[string]int, p *map[string]int) *stringToIntValue { + ssv := new(stringToIntValue) + ssv.value = p + *ssv.value = val + return ssv +} + +// Format: a=1,b=2 +func (s *stringToIntValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make(map[string]int, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return fmt.Errorf("%s must be formatted as key=value", pair) + } + var err error + out[kv[0]], err = strconv.Atoi(kv[1]) + if err != nil { + return err + } + } + if !s.changed { + *s.value = out + } else { + for k, v := range out { + (*s.value)[k] = v + } + } + s.changed = true + return nil +} + +func (s *stringToIntValue) Type() string { + return "stringToInt" +} + +func (s *stringToIntValue) String() string { + var buf bytes.Buffer + i := 0 + for k, v := range *s.value { + if i > 0 { + buf.WriteRune(',') + } + buf.WriteString(k) + buf.WriteRune('=') + buf.WriteString(strconv.Itoa(v)) + i++ + } + return "[" + buf.String() + "]" +} + +func stringToIntConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // An empty string would cause an empty map + if len(val) == 0 { + return map[string]int{}, nil + } + ss := strings.Split(val, ",") + out := make(map[string]int, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return nil, fmt.Errorf("%s must be formatted as key=value", pair) + } + var err error + out[kv[0]], err = strconv.Atoi(kv[1]) + if err != nil { + return nil, err + } + } + return out, nil +} + +// GetStringToInt return the map[string]int value of a flag with the given name +func (f *FlagSet) GetStringToInt(name string) (map[string]int, error) { + val, err := f.getFlagType(name, "stringToInt", stringToIntConv) + if err != nil { + return map[string]int{}, err + } + return val.(map[string]int), nil +} + +// StringToIntVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a map[string]int variable in which to store the values of the multiple flags. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToIntVar(p *map[string]int, name string, value map[string]int, usage string) { + f.VarP(newStringToIntValue(value, p), name, "", usage) +} + +// StringToIntVarP is like StringToIntVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToIntVarP(p *map[string]int, name, shorthand string, value map[string]int, usage string) { + f.VarP(newStringToIntValue(value, p), name, shorthand, usage) +} + +// StringToIntVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a map[string]int variable in which to store the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToIntVar(p *map[string]int, name string, value map[string]int, usage string) { + CommandLine.VarP(newStringToIntValue(value, p), name, "", usage) +} + +// StringToIntVarP is like StringToIntVar, but accepts a shorthand letter that can be used after a single dash. +func StringToIntVarP(p *map[string]int, name, shorthand string, value map[string]int, usage string) { + CommandLine.VarP(newStringToIntValue(value, p), name, shorthand, usage) +} + +// StringToInt defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]int variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToInt(name string, value map[string]int, usage string) *map[string]int { + p := map[string]int{} + f.StringToIntVarP(&p, name, "", value, usage) + return &p +} + +// StringToIntP is like StringToInt, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToIntP(name, shorthand string, value map[string]int, usage string) *map[string]int { + p := map[string]int{} + f.StringToIntVarP(&p, name, shorthand, value, usage) + return &p +} + +// StringToInt defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]int variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToInt(name string, value map[string]int, usage string) *map[string]int { + return CommandLine.StringToIntP(name, "", value, usage) +} + +// StringToIntP is like StringToInt, but accepts a shorthand letter that can be used after a single dash. +func StringToIntP(name, shorthand string, value map[string]int, usage string) *map[string]int { + return CommandLine.StringToIntP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/string_to_int_test.go b/vendor/github.com/spf13/pflag/string_to_int_test.go new file mode 100644 index 0000000..b60bbaf --- /dev/null +++ b/vendor/github.com/spf13/pflag/string_to_int_test.go @@ -0,0 +1,156 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of ths2i source code s2i governed by a BSD-style +// license that can be found in the LICENSE file. + +package pflag + +import ( + "bytes" + "fmt" + "strconv" + "testing" +) + +func setUpS2IFlagSet(s2ip *map[string]int) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.StringToIntVar(s2ip, "s2i", map[string]int{}, "Command separated ls2it!") + return f +} + +func setUpS2IFlagSetWithDefault(s2ip *map[string]int) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.StringToIntVar(s2ip, "s2i", map[string]int{"a": 1, "b": 2}, "Command separated ls2it!") + return f +} + +func createS2IFlag(vals map[string]int) string { + var buf bytes.Buffer + i := 0 + for k, v := range vals { + if i > 0 { + buf.WriteRune(',') + } + buf.WriteString(k) + buf.WriteRune('=') + buf.WriteString(strconv.Itoa(v)) + i++ + } + return buf.String() +} + +func TestEmptyS2I(t *testing.T) { + var s2i map[string]int + f := setUpS2IFlagSet(&s2i) + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + + getS2I, err := f.GetStringToInt("s2i") + if err != nil { + t.Fatal("got an error from GetStringToInt():", err) + } + if len(getS2I) != 0 { + t.Fatalf("got s2i %v with len=%d but expected length=0", getS2I, len(getS2I)) + } +} + +func TestS2I(t *testing.T) { + var s2i map[string]int + f := setUpS2IFlagSet(&s2i) + + vals := map[string]int{"a": 1, "b": 2, "d": 4, "c": 3} + arg := fmt.Sprintf("--s2i=%s", createS2IFlag(vals)) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for k, v := range s2i { + if vals[k] != v { + t.Fatalf("expected s2i[%s] to be %d but got: %d", k, vals[k], v) + } + } + getS2I, err := f.GetStringToInt("s2i") + if err != nil { + t.Fatalf("got error: %v", err) + } + for k, v := range getS2I { + if vals[k] != v { + t.Fatalf("expected s2i[%s] to be %d but got: %d from GetStringToInt", k, vals[k], v) + } + } +} + +func TestS2IDefault(t *testing.T) { + var s2i map[string]int + f := setUpS2IFlagSetWithDefault(&s2i) + + vals := map[string]int{"a": 1, "b": 2} + + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for k, v := range s2i { + if vals[k] != v { + t.Fatalf("expected s2i[%s] to be %d but got: %d", k, vals[k], v) + } + } + + getS2I, err := f.GetStringToInt("s2i") + if err != nil { + t.Fatal("got an error from GetStringToInt():", err) + } + for k, v := range getS2I { + if vals[k] != v { + t.Fatalf("expected s2i[%s] to be %d from GetStringToInt but got: %d", k, vals[k], v) + } + } +} + +func TestS2IWithDefault(t *testing.T) { + var s2i map[string]int + f := setUpS2IFlagSetWithDefault(&s2i) + + vals := map[string]int{"a": 1, "b": 2} + arg := fmt.Sprintf("--s2i=%s", createS2IFlag(vals)) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for k, v := range s2i { + if vals[k] != v { + t.Fatalf("expected s2i[%s] to be %d but got: %d", k, vals[k], v) + } + } + + getS2I, err := f.GetStringToInt("s2i") + if err != nil { + t.Fatal("got an error from GetStringToInt():", err) + } + for k, v := range getS2I { + if vals[k] != v { + t.Fatalf("expected s2i[%s] to be %d from GetStringToInt but got: %d", k, vals[k], v) + } + } +} + +func TestS2ICalledTwice(t *testing.T) { + var s2i map[string]int + f := setUpS2IFlagSet(&s2i) + + in := []string{"a=1,b=2", "b=3"} + expected := map[string]int{"a": 1, "b": 3} + argfmt := "--s2i=%s" + arg1 := fmt.Sprintf(argfmt, in[0]) + arg2 := fmt.Sprintf(argfmt, in[1]) + err := f.Parse([]string{arg1, arg2}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for i, v := range s2i { + if expected[i] != v { + t.Fatalf("expected s2i[%s] to be %d but got: %d", i, expected[i], v) + } + } +} diff --git a/vendor/github.com/spf13/pflag/string_to_string.go b/vendor/github.com/spf13/pflag/string_to_string.go new file mode 100644 index 0000000..890a01a --- /dev/null +++ b/vendor/github.com/spf13/pflag/string_to_string.go @@ -0,0 +1,160 @@ +package pflag + +import ( + "bytes" + "encoding/csv" + "fmt" + "strings" +) + +// -- stringToString Value +type stringToStringValue struct { + value *map[string]string + changed bool +} + +func newStringToStringValue(val map[string]string, p *map[string]string) *stringToStringValue { + ssv := new(stringToStringValue) + ssv.value = p + *ssv.value = val + return ssv +} + +// Format: a=1,b=2 +func (s *stringToStringValue) Set(val string) error { + var ss []string + n := strings.Count(val, "=") + switch n { + case 0: + return fmt.Errorf("%s must be formatted as key=value", val) + case 1: + ss = append(ss, strings.Trim(val, `"`)) + default: + r := csv.NewReader(strings.NewReader(val)) + var err error + ss, err = r.Read() + if err != nil { + return err + } + } + + out := make(map[string]string, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return fmt.Errorf("%s must be formatted as key=value", pair) + } + out[kv[0]] = kv[1] + } + if !s.changed { + *s.value = out + } else { + for k, v := range out { + (*s.value)[k] = v + } + } + s.changed = true + return nil +} + +func (s *stringToStringValue) Type() string { + return "stringToString" +} + +func (s *stringToStringValue) String() string { + records := make([]string, 0, len(*s.value)>>1) + for k, v := range *s.value { + records = append(records, k+"="+v) + } + + var buf bytes.Buffer + w := csv.NewWriter(&buf) + if err := w.Write(records); err != nil { + panic(err) + } + w.Flush() + return "[" + strings.TrimSpace(buf.String()) + "]" +} + +func stringToStringConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // An empty string would cause an empty map + if len(val) == 0 { + return map[string]string{}, nil + } + r := csv.NewReader(strings.NewReader(val)) + ss, err := r.Read() + if err != nil { + return nil, err + } + out := make(map[string]string, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return nil, fmt.Errorf("%s must be formatted as key=value", pair) + } + out[kv[0]] = kv[1] + } + return out, nil +} + +// GetStringToString return the map[string]string value of a flag with the given name +func (f *FlagSet) GetStringToString(name string) (map[string]string, error) { + val, err := f.getFlagType(name, "stringToString", stringToStringConv) + if err != nil { + return map[string]string{}, err + } + return val.(map[string]string), nil +} + +// StringToStringVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a map[string]string variable in which to store the values of the multiple flags. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToStringVar(p *map[string]string, name string, value map[string]string, usage string) { + f.VarP(newStringToStringValue(value, p), name, "", usage) +} + +// StringToStringVarP is like StringToStringVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToStringVarP(p *map[string]string, name, shorthand string, value map[string]string, usage string) { + f.VarP(newStringToStringValue(value, p), name, shorthand, usage) +} + +// StringToStringVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a map[string]string variable in which to store the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToStringVar(p *map[string]string, name string, value map[string]string, usage string) { + CommandLine.VarP(newStringToStringValue(value, p), name, "", usage) +} + +// StringToStringVarP is like StringToStringVar, but accepts a shorthand letter that can be used after a single dash. +func StringToStringVarP(p *map[string]string, name, shorthand string, value map[string]string, usage string) { + CommandLine.VarP(newStringToStringValue(value, p), name, shorthand, usage) +} + +// StringToString defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]string variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToString(name string, value map[string]string, usage string) *map[string]string { + p := map[string]string{} + f.StringToStringVarP(&p, name, "", value, usage) + return &p +} + +// StringToStringP is like StringToString, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToStringP(name, shorthand string, value map[string]string, usage string) *map[string]string { + p := map[string]string{} + f.StringToStringVarP(&p, name, shorthand, value, usage) + return &p +} + +// StringToString defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]string variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToString(name string, value map[string]string, usage string) *map[string]string { + return CommandLine.StringToStringP(name, "", value, usage) +} + +// StringToStringP is like StringToString, but accepts a shorthand letter that can be used after a single dash. +func StringToStringP(name, shorthand string, value map[string]string, usage string) *map[string]string { + return CommandLine.StringToStringP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/string_to_string_test.go b/vendor/github.com/spf13/pflag/string_to_string_test.go new file mode 100644 index 0000000..0777f03 --- /dev/null +++ b/vendor/github.com/spf13/pflag/string_to_string_test.go @@ -0,0 +1,162 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of ths2s source code s2s governed by a BSD-style +// license that can be found in the LICENSE file. + +package pflag + +import ( + "bytes" + "encoding/csv" + "fmt" + "strings" + "testing" +) + +func setUpS2SFlagSet(s2sp *map[string]string) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.StringToStringVar(s2sp, "s2s", map[string]string{}, "Command separated ls2st!") + return f +} + +func setUpS2SFlagSetWithDefault(s2sp *map[string]string) *FlagSet { + f := NewFlagSet("test", ContinueOnError) + f.StringToStringVar(s2sp, "s2s", map[string]string{"da": "1", "db": "2", "de": "5,6"}, "Command separated ls2st!") + return f +} + +func createS2SFlag(vals map[string]string) string { + records := make([]string, 0, len(vals)>>1) + for k, v := range vals { + records = append(records, k+"="+v) + } + + var buf bytes.Buffer + w := csv.NewWriter(&buf) + if err := w.Write(records); err != nil { + panic(err) + } + w.Flush() + return strings.TrimSpace(buf.String()) +} + +func TestEmptyS2S(t *testing.T) { + var s2s map[string]string + f := setUpS2SFlagSet(&s2s) + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + + getS2S, err := f.GetStringToString("s2s") + if err != nil { + t.Fatal("got an error from GetStringToString():", err) + } + if len(getS2S) != 0 { + t.Fatalf("got s2s %v with len=%d but expected length=0", getS2S, len(getS2S)) + } +} + +func TestS2S(t *testing.T) { + var s2s map[string]string + f := setUpS2SFlagSet(&s2s) + + vals := map[string]string{"a": "1", "b": "2", "d": "4", "c": "3", "e": "5,6"} + arg := fmt.Sprintf("--s2s=%s", createS2SFlag(vals)) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for k, v := range s2s { + if vals[k] != v { + t.Fatalf("expected s2s[%s] to be %s but got: %s", k, vals[k], v) + } + } + getS2S, err := f.GetStringToString("s2s") + if err != nil { + t.Fatalf("got error: %v", err) + } + for k, v := range getS2S { + if vals[k] != v { + t.Fatalf("expected s2s[%s] to be %s but got: %s from GetStringToString", k, vals[k], v) + } + } +} + +func TestS2SDefault(t *testing.T) { + var s2s map[string]string + f := setUpS2SFlagSetWithDefault(&s2s) + + vals := map[string]string{"da": "1", "db": "2", "de": "5,6"} + + err := f.Parse([]string{}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for k, v := range s2s { + if vals[k] != v { + t.Fatalf("expected s2s[%s] to be %s but got: %s", k, vals[k], v) + } + } + + getS2S, err := f.GetStringToString("s2s") + if err != nil { + t.Fatal("got an error from GetStringToString():", err) + } + for k, v := range getS2S { + if vals[k] != v { + t.Fatalf("expected s2s[%s] to be %s from GetStringToString but got: %s", k, vals[k], v) + } + } +} + +func TestS2SWithDefault(t *testing.T) { + var s2s map[string]string + f := setUpS2SFlagSetWithDefault(&s2s) + + vals := map[string]string{"a": "1", "b": "2", "e": "5,6"} + arg := fmt.Sprintf("--s2s=%s", createS2SFlag(vals)) + err := f.Parse([]string{arg}) + if err != nil { + t.Fatal("expected no error; got", err) + } + for k, v := range s2s { + if vals[k] != v { + t.Fatalf("expected s2s[%s] to be %s but got: %s", k, vals[k], v) + } + } + + getS2S, err := f.GetStringToString("s2s") + if err != nil { + t.Fatal("got an error from GetStringToString():", err) + } + for k, v := range getS2S { + if vals[k] != v { + t.Fatalf("expected s2s[%s] to be %s from GetStringToString but got: %s", k, vals[k], v) + } + } +} + +func TestS2SCalledTwice(t *testing.T) { + var s2s map[string]string + f := setUpS2SFlagSet(&s2s) + + in := []string{"a=1,b=2", "b=3", `"e=5,6"`, `f=7,8`} + expected := map[string]string{"a": "1", "b": "3", "e": "5,6", "f": "7,8"} + argfmt := "--s2s=%s" + arg0 := fmt.Sprintf(argfmt, in[0]) + arg1 := fmt.Sprintf(argfmt, in[1]) + arg2 := fmt.Sprintf(argfmt, in[2]) + arg3 := fmt.Sprintf(argfmt, in[3]) + err := f.Parse([]string{arg0, arg1, arg2, arg3}) + if err != nil { + t.Fatal("expected no error; got", err) + } + if len(s2s) != len(expected) { + t.Fatalf("expected %d flags; got %d flags", len(expected), len(s2s)) + } + for i, v := range s2s { + if expected[i] != v { + t.Fatalf("expected s2s[%s] to be %s but got: %s", i, expected[i], v) + } + } +} diff --git a/vendor/github.com/spf13/viper/.gitignore b/vendor/github.com/spf13/viper/.gitignore index 352a34a..01b5c44 100644 --- a/vendor/github.com/spf13/viper/.gitignore +++ b/vendor/github.com/spf13/viper/.gitignore @@ -21,4 +21,9 @@ _testmain.go *.exe *.test -*.bench \ No newline at end of file +*.bench + +.vscode + +# exclude dependencies in the `/vendor` folder +vendor diff --git a/vendor/github.com/spf13/viper/.travis.yml b/vendor/github.com/spf13/viper/.travis.yml index d4c2559..bb83057 100644 --- a/vendor/github.com/spf13/viper/.travis.yml +++ b/vendor/github.com/spf13/viper/.travis.yml @@ -1,9 +1,13 @@ go_import_path: github.com/spf13/viper language: go + +env: + global: + - GO111MODULE="on" + go: - - 1.7.5 - - 1.8 + - 1.11.x - tip os: @@ -17,6 +21,7 @@ matrix: script: - go install ./... + - diff -u <(echo -n) <(gofmt -d .) - go test -v ./... after_success: diff --git a/vendor/github.com/spf13/viper/README.md b/vendor/github.com/spf13/viper/README.md index 577088f..0208eac 100644 --- a/vendor/github.com/spf13/viper/README.md +++ b/vendor/github.com/spf13/viper/README.md @@ -6,18 +6,19 @@ Many Go projects are built using Viper including: * [Hugo](http://gohugo.io) * [EMC RexRay](http://rexray.readthedocs.org/en/stable/) -* [Imgur's Incus](https://github.com/Imgur/incus) +* [Imgur’s Incus](https://github.com/Imgur/incus) * [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack) * [Docker Notary](https://github.com/docker/Notary) * [BloomApi](https://www.bloomapi.com/) * [doctl](https://github.com/digitalocean/doctl) +* [Clairctl](https://github.com/jgsqware/clairctl) [![Build Status](https://travis-ci.org/spf13/viper.svg)](https://travis-ci.org/spf13/viper) [![Join the chat at https://gitter.im/spf13/viper](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/spf13/viper?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![GoDoc](https://godoc.org/github.com/spf13/viper?status.svg)](https://godoc.org/github.com/spf13/viper) ## What is Viper? -Viper is a complete configuration solution for go applications including 12 factor apps. It is designed +Viper is a complete configuration solution for Go applications including 12-Factor apps. It is designed to work within an application, and can handle all types of configuration needs and formats. It supports: @@ -68,7 +69,7 @@ Viper configuration keys are case insensitive. ### Establishing Defaults A good configuration system will support default values. A default value is not -required for a key, but it's useful in the event that a key hasn’t been set via +required for a key, but it’s useful in the event that a key hasn’t been set via config file, environment variable, remote configuration or flag. Examples: @@ -116,10 +117,10 @@ Optionally you can provide a function for Viper to run each time a change occurs **Make sure you add all of the configPaths prior to calling `WatchConfig()`** ```go - viper.WatchConfig() - viper.OnConfigChange(func(e fsnotify.Event) { - fmt.Println("Config file changed:", e.Name) - }) +viper.WatchConfig() +viper.OnConfigChange(func(e fsnotify.Event) { + fmt.Println("Config file changed:", e.Name) +}) ``` ### Reading Config from io.Reader @@ -178,19 +179,20 @@ viper.GetBool("verbose") // true ### Working with Environment Variables Viper has full support for environment variables. This enables 12 factor -applications out of the box. There are four methods that exist to aid working +applications out of the box. There are five methods that exist to aid working with ENV: * `AutomaticEnv()` * `BindEnv(string...) : error` * `SetEnvPrefix(string)` - * `SetEnvReplacer(string...) *strings.Replacer` + * `SetEnvKeyReplacer(string...) *strings.Replacer` + * `AllowEmptyEnvVar(bool)` _When working with ENV variables, it’s important to recognize that Viper treats ENV variables as case sensitive._ Viper provides a mechanism to try to ensure that ENV variables are unique. By -using `SetEnvPrefix`, you can tell Viper to use add a prefix while reading from +using `SetEnvPrefix`, you can tell Viper to use a prefix while reading from the environment variables. Both `BindEnv` and `AutomaticEnv` will use this prefix. @@ -211,11 +213,15 @@ time a `viper.Get` request is made. It will apply the following rules. It will check for a environment variable with a name matching the key uppercased and prefixed with the `EnvPrefix` if set. -`SetEnvReplacer` allows you to use a `strings.Replacer` object to rewrite Env +`SetEnvKeyReplacer` allows you to use a `strings.Replacer` object to rewrite Env keys to an extent. This is useful if you want to use `-` or something in your `Get()` calls, but want your environmental variables to use `_` delimiters. An example of using it can be found in `viper_test.go`. +By default empty environment variables are considered unset and will fall back to +the next configuration source. To treat empty environment variables as set, use +the `AllowEmptyEnv` method. + #### Env example ```go @@ -236,7 +242,7 @@ Like `BindEnv`, the value is not set when the binding method is called, but when it is accessed. This means you can bind as early as you want, even in an `init()` function. -The `BindPFlag()` method provides this functionality. +For individual flags, the `BindPFlag()` method provides this functionality. Example: @@ -245,6 +251,19 @@ serverCmd.Flags().Int("port", 1138, "Port to run Application server on") viper.BindPFlag("port", serverCmd.Flags().Lookup("port")) ``` +You can also bind an existing set of pflags (pflag.FlagSet): + +Example: + +```go +pflag.Int("flagname", 1234, "help message for flagname") + +pflag.Parse() +viper.BindPFlags(pflag.CommandLine) + +i := viper.GetInt("flagname") // retrieve values from viper instead of pflag +``` + The use of [pflag](https://github.com/spf13/pflag/) in Viper does not preclude the use of other packages that use the [flag](https://golang.org/pkg/flag/) package from the standard library. The pflag package can handle the flags @@ -263,15 +282,23 @@ import ( ) func main() { + + // using standard library "flag" package + flag.Int("flagname", 1234, "help message for flagname") + pflag.CommandLine.AddGoFlagSet(flag.CommandLine) pflag.Parse() - ... + viper.BindPFlags(pflag.CommandLine) + + i := viper.GetInt("flagname") // retrieve value from viper + + ... } ``` #### Flag interfaces -Viper provides two Go interfaces to bind other flag systems if you don't use `Pflags`. +Viper provides two Go interfaces to bind other flag systems if you don’t use `Pflags`. `FlagValue` represents a single flag. This is a very simple example on how to implement this interface: @@ -351,12 +378,33 @@ how to use Consul. ### Remote Key/Value Store Example - Unencrypted +#### etcd ```go viper.AddRemoteProvider("etcd", "http://127.0.0.1:4001","/config/hugo.json") viper.SetConfigType("json") // because there is no file extension in a stream of bytes, supported extensions are "json", "toml", "yaml", "yml", "properties", "props", "prop" err := viper.ReadRemoteConfig() ``` +#### Consul +You need to set a key to Consul key/value storage with JSON value containing your desired config. +For example, create a Consul key/value store key `MY_CONSUL_KEY` with value: + +```json +{ + "port": 8080, + "hostname": "myhostname.com" +} +``` + +```go +viper.AddRemoteProvider("consul", "localhost:8500", "MY_CONSUL_KEY") +viper.SetConfigType("json") // Need to explicitly set this to json +err := viper.ReadRemoteConfig() + +fmt.Println(viper.Get("port")) // 8080 +fmt.Println(viper.Get("hostname")) // myhostname.com +``` + ### Remote Key/Value Store Example - Encrypted ```go @@ -401,7 +449,7 @@ go func(){ ## Getting Values From Viper -In Viper, there are a few ways to get a value depending on the value's type. +In Viper, there are a few ways to get a value depending on the value’s type. The following functions and methods exist: * `Get(key string) : interface{}` @@ -415,6 +463,7 @@ The following functions and methods exist: * `GetTime(key string) : time.Time` * `GetDuration(key string) : time.Duration` * `IsSet(key string) : bool` + * `AllSettings() : map[string]interface{}` One important thing to recognize is that each Get function will return a zero value if it’s not found. To check if a given key exists, the `IsSet()` method @@ -531,7 +580,7 @@ func NewCache(cfg *Viper) *Cache {...} ``` which creates a cache based on config information formatted as `subv`. -Now it's easy to create these 2 caches separately as: +Now it’s easy to create these 2 caches separately as: ```go cfg1 := viper.Sub("app.cache1") @@ -568,6 +617,27 @@ if err != nil { } ``` +### Marshalling to string + +You may need to marhsal all the settings held in viper into a string rather than write them to a file. +You can use your favorite format's marshaller with the config returned by `AllSettings()`. + +```go +import ( + yaml "gopkg.in/yaml.v2" + // ... +) + +func yamlStringSettings() string { + c := viper.AllSettings() + bs, err := yaml.Marshal(c) + if err != nil { + t.Fatalf("unable to marshal config to YAML: %v", err) + } + return string(bs) +} +``` + ## Viper or Vipers? Viper comes ready to use out of the box. There is no configuration or diff --git a/vendor/github.com/spf13/viper/flags_test.go b/vendor/github.com/spf13/viper/flags_test.go index 5bffca3..0b976b6 100644 --- a/vendor/github.com/spf13/viper/flags_test.go +++ b/vendor/github.com/spf13/viper/flags_test.go @@ -62,5 +62,4 @@ func TestBindFlagValue(t *testing.T) { flag.Changed = true //hack for pflag usage assert.Equal(t, "testing_mutate", Get("testvalue")) - } diff --git a/vendor/github.com/spf13/viper/go.mod b/vendor/github.com/spf13/viper/go.mod new file mode 100644 index 0000000..86e801c --- /dev/null +++ b/vendor/github.com/spf13/viper/go.mod @@ -0,0 +1,24 @@ +module github.com/spf13/viper + +require ( + github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6 // indirect + github.com/coreos/etcd v3.3.10+incompatible // indirect + github.com/coreos/go-etcd v2.0.0+incompatible // indirect + github.com/coreos/go-semver v0.2.0 // indirect + github.com/fsnotify/fsnotify v1.4.7 + github.com/hashicorp/hcl v1.0.0 + github.com/magiconair/properties v1.8.0 + github.com/mitchellh/mapstructure v1.1.2 + github.com/pelletier/go-toml v1.2.0 + github.com/spf13/afero v1.1.2 + github.com/spf13/cast v1.3.0 + github.com/spf13/jwalterweatherman v1.0.0 + github.com/spf13/pflag v1.0.3 + github.com/stretchr/testify v1.2.2 + github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8 // indirect + github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77 + golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9 // indirect + golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a // indirect + golang.org/x/text v0.3.0 // indirect + gopkg.in/yaml.v2 v2.2.2 +) diff --git a/vendor/github.com/spf13/viper/go.sum b/vendor/github.com/spf13/viper/go.sum new file mode 100644 index 0000000..5c9fb7d --- /dev/null +++ b/vendor/github.com/spf13/viper/go.sum @@ -0,0 +1,35 @@ +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/spf13/viper/remote/remote.go b/vendor/github.com/spf13/viper/remote/remote.go index f100a9c..810d070 100644 --- a/vendor/github.com/spf13/viper/remote/remote.go +++ b/vendor/github.com/spf13/viper/remote/remote.go @@ -8,10 +8,11 @@ package remote import ( "bytes" - "github.com/spf13/viper" - crypt "github.com/xordataexchange/crypt/config" "io" "os" + + "github.com/spf13/viper" + crypt "github.com/xordataexchange/crypt/config" ) type remoteConfigProvider struct{} @@ -33,13 +34,14 @@ func (rc remoteConfigProvider) Watch(rp viper.RemoteProvider) (io.Reader, error) if err != nil { return nil, err } - resp,err := cm.Get(rp.Path()) + resp, err := cm.Get(rp.Path()) if err != nil { return nil, err } return bytes.NewReader(resp), nil } + func (rc remoteConfigProvider) WatchChannel(rp viper.RemoteProvider) (<-chan *viper.RemoteResponse, chan bool) { cm, err := getConfigManager(rp) if err != nil { @@ -47,13 +49,13 @@ func (rc remoteConfigProvider) WatchChannel(rp viper.RemoteProvider) (<-chan *vi } quit := make(chan bool) quitwc := make(chan bool) - viperResponsCh := make(chan *viper.RemoteResponse) + viperResponsCh := make(chan *viper.RemoteResponse) cryptoResponseCh := cm.Watch(rp.Path(), quit) // need this function to convert the Channel response form crypt.Response to viper.Response - go func(cr <-chan *crypt.Response,vr chan<- *viper.RemoteResponse, quitwc <-chan bool, quit chan<- bool) { + go func(cr <-chan *crypt.Response, vr chan<- *viper.RemoteResponse, quitwc <-chan bool, quit chan<- bool) { for { select { - case <- quitwc: + case <-quitwc: quit <- true return case resp := <-cr: @@ -65,15 +67,12 @@ func (rc remoteConfigProvider) WatchChannel(rp viper.RemoteProvider) (<-chan *vi } } - }(cryptoResponseCh,viperResponsCh,quitwc,quit) - - return viperResponsCh,quitwc + }(cryptoResponseCh, viperResponsCh, quitwc, quit) + return viperResponsCh, quitwc } - func getConfigManager(rp viper.RemoteProvider) (crypt.ConfigManager, error) { - var cm crypt.ConfigManager var err error @@ -99,7 +98,6 @@ func getConfigManager(rp viper.RemoteProvider) (crypt.ConfigManager, error) { return nil, err } return cm, nil - } func init() { diff --git a/vendor/github.com/spf13/viper/util.go b/vendor/github.com/spf13/viper/util.go index 3ebada9..952cad4 100644 --- a/vendor/github.com/spf13/viper/util.go +++ b/vendor/github.com/spf13/viper/util.go @@ -11,22 +11,16 @@ package viper import ( - "bytes" - "encoding/json" "fmt" - "io" "os" "path/filepath" "runtime" "strings" "unicode" - "github.com/hashicorp/hcl" - "github.com/magiconair/properties" - toml "github.com/pelletier/go-toml" + "github.com/spf13/afero" "github.com/spf13/cast" jww "github.com/spf13/jwalterweatherman" - "gopkg.in/yaml.v2" ) // ConfigParseError denotes failing to parse configuration file. @@ -121,8 +115,8 @@ func absPathify(inPath string) string { } // Check if File / Directory Exists -func exists(path string) (bool, error) { - _, err := v.fs.Stat(path) +func exists(fs afero.Fs, path string) (bool, error) { + _, err := fs.Stat(path) if err == nil { return true, nil } @@ -152,61 +146,6 @@ func userHomeDir() string { return os.Getenv("HOME") } -func unmarshallConfigReader(in io.Reader, c map[string]interface{}, configType string) error { - buf := new(bytes.Buffer) - buf.ReadFrom(in) - - switch strings.ToLower(configType) { - case "yaml", "yml": - if err := yaml.Unmarshal(buf.Bytes(), &c); err != nil { - return ConfigParseError{err} - } - - case "json": - if err := json.Unmarshal(buf.Bytes(), &c); err != nil { - return ConfigParseError{err} - } - - case "hcl": - obj, err := hcl.Parse(string(buf.Bytes())) - if err != nil { - return ConfigParseError{err} - } - if err = hcl.DecodeObject(&c, obj); err != nil { - return ConfigParseError{err} - } - - case "toml": - tree, err := toml.LoadReader(buf) - if err != nil { - return ConfigParseError{err} - } - tmap := tree.ToMap() - for k, v := range tmap { - c[k] = v - } - - case "properties", "props", "prop": - var p *properties.Properties - var err error - if p, err = properties.Load(buf.Bytes(), properties.UTF8); err != nil { - return ConfigParseError{err} - } - for _, key := range p.Keys() { - value, _ := p.Get(key) - // recursively build nested maps - path := strings.Split(key, ".") - lastKey := strings.ToLower(path[len(path)-1]) - deepestMap := deepSearch(c, path[0:len(path)-1]) - // set innermost value - deepestMap[lastKey] = value - } - } - - insensitiviseMap(c) - return nil -} - func safeMul(a, b uint) uint { c := a * b if a > 1 && b > 1 && c/b != a { diff --git a/vendor/github.com/spf13/viper/util_test.go b/vendor/github.com/spf13/viper/util_test.go index 5949e09..0af80bb 100644 --- a/vendor/github.com/spf13/viper/util_test.go +++ b/vendor/github.com/spf13/viper/util_test.go @@ -16,7 +16,6 @@ import ( ) func TestCopyAndInsensitiviseMap(t *testing.T) { - var ( given = map[string]interface{}{ "Foo": 32, diff --git a/vendor/github.com/spf13/viper/viper.go b/vendor/github.com/spf13/viper/viper.go index 39a3c06..7173c6e 100644 --- a/vendor/github.com/spf13/viper/viper.go +++ b/vendor/github.com/spf13/viper/viper.go @@ -22,6 +22,7 @@ package viper import ( "bytes" "encoding/csv" + "encoding/json" "fmt" "io" "log" @@ -29,16 +30,33 @@ import ( "path/filepath" "reflect" "strings" + "sync" "time" + yaml "gopkg.in/yaml.v2" + "github.com/fsnotify/fsnotify" + "github.com/hashicorp/hcl" + "github.com/hashicorp/hcl/hcl/printer" + "github.com/magiconair/properties" "github.com/mitchellh/mapstructure" + toml "github.com/pelletier/go-toml" "github.com/spf13/afero" "github.com/spf13/cast" jww "github.com/spf13/jwalterweatherman" "github.com/spf13/pflag" ) +// ConfigMarshalError happens when failing to marshal the configuration. +type ConfigMarshalError struct { + err error +} + +// Error returns the formatted configuration error. +func (e ConfigMarshalError) Error() string { + return fmt.Sprintf("While marshaling config: %s", e.err.Error()) +} + var v *Viper type RemoteResponse struct { @@ -69,8 +87,7 @@ func (str UnsupportedConfigError) Error() string { } // UnsupportedRemoteProviderError denotes encountering an unsupported remote -// provider. Currently only etcd and Consul are -// supported. +// provider. Currently only etcd and Consul are supported. type UnsupportedRemoteProviderError string // Error returns the formatted remote provider error. @@ -97,6 +114,23 @@ func (fnfe ConfigFileNotFoundError) Error() string { return fmt.Sprintf("Config File %q Not Found in %q", fnfe.name, fnfe.locations) } +// A DecoderConfigOption can be passed to viper.Unmarshal to configure +// mapstructure.DecoderConfig options +type DecoderConfigOption func(*mapstructure.DecoderConfig) + +// DecodeHook returns a DecoderConfigOption which overrides the default +// DecoderConfig.DecodeHook value, the default is: +// +// mapstructure.ComposeDecodeHookFunc( +// mapstructure.StringToTimeDurationHookFunc(), +// mapstructure.StringToSliceHookFunc(","), +// ) +func DecodeHook(hook mapstructure.DecodeHookFunc) DecoderConfigOption { + return func(c *mapstructure.DecoderConfig) { + c.DecodeHook = hook + } +} + // Viper is a prioritized configuration registry. It // maintains a set of configuration sources, fetches // values to populate those, and provides them according @@ -153,6 +187,7 @@ type Viper struct { automaticEnvApplied bool envKeyReplacer *strings.Replacer + allowEmptyEnv bool config map[string]interface{} override map[string]interface{} @@ -163,6 +198,10 @@ type Viper struct { aliases map[string]string typeByDefValue bool + // Store read properties on the object so that we can write back in order with comments. + // This will only be used if the configuration read is a properties file. + properties *properties.Properties + onConfigChange func(fsnotify.Event) } @@ -189,7 +228,7 @@ func New() *Viper { // can use it in their testing as well. func Reset() { v = New() - SupportedExts = []string{"json", "toml", "yaml", "yml", "hcl"} + SupportedExts = []string{"json", "toml", "yaml", "yml", "properties", "props", "prop", "hcl"} SupportedRemoteProviders = []string{"etcd", "consul"} } @@ -239,52 +278,77 @@ func (v *Viper) OnConfigChange(run func(in fsnotify.Event)) { } func WatchConfig() { v.WatchConfig() } + func (v *Viper) WatchConfig() { + initWG := sync.WaitGroup{} + initWG.Add(1) go func() { watcher, err := fsnotify.NewWatcher() if err != nil { log.Fatal(err) } defer watcher.Close() - // we have to watch the entire directory to pick up renames/atomic saves in a cross-platform way filename, err := v.getConfigFile() if err != nil { - log.Println("error:", err) + log.Printf("error: %v\n", err) return } configFile := filepath.Clean(filename) configDir, _ := filepath.Split(configFile) + realConfigFile, _ := filepath.EvalSymlinks(filename) - done := make(chan bool) + eventsWG := sync.WaitGroup{} + eventsWG.Add(1) go func() { for { select { - case event := <-watcher.Events: - // we only care about the config file - if filepath.Clean(event.Name) == configFile { - if event.Op&fsnotify.Write == fsnotify.Write || event.Op&fsnotify.Create == fsnotify.Create { - err := v.ReadInConfig() - if err != nil { - log.Println("error:", err) - } + case event, ok := <-watcher.Events: + if !ok { // 'Events' channel is closed + eventsWG.Done() + return + } + currentConfigFile, _ := filepath.EvalSymlinks(filename) + // we only care about the config file with the following cases: + // 1 - if the config file was modified or created + // 2 - if the real path to the config file changed (eg: k8s ConfigMap replacement) + const writeOrCreateMask = fsnotify.Write | fsnotify.Create + if (filepath.Clean(event.Name) == configFile && + event.Op&writeOrCreateMask != 0) || + (currentConfigFile != "" && currentConfigFile != realConfigFile) { + realConfigFile = currentConfigFile + err := v.ReadInConfig() + if err != nil { + log.Printf("error reading config file: %v\n", err) + } + if v.onConfigChange != nil { v.onConfigChange(event) } + } else if filepath.Clean(event.Name) == configFile && + event.Op&fsnotify.Remove&fsnotify.Remove != 0 { + eventsWG.Done() + return + } + + case err, ok := <-watcher.Errors: + if ok { // 'Errors' channel is not closed + log.Printf("watcher error: %v\n", err) } - case err := <-watcher.Errors: - log.Println("error:", err) + eventsWG.Done() + return } } }() - watcher.Add(configDir) - <-done + initWG.Done() // done initalizing the watch in this go routine, so the parent routine can move on... + eventsWG.Wait() // now, wait for event loop to end in this go-routine... }() + initWG.Wait() // make sure that the go routine above fully ended before returning } -// SetConfigFile explicitly defines the path, name and extension of the config file -// Viper will use this and not check any of the config paths +// SetConfigFile explicitly defines the path, name and extension of the config file. +// Viper will use this and not check any of the config paths. func SetConfigFile(in string) { v.SetConfigFile(in) } func (v *Viper) SetConfigFile(in string) { if in != "" { @@ -293,8 +357,8 @@ func (v *Viper) SetConfigFile(in string) { } // SetEnvPrefix defines a prefix that ENVIRONMENT variables will use. -// E.g. if your prefix is "spf", the env registry -// will look for env. variables that start with "SPF_" +// E.g. if your prefix is "spf", the env registry will look for env +// variables that start with "SPF_". func SetEnvPrefix(in string) { v.SetEnvPrefix(in) } func (v *Viper) SetEnvPrefix(in string) { if in != "" { @@ -310,21 +374,32 @@ func (v *Viper) mergeWithEnvPrefix(in string) string { return strings.ToUpper(in) } +// AllowEmptyEnv tells Viper to consider set, +// but empty environment variables as valid values instead of falling back. +// For backward compatibility reasons this is false by default. +func AllowEmptyEnv(allowEmptyEnv bool) { v.AllowEmptyEnv(allowEmptyEnv) } +func (v *Viper) AllowEmptyEnv(allowEmptyEnv bool) { + v.allowEmptyEnv = allowEmptyEnv +} + // TODO: should getEnv logic be moved into find(). Can generalize the use of // rewriting keys many things, Ex: Get('someKey') -> some_key -// (cammel case to snake case for JSON keys perhaps) +// (camel case to snake case for JSON keys perhaps) // getEnv is a wrapper around os.Getenv which replaces characters in the original -// key. This allows env vars which have different keys then the config object -// keys -func (v *Viper) getEnv(key string) string { +// key. This allows env vars which have different keys than the config object +// keys. +func (v *Viper) getEnv(key string) (string, bool) { if v.envKeyReplacer != nil { key = v.envKeyReplacer.Replace(key) } - return os.Getenv(key) + + val, ok := os.LookupEnv(key) + + return val, ok && (v.allowEmptyEnv || val != "") } -// ConfigFileUsed returns the file used to populate the config registry +// ConfigFileUsed returns the file used to populate the config registry. func ConfigFileUsed() string { return v.ConfigFileUsed() } func (v *Viper) ConfigFileUsed() string { return v.configFile } @@ -548,10 +623,9 @@ func (v *Viper) isPathShadowedInFlatMap(path []string, mi interface{}) string { // "foo.bar.baz" in a lower-priority map func (v *Viper) isPathShadowedInAutoEnv(path []string) string { var parentKey string - var val string for i := 1; i < len(path); i++ { parentKey = strings.Join(path[0:i], v.keyDelim) - if val = v.getEnv(v.mergeWithEnvPrefix(parentKey)); val != "" { + if _, ok := v.getEnv(v.mergeWithEnvPrefix(parentKey)); ok { return parentKey } } @@ -611,8 +685,10 @@ func (v *Viper) Get(key string) interface{} { return cast.ToBool(val) case string: return cast.ToString(val) - case int64, int32, int16, int8, int: + case int32, int16, int8, int: return cast.ToInt(val) + case int64: + return cast.ToInt64(val) case float64, float32: return cast.ToFloat64(val) case time.Time: @@ -662,6 +738,12 @@ func (v *Viper) GetInt(key string) int { return cast.ToInt(v.Get(key)) } +// GetInt32 returns the value associated with the key as an integer. +func GetInt32(key string) int32 { return v.GetInt32(key) } +func (v *Viper) GetInt32(key string) int32 { + return cast.ToInt32(v.Get(key)) +} + // GetInt64 returns the value associated with the key as an integer. func GetInt64(key string) int64 { return v.GetInt64(key) } func (v *Viper) GetInt64(key string) int64 { @@ -719,43 +801,50 @@ func (v *Viper) GetSizeInBytes(key string) uint { } // UnmarshalKey takes a single key and unmarshals it into a Struct. -func UnmarshalKey(key string, rawVal interface{}) error { return v.UnmarshalKey(key, rawVal) } -func (v *Viper) UnmarshalKey(key string, rawVal interface{}) error { - err := decode(v.Get(key), defaultDecoderConfig(rawVal)) +func UnmarshalKey(key string, rawVal interface{}, opts ...DecoderConfigOption) error { + return v.UnmarshalKey(key, rawVal, opts...) +} +func (v *Viper) UnmarshalKey(key string, rawVal interface{}, opts ...DecoderConfigOption) error { + err := decode(v.Get(key), defaultDecoderConfig(rawVal, opts...)) if err != nil { return err } - v.insensitiviseMaps() - return nil } // Unmarshal unmarshals the config into a Struct. Make sure that the tags // on the fields of the structure are properly set. -func Unmarshal(rawVal interface{}) error { return v.Unmarshal(rawVal) } -func (v *Viper) Unmarshal(rawVal interface{}) error { - err := decode(v.AllSettings(), defaultDecoderConfig(rawVal)) +func Unmarshal(rawVal interface{}, opts ...DecoderConfigOption) error { + return v.Unmarshal(rawVal, opts...) +} +func (v *Viper) Unmarshal(rawVal interface{}, opts ...DecoderConfigOption) error { + err := decode(v.AllSettings(), defaultDecoderConfig(rawVal, opts...)) if err != nil { return err } - v.insensitiviseMaps() - return nil } // defaultDecoderConfig returns default mapsstructure.DecoderConfig with suppot -// of time.Duration values -func defaultDecoderConfig(output interface{}) *mapstructure.DecoderConfig { - return &mapstructure.DecoderConfig{ +// of time.Duration values & string slices +func defaultDecoderConfig(output interface{}, opts ...DecoderConfigOption) *mapstructure.DecoderConfig { + c := &mapstructure.DecoderConfig{ Metadata: nil, Result: output, WeaklyTypedInput: true, - DecodeHook: mapstructure.StringToTimeDurationHookFunc(), + DecodeHook: mapstructure.ComposeDecodeHookFunc( + mapstructure.StringToTimeDurationHookFunc(), + mapstructure.StringToSliceHookFunc(","), + ), } + for _, opt := range opts { + opt(c) + } + return c } // A wrapper around mapstructure.Decode that mimics the WeakDecode functionality @@ -779,8 +868,6 @@ func (v *Viper) UnmarshalExact(rawVal interface{}) error { return err } - v.insensitiviseMaps() - return nil } @@ -815,7 +902,7 @@ func (v *Viper) BindFlagValues(flags FlagValueSet) (err error) { } // BindFlagValue binds a specific key to a FlagValue. -// Example(where serverCmd is a Cobra instance): +// Example (where serverCmd is a Cobra instance): // // serverCmd.Flags().Int("port", 1138, "Port to run Application server on") // Viper.BindFlagValue("port", serverCmd.Flags().Lookup("port")) @@ -911,7 +998,7 @@ func (v *Viper) find(lcaseKey string) interface{} { if v.automaticEnvApplied { // even if it hasn't been registered, if automaticEnv is used, // check any Get request - if val = v.getEnv(v.mergeWithEnvPrefix(lcaseKey)); val != "" { + if val, ok := v.getEnv(v.mergeWithEnvPrefix(lcaseKey)); ok { return val } if nested && v.isPathShadowedInAutoEnv(path) != "" { @@ -920,7 +1007,7 @@ func (v *Viper) find(lcaseKey string) interface{} { } envkey, exists := v.env[lcaseKey] if exists { - if val = v.getEnv(envkey); val != "" { + if val, ok := v.getEnv(envkey); ok { return val } } @@ -1085,7 +1172,7 @@ func (v *Viper) SetDefault(key string, value interface{}) { deepestMap[lastKey] = value } -// Set sets the value for the key in the override regiser. +// Set sets the value for the key in the override register. // Set is case-insensitive for a key. // Will be used instead of values obtained via // flags, config file, ENV, default, or key/value store. @@ -1117,6 +1204,7 @@ func (v *Viper) ReadInConfig() error { return UnsupportedConfigError(v.getConfigType()) } + jww.DEBUG.Println("Reading file: ", filename) file, err := afero.ReadFile(v.fs, filename) if err != nil { return err @@ -1165,17 +1253,214 @@ func (v *Viper) ReadConfig(in io.Reader) error { // MergeConfig merges a new configuration with an existing config. func MergeConfig(in io.Reader) error { return v.MergeConfig(in) } func (v *Viper) MergeConfig(in io.Reader) error { - if v.config == nil { - v.config = make(map[string]interface{}) - } cfg := make(map[string]interface{}) if err := v.unmarshalReader(in, cfg); err != nil { return err } + return v.MergeConfigMap(cfg) +} + +// MergeConfigMap merges the configuration from the map given with an existing config. +// Note that the map given may be modified. +func MergeConfigMap(cfg map[string]interface{}) error { return v.MergeConfigMap(cfg) } +func (v *Viper) MergeConfigMap(cfg map[string]interface{}) error { + if v.config == nil { + v.config = make(map[string]interface{}) + } + insensitiviseMap(cfg) mergeMaps(cfg, v.config, nil) return nil } +// WriteConfig writes the current configuration to a file. +func WriteConfig() error { return v.WriteConfig() } +func (v *Viper) WriteConfig() error { + filename, err := v.getConfigFile() + if err != nil { + return err + } + return v.writeConfig(filename, true) +} + +// SafeWriteConfig writes current configuration to file only if the file does not exist. +func SafeWriteConfig() error { return v.SafeWriteConfig() } +func (v *Viper) SafeWriteConfig() error { + filename, err := v.getConfigFile() + if err != nil { + return err + } + return v.writeConfig(filename, false) +} + +// WriteConfigAs writes current configuration to a given filename. +func WriteConfigAs(filename string) error { return v.WriteConfigAs(filename) } +func (v *Viper) WriteConfigAs(filename string) error { + return v.writeConfig(filename, true) +} + +// SafeWriteConfigAs writes current configuration to a given filename if it does not exist. +func SafeWriteConfigAs(filename string) error { return v.SafeWriteConfigAs(filename) } +func (v *Viper) SafeWriteConfigAs(filename string) error { + return v.writeConfig(filename, false) +} + +func writeConfig(filename string, force bool) error { return v.writeConfig(filename, force) } +func (v *Viper) writeConfig(filename string, force bool) error { + jww.INFO.Println("Attempting to write configuration to file.") + ext := filepath.Ext(filename) + if len(ext) <= 1 { + return fmt.Errorf("Filename: %s requires valid extension.", filename) + } + configType := ext[1:] + if !stringInSlice(configType, SupportedExts) { + return UnsupportedConfigError(configType) + } + if v.config == nil { + v.config = make(map[string]interface{}) + } + var flags int + if force == true { + flags = os.O_CREATE | os.O_TRUNC | os.O_WRONLY + } else { + if _, err := os.Stat(filename); os.IsNotExist(err) { + flags = os.O_WRONLY + } else { + return fmt.Errorf("File: %s exists. Use WriteConfig to overwrite.", filename) + } + } + f, err := v.fs.OpenFile(filename, flags, os.FileMode(0644)) + if err != nil { + return err + } + return v.marshalWriter(f, configType) +} + +// Unmarshal a Reader into a map. +// Should probably be an unexported function. +func unmarshalReader(in io.Reader, c map[string]interface{}) error { + return v.unmarshalReader(in, c) +} +func (v *Viper) unmarshalReader(in io.Reader, c map[string]interface{}) error { + buf := new(bytes.Buffer) + buf.ReadFrom(in) + + switch strings.ToLower(v.getConfigType()) { + case "yaml", "yml": + if err := yaml.Unmarshal(buf.Bytes(), &c); err != nil { + return ConfigParseError{err} + } + + case "json": + if err := json.Unmarshal(buf.Bytes(), &c); err != nil { + return ConfigParseError{err} + } + + case "hcl": + obj, err := hcl.Parse(string(buf.Bytes())) + if err != nil { + return ConfigParseError{err} + } + if err = hcl.DecodeObject(&c, obj); err != nil { + return ConfigParseError{err} + } + + case "toml": + tree, err := toml.LoadReader(buf) + if err != nil { + return ConfigParseError{err} + } + tmap := tree.ToMap() + for k, v := range tmap { + c[k] = v + } + + case "properties", "props", "prop": + v.properties = properties.NewProperties() + var err error + if v.properties, err = properties.Load(buf.Bytes(), properties.UTF8); err != nil { + return ConfigParseError{err} + } + for _, key := range v.properties.Keys() { + value, _ := v.properties.Get(key) + // recursively build nested maps + path := strings.Split(key, ".") + lastKey := strings.ToLower(path[len(path)-1]) + deepestMap := deepSearch(c, path[0:len(path)-1]) + // set innermost value + deepestMap[lastKey] = value + } + } + + insensitiviseMap(c) + return nil +} + +// Marshal a map into Writer. +func marshalWriter(f afero.File, configType string) error { + return v.marshalWriter(f, configType) +} +func (v *Viper) marshalWriter(f afero.File, configType string) error { + c := v.AllSettings() + switch configType { + case "json": + b, err := json.MarshalIndent(c, "", " ") + if err != nil { + return ConfigMarshalError{err} + } + _, err = f.WriteString(string(b)) + if err != nil { + return ConfigMarshalError{err} + } + + case "hcl": + b, err := json.Marshal(c) + ast, err := hcl.Parse(string(b)) + if err != nil { + return ConfigMarshalError{err} + } + err = printer.Fprint(f, ast.Node) + if err != nil { + return ConfigMarshalError{err} + } + + case "prop", "props", "properties": + if v.properties == nil { + v.properties = properties.NewProperties() + } + p := v.properties + for _, key := range v.AllKeys() { + _, _, err := p.Set(key, v.GetString(key)) + if err != nil { + return ConfigMarshalError{err} + } + } + _, err := p.WriteComment(f, "#", properties.UTF8) + if err != nil { + return ConfigMarshalError{err} + } + + case "toml": + t, err := toml.TreeFromMap(c) + if err != nil { + return ConfigMarshalError{err} + } + s := t.String() + if _, err := f.WriteString(s); err != nil { + return ConfigMarshalError{err} + } + + case "yaml", "yml": + b, err := yaml.Marshal(c) + if err != nil { + return ConfigMarshalError{err} + } + if _, err = f.WriteString(string(b)); err != nil { + return ConfigMarshalError{err} + } + } + return nil +} + func keyExists(k string, m map[string]interface{}) string { lk := strings.ToLower(k) for mk := range m { @@ -1288,23 +1573,6 @@ func (v *Viper) WatchRemoteConfigOnChannel() error { return v.watchKeyValueConfigOnChannel() } -// Unmarshall a Reader into a map. -// Should probably be an unexported function. -func unmarshalReader(in io.Reader, c map[string]interface{}) error { - return v.unmarshalReader(in, c) -} - -func (v *Viper) unmarshalReader(in io.Reader, c map[string]interface{}) error { - return unmarshallConfigReader(in, c, v.getConfigType()) -} - -func (v *Viper) insensitiviseMaps() { - insensitiviseMap(v.config) - insensitiviseMap(v.defaults) - insensitiviseMap(v.override) - insensitiviseMap(v.kvstore) -} - // Retrieve the first found remote configuration. func (v *Viper) getKeyValueConfig() error { if RemoteConfig == nil { @@ -1517,25 +1785,21 @@ func (v *Viper) getConfigType() string { } func (v *Viper) getConfigFile() (string, error) { - // if explicitly set, then use it - if v.configFile != "" { - return v.configFile, nil - } - - cf, err := v.findConfigFile() - if err != nil { - return "", err + if v.configFile == "" { + cf, err := v.findConfigFile() + if err != nil { + return "", err + } + v.configFile = cf } - - v.configFile = cf - return v.getConfigFile() + return v.configFile, nil } func (v *Viper) searchInPath(in string) (filename string) { jww.DEBUG.Println("Searching for config in ", in) for _, ext := range SupportedExts { jww.DEBUG.Println("Checking for", filepath.Join(in, v.configName+"."+ext)) - if b, _ := exists(filepath.Join(in, v.configName+"."+ext)); b { + if b, _ := exists(v.fs, filepath.Join(in, v.configName+"."+ext)); b { jww.DEBUG.Println("Found: ", filepath.Join(in, v.configName+"."+ext)) return filepath.Join(in, v.configName+"."+ext) } @@ -1547,7 +1811,6 @@ func (v *Viper) searchInPath(in string) (filename string) { // Search all configPaths for any config file. // Returns the first path that exists (and is a config file). func (v *Viper) findConfigFile() (string, error) { - jww.INFO.Println("Searching for config in ", v.configPaths) for _, cp := range v.configPaths { diff --git a/vendor/github.com/spf13/viper/viper_test.go b/vendor/github.com/spf13/viper/viper_test.go index 774ca11..f4263d3 100644 --- a/vendor/github.com/spf13/viper/viper_test.go +++ b/vendor/github.com/spf13/viper/viper_test.go @@ -7,21 +7,29 @@ package viper import ( "bytes" + "encoding/json" "fmt" "io" "io/ioutil" "os" + "os/exec" "path" "reflect" + "runtime" "sort" "strings" + "sync" "testing" "time" + "github.com/fsnotify/fsnotify" + "github.com/mitchellh/mapstructure" + "github.com/spf13/afero" "github.com/spf13/cast" "github.com/spf13/pflag" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) var yamlExample = []byte(`Hacker: true @@ -262,7 +270,7 @@ func TestDefault(t *testing.T) { assert.Equal(t, "leather", Get("clothing.jacket")) } -func TestUnmarshalling(t *testing.T) { +func TestUnmarshaling(t *testing.T) { SetConfigType("yaml") r := bytes.NewReader(yamlExample) @@ -380,6 +388,36 @@ func TestEnv(t *testing.T) { } +func TestEmptyEnv(t *testing.T) { + initJSON() + + BindEnv("type") // Empty environment variable + BindEnv("name") // Bound, but not set environment variable + + os.Clearenv() + + os.Setenv("TYPE", "") + + assert.Equal(t, "donut", Get("type")) + assert.Equal(t, "Cake", Get("name")) +} + +func TestEmptyEnv_Allowed(t *testing.T) { + initJSON() + + AllowEmptyEnv(true) + + BindEnv("type") // Empty environment variable + BindEnv("name") // Bound, but not set environment variable + + os.Clearenv() + + os.Setenv("TYPE", "") + + assert.Equal(t, "", Get("type")) + assert.Equal(t, "Cake", Get("name")) +} + func TestEnvPrefix(t *testing.T) { initJSON() @@ -417,7 +455,7 @@ func TestAutoEnvWithPrefix(t *testing.T) { assert.Equal(t, "13", Get("bar")) } -func TestSetEnvReplacer(t *testing.T) { +func TestSetEnvKeyReplacer(t *testing.T) { Reset() AutomaticEnv() @@ -502,6 +540,42 @@ func TestUnmarshal(t *testing.T) { assert.Equal(t, &config{Name: "Steve", Port: 1234, Duration: time.Second + time.Millisecond}, &C) } +func TestUnmarshalWithDecoderOptions(t *testing.T) { + Set("credentials", "{\"foo\":\"bar\"}") + + opt := DecodeHook(mapstructure.ComposeDecodeHookFunc( + mapstructure.StringToTimeDurationHookFunc(), + mapstructure.StringToSliceHookFunc(","), + // Custom Decode Hook Function + func(rf reflect.Kind, rt reflect.Kind, data interface{}) (interface{}, error) { + if rf != reflect.String || rt != reflect.Map { + return data, nil + } + m := map[string]string{} + raw := data.(string) + if raw == "" { + return m, nil + } + return m, json.Unmarshal([]byte(raw), &m) + }, + )) + + type config struct { + Credentials map[string]string + } + + var C config + + err := Unmarshal(&C, opt) + if err != nil { + t.Fatalf("unable to decode into struct, %v", err) + } + + assert.Equal(t, &config{ + Credentials: map[string]string{"foo": "bar"}, + }, &C) +} + func TestBindPFlags(t *testing.T) { v := New() // create independent Viper object flagSet := pflag.NewFlagSet("test", pflag.ContinueOnError) @@ -539,24 +613,28 @@ func TestBindPFlags(t *testing.T) { } func TestBindPFlagsStringSlice(t *testing.T) { - for _, testValue := range []struct { + tests := []struct { Expected []string Value string }{ - {[]string{}, ""}, + {nil, ""}, {[]string{"jeden"}, "jeden"}, {[]string{"dwa", "trzy"}, "dwa,trzy"}, - {[]string{"cztery", "piec , szesc"}, "cztery,\"piec , szesc\""}} { + {[]string{"cztery", "piec , szesc"}, "cztery,\"piec , szesc\""}, + } + + v := New() // create independent Viper object + defaultVal := []string{"default"} + v.SetDefault("stringslice", defaultVal) + + for _, testValue := range tests { + flagSet := pflag.NewFlagSet("test", pflag.ContinueOnError) + flagSet.StringSlice("stringslice", testValue.Expected, "test") for _, changed := range []bool{true, false} { - v := New() // create independent Viper object - flagSet := pflag.NewFlagSet("test", pflag.ContinueOnError) - flagSet.StringSlice("stringslice", testValue.Expected, "test") - flagSet.Visit(func(f *pflag.Flag) { - if len(testValue.Value) > 0 { - f.Value.Set(testValue.Value) - f.Changed = changed - } + flagSet.VisitAll(func(f *pflag.Flag) { + f.Value.Set(testValue.Value) + f.Changed = changed }) err := v.BindPFlags(flagSet) @@ -571,7 +649,11 @@ func TestBindPFlagsStringSlice(t *testing.T) { if err := v.Unmarshal(val); err != nil { t.Fatalf("%+#v cannot unmarshal: %s", testValue.Value, err) } - assert.Equal(t, testValue.Expected, val.StringSlice) + if changed { + assert.Equal(t, testValue.Expected, val.StringSlice) + } else { + assert.Equal(t, defaultVal, val.StringSlice) + } } } } @@ -847,6 +929,190 @@ func TestSub(t *testing.T) { assert.Equal(t, (*Viper)(nil), subv) } +var hclWriteExpected = []byte(`"foos" = { + "foo" = { + "key" = 1 + } + + "foo" = { + "key" = 2 + } + + "foo" = { + "key" = 3 + } + + "foo" = { + "key" = 4 + } +} + +"id" = "0001" + +"name" = "Cake" + +"ppu" = 0.55 + +"type" = "donut"`) + +func TestWriteConfigHCL(t *testing.T) { + v := New() + fs := afero.NewMemMapFs() + v.SetFs(fs) + v.SetConfigName("c") + v.SetConfigType("hcl") + err := v.ReadConfig(bytes.NewBuffer(hclExample)) + if err != nil { + t.Fatal(err) + } + if err := v.WriteConfigAs("c.hcl"); err != nil { + t.Fatal(err) + } + read, err := afero.ReadFile(fs, "c.hcl") + if err != nil { + t.Fatal(err) + } + assert.Equal(t, hclWriteExpected, read) +} + +var jsonWriteExpected = []byte(`{ + "batters": { + "batter": [ + { + "type": "Regular" + }, + { + "type": "Chocolate" + }, + { + "type": "Blueberry" + }, + { + "type": "Devil's Food" + } + ] + }, + "id": "0001", + "name": "Cake", + "ppu": 0.55, + "type": "donut" +}`) + +func TestWriteConfigJson(t *testing.T) { + v := New() + fs := afero.NewMemMapFs() + v.SetFs(fs) + v.SetConfigName("c") + v.SetConfigType("json") + err := v.ReadConfig(bytes.NewBuffer(jsonExample)) + if err != nil { + t.Fatal(err) + } + if err := v.WriteConfigAs("c.json"); err != nil { + t.Fatal(err) + } + read, err := afero.ReadFile(fs, "c.json") + if err != nil { + t.Fatal(err) + } + assert.Equal(t, jsonWriteExpected, read) +} + +var propertiesWriteExpected = []byte(`p_id = 0001 +p_type = donut +p_name = Cake +p_ppu = 0.55 +p_batters.batter.type = Regular +`) + +func TestWriteConfigProperties(t *testing.T) { + v := New() + fs := afero.NewMemMapFs() + v.SetFs(fs) + v.SetConfigName("c") + v.SetConfigType("properties") + err := v.ReadConfig(bytes.NewBuffer(propertiesExample)) + if err != nil { + t.Fatal(err) + } + if err := v.WriteConfigAs("c.properties"); err != nil { + t.Fatal(err) + } + read, err := afero.ReadFile(fs, "c.properties") + if err != nil { + t.Fatal(err) + } + assert.Equal(t, propertiesWriteExpected, read) +} + +func TestWriteConfigTOML(t *testing.T) { + fs := afero.NewMemMapFs() + v := New() + v.SetFs(fs) + v.SetConfigName("c") + v.SetConfigType("toml") + err := v.ReadConfig(bytes.NewBuffer(tomlExample)) + if err != nil { + t.Fatal(err) + } + if err := v.WriteConfigAs("c.toml"); err != nil { + t.Fatal(err) + } + + // The TOML String method does not order the contents. + // Therefore, we must read the generated file and compare the data. + v2 := New() + v2.SetFs(fs) + v2.SetConfigName("c") + v2.SetConfigType("toml") + v2.SetConfigFile("c.toml") + err = v2.ReadInConfig() + if err != nil { + t.Fatal(err) + } + + assert.Equal(t, v.GetString("title"), v2.GetString("title")) + assert.Equal(t, v.GetString("owner.bio"), v2.GetString("owner.bio")) + assert.Equal(t, v.GetString("owner.dob"), v2.GetString("owner.dob")) + assert.Equal(t, v.GetString("owner.organization"), v2.GetString("owner.organization")) +} + +var yamlWriteExpected = []byte(`age: 35 +beard: true +clothing: + jacket: leather + pants: + size: large + trousers: denim +eyes: brown +hacker: true +hobbies: +- skateboarding +- snowboarding +- go +name: steve +`) + +func TestWriteConfigYAML(t *testing.T) { + v := New() + fs := afero.NewMemMapFs() + v.SetFs(fs) + v.SetConfigName("c") + v.SetConfigType("yaml") + err := v.ReadConfig(bytes.NewBuffer(yamlExample)) + if err != nil { + t.Fatal(err) + } + if err := v.WriteConfigAs("c.yaml"); err != nil { + t.Fatal(err) + } + read, err := afero.ReadFile(fs, "c.yaml") + if err != nil { + t.Fatal(err) + } + assert.Equal(t, yamlWriteExpected, read) +} + var yamlMergeExampleTgt = []byte(` hello: pop: 37890 @@ -879,8 +1145,8 @@ func TestMergeConfig(t *testing.T) { t.Fatalf("pop != 37890, = %d", pop) } - if pop := v.GetInt("hello.lagrenum"); pop != 765432101234567 { - t.Fatalf("lagrenum != 765432101234567, = %d", pop) + if pop := v.GetInt32("hello.pop"); pop != int32(37890) { + t.Fatalf("pop != 37890, = %d", pop) } if pop := v.GetInt64("hello.lagrenum"); pop != int64(765432101234567) { @@ -903,8 +1169,8 @@ func TestMergeConfig(t *testing.T) { t.Fatalf("pop != 45000, = %d", pop) } - if pop := v.GetInt("hello.lagrenum"); pop != 7654321001234567 { - t.Fatalf("lagrenum != 7654321001234567, = %d", pop) + if pop := v.GetInt32("hello.pop"); pop != int32(45000) { + t.Fatalf("pop != 45000, = %d", pop) } if pop := v.GetInt64("hello.lagrenum"); pop != int64(7654321001234567) { @@ -964,6 +1230,48 @@ func TestMergeConfigNoMerge(t *testing.T) { } } +func TestMergeConfigMap(t *testing.T) { + v := New() + v.SetConfigType("yml") + if err := v.ReadConfig(bytes.NewBuffer(yamlMergeExampleTgt)); err != nil { + t.Fatal(err) + } + + assert := func(i int) { + large := v.GetInt("hello.lagrenum") + pop := v.GetInt("hello.pop") + if large != 765432101234567 { + t.Fatal("Got large num:", large) + } + + if pop != i { + t.Fatal("Got pop:", pop) + } + } + + assert(37890) + + update := map[string]interface{}{ + "Hello": map[string]interface{}{ + "Pop": 1234, + }, + "World": map[interface{}]interface{}{ + "Rock": 345, + }, + } + + if err := v.MergeConfigMap(update); err != nil { + t.Fatal(err) + } + + if rock := v.GetInt("world.rock"); rock != 345 { + t.Fatal("Got rock:", rock) + } + + assert(1234) + +} + func TestUnmarshalingWithAliases(t *testing.T) { v := New() v.SetDefault("ID", 1) @@ -1183,6 +1491,111 @@ func doTestCaseInsensitive(t *testing.T, typ, config string) { } +func newViperWithConfigFile(t *testing.T) (*Viper, string, func()) { + watchDir, err := ioutil.TempDir("", "") + require.Nil(t, err) + configFile := path.Join(watchDir, "config.yaml") + err = ioutil.WriteFile(configFile, []byte("foo: bar\n"), 0640) + require.Nil(t, err) + cleanup := func() { + os.RemoveAll(watchDir) + } + v := New() + v.SetConfigFile(configFile) + err = v.ReadInConfig() + require.Nil(t, err) + require.Equal(t, "bar", v.Get("foo")) + return v, configFile, cleanup +} + +func newViperWithSymlinkedConfigFile(t *testing.T) (*Viper, string, string, func()) { + watchDir, err := ioutil.TempDir("", "") + require.Nil(t, err) + dataDir1 := path.Join(watchDir, "data1") + err = os.Mkdir(dataDir1, 0777) + require.Nil(t, err) + realConfigFile := path.Join(dataDir1, "config.yaml") + t.Logf("Real config file location: %s\n", realConfigFile) + err = ioutil.WriteFile(realConfigFile, []byte("foo: bar\n"), 0640) + require.Nil(t, err) + cleanup := func() { + os.RemoveAll(watchDir) + } + // now, symlink the tm `data1` dir to `data` in the baseDir + os.Symlink(dataDir1, path.Join(watchDir, "data")) + // and link the `/datadir1/config.yaml` to `/config.yaml` + configFile := path.Join(watchDir, "config.yaml") + os.Symlink(path.Join(watchDir, "data", "config.yaml"), configFile) + t.Logf("Config file location: %s\n", path.Join(watchDir, "config.yaml")) + // init Viper + v := New() + v.SetConfigFile(configFile) + err = v.ReadInConfig() + require.Nil(t, err) + require.Equal(t, "bar", v.Get("foo")) + return v, watchDir, configFile, cleanup +} + +func TestWatchFile(t *testing.T) { + if runtime.GOOS == "linux" { + // TODO(bep) FIX ME + t.Skip("Skip test on Linux ...") + } + + t.Run("file content changed", func(t *testing.T) { + // given a `config.yaml` file being watched + v, configFile, cleanup := newViperWithConfigFile(t) + defer cleanup() + _, err := os.Stat(configFile) + require.NoError(t, err) + t.Logf("test config file: %s\n", configFile) + wg := sync.WaitGroup{} + wg.Add(1) + v.OnConfigChange(func(in fsnotify.Event) { + t.Logf("config file changed") + wg.Done() + }) + v.WatchConfig() + // when overwriting the file and waiting for the custom change notification handler to be triggered + err = ioutil.WriteFile(configFile, []byte("foo: baz\n"), 0640) + wg.Wait() + // then the config value should have changed + require.Nil(t, err) + assert.Equal(t, "baz", v.Get("foo")) + }) + + t.Run("link to real file changed (à la Kubernetes)", func(t *testing.T) { + // skip if not executed on Linux + if runtime.GOOS != "linux" { + t.Skipf("Skipping test as symlink replacements don't work on non-linux environment...") + } + v, watchDir, _, _ := newViperWithSymlinkedConfigFile(t) + // defer cleanup() + wg := sync.WaitGroup{} + v.WatchConfig() + v.OnConfigChange(func(in fsnotify.Event) { + t.Logf("config file changed") + wg.Done() + }) + wg.Add(1) + // when link to another `config.yaml` file + dataDir2 := path.Join(watchDir, "data2") + err := os.Mkdir(dataDir2, 0777) + require.Nil(t, err) + configFile2 := path.Join(dataDir2, "config.yaml") + err = ioutil.WriteFile(configFile2, []byte("foo: baz\n"), 0640) + require.Nil(t, err) + // change the symlink using the `ln -sfn` command + err = exec.Command("ln", "-sfn", dataDir2, path.Join(watchDir, "data")).Run() + require.Nil(t, err) + wg.Wait() + // then + require.Nil(t, err) + assert.Equal(t, "baz", v.Get("foo")) + }) + +} + func BenchmarkGetBool(b *testing.B) { key := "BenchmarkGetBool" v = New() diff --git a/vendor/github.com/trivago/tgo/CHANGELOG.md b/vendor/github.com/trivago/tgo/CHANGELOG.md new file mode 100644 index 0000000..98c918e --- /dev/null +++ b/vendor/github.com/trivago/tgo/CHANGELOG.md @@ -0,0 +1,6 @@ +# 1.0.1 + +## Fixed in 1.0.1 + +- Windows build of tos/user.go now work +- Added documentation to tos/user.go diff --git a/vendor/github.com/trivago/tgo/README.md b/vendor/github.com/trivago/tgo/README.md index 337d816..c5beab3 100644 --- a/vendor/github.com/trivago/tgo/README.md +++ b/vendor/github.com/trivago/tgo/README.md @@ -4,11 +4,9 @@ [![Go Report Card](http://goreportcard.com/badge/trivago/tgo)](http://goreportcard.com/report/trivago/tgo) [![Build Status](https://travis-ci.org/trivago/tgo.svg?branch=master)](https://travis-ci.org/trivago/tgo) -*This package is currently under heavy development and should be used with care.* - Trivago go extensions and utilities. -This is a library package containing tools that aid trivago with golang development across different projects. +This package contains utility functions and structs that aid trivago with golang development across different projects. This package and all subpackage match the golang standard library package names along with a "t" prefix. -I.e. type that would be placed in the "net" package can be found in the "tnet" package, etc.. +I.e. types that would be placed in the "net" package can be found in the "tnet" package, etc.. This prefix was chosen to allow mixing standard libary and tgo without having to rename package imports all the time. diff --git a/vendor/github.com/trivago/tgo/errorstack.go b/vendor/github.com/trivago/tgo/errorstack.go index fc1dc93..4f82c60 100644 --- a/vendor/github.com/trivago/tgo/errorstack.go +++ b/vendor/github.com/trivago/tgo/errorstack.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/errorstack_test.go b/vendor/github.com/trivago/tgo/errorstack_test.go index b1a520b..5602012 100644 --- a/vendor/github.com/trivago/tgo/errorstack_test.go +++ b/vendor/github.com/trivago/tgo/errorstack_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/metric.go b/vendor/github.com/trivago/tgo/metric.go index 66f958e..007a1a5 100644 --- a/vendor/github.com/trivago/tgo/metric.go +++ b/vendor/github.com/trivago/tgo/metric.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/metric_test.go b/vendor/github.com/trivago/tgo/metric_test.go index 4878c49..3fe124c 100644 --- a/vendor/github.com/trivago/tgo/metric_test.go +++ b/vendor/github.com/trivago/tgo/metric_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/metricserver.go b/vendor/github.com/trivago/tgo/metricserver.go index 2b5f7fc..3dc6abe 100644 --- a/vendor/github.com/trivago/tgo/metricserver.go +++ b/vendor/github.com/trivago/tgo/metricserver.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/runtime.go b/vendor/github.com/trivago/tgo/runtime.go index 0e62acb..25dd1cb 100644 --- a/vendor/github.com/trivago/tgo/runtime.go +++ b/vendor/github.com/trivago/tgo/runtime.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/runtime_test.go b/vendor/github.com/trivago/tgo/runtime_test.go index b15a0d8..de80c85 100644 --- a/vendor/github.com/trivago/tgo/runtime_test.go +++ b/vendor/github.com/trivago/tgo/runtime_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tcontainer/arrays.go b/vendor/github.com/trivago/tgo/tcontainer/arrays.go index 4aa4d03..ede9aed 100644 --- a/vendor/github.com/trivago/tgo/tcontainer/arrays.go +++ b/vendor/github.com/trivago/tgo/tcontainer/arrays.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tcontainer/arrays_test.go b/vendor/github.com/trivago/tgo/tcontainer/arrays_test.go index 85e8d27..beebbcd 100644 --- a/vendor/github.com/trivago/tgo/tcontainer/arrays_test.go +++ b/vendor/github.com/trivago/tgo/tcontainer/arrays_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tcontainer/bytepool.go b/vendor/github.com/trivago/tgo/tcontainer/bytepool.go index b39409e..c1290b7 100644 --- a/vendor/github.com/trivago/tgo/tcontainer/bytepool.go +++ b/vendor/github.com/trivago/tgo/tcontainer/bytepool.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tcontainer/bytepool_test.go b/vendor/github.com/trivago/tgo/tcontainer/bytepool_test.go index 77b2316..f75a06d 100644 --- a/vendor/github.com/trivago/tgo/tcontainer/bytepool_test.go +++ b/vendor/github.com/trivago/tgo/tcontainer/bytepool_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tcontainer/marshalmap.go b/vendor/github.com/trivago/tgo/tcontainer/marshalmap.go index 37fb9de..20c2e0f 100644 --- a/vendor/github.com/trivago/tgo/tcontainer/marshalmap.go +++ b/vendor/github.com/trivago/tgo/tcontainer/marshalmap.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,11 +16,12 @@ package tcontainer import ( "fmt" - "github.com/trivago/tgo/treflect" "reflect" "strconv" "strings" "time" + + "github.com/trivago/tgo/treflect" ) // MarshalMap is a wrapper type to attach converter methods to maps normally @@ -92,6 +93,44 @@ func ConvertToMarshalMap(value interface{}, formatKey func(string) string) (Mars return nil, fmt.Errorf("Root value cannot be converted to MarshalMap") } +// Clone creates a copy of the given MarshalMap. +func (mmap MarshalMap) Clone() MarshalMap { + clone := cloneMap(reflect.ValueOf(mmap)) + return clone.Interface().(MarshalMap) +} + +func cloneMap(mapValue reflect.Value) reflect.Value { + clone := reflect.MakeMap(mapValue.Type()) + keys := mapValue.MapKeys() + + for _, k := range keys { + v := mapValue.MapIndex(k) + switch k.Kind() { + default: + clone.SetMapIndex(k, v) + + case reflect.Array, reflect.Slice: + if v.Type().Elem().Kind() == reflect.Map { + sliceCopy := reflect.MakeSlice(v.Type(), v.Len(), v.Len()) + for i := 0; i < v.Len(); i++ { + element := v.Index(i) + sliceCopy.Index(i).Set(cloneMap(element)) + } + } else { + sliceCopy := reflect.MakeSlice(v.Type(), 0, v.Len()) + reflect.Copy(sliceCopy, v) + clone.SetMapIndex(k, sliceCopy) + } + + case reflect.Map: + vClone := cloneMap(v) + clone.SetMapIndex(k, vClone) + } + } + + return clone +} + // Bool returns a value at key that is expected to be a boolean func (mmap MarshalMap) Bool(key string) (bool, error) { val, exists := mmap.Value(key) @@ -182,6 +221,25 @@ func (mmap MarshalMap) String(key string) (string, error) { return strValue, nil } +// Bytes returns a value at key that is expected to be a []byte +func (mmap MarshalMap) Bytes(key string) ([]byte, error) { + val, exists := mmap.Value(key) + if !exists { + return []byte{}, fmt.Errorf(`"%s" is not set`, key) + } + + bytesValue, isBytes := val.([]byte) + if !isBytes { + return []byte{}, fmt.Errorf(`"%s" is expected to be a []byte`, key) + } + return bytesValue, nil +} + +// Slice is an alias for Array +func (mmap MarshalMap) Slice(key string) ([]interface{}, error) { + return mmap.Array(key) +} + // Array returns a value at key that is expected to be a []interface{} func (mmap MarshalMap) Array(key string) ([]interface{}, error) { val, exists := mmap.Value(key) @@ -237,6 +295,11 @@ func castToStringArray(key string, value interface{}) ([]string, error) { } } +// StringSlice is an alias for StringArray +func (mmap MarshalMap) StringSlice(key string) ([]string, error) { + return mmap.StringArray(key) +} + // StringArray returns a value at key that is expected to be a []string // This function supports conversion (by copy) from // * []interface{} @@ -275,7 +338,12 @@ func castToInt64Array(key string, value interface{}) ([]int64, error) { } } -// IntArray returns a value at key that is expected to be a []int64 +// Int64Slice is an alias for Int64Array +func (mmap MarshalMap) Int64Slice(key string) ([]int64, error) { + return mmap.Int64Array(key) +} + +// Int64Array returns a value at key that is expected to be a []int64 // This function supports conversion (by copy) from // * []interface{} func (mmap MarshalMap) Int64Array(key string) ([]int64, error) { @@ -327,6 +395,11 @@ func (mmap MarshalMap) StringMap(key string) (map[string]string, error) { } } +// StringSliceMap is an alias for StringArrayMap +func (mmap MarshalMap) StringSliceMap(key string) (map[string][]string, error) { + return mmap.StringArrayMap(key) +} + // StringArrayMap returns a value at key that is expected to be a // map[string][]string. This function supports conversion (by copy) from // * map[interface{}][]interface{} @@ -389,8 +462,30 @@ func (mmap MarshalMap) MarshalMap(key string) (MarshalMap, error) { // "key1/key2" -> mmap["key1"]["key2"] nested map // "key1[0]" -> mmap["key1"][0] nested array // "key1[0]key2" -> mmap["key1"][0]["key2"] nested array, nested map -func (mmap MarshalMap) Value(key string) (interface{}, bool) { - return mmap.resolvePath(key, mmap) +func (mmap MarshalMap) Value(key string) (val interface{}, exists bool) { + exists = mmap.resolvePath(key, mmap, func(p, k reflect.Value, v interface{}) { + val = v + }) + return val, exists +} + +// Delete a value from a given path. +// The path must point to a map key. Deleting from arrays is not supported. +func (mmap MarshalMap) Delete(key string) { + mmap.resolvePath(key, mmap, func(p, k reflect.Value, v interface{}) { + if v != nil { + println(key, "deleted") + p.SetMapIndex(k, reflect.Value{}) + } + }) +} + +// Set a value for a given path. +// The path must point to a map key. Setting array elements is not supported. +func (mmap MarshalMap) Set(key string, val interface{}) { + mmap.resolvePath(key, mmap, func(p, k reflect.Value, v interface{}) { + p.SetMapIndex(k, reflect.ValueOf(val)) + }) } func (mmap MarshalMap) resolvePathKey(key string) (int, int) { @@ -415,50 +510,57 @@ func (mmap MarshalMap) resolvePathKey(key string) (int, int) { return keyEnd, nextKeyStart } -func (mmap MarshalMap) resolvePath(key string, value interface{}) (interface{}, bool) { - if len(key) == 0 { - return value, true // ### return, found requested value ### +func (mmap MarshalMap) resolvePath(k string, v interface{}, action func(p, k reflect.Value, v interface{})) bool { + if len(k) == 0 { + action(reflect.Value{}, reflect.ValueOf(k), v) // ### return, found requested value ### + return true } - valueMeta := reflect.ValueOf(value) - switch valueMeta.Kind() { + vValue := reflect.ValueOf(v) + switch vValue.Kind() { case reflect.Array, reflect.Slice: - startIdx := strings.IndexRune(key, MarshalMapArrayBegin) // Must be first char, otherwise malformed - endIdx := strings.IndexRune(key, MarshalMapArrayEnd) // Must be > startIdx, otherwise malformed + startIdx := strings.IndexRune(k, MarshalMapArrayBegin) // Must be first char, otherwise malformed + endIdx := strings.IndexRune(k, MarshalMapArrayEnd) // Must be > startIdx, otherwise malformed if startIdx == -1 || endIdx == -1 { - return nil, false + return false } if startIdx == 0 && endIdx > startIdx { - index, err := strconv.Atoi(key[startIdx+1 : endIdx]) + index, err := strconv.Atoi(k[startIdx+1 : endIdx]) // [1] -> index: "1", remain: "" -- value // [1]a/b -> index: "1", remain: "a/b" -- nested map // [1][2] -> index: "1", remain: "[2]" -- nested array - if err == nil && index < valueMeta.Len() { - item := valueMeta.Index(index).Interface() - key := key[endIdx+1:] - return mmap.resolvePath(key, item) // ### return, nested array ### + if err == nil && index < vValue.Len() { + item := vValue.Index(index).Interface() + key := k[endIdx+1:] + return mmap.resolvePath(key, item, action) // ### return, nested array ### } } case reflect.Map: - keyMeta := reflect.ValueOf(key) - if storedValue := valueMeta.MapIndex(keyMeta); storedValue.IsValid() { - return storedValue.Interface(), true + kValue := reflect.ValueOf(k) + if storedValue := vValue.MapIndex(kValue); storedValue.IsValid() { + action(vValue, kValue, storedValue.Interface()) + return true + } + + keyEnd, nextKeyStart := mmap.resolvePathKey(k) + if keyEnd == len(k) { + action(vValue, kValue, nil) // call action to support setting non-existing keys + return false // ### return, key not found ### } - keyEnd, nextKeyStart := mmap.resolvePathKey(key) - pathKey := key[:keyEnd] - keyMeta = reflect.ValueOf(pathKey) + nextKey := k[:keyEnd] + nkValue := reflect.ValueOf(nextKey) - if storedValue := valueMeta.MapIndex(keyMeta); storedValue.IsValid() { - remain := key[nextKeyStart:] - return mmap.resolvePath(remain, storedValue.Interface()) // ### return, nested map ### + if storedValue := vValue.MapIndex(nkValue); storedValue.IsValid() { + remain := k[nextKeyStart:] + return mmap.resolvePath(remain, storedValue.Interface(), action) // ### return, nested map ### } } - return nil, false + return false } diff --git a/vendor/github.com/trivago/tgo/tcontainer/marshalmap_test.go b/vendor/github.com/trivago/tgo/tcontainer/marshalmap_test.go index 717ce67..604458d 100644 --- a/vendor/github.com/trivago/tgo/tcontainer/marshalmap_test.go +++ b/vendor/github.com/trivago/tgo/tcontainer/marshalmap_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,9 +15,10 @@ package tcontainer import ( - "github.com/trivago/tgo/ttesting" "testing" "time" + + "github.com/trivago/tgo/ttesting" ) func TestMarshalMapBaseTypes(t *testing.T) { @@ -357,3 +358,86 @@ func TestMarshalMapConvert(t *testing.T) { _, err = ConvertToMarshalMap(mapRoot3, nil) expect.NoError(err) } + +func TestMarshalMapClone(t *testing.T) { + expect := ttesting.NewExpect(t) + testMap := NewMarshalMap() + + testMap["t1"] = 10 + testMap["t2"] = uint64(10) + testMap["t3"] = "test" + testMap["t4"] = []byte("test") + testMap["t5"] = []int{1, 2, 3} + + cloneTest1 := testMap.Clone() + expect.Equal(testMap, cloneTest1) + + testMap["nested1"] = cloneTest1 + testMap["nested2"] = map[interface{}]interface{}{ + 1: "test", + "foo": "bar", + 3.1: []int{1, 2, 3}, + } + testMap["nested3"] = []MarshalMap{cloneTest1} + + cloneTest2 := testMap.Clone() + expect.Equal(testMap, cloneTest2) +} + +func TestMarshalMapDelete(t *testing.T) { + expect := ttesting.NewExpect(t) + + nested := NewMarshalMap() + nested["t1"] = 1 + nested["t2"] = 2 + nested["t3"] = 3 + + nested2 := nested.Clone() + testMap := NewMarshalMap() + testMap["t1"] = 1 + testMap["map"] = nested + testMap["array"] = []MarshalMap{nested2} + + testMap.Delete("t1") + expect.MapNotSet(testMap, "t1") + _, exists := testMap.Value("t1") + expect.False(exists) + + testMap.Delete("map/t2") + expect.MapNotSet(nested, "t2") + _, exists = testMap.Value("map/t2") + expect.False(exists) + + testMap.Delete("array[0]t3") + expect.MapNotSet(nested2, "t3") + _, exists = testMap.Value("array[0]t3") + expect.False(exists) +} + +func TestMarshalMapSet(t *testing.T) { + expect := ttesting.NewExpect(t) + + testMap := NewMarshalMap() + testMap["t1"] = 1 + testMap["t2"] = "string" + + testMap.Set("t2", 4) + expect.MapEqual(testMap, "t2", 4) + + testMap.Set("t3", testMap.Clone()) + val, exists := testMap.Value("t3/t1") + + expect.True(exists) + expect.Equal(val, 1) + + testMap.Set("t3/t2", "test") + val, exists = testMap.Value("t3/t2") + + expect.True(exists) + expect.Equal(val, "test") + + testMap.Set("t3/t3", 3) + val, exists = testMap.Value("t3/t3") + expect.True(exists) + expect.Equal(val, 3) +} diff --git a/vendor/github.com/trivago/tgo/tcontainer/trie.go b/vendor/github.com/trivago/tgo/tcontainer/trie.go index 288748b..f721709 100644 --- a/vendor/github.com/trivago/tgo/tcontainer/trie.go +++ b/vendor/github.com/trivago/tgo/tcontainer/trie.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/terrors/simpleerror.go b/vendor/github.com/trivago/tgo/terrors/simpleerror.go index 9e72de3..370e559 100644 --- a/vendor/github.com/trivago/tgo/terrors/simpleerror.go +++ b/vendor/github.com/trivago/tgo/terrors/simpleerror.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tflag/flag.go b/vendor/github.com/trivago/tgo/tflag/flag.go index 1499a3e..adeb015 100644 --- a/vendor/github.com/trivago/tgo/tflag/flag.go +++ b/vendor/github.com/trivago/tgo/tflag/flag.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tio/bufferedreader.go b/vendor/github.com/trivago/tgo/tio/bufferedreader.go index 151a57a..5b45b62 100644 --- a/vendor/github.com/trivago/tgo/tio/bufferedreader.go +++ b/vendor/github.com/trivago/tgo/tio/bufferedreader.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,8 +17,10 @@ package tio import ( "bytes" "encoding/binary" - "github.com/trivago/tgo/tstrings" "io" + "regexp" + + "github.com/trivago/tgo/tstrings" ) // BufferedReaderFlags is an enum to configure a buffered reader @@ -68,6 +70,14 @@ const ( // BufferedReaderFlagEverything will keep MLE and/or delimiters when // building a message. BufferedReaderFlagEverything = BufferedReaderFlags(16) + + // BufferedReaderFlagRegex enables reading messages delimited by regexp. + // To enable multiline reading, prefix the regex with "(?m)" + BufferedReaderFlagRegex = BufferedReaderFlags(32) + + // BufferedReaderFlagRegexStart switches regex reading to find the start + // of the next message instead of looking for the end of the current. + BufferedReaderFlagRegexStart = BufferedReaderFlags(64 + 32) ) type bufferError string @@ -89,15 +99,16 @@ var BufferDataInvalid = bufferError("Invalid data") // has been reached. The latter has to be enabled by flag and will disable the // default behavior, which is looking for a delimiter string. type BufferedReader struct { - data []byte - delimiter []byte - parse func() ([]byte, int) - paramMLE int - growSize int - end int - encoding binary.ByteOrder - flags BufferedReaderFlags - incomplete bool + data []byte + delimiter []byte + parse func() ([]byte, int) + paramMLE int + growSize int + end int + encoding binary.ByteOrder + flags BufferedReaderFlags + incomplete bool + delimiterRegexp *regexp.Regexp } // NewBufferedReader creates a new buffered reader that reads messages from a @@ -105,26 +116,34 @@ type BufferedReader struct { // Messages can be separated from the stream by using common methods such as // fixed size, encoded message length or delimiter string. // The internal buffer is grown statically (by its original size) if necessary. -// bufferSize defines the initial size / grow size of the buffer -// flags configures the parsing method -// offsetOrLength sets either the runlength offset or fixed message size -// delimiter defines the delimiter used for textual message parsing +// bufferSize defines the initial size / grow size of the buffer. +// flags configures the parsing method. +// offsetOrLength sets either the runlength offset or fixed message size. +// delimiter defines the delimiter used for textual message parsing, i.e. +// BufferedReaderFlagDelimiter or BufferedReaderFlagRegex. func NewBufferedReader(bufferSize int, flags BufferedReaderFlags, offsetOrLength int, delimiter string) *BufferedReader { buffer := BufferedReader{ - data: make([]byte, bufferSize), - delimiter: []byte(delimiter), - paramMLE: offsetOrLength, - encoding: binary.LittleEndian, - end: 0, - flags: flags, - growSize: bufferSize, - incomplete: true, + data: make([]byte, bufferSize), + delimiter: []byte(delimiter), + paramMLE: offsetOrLength, + encoding: binary.LittleEndian, + end: 0, + flags: flags, + growSize: bufferSize, + incomplete: true, + delimiterRegexp: nil, } if flags&BufferedReaderFlagBigEndian != 0 { buffer.encoding = binary.BigEndian } + if flags&BufferedReaderFlagRegex != 0 { + buffer.parse = buffer.parseDelimiterRegex + buffer.delimiterRegexp = regexp.MustCompile(delimiter) + return &buffer + } + if flags&BufferedReaderFlagMaskMLE == 0 { buffer.parse = buffer.parseDelimiter } else { @@ -153,6 +172,20 @@ func (buffer *BufferedReader) Reset(sequence uint64) { buffer.incomplete = true } +// ResetGetIncomplete works like Reset but returns any incomplete contents +// left in the buffer. Note that the data in the buffer returned is overwritten +// with the next read call. +func (buffer *BufferedReader) ResetGetIncomplete() []byte { + remains := buffer.data[:buffer.end] + buffer.Reset(0) + return remains +} + +// HasIncompleteData returns true if there is unparsed data left in the buffer +func (buffer *BufferedReader) HasIncompleteData() bool { + return buffer.end > 0 +} + // general message extraction part of all parser methods func (buffer *BufferedReader) extractMessage(messageLen int, msgStartIdx int) ([]byte, int) { nextMsgIdx := msgStartIdx + messageLen @@ -257,6 +290,33 @@ func (buffer *BufferedReader) parseMLE64() ([]byte, int) { return buffer.extractMessage(int(messageLen), buffer.paramMLE+8) } +// messages are separated by regexp +func (buffer *BufferedReader) parseDelimiterRegex() ([]byte, int) { + startOffset := 0 + + for { + delimiterIdx := buffer.delimiterRegexp.FindIndex(buffer.data[startOffset:buffer.end]) + if delimiterIdx == nil { + return nil, 0 // ### return, incomplete ### + } + + // If we do end of message parsing, we're done + if buffer.flags&BufferedReaderFlagRegexStart != BufferedReaderFlagRegexStart { + return buffer.extractMessage(delimiterIdx[1], 0) // ### done, end of line ### + } + + // We're done if this is the second pass (start offset > 0) + // We're done if we have data before the match (incomplete message) + if startOffset > 0 || delimiterIdx[0] > 0 { + return buffer.extractMessage(delimiterIdx[0]+startOffset, 0) // ### done, start of line ### + } + + // Found delimiter at start of data, look for the start of a second message. + // We don't need to add startOffset here as we never reach this if startOffset != 0 + startOffset = delimiterIdx[1] + } +} + // ReadAll calls ReadOne as long as there are messages in the stream. // Messages will be send to the given write callback. // If callback is nil, data will be read and discarded. diff --git a/vendor/github.com/trivago/tgo/tio/bufferedreader_test.go b/vendor/github.com/trivago/tgo/tio/bufferedreader_test.go index de11a1c..6f6201b 100644 --- a/vendor/github.com/trivago/tgo/tio/bufferedreader_test.go +++ b/vendor/github.com/trivago/tgo/tio/bufferedreader_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,10 +17,11 @@ package tio import ( "bytes" "fmt" - "github.com/trivago/tgo/ttesting" "io" "strings" "testing" + + "github.com/trivago/tgo/ttesting" ) type bufferedReaderTestData struct { @@ -54,6 +55,75 @@ func TestBufferedReaderDelimiter(t *testing.T) { data.expect.Nil(msg) } +func TestBufferedReaderRegexStart(t *testing.T) { + expect := ttesting.NewExpect(t) + data := []string{ + "incomplete\n", + "XXXX message\n", + "XXXX multi\nline\nmessage\n", + "XXXX trailing", + } + + parseReader := strings.NewReader(strings.Join(data, "")) + reader := NewBufferedReader(1024, BufferedReaderFlagRegexStart, 0, "(?m)^XXXX") + + parsedData := []string{} + err := reader.ReadAll(parseReader, func(m []byte) { + parsedData = append(parsedData, string(m)) + }) + + expect.Equal(io.EOF, err) + expect.Equal(len(data)-1, len(parsedData)) + + expect.True(reader.HasIncompleteData()) + parsedData = append(parsedData, string(reader.ResetGetIncomplete())) + + expect.False(reader.HasIncompleteData()) + + for i, original := range data { + expect.Equal(original, parsedData[i]) + } + + msg, more, err := reader.ReadOne(parseReader) + expect.False(more) + expect.Equal(io.EOF, err) + expect.Nil(msg) +} + +func TestBufferedReaderRegexEnd(t *testing.T) { + expect := ttesting.NewExpect(t) + data := []string{ + "message XXXX\n", + "multi\nline\nmessage XXXX\n", + "incomplete", + } + + parseReader := strings.NewReader(strings.Join(data, "")) + reader := NewBufferedReader(1024, BufferedReaderFlagRegex, 0, "XXXX\n") + + parsedData := []string{} + err := reader.ReadAll(parseReader, func(m []byte) { + parsedData = append(parsedData, string(m)) + }) + + expect.Equal(io.EOF, err) + expect.Equal(len(data)-1, len(parsedData)) + + expect.True(reader.HasIncompleteData()) + parsedData = append(parsedData, string(reader.ResetGetIncomplete())) + + expect.False(reader.HasIncompleteData()) + + for i, original := range data { + expect.Equal(original, parsedData[i]) + } + + msg, more, err := reader.ReadOne(parseReader) + expect.False(more) + expect.Equal(io.EOF, err) + expect.Nil(msg) +} + func TestBufferedReaderMLEText(t *testing.T) { data := bufferedReaderTestData{ expect: ttesting.NewExpect(t), diff --git a/vendor/github.com/trivago/tgo/tio/bytestream.go b/vendor/github.com/trivago/tgo/tio/bytestream.go index 2f76e95..f954dbf 100644 --- a/vendor/github.com/trivago/tgo/tio/bytestream.go +++ b/vendor/github.com/trivago/tgo/tio/bytestream.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tio/bytestream_test.go b/vendor/github.com/trivago/tgo/tio/bytestream_test.go index 4e9c3e3..82b65b0 100644 --- a/vendor/github.com/trivago/tgo/tio/bytestream_test.go +++ b/vendor/github.com/trivago/tgo/tio/bytestream_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tio/bytewriter.go b/vendor/github.com/trivago/tgo/tio/bytewriter.go index 6289e17..cff683d 100644 --- a/vendor/github.com/trivago/tgo/tio/bytewriter.go +++ b/vendor/github.com/trivago/tgo/tio/bytewriter.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tio/bytewriter_test.go b/vendor/github.com/trivago/tgo/tio/bytewriter_test.go index 8ef8695..2efa9df 100644 --- a/vendor/github.com/trivago/tgo/tio/bytewriter_test.go +++ b/vendor/github.com/trivago/tgo/tio/bytewriter_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tio/files.go b/vendor/github.com/trivago/tgo/tio/files.go index 5108a81..693830c 100644 --- a/vendor/github.com/trivago/tgo/tio/files.go +++ b/vendor/github.com/trivago/tgo/tio/files.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tio/files_test.go b/vendor/github.com/trivago/tgo/tio/files_test.go index 5ae3b06..cfb9bcf 100644 --- a/vendor/github.com/trivago/tgo/tio/files_test.go +++ b/vendor/github.com/trivago/tgo/tio/files_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tio/syscall.go b/vendor/github.com/trivago/tgo/tio/syscall.go index 0751b2a..f4f723a 100644 --- a/vendor/github.com/trivago/tgo/tio/syscall.go +++ b/vendor/github.com/trivago/tgo/tio/syscall.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tio/syscall_windows.go b/vendor/github.com/trivago/tgo/tio/syscall_windows.go index c5ee1b2..90d0073 100644 --- a/vendor/github.com/trivago/tgo/tio/syscall_windows.go +++ b/vendor/github.com/trivago/tgo/tio/syscall_windows.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tlog/log.go b/vendor/github.com/trivago/tgo/tlog/log.go index 5e6eb89..67faa68 100644 --- a/vendor/github.com/trivago/tgo/tlog/log.go +++ b/vendor/github.com/trivago/tgo/tlog/log.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tlog/logcache.go b/vendor/github.com/trivago/tgo/tlog/logcache.go index b8db4ef..beb7721 100644 --- a/vendor/github.com/trivago/tgo/tlog/logcache.go +++ b/vendor/github.com/trivago/tgo/tlog/logcache.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tlog/loglogger.go b/vendor/github.com/trivago/tgo/tlog/loglogger.go index e26aa2d..2963d17 100644 --- a/vendor/github.com/trivago/tgo/tlog/loglogger.go +++ b/vendor/github.com/trivago/tgo/tlog/loglogger.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tlog/lognull.go b/vendor/github.com/trivago/tgo/tlog/lognull.go index 86f719c..6d9d111 100644 --- a/vendor/github.com/trivago/tgo/tlog/lognull.go +++ b/vendor/github.com/trivago/tgo/tlog/lognull.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tlog/logreferrer.go b/vendor/github.com/trivago/tgo/tlog/logreferrer.go index df13089..a8e9d63 100644 --- a/vendor/github.com/trivago/tgo/tlog/logreferrer.go +++ b/vendor/github.com/trivago/tgo/tlog/logreferrer.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tmath/bits.go b/vendor/github.com/trivago/tgo/tmath/bits.go index 4de34d3..3458a29 100644 --- a/vendor/github.com/trivago/tgo/tmath/bits.go +++ b/vendor/github.com/trivago/tgo/tmath/bits.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tmath/bits_test.go b/vendor/github.com/trivago/tgo/tmath/bits_test.go index 6abd382..a71096f 100644 --- a/vendor/github.com/trivago/tgo/tmath/bits_test.go +++ b/vendor/github.com/trivago/tgo/tmath/bits_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tmath/math.go b/vendor/github.com/trivago/tgo/tmath/math.go index 31cf6d8..9e6a39b 100644 --- a/vendor/github.com/trivago/tgo/tmath/math.go +++ b/vendor/github.com/trivago/tgo/tmath/math.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tmath/math_test.go b/vendor/github.com/trivago/tgo/tmath/math_test.go index 57152b9..304b339 100644 --- a/vendor/github.com/trivago/tgo/tmath/math_test.go +++ b/vendor/github.com/trivago/tgo/tmath/math_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tnet/network.go b/vendor/github.com/trivago/tgo/tnet/network.go index 0b6bfd7..2a4ffe5 100644 --- a/vendor/github.com/trivago/tgo/tnet/network.go +++ b/vendor/github.com/trivago/tgo/tnet/network.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,10 +18,18 @@ import ( "github.com/trivago/tgo/tstrings" "io" "net" + "strconv" "strings" "syscall" ) +// URI stores parts parsed from an URI string. +type URI struct { + Protocol string + Address string + Port uint16 +} + // ParseAddress takes an address and tries to extract the protocol from it. // Protocols may be prepended by the "protocol://" notation. // If no protocol is given, defaultProtocol is returned. @@ -60,6 +68,15 @@ func SplitAddress(addressString string, defaultProtocol string) (protocol, host, return } +// SplitAddressToURI acts like SplitAddress but returns an URI struct instead. +func SplitAddressToURI(addressString string, defaultProtocol string) (uri URI, err error) { + portString := "" + uri.Protocol, uri.Address, portString, err = SplitAddress(addressString, defaultProtocol) + portNum, _ := strconv.ParseInt(portString, 10, 16) + uri.Port = uint16(portNum) + return uri, err +} + // IsDisconnectedError returns true if the given error is related to a // disconnected socket. func IsDisconnectedError(err error) bool { diff --git a/vendor/github.com/trivago/tgo/tnet/network_test.go b/vendor/github.com/trivago/tgo/tnet/network_test.go index 1334a99..3cdd8d1 100644 --- a/vendor/github.com/trivago/tgo/tnet/network_test.go +++ b/vendor/github.com/trivago/tgo/tnet/network_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -76,3 +76,37 @@ func TestSplitAddress(t *testing.T) { expect.Equal("", addr) expect.Equal("80", port) } + +func TestSplitAddressToURI(t *testing.T) { + expect := ttesting.NewExpect(t) + + uri, err := SplitAddressToURI("unix://test.socket", "") + expect.NoError(err) + expect.Equal("unix", uri.Protocol) + expect.Equal("test.socket", uri.Address) + expect.Equal(uint16(0), uri.Port) + + uri, err = SplitAddressToURI("udp://192.168.0.1:20", "") + expect.NoError(err) + expect.Equal("udp", uri.Protocol) + expect.Equal("192.168.0.1", uri.Address) + expect.Equal(uint16(20), uri.Port) + + uri, err = SplitAddressToURI("192.168.0.1:20", "tcp") + expect.NoError(err) + expect.Equal("tcp", uri.Protocol) + expect.Equal("192.168.0.1", uri.Address) + expect.Equal(uint16(20), uri.Port) + + uri, err = SplitAddressToURI("80", "tcp") + expect.NoError(err) + expect.Equal("tcp", uri.Protocol) + expect.Equal("", uri.Address) + expect.Equal(uint16(80), uri.Port) + + uri, err = SplitAddressToURI("tcp://80", "") + expect.NoError(err) + expect.Equal("tcp", uri.Protocol) + expect.Equal("", uri.Address) + expect.Equal(uint16(80), uri.Port) +} diff --git a/vendor/github.com/trivago/tgo/tnet/stoplistener.go b/vendor/github.com/trivago/tgo/tnet/stoplistener.go index bcf452e..4fc5513 100644 --- a/vendor/github.com/trivago/tgo/tnet/stoplistener.go +++ b/vendor/github.com/trivago/tgo/tnet/stoplistener.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tnet/stoplistener_test.go b/vendor/github.com/trivago/tgo/tnet/stoplistener_test.go index 1501259..f53da85 100644 --- a/vendor/github.com/trivago/tgo/tnet/stoplistener_test.go +++ b/vendor/github.com/trivago/tgo/tnet/stoplistener_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tnet/thttp/sockettransport.go b/vendor/github.com/trivago/tgo/tnet/thttp/sockettransport.go index b337bf1..548c6d0 100644 --- a/vendor/github.com/trivago/tgo/tnet/thttp/sockettransport.go +++ b/vendor/github.com/trivago/tgo/tnet/thttp/sockettransport.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tnet/thttp/sockettransport_test.go b/vendor/github.com/trivago/tgo/tnet/thttp/sockettransport_test.go index 79c3c55..e59cd92 100644 --- a/vendor/github.com/trivago/tgo/tnet/thttp/sockettransport_test.go +++ b/vendor/github.com/trivago/tgo/tnet/thttp/sockettransport_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/exit.go b/vendor/github.com/trivago/tgo/tos/exit.go index a2da676..69d2092 100644 --- a/vendor/github.com/trivago/tgo/tos/exit.go +++ b/vendor/github.com/trivago/tgo/tos/exit.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/file.go b/vendor/github.com/trivago/tgo/tos/file.go index 82bb57f..648d668 100644 --- a/vendor/github.com/trivago/tgo/tos/file.go +++ b/vendor/github.com/trivago/tgo/tos/file.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/file_test.go b/vendor/github.com/trivago/tgo/tos/file_test.go index 1d916de..0784003 100644 --- a/vendor/github.com/trivago/tgo/tos/file_test.go +++ b/vendor/github.com/trivago/tgo/tos/file_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/file_unix.go b/vendor/github.com/trivago/tgo/tos/file_unix.go index 81793f7..888e2b5 100644 --- a/vendor/github.com/trivago/tgo/tos/file_unix.go +++ b/vendor/github.com/trivago/tgo/tos/file_unix.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/file_unsupported.go b/vendor/github.com/trivago/tgo/tos/file_unsupported.go index cb4ca99..2757d48 100644 --- a/vendor/github.com/trivago/tgo/tos/file_unsupported.go +++ b/vendor/github.com/trivago/tgo/tos/file_unsupported.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/ids_darwin.go b/vendor/github.com/trivago/tgo/tos/ids_darwin.go index 9cb0d08..02bcdfe 100644 --- a/vendor/github.com/trivago/tgo/tos/ids_darwin.go +++ b/vendor/github.com/trivago/tgo/tos/ids_darwin.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/ids_freebsd.go b/vendor/github.com/trivago/tgo/tos/ids_freebsd.go index bb725d0..1a98935 100644 --- a/vendor/github.com/trivago/tgo/tos/ids_freebsd.go +++ b/vendor/github.com/trivago/tgo/tos/ids_freebsd.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/ids_linux.go b/vendor/github.com/trivago/tgo/tos/ids_linux.go index 9e8352c..d3d7c66 100644 --- a/vendor/github.com/trivago/tgo/tos/ids_linux.go +++ b/vendor/github.com/trivago/tgo/tos/ids_linux.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/ids_windows.go b/vendor/github.com/trivago/tgo/tos/ids_windows.go index ae0ee41..77064ab 100644 --- a/vendor/github.com/trivago/tgo/tos/ids_windows.go +++ b/vendor/github.com/trivago/tgo/tos/ids_windows.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,8 +16,11 @@ package tos -// TODO -/* +// TODO: +// - Windows uses 128-bit UUIDs +// - Root can be mapped to administrator +// - Nobody mapping is TBD + const ( // NobodyUid represents the user id of the user nobody NobodyUid = 0 @@ -27,4 +30,4 @@ const ( RootUid = 0 // RootUid represents the group id of the root group RootGid = 0 -)*/ +) diff --git a/vendor/github.com/trivago/tgo/tos/pidfile.go b/vendor/github.com/trivago/tgo/tos/pidfile.go index 0350c54..17167e5 100644 --- a/vendor/github.com/trivago/tgo/tos/pidfile.go +++ b/vendor/github.com/trivago/tgo/tos/pidfile.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/pidfile_test.go b/vendor/github.com/trivago/tgo/tos/pidfile_test.go index 82e271c..d05d81d 100644 --- a/vendor/github.com/trivago/tgo/tos/pidfile_test.go +++ b/vendor/github.com/trivago/tgo/tos/pidfile_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tos/user.go b/vendor/github.com/trivago/tgo/tos/user.go index c34347a..fbd9a8a 100644 --- a/vendor/github.com/trivago/tgo/tos/user.go +++ b/vendor/github.com/trivago/tgo/tos/user.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ import ( "strconv" ) +// GetUid returns the user id for a given user name func GetUid(name string) (int, error) { switch name { case "nobody": @@ -37,6 +38,7 @@ func GetUid(name string) (int, error) { } } +// GetGid returns the group id for a given group name func GetGid(name string) (int, error) { switch name { case "nobody": diff --git a/vendor/github.com/trivago/tgo/treflect/clone.go b/vendor/github.com/trivago/tgo/treflect/clone.go new file mode 100644 index 0000000..b887126 --- /dev/null +++ b/vendor/github.com/trivago/tgo/treflect/clone.go @@ -0,0 +1,80 @@ +// Copyright 2015-2018 trivago N.V. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package treflect + +import ( + "reflect" +) + +// Clone does a deep copy of the given value. +// Please note that field have to be public in order to be copied. +// Private fields will be ignored. +func Clone(v interface{}) interface{} { + value := reflect.ValueOf(v) + copy := clone(value) + return copy.Interface() +} + +func clone(v reflect.Value) reflect.Value { + switch v.Kind() { + case reflect.Struct: + copy := reflect.New(v.Type()) + + for i := 0; i < v.Type().NumField(); i++ { + field := v.Field(i) + targetField := copy.Elem().Field(i) + if !targetField.CanSet() { + continue // ignore private fields + } + fieldCopy := clone(field) + targetField.Set(fieldCopy) + } + return copy.Elem() + + case reflect.Chan: + copy := reflect.MakeChan(v.Type(), v.Len()) + return copy + + case reflect.Map: + copy := reflect.MakeMap(v.Type()) + keys := v.MapKeys() + for _, k := range keys { + fieldCopy := clone(v.MapIndex(k)) + copy.SetMapIndex(k, fieldCopy) + } + return copy + + case reflect.Slice: + copy := reflect.MakeSlice(v.Type(), v.Len(), v.Len()) + for i := 0; i < v.Len(); i++ { + elementCopy := clone(v.Index(i)) + copy.Index(i).Set(elementCopy) + } + return copy + + case reflect.Array: + copy := reflect.New(v.Type()).Elem() + for i := 0; i < v.Len(); i++ { + elementCopy := clone(v.Index(i)) + copy.Index(i).Set(elementCopy) + } + return copy + + default: + copy := reflect.New(v.Type()) + copy.Elem().Set(v) + return copy.Elem() + } +} diff --git a/vendor/github.com/trivago/tgo/treflect/clone_test.go b/vendor/github.com/trivago/tgo/treflect/clone_test.go new file mode 100644 index 0000000..70640a0 --- /dev/null +++ b/vendor/github.com/trivago/tgo/treflect/clone_test.go @@ -0,0 +1,60 @@ +// Copyright 2015-2018 trivago N.V. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package treflect + +import ( + "testing" + + "github.com/trivago/tgo/ttesting" +) + +type CloneTest1 struct { + Number int + Text string + Slice []int + Array [3]int + private int // private fields should be ignored +} + +type cloneTest2 struct { + CloneTest1 + Slice2 []CloneTest1 + Dict map[string]CloneTest1 +} + +func TestClone(t *testing.T) { + expect := ttesting.NewExpect(t) + + test1 := CloneTest1{ + Number: 1, + Text: "test", + Slice: []int{1, 2, 3}, + Array: [3]int{1, 2, 3}, + } + + testClone1 := Clone(test1) + expect.Equal(test1, testClone1) + + test2 := cloneTest2{ + CloneTest1: test1, + Slice2: []CloneTest1{test1}, + Dict: map[string]CloneTest1{ + "foo": test1, + }, + } + + testClone2 := Clone(test2) + expect.Equal(test2, testClone2) +} diff --git a/vendor/github.com/trivago/tgo/treflect/reflection.go b/vendor/github.com/trivago/tgo/treflect/reflection.go index b28a5ae..48a11f4 100644 --- a/vendor/github.com/trivago/tgo/treflect/reflection.go +++ b/vendor/github.com/trivago/tgo/treflect/reflection.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -85,8 +85,6 @@ func Int64(v interface{}) (int64, bool) { return int64(v.(float64)), true } - fmt.Printf("%t\n%#v\n%#v\n", v, v, reflect.TypeOf(v).Kind()) - return 0, false } diff --git a/vendor/github.com/trivago/tgo/treflect/reflection_test.go b/vendor/github.com/trivago/tgo/treflect/reflection_test.go index 95538c1..41c3b0c 100644 --- a/vendor/github.com/trivago/tgo/treflect/reflection_test.go +++ b/vendor/github.com/trivago/tgo/treflect/reflection_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/treflect/typeregistry.go b/vendor/github.com/trivago/tgo/treflect/typeregistry.go index e369c76..d3e3d7e 100644 --- a/vendor/github.com/trivago/tgo/treflect/typeregistry.go +++ b/vendor/github.com/trivago/tgo/treflect/typeregistry.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -34,8 +34,12 @@ func NewTypeRegistry() TypeRegistry { } // Register a plugin to the TypeRegistry by passing an uninitialized object. -// Example: var MyConsumerClassID = shared.Plugin.Register(MyConsumer{}) func (registry TypeRegistry) Register(typeInstance interface{}) { + registry.RegisterWithDepth(typeInstance, 1) +} + +// RegisterWithDepth to register a plugin to the TypeRegistry by passing an uninitialized object. +func (registry TypeRegistry) RegisterWithDepth(typeInstance interface{}, depth int) { structType := reflect.TypeOf(typeInstance) packageName := structType.PkgPath() typeName := structType.Name() @@ -46,7 +50,7 @@ func (registry TypeRegistry) Register(typeInstance interface{}) { maxDepth = len(pathTokens) } - for n := 1; n <= maxDepth; n++ { + for n := depth; n <= maxDepth; n++ { shortTypeName := strings.Join(pathTokens[len(pathTokens)-n:], ".") + "." + typeName registry.namedType[shortTypeName] = structType } diff --git a/vendor/github.com/trivago/tgo/tstrings/parser.go b/vendor/github.com/trivago/tgo/tstrings/parser.go index b531505..6a4cbb9 100644 --- a/vendor/github.com/trivago/tgo/tstrings/parser.go +++ b/vendor/github.com/trivago/tgo/tstrings/parser.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tstrings/parser_test.go b/vendor/github.com/trivago/tgo/tstrings/parser_test.go index 22e9199..c3bffe0 100644 --- a/vendor/github.com/trivago/tgo/tstrings/parser_test.go +++ b/vendor/github.com/trivago/tgo/tstrings/parser_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tstrings/strings.go b/vendor/github.com/trivago/tgo/tstrings/strings.go index 8e78831..569eafd 100644 --- a/vendor/github.com/trivago/tgo/tstrings/strings.go +++ b/vendor/github.com/trivago/tgo/tstrings/strings.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tstrings/strings_test.go b/vendor/github.com/trivago/tgo/tstrings/strings_test.go index 2ed1144..656dea0 100644 --- a/vendor/github.com/trivago/tgo/tstrings/strings_test.go +++ b/vendor/github.com/trivago/tgo/tstrings/strings_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/errors.go b/vendor/github.com/trivago/tgo/tsync/errors.go index 26503a7..d01544d 100644 --- a/vendor/github.com/trivago/tgo/tsync/errors.go +++ b/vendor/github.com/trivago/tgo/tsync/errors.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/flow.go b/vendor/github.com/trivago/tgo/tsync/flow.go new file mode 100644 index 0000000..22943ae --- /dev/null +++ b/vendor/github.com/trivago/tgo/tsync/flow.go @@ -0,0 +1,98 @@ +package tsync + +import ( + "reflect" +) + +// Fanout receives from the given in channel and forwards the data to the first +// non-blocking out channel. Fanout returns when in has been closed. +func Fanout(in interface{}, out ...interface{}) { + cases := newSendCases(out) + inChan := reflect.ValueOf(in) + + for { + data, more := inChan.Recv() + if !more { + return + } + for i := range cases { + cases[i].Send = data + } + reflect.Select(cases) + } +} + +// Funnel receives from the first non-blocking in channel and forwards it to the +// given out channel. Funnel returns when all in channels have been closed. +func Funnel(out interface{}, in ...interface{}) { + cases := newRecvCases(in) + outChan := reflect.ValueOf(out) + + for len(cases) > 0 { + idx, val, ok := reflect.Select(cases) + if !ok { + cases = removeCase(idx, cases) + continue + } + outChan.Send(val) + } +} + +// Turnout multiplexes data between the list of in and out channels. The data of +// the first non-blocking in channel will be forwarded to the first non-blocking +// out channel. Turnout returns when all in channels have been closed. +func Turnout(in []interface{}, out []interface{}) { + inCases := newRecvCases(in) + outCases := newSendCases(out) + + for len(inCases) > 0 { + idx, val, ok := reflect.Select(inCases) + if !ok { + inCases = removeCase(idx, inCases) + continue + } + + for i := range outCases { + outCases[i].Send = val + } + reflect.Select(outCases) + } +} + +func newSendCases(send []interface{}) []reflect.SelectCase { + cases := make([]reflect.SelectCase, 0, len(send)) + for _, c := range send { + cases = append(cases, reflect.SelectCase{ + Dir: reflect.SelectSend, + Chan: reflect.ValueOf(c), + }) + } + return cases +} + +func newRecvCases(recv []interface{}) []reflect.SelectCase { + cases := make([]reflect.SelectCase, 0, len(recv)) + for _, c := range recv { + cases = append(cases, reflect.SelectCase{ + Dir: reflect.SelectRecv, + Chan: reflect.ValueOf(c), + }) + } + return cases +} + +func removeCase(i int, c []reflect.SelectCase) []reflect.SelectCase { + switch { + case len(c) == 0: + return []reflect.SelectCase{} + + case i == 0: + return c[1:] + + case i == len(c)-1: + return c[:i] + + default: + return append(c[:i], c[i+1:]...) + } +} diff --git a/vendor/github.com/trivago/tgo/tsync/flow_test.go b/vendor/github.com/trivago/tgo/tsync/flow_test.go new file mode 100644 index 0000000..ee565fc --- /dev/null +++ b/vendor/github.com/trivago/tgo/tsync/flow_test.go @@ -0,0 +1,130 @@ +// Copyright 2015-2018 trivago N.V. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package tsync + +import ( + "github.com/trivago/tgo/ttesting" + "sync" + "testing" + "time" +) + +func TestFanout(t *testing.T) { + expect := ttesting.NewExpect(t) + + in := make(chan int) + out1 := make(chan int) + out2 := make(chan int) + out3 := make(chan int) + + fanoutFunc := new(sync.WaitGroup) + fanoutFunc.Add(1) + + go func() { + Fanout(in, out1, out2, out3) + fanoutFunc.Done() + }() + + in <- 1 + select { + case d := <-out1: + expect.Equal(1, d) + case d := <-out2: + expect.Equal(1, d) + case d := <-out3: + expect.Equal(1, d) + case <-time.After(time.Second): + expect.NotExecuted() + } + + close(in) + expect.NonBlocking(time.Second, fanoutFunc.Wait) +} + +func TestFunnel(t *testing.T) { + expect := ttesting.NewExpect(t) + + out := make(chan int) + in1 := make(chan int, 1) + in2 := make(chan int, 1) + in3 := make(chan int, 1) + + funnelFunc := new(sync.WaitGroup) + funnelFunc.Add(1) + + go func() { + Funnel(out, in1, in2, in3) + funnelFunc.Done() + }() + + in1 <- 1 + in2 <- 1 + in3 <- 1 + + for i := 0; i < 3; i++ { + select { + case d := <-out: + expect.Equal(1, d) + case <-time.After(time.Second): + expect.NotExecuted() + } + } + + close(in1) + close(in2) + close(in3) + expect.NonBlocking(time.Second, funnelFunc.Wait) +} + +func TestTurnout(t *testing.T) { + expect := ttesting.NewExpect(t) + + in1 := make(chan int, 1) + in2 := make(chan int, 1) + in3 := make(chan int, 1) + out1 := make(chan int) + out2 := make(chan int) + out3 := make(chan int) + + turnoutFunc := new(sync.WaitGroup) + turnoutFunc.Add(1) + + go func() { + Turnout([]interface{}{in1, in2, in3}, []interface{}{out1, out2, out3}) + turnoutFunc.Done() + }() + + in1 <- 1 + in2 <- 1 + in3 <- 1 + + for i := 0; i < 3; i++ { + select { + case d := <-out1: + expect.Equal(1, d) + case d := <-out2: + expect.Equal(1, d) + case d := <-out3: + expect.Equal(1, d) + case <-time.After(time.Second): + expect.NotExecuted() + } + } + + close(in1) + close(in2) + close(in3) + expect.NonBlocking(time.Second, turnoutFunc.Wait) +} diff --git a/vendor/github.com/trivago/tgo/tsync/fuse.go b/vendor/github.com/trivago/tgo/tsync/fuse.go index defbe65..02c3970 100644 --- a/vendor/github.com/trivago/tgo/tsync/fuse.go +++ b/vendor/github.com/trivago/tgo/tsync/fuse.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/fuse_test.go b/vendor/github.com/trivago/tgo/tsync/fuse_test.go index 27aad66..3bff66a 100644 --- a/vendor/github.com/trivago/tgo/tsync/fuse_test.go +++ b/vendor/github.com/trivago/tgo/tsync/fuse_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/mutex.go b/vendor/github.com/trivago/tgo/tsync/mutex.go index 73cc235..2a1faf1 100644 --- a/vendor/github.com/trivago/tgo/tsync/mutex.go +++ b/vendor/github.com/trivago/tgo/tsync/mutex.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,13 +19,18 @@ import ( ) // Mutex is a lightweight, spinner based mutex implementation, extending the -// standard go mutex by the possibility to query the mutexe's state. This state -// is very volatile but can come in handy sometimes. +// standard go mutex by the possibility to query the mutex' state and by adding +// a TryLock function. type Mutex struct { state *int32 priority SpinPriority } +const ( + mutexUnlocked = int32(0) + mutexLocked = int32(1) +) + // NewMutex creates a new mutex with the given spin priority used during Lock. func NewMutex(priority SpinPriority) *Mutex { return &Mutex{ @@ -37,19 +42,25 @@ func NewMutex(priority SpinPriority) *Mutex { // Lock blocks (spins) until the lock becomes available func (m *Mutex) Lock() { spin := NewSpinner(m.priority) - for !atomic.CompareAndSwapInt32(m.state, 0, 1) { + for !m.TryLock() { spin.Yield() } } +// TryLock tries to acquire a lock and returns true if it succeeds. This +// function does not block. +func (m *Mutex) TryLock() bool { + return atomic.CompareAndSwapInt32(m.state, mutexUnlocked, mutexLocked) +} + // Unlock unblocks one routine waiting on lock. func (m *Mutex) Unlock() { - atomic.StoreInt32(m.state, 0) + atomic.StoreInt32(m.state, mutexUnlocked) } // IsLocked returns the state of this mutex. The result of this function might // change directly after call so it should only be used in situations where // this fact is not considered problematic. func (m *Mutex) IsLocked() bool { - return atomic.LoadInt32(m.state) == 1 + return atomic.LoadInt32(m.state) != mutexUnlocked } diff --git a/vendor/github.com/trivago/tgo/tsync/queue.go b/vendor/github.com/trivago/tgo/tsync/queue.go index fe7362c..d13855c 100644 --- a/vendor/github.com/trivago/tgo/tsync/queue.go +++ b/vendor/github.com/trivago/tgo/tsync/queue.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/queue_test.go b/vendor/github.com/trivago/tgo/tsync/queue_test.go index 1faa489..a52d637 100644 --- a/vendor/github.com/trivago/tgo/tsync/queue_test.go +++ b/vendor/github.com/trivago/tgo/tsync/queue_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/run.go b/vendor/github.com/trivago/tgo/tsync/run.go index cbee939..79fb84a 100644 --- a/vendor/github.com/trivago/tgo/tsync/run.go +++ b/vendor/github.com/trivago/tgo/tsync/run.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/spinner.go b/vendor/github.com/trivago/tgo/tsync/spinner.go index 5c82c9a..bad4932 100644 --- a/vendor/github.com/trivago/tgo/tsync/spinner.go +++ b/vendor/github.com/trivago/tgo/tsync/spinner.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/stack.go b/vendor/github.com/trivago/tgo/tsync/stack.go index 2f4a03c..214b4cd 100644 --- a/vendor/github.com/trivago/tgo/tsync/stack.go +++ b/vendor/github.com/trivago/tgo/tsync/stack.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/stack_test.go b/vendor/github.com/trivago/tgo/tsync/stack_test.go index 30390fa..e56d2b7 100644 --- a/vendor/github.com/trivago/tgo/tsync/stack_test.go +++ b/vendor/github.com/trivago/tgo/tsync/stack_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/tsync/waitgroup.go b/vendor/github.com/trivago/tgo/tsync/waitgroup.go index 1177a14..2cac12d 100644 --- a/vendor/github.com/trivago/tgo/tsync/waitgroup.go +++ b/vendor/github.com/trivago/tgo/tsync/waitgroup.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/ttesting/expect.go b/vendor/github.com/trivago/tgo/ttesting/expect.go index 76b6374..19e1fcb 100644 --- a/vendor/github.com/trivago/tgo/ttesting/expect.go +++ b/vendor/github.com/trivago/tgo/ttesting/expect.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -35,6 +35,7 @@ func init() { // Expect is a helper construct for unittesting type Expect struct { scope string + group string t *testing.T silent bool } @@ -67,6 +68,17 @@ func NewSilentExpect(t *testing.T) Expect { } } +// StartGroup starts a new test groups. Errors will be prefixed with the name of +// this group until EndGroup is called. +func (e *Expect) StartGroup(name string) { + e.group = name +} + +// EndGroup closes a test group. See StartGroup. +func (e *Expect) EndGroup() { + e.group = "" +} + func (e Expect) error(message string) { if e.silent { return @@ -76,7 +88,12 @@ func (e Expect) error(message string) { file = file[basePathIdx+len(expectBasePath):] } - fmt.Printf("\t%s:%d: %s -> %s\n", file, line, e.scope, message) + scope := e.scope + if e.group != "" { + scope += "/" + e.group + } + + fmt.Printf("\t%s:%d: %s -> %s\n", file, line, scope, message) e.t.Fail() } diff --git a/vendor/github.com/trivago/tgo/ttesting/expect_test.go b/vendor/github.com/trivago/tgo/ttesting/expect_test.go index 4cb6960..a3de929 100644 --- a/vendor/github.com/trivago/tgo/ttesting/expect_test.go +++ b/vendor/github.com/trivago/tgo/ttesting/expect_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/github.com/trivago/tgo/ttesting/expectfail_test.go b/vendor/github.com/trivago/tgo/ttesting/expectfail_test.go index e3bbc5c..45da218 100644 --- a/vendor/github.com/trivago/tgo/ttesting/expectfail_test.go +++ b/vendor/github.com/trivago/tgo/ttesting/expectfail_test.go @@ -1,4 +1,4 @@ -// Copyright 2015-2016 trivago GmbH +// Copyright 2015-2018 trivago N.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/golang.org/x/crypto/CONTRIBUTING.md b/vendor/golang.org/x/crypto/CONTRIBUTING.md index 88dff59..d0485e8 100644 --- a/vendor/golang.org/x/crypto/CONTRIBUTING.md +++ b/vendor/golang.org/x/crypto/CONTRIBUTING.md @@ -4,16 +4,15 @@ Go is an open source project. It is the work of hundreds of contributors. We appreciate your help! - ## Filing issues When [filing an issue](https://golang.org/issue/new), make sure to answer these five questions: -1. What version of Go are you using (`go version`)? -2. What operating system and processor architecture are you using? -3. What did you do? -4. What did you expect to see? -5. What did you see instead? +1. What version of Go are you using (`go version`)? +2. What operating system and processor architecture are you using? +3. What did you do? +4. What did you expect to see? +5. What did you see instead? General questions should go to the [golang-nuts mailing list](https://groups.google.com/group/golang-nuts) instead of the issue tracker. The gophers there will answer or ask you to file an issue if you've tripped over a bug. @@ -23,9 +22,5 @@ The gophers there will answer or ask you to file an issue if you've tripped over Please read the [Contribution Guidelines](https://golang.org/doc/contribute.html) before sending patches. -**We do not accept GitHub pull requests** -(we use [Gerrit](https://code.google.com/p/gerrit/) instead for code review). - Unless otherwise noted, the Go source files are distributed under the BSD-style license found in the LICENSE file. - diff --git a/vendor/golang.org/x/crypto/acme/acme.go b/vendor/golang.org/x/crypto/acme/acme.go index d6a09dd..00ee955 100644 --- a/vendor/golang.org/x/crypto/acme/acme.go +++ b/vendor/golang.org/x/crypto/acme/acme.go @@ -14,7 +14,6 @@ package acme import ( - "bytes" "context" "crypto" "crypto/ecdsa" @@ -23,6 +22,8 @@ import ( "crypto/sha256" "crypto/tls" "crypto/x509" + "crypto/x509/pkix" + "encoding/asn1" "encoding/base64" "encoding/hex" "encoding/json" @@ -33,14 +34,26 @@ import ( "io/ioutil" "math/big" "net/http" - "strconv" "strings" "sync" "time" ) -// LetsEncryptURL is the Directory endpoint of Let's Encrypt CA. -const LetsEncryptURL = "https://acme-v01.api.letsencrypt.org/directory" +const ( + // LetsEncryptURL is the Directory endpoint of Let's Encrypt CA. + LetsEncryptURL = "https://acme-v01.api.letsencrypt.org/directory" + + // ALPNProto is the ALPN protocol name used by a CA server when validating + // tls-alpn-01 challenges. + // + // Package users must ensure their servers can negotiate the ACME ALPN in + // order for tls-alpn-01 challenge verifications to succeed. + // See the crypto/tls package's Config.NextProtos field. + ALPNProto = "acme-tls/1" +) + +// idPeACMEIdentifierV1 is the OID for the ACME extension for the TLS-ALPN challenge. +var idPeACMEIdentifierV1 = asn1.ObjectIdentifier{1, 3, 6, 1, 5, 5, 7, 1, 30, 1} const ( maxChainLen = 5 // max depth and breadth of a certificate chain @@ -64,6 +77,10 @@ const ( type Client struct { // Key is the account key used to register with a CA and sign requests. // Key.Public() must return a *rsa.PublicKey or *ecdsa.PublicKey. + // + // The following algorithms are supported: + // RS256, ES256, ES384 and ES512. + // See RFC7518 for more details about the algorithms. Key crypto.Signer // HTTPClient optionally specifies an HTTP client to use @@ -76,6 +93,22 @@ type Client struct { // will have no effect. DirectoryURL string + // RetryBackoff computes the duration after which the nth retry of a failed request + // should occur. The value of n for the first call on failure is 1. + // The values of r and resp are the request and response of the last failed attempt. + // If the returned value is negative or zero, no more retries are done and an error + // is returned to the caller of the original method. + // + // Requests which result in a 4xx client error are not retried, + // except for 400 Bad Request due to "bad nonce" errors and 429 Too Many Requests. + // + // If RetryBackoff is nil, a truncated exponential backoff algorithm + // with the ceiling of 10 seconds is used, where each subsequent retry n + // is done after either ("Retry-After" + jitter) or (2^n seconds + jitter), + // preferring the former if "Retry-After" header is found in the resp. + // The jitter is a random value up to 1 second. + RetryBackoff func(n int, r *http.Request, resp *http.Response) time.Duration + dirMu sync.Mutex // guards writes to dir dir *Directory // cached result of Client's Discover method @@ -95,19 +128,12 @@ func (c *Client) Discover(ctx context.Context) (Directory, error) { return *c.dir, nil } - dirURL := c.DirectoryURL - if dirURL == "" { - dirURL = LetsEncryptURL - } - res, err := c.get(ctx, dirURL) + res, err := c.get(ctx, c.directoryURL(), wantStatus(http.StatusOK)) if err != nil { return Directory{}, err } defer res.Body.Close() c.addNonce(res.Header) - if res.StatusCode != http.StatusOK { - return Directory{}, responseError(res) - } var v struct { Reg string `json:"new-reg"` @@ -135,6 +161,13 @@ func (c *Client) Discover(ctx context.Context) (Directory, error) { return *c.dir, nil } +func (c *Client) directoryURL() string { + if c.DirectoryURL != "" { + return c.DirectoryURL + } + return LetsEncryptURL +} + // CreateCert requests a new certificate using the Certificate Signing Request csr encoded in DER format. // The exp argument indicates the desired certificate validity duration. CA may issue a certificate // with a different duration. @@ -142,7 +175,7 @@ func (c *Client) Discover(ctx context.Context) (Directory, error) { // // In the case where CA server does not provide the issued certificate in the response, // CreateCert will poll certURL using c.FetchCert, which will result in additional round-trips. -// In such scenario the caller can cancel the polling with ctx. +// In such a scenario, the caller can cancel the polling with ctx. // // CreateCert returns an error if the CA's response or chain was unreasonably large. // Callers are encouraged to parse the returned value to ensure the certificate is valid and has the expected features. @@ -166,14 +199,11 @@ func (c *Client) CreateCert(ctx context.Context, csr []byte, exp time.Duration, req.NotAfter = now.Add(exp).Format(time.RFC3339) } - res, err := c.retryPostJWS(ctx, c.Key, c.dir.CertURL, req) + res, err := c.post(ctx, c.Key, c.dir.CertURL, req, wantStatus(http.StatusCreated)) if err != nil { return nil, "", err } defer res.Body.Close() - if res.StatusCode != http.StatusCreated { - return nil, "", responseError(res) - } curl := res.Header.Get("Location") // cert permanent URL if res.ContentLength == 0 { @@ -196,26 +226,11 @@ func (c *Client) CreateCert(ctx context.Context, csr []byte, exp time.Duration, // Callers are encouraged to parse the returned value to ensure the certificate is valid // and has expected features. func (c *Client) FetchCert(ctx context.Context, url string, bundle bool) ([][]byte, error) { - for { - res, err := c.get(ctx, url) - if err != nil { - return nil, err - } - defer res.Body.Close() - if res.StatusCode == http.StatusOK { - return c.responseCert(ctx, res, bundle) - } - if res.StatusCode > 299 { - return nil, responseError(res) - } - d := retryAfter(res.Header.Get("Retry-After"), 3*time.Second) - select { - case <-time.After(d): - // retry - case <-ctx.Done(): - return nil, ctx.Err() - } + res, err := c.get(ctx, url, wantStatus(http.StatusOK)) + if err != nil { + return nil, err } + return c.responseCert(ctx, res, bundle) } // RevokeCert revokes a previously issued certificate cert, provided in DER format. @@ -241,14 +256,11 @@ func (c *Client) RevokeCert(ctx context.Context, key crypto.Signer, cert []byte, if key == nil { key = c.Key } - res, err := c.retryPostJWS(ctx, key, c.dir.RevokeURL, body) + res, err := c.post(ctx, key, c.dir.RevokeURL, body, wantStatus(http.StatusOK)) if err != nil { return err } defer res.Body.Close() - if res.StatusCode != http.StatusOK { - return responseError(res) - } return nil } @@ -257,7 +269,7 @@ func (c *Client) RevokeCert(ctx context.Context, key crypto.Signer, cert []byte, func AcceptTOS(tosURL string) bool { return true } // Register creates a new account registration by following the "new-reg" flow. -// It returns registered account. The a argument is not modified. +// It returns the registered account. The account is not modified. // // The registration may require the caller to agree to the CA's Terms of Service (TOS). // If so, and the account has not indicated the acceptance of the terms (see Account for details), @@ -314,6 +326,20 @@ func (c *Client) UpdateReg(ctx context.Context, a *Account) (*Account, error) { // a valid authorization (Authorization.Status is StatusValid). If so, the caller // need not fulfill any challenge and can proceed to requesting a certificate. func (c *Client) Authorize(ctx context.Context, domain string) (*Authorization, error) { + return c.authorize(ctx, "dns", domain) +} + +// AuthorizeIP is the same as Authorize but requests IP address authorization. +// Clients which successfully obtain such authorization may request to issue +// a certificate for IP addresses. +// +// See the ACME spec extension for more details about IP address identifiers: +// https://tools.ietf.org/html/draft-ietf-acme-ip. +func (c *Client) AuthorizeIP(ctx context.Context, ipaddr string) (*Authorization, error) { + return c.authorize(ctx, "ip", ipaddr) +} + +func (c *Client) authorize(ctx context.Context, typ, val string) (*Authorization, error) { if _, err := c.Discover(ctx); err != nil { return nil, err } @@ -327,16 +353,13 @@ func (c *Client) Authorize(ctx context.Context, domain string) (*Authorization, Identifier authzID `json:"identifier"` }{ Resource: "new-authz", - Identifier: authzID{Type: "dns", Value: domain}, + Identifier: authzID{Type: typ, Value: val}, } - res, err := c.retryPostJWS(ctx, c.Key, c.dir.AuthzURL, req) + res, err := c.post(ctx, c.Key, c.dir.AuthzURL, req, wantStatus(http.StatusCreated)) if err != nil { return nil, err } defer res.Body.Close() - if res.StatusCode != http.StatusCreated { - return nil, responseError(res) - } var v wireAuthz if err := json.NewDecoder(res.Body).Decode(&v); err != nil { @@ -353,14 +376,11 @@ func (c *Client) Authorize(ctx context.Context, domain string) (*Authorization, // If a caller needs to poll an authorization until its status is final, // see the WaitAuthorization method. func (c *Client) GetAuthorization(ctx context.Context, url string) (*Authorization, error) { - res, err := c.get(ctx, url) + res, err := c.get(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) if err != nil { return nil, err } defer res.Body.Close() - if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusAccepted { - return nil, responseError(res) - } var v wireAuthz if err := json.NewDecoder(res.Body).Decode(&v); err != nil { return nil, fmt.Errorf("acme: invalid response: %v", err) @@ -387,56 +407,58 @@ func (c *Client) RevokeAuthorization(ctx context.Context, url string) error { Status: "deactivated", Delete: true, } - res, err := c.retryPostJWS(ctx, c.Key, url, req) + res, err := c.post(ctx, c.Key, url, req, wantStatus(http.StatusOK)) if err != nil { return err } defer res.Body.Close() - if res.StatusCode != http.StatusOK { - return responseError(res) - } return nil } // WaitAuthorization polls an authorization at the given URL // until it is in one of the final states, StatusValid or StatusInvalid, -// or the context is done. +// the ACME CA responded with a 4xx error code, or the context is done. // // It returns a non-nil Authorization only if its Status is StatusValid. // In all other cases WaitAuthorization returns an error. // If the Status is StatusInvalid, the returned error is of type *AuthorizationError. func (c *Client) WaitAuthorization(ctx context.Context, url string) (*Authorization, error) { - sleep := sleeper(ctx) for { - res, err := c.get(ctx, url) + res, err := c.get(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) if err != nil { return nil, err } - retry := res.Header.Get("Retry-After") - if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusAccepted { - res.Body.Close() - if err := sleep(retry, 1); err != nil { - return nil, err - } - continue - } + var raw wireAuthz err = json.NewDecoder(res.Body).Decode(&raw) res.Body.Close() - if err != nil { - if err := sleep(retry, 0); err != nil { - return nil, err - } - continue - } - if raw.Status == StatusValid { + switch { + case err != nil: + // Skip and retry. + case raw.Status == StatusValid: return raw.authorization(url), nil - } - if raw.Status == StatusInvalid { + case raw.Status == StatusInvalid: return nil, raw.error(url) } - if err := sleep(retry, 0); err != nil { - return nil, err + + // Exponential backoff is implemented in c.get above. + // This is just to prevent continuously hitting the CA + // while waiting for a final authorization status. + d := retryAfter(res.Header.Get("Retry-After")) + if d == 0 { + // Given that the fastest challenges TLS-SNI and HTTP-01 + // require a CA to make at least 1 network round trip + // and most likely persist a challenge state, + // this default delay seems reasonable. + d = time.Second + } + t := time.NewTimer(d) + select { + case <-ctx.Done(): + t.Stop() + return nil, ctx.Err() + case <-t.C: + // Retry. } } } @@ -445,14 +467,11 @@ func (c *Client) WaitAuthorization(ctx context.Context, url string) (*Authorizat // // A client typically polls a challenge status using this method. func (c *Client) GetChallenge(ctx context.Context, url string) (*Challenge, error) { - res, err := c.get(ctx, url) + res, err := c.get(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) if err != nil { return nil, err } defer res.Body.Close() - if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusAccepted { - return nil, responseError(res) - } v := wireChallenge{URI: url} if err := json.NewDecoder(res.Body).Decode(&v); err != nil { return nil, fmt.Errorf("acme: invalid response: %v", err) @@ -479,16 +498,14 @@ func (c *Client) Accept(ctx context.Context, chal *Challenge) (*Challenge, error Type: chal.Type, Auth: auth, } - res, err := c.retryPostJWS(ctx, c.Key, chal.URI, req) + res, err := c.post(ctx, c.Key, chal.URI, req, wantStatus( + http.StatusOK, // according to the spec + http.StatusAccepted, // Let's Encrypt: see https://goo.gl/WsJ7VT (acme-divergences.md) + )) if err != nil { return nil, err } defer res.Body.Close() - // Note: the protocol specifies 200 as the expected response code, but - // letsencrypt seems to be returning 202. - if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusAccepted { - return nil, responseError(res) - } var v wireChallenge if err := json.NewDecoder(res.Body).Decode(&v); err != nil { @@ -545,7 +562,7 @@ func (c *Client) HTTP01ChallengePath(token string) string { // If no WithKey option is provided, a new ECDSA key is generated using P-256 curve. // // The returned certificate is valid for the next 24 hours and must be presented only when -// the server name of the client hello matches exactly the returned name value. +// the server name of the TLS ClientHello matches exactly the returned name value. func (c *Client) TLSSNI01ChallengeCert(token string, opt ...CertOption) (cert tls.Certificate, name string, err error) { ka, err := keyAuth(c.Key.Public(), token) if err != nil { @@ -572,7 +589,7 @@ func (c *Client) TLSSNI01ChallengeCert(token string, opt ...CertOption) (cert tl // If no WithKey option is provided, a new ECDSA key is generated using P-256 curve. // // The returned certificate is valid for the next 24 hours and must be presented only when -// the server name in the client hello matches exactly the returned name value. +// the server name in the TLS ClientHello matches exactly the returned name value. func (c *Client) TLSSNI02ChallengeCert(token string, opt ...CertOption) (cert tls.Certificate, name string, err error) { b := sha256.Sum256([]byte(token)) h := hex.EncodeToString(b[:]) @@ -593,6 +610,52 @@ func (c *Client) TLSSNI02ChallengeCert(token string, opt ...CertOption) (cert tl return cert, sanA, nil } +// TLSALPN01ChallengeCert creates a certificate for TLS-ALPN-01 challenge response. +// Servers can present the certificate to validate the challenge and prove control +// over a domain name. For more details on TLS-ALPN-01 see +// https://tools.ietf.org/html/draft-shoemaker-acme-tls-alpn-00#section-3 +// +// The token argument is a Challenge.Token value. +// If a WithKey option is provided, its private part signs the returned cert, +// and the public part is used to specify the signee. +// If no WithKey option is provided, a new ECDSA key is generated using P-256 curve. +// +// The returned certificate is valid for the next 24 hours and must be presented only when +// the server name in the TLS ClientHello matches the domain, and the special acme-tls/1 ALPN protocol +// has been specified. +func (c *Client) TLSALPN01ChallengeCert(token, domain string, opt ...CertOption) (cert tls.Certificate, err error) { + ka, err := keyAuth(c.Key.Public(), token) + if err != nil { + return tls.Certificate{}, err + } + shasum := sha256.Sum256([]byte(ka)) + extValue, err := asn1.Marshal(shasum[:]) + if err != nil { + return tls.Certificate{}, err + } + acmeExtension := pkix.Extension{ + Id: idPeACMEIdentifierV1, + Critical: true, + Value: extValue, + } + + tmpl := defaultTLSChallengeCertTemplate() + + var newOpt []CertOption + for _, o := range opt { + switch o := o.(type) { + case *certOptTemplate: + t := *(*x509.Certificate)(o) // shallow copy is ok + tmpl = &t + default: + newOpt = append(newOpt, o) + } + } + tmpl.ExtraExtensions = append(tmpl.ExtraExtensions, acmeExtension) + newOpt = append(newOpt, WithTemplate(tmpl)) + return tlsChallengeCert([]string{domain}, newOpt) +} + // doReg sends all types of registration requests. // The type of request is identified by typ argument, which is a "resource" // in the ACME spec terms. @@ -612,14 +675,15 @@ func (c *Client) doReg(ctx context.Context, url string, typ string, acct *Accoun req.Contact = acct.Contact req.Agreement = acct.AgreedTerms } - res, err := c.retryPostJWS(ctx, c.Key, url, req) + res, err := c.post(ctx, c.Key, url, req, wantStatus( + http.StatusOK, // updates and deletes + http.StatusCreated, // new account creation + http.StatusAccepted, // Let's Encrypt divergent implementation + )) if err != nil { return nil, err } defer res.Body.Close() - if res.StatusCode < 200 || res.StatusCode > 299 { - return nil, responseError(res) - } var v struct { Contact []string @@ -649,66 +713,19 @@ func (c *Client) doReg(ctx context.Context, url string, typ string, acct *Accoun }, nil } -// retryPostJWS will retry calls to postJWS if there is a badNonce error, -// clearing the stored nonces after each error. -// If the response was 4XX-5XX, then responseError is called on the body, -// the body is closed, and the error returned. -func (c *Client) retryPostJWS(ctx context.Context, key crypto.Signer, url string, body interface{}) (*http.Response, error) { - sleep := sleeper(ctx) - for { - res, err := c.postJWS(ctx, key, url, body) - if err != nil { - return nil, err - } - // handle errors 4XX-5XX with responseError - if res.StatusCode >= 400 && res.StatusCode <= 599 { - err := responseError(res) - res.Body.Close() - // according to spec badNonce is urn:ietf:params:acme:error:badNonce - // however, acme servers in the wild return their version of the error - // https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-5.4 - if ae, ok := err.(*Error); ok && strings.HasSuffix(strings.ToLower(ae.ProblemType), ":badnonce") { - // clear any nonces that we might've stored that might now be - // considered bad - c.clearNonces() - retry := res.Header.Get("Retry-After") - if err := sleep(retry, 1); err != nil { - return nil, err - } - continue - } - return nil, err - } - return res, nil - } -} - -// postJWS signs the body with the given key and POSTs it to the provided url. -// The body argument must be JSON-serializable. -func (c *Client) postJWS(ctx context.Context, key crypto.Signer, url string, body interface{}) (*http.Response, error) { - nonce, err := c.popNonce(ctx, url) - if err != nil { - return nil, err - } - b, err := jwsEncodeJSON(body, key, nonce) - if err != nil { - return nil, err - } - res, err := c.post(ctx, url, "application/jose+json", bytes.NewReader(b)) - if err != nil { - return nil, err - } - c.addNonce(res.Header) - return res, nil -} - // popNonce returns a nonce value previously stored with c.addNonce -// or fetches a fresh one from the given URL. +// or fetches a fresh one from a URL by issuing a HEAD request. +// It first tries c.directoryURL() and then the provided url if the former fails. func (c *Client) popNonce(ctx context.Context, url string) (string, error) { c.noncesMu.Lock() defer c.noncesMu.Unlock() if len(c.nonces) == 0 { - return c.fetchNonce(ctx, url) + dirURL := c.directoryURL() + v, err := c.fetchNonce(ctx, dirURL) + if err != nil && url != dirURL { + v, err = c.fetchNonce(ctx, url) + } + return v, err } var nonce string for nonce = range c.nonces { @@ -742,58 +759,12 @@ func (c *Client) addNonce(h http.Header) { c.nonces[v] = struct{}{} } -func (c *Client) httpClient() *http.Client { - if c.HTTPClient != nil { - return c.HTTPClient - } - return http.DefaultClient -} - -func (c *Client) get(ctx context.Context, urlStr string) (*http.Response, error) { - req, err := http.NewRequest("GET", urlStr, nil) - if err != nil { - return nil, err - } - return c.do(ctx, req) -} - -func (c *Client) head(ctx context.Context, urlStr string) (*http.Response, error) { - req, err := http.NewRequest("HEAD", urlStr, nil) - if err != nil { - return nil, err - } - return c.do(ctx, req) -} - -func (c *Client) post(ctx context.Context, urlStr, contentType string, body io.Reader) (*http.Response, error) { - req, err := http.NewRequest("POST", urlStr, body) - if err != nil { - return nil, err - } - req.Header.Set("Content-Type", contentType) - return c.do(ctx, req) -} - -func (c *Client) do(ctx context.Context, req *http.Request) (*http.Response, error) { - res, err := c.httpClient().Do(req.WithContext(ctx)) +func (c *Client) fetchNonce(ctx context.Context, url string) (string, error) { + r, err := http.NewRequest("HEAD", url, nil) if err != nil { - select { - case <-ctx.Done(): - // Prefer the unadorned context error. - // (The acme package had tests assuming this, previously from ctxhttp's - // behavior, predating net/http supporting contexts natively) - // TODO(bradfitz): reconsider this in the future. But for now this - // requires no test updates. - return nil, ctx.Err() - default: - return nil, err - } + return "", err } - return res, nil -} - -func (c *Client) fetchNonce(ctx context.Context, url string) (string, error) { - resp, err := c.head(ctx, url) + resp, err := c.doNoRetry(ctx, r) if err != nil { return "", err } @@ -845,24 +816,6 @@ func (c *Client) responseCert(ctx context.Context, res *http.Response, bundle bo return cert, nil } -// responseError creates an error of Error type from resp. -func responseError(resp *http.Response) error { - // don't care if ReadAll returns an error: - // json.Unmarshal will fail in that case anyway - b, _ := ioutil.ReadAll(resp.Body) - e := &wireError{Status: resp.StatusCode} - if err := json.Unmarshal(b, e); err != nil { - // this is not a regular error response: - // populate detail with anything we received, - // e.Status will already contain HTTP response code value - e.Detail = string(b) - if e.Detail == "" { - e.Detail = resp.Status - } - } - return e.error(resp.Header) -} - // chainCert fetches CA certificate chain recursively by following "up" links. // Each recursive call increments the depth by 1, resulting in an error // if the recursion level reaches maxChainLen. @@ -873,14 +826,11 @@ func (c *Client) chainCert(ctx context.Context, url string, depth int) ([][]byte return nil, errors.New("acme: certificate chain is too deep") } - res, err := c.get(ctx, url) + res, err := c.get(ctx, url, wantStatus(http.StatusOK)) if err != nil { return nil, err } defer res.Body.Close() - if res.StatusCode != http.StatusOK { - return nil, responseError(res) - } b, err := ioutil.ReadAll(io.LimitReader(res.Body, maxCertSize+1)) if err != nil { return nil, err @@ -925,65 +875,6 @@ func linkHeader(h http.Header, rel string) []string { return links } -// sleeper returns a function that accepts the Retry-After HTTP header value -// and an increment that's used with backoff to increasingly sleep on -// consecutive calls until the context is done. If the Retry-After header -// cannot be parsed, then backoff is used with a maximum sleep time of 10 -// seconds. -func sleeper(ctx context.Context) func(ra string, inc int) error { - var count int - return func(ra string, inc int) error { - count += inc - d := backoff(count, 10*time.Second) - d = retryAfter(ra, d) - wakeup := time.NewTimer(d) - defer wakeup.Stop() - select { - case <-ctx.Done(): - return ctx.Err() - case <-wakeup.C: - return nil - } - } -} - -// retryAfter parses a Retry-After HTTP header value, -// trying to convert v into an int (seconds) or use http.ParseTime otherwise. -// It returns d if v cannot be parsed. -func retryAfter(v string, d time.Duration) time.Duration { - if i, err := strconv.Atoi(v); err == nil { - return time.Duration(i) * time.Second - } - t, err := http.ParseTime(v) - if err != nil { - return d - } - return t.Sub(timeNow()) -} - -// backoff computes a duration after which an n+1 retry iteration should occur -// using truncated exponential backoff algorithm. -// -// The n argument is always bounded between 0 and 30. -// The max argument defines upper bound for the returned value. -func backoff(n int, max time.Duration) time.Duration { - if n < 0 { - n = 0 - } - if n > 30 { - n = 30 - } - var d time.Duration - if x, err := rand.Int(rand.Reader, big.NewInt(1000)); err == nil { - d = time.Duration(x.Int64()) * time.Millisecond - } - d += time.Duration(1< max { - return max - } - return d -} - // keyAuth generates a key authorization string for a given token. func keyAuth(pub crypto.PublicKey, token string) (string, error) { th, err := JWKThumbprint(pub) @@ -993,14 +884,25 @@ func keyAuth(pub crypto.PublicKey, token string) (string, error) { return fmt.Sprintf("%s.%s", token, th), nil } +// defaultTLSChallengeCertTemplate is a template used to create challenge certs for TLS challenges. +func defaultTLSChallengeCertTemplate() *x509.Certificate { + return &x509.Certificate{ + SerialNumber: big.NewInt(1), + NotBefore: time.Now(), + NotAfter: time.Now().Add(24 * time.Hour), + BasicConstraintsValid: true, + KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, + } +} + // tlsChallengeCert creates a temporary certificate for TLS-SNI challenges // with the given SANs and auto-generated public/private key pair. +// The Subject Common Name is set to the first SAN to aid debugging. // To create a cert with a custom key pair, specify WithKey option. func tlsChallengeCert(san []string, opt []CertOption) (tls.Certificate, error) { - var ( - key crypto.Signer - tmpl *x509.Certificate - ) + var key crypto.Signer + tmpl := defaultTLSChallengeCertTemplate() for _, o := range opt { switch o := o.(type) { case *certOptKey: @@ -1009,7 +911,7 @@ func tlsChallengeCert(san []string, opt []CertOption) (tls.Certificate, error) { } key = o.key case *certOptTemplate: - var t = *(*x509.Certificate)(o) // shallow copy is ok + t := *(*x509.Certificate)(o) // shallow copy is ok tmpl = &t default: // package's fault, if we let this happen: @@ -1022,17 +924,10 @@ func tlsChallengeCert(san []string, opt []CertOption) (tls.Certificate, error) { return tls.Certificate{}, err } } - if tmpl == nil { - tmpl = &x509.Certificate{ - SerialNumber: big.NewInt(1), - NotBefore: time.Now(), - NotAfter: time.Now().Add(24 * time.Hour), - BasicConstraintsValid: true, - KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature, - ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, - } - } tmpl.DNSNames = san + if len(san) > 0 { + tmpl.Subject.CommonName = san[0] + } der, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, key.Public(), key) if err != nil { diff --git a/vendor/golang.org/x/crypto/acme/acme_test.go b/vendor/golang.org/x/crypto/acme/acme_test.go index 14832de..99a4bf8 100644 --- a/vendor/golang.org/x/crypto/acme/acme_test.go +++ b/vendor/golang.org/x/crypto/acme/acme_test.go @@ -13,9 +13,9 @@ import ( "crypto/x509" "crypto/x509/pkix" "encoding/base64" + "encoding/hex" "encoding/json" "fmt" - "io/ioutil" "math/big" "net/http" "net/http/httptest" @@ -75,6 +75,7 @@ func TestDiscover(t *testing.T) { ) ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") + w.Header().Set("Replay-Nonce", "testnonce") fmt.Fprintf(w, `{ "new-reg": %q, "new-authz": %q, @@ -100,6 +101,9 @@ func TestDiscover(t *testing.T) { if dir.RevokeURL != revoke { t.Errorf("dir.RevokeURL = %q; want %q", dir.RevokeURL, revoke) } + if _, exist := c.nonces["testnonce"]; !exist { + t.Errorf("c.nonces = %q; want 'testnonce' in the map", c.nonces) + } } func TestRegister(t *testing.T) { @@ -147,7 +151,11 @@ func TestRegister(t *testing.T) { return false } - c := Client{Key: testKeyEC, dir: &Directory{RegURL: ts.URL}} + c := Client{ + Key: testKeyEC, + DirectoryURL: ts.URL, + dir: &Directory{RegURL: ts.URL}, + } a := &Account{Contact: contacts} var err error if a, err = c.Register(context.Background(), a, prompt); err != nil { @@ -288,106 +296,131 @@ func TestGetReg(t *testing.T) { } func TestAuthorize(t *testing.T) { - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.Method == "HEAD" { - w.Header().Set("Replay-Nonce", "test-nonce") - return - } - if r.Method != "POST" { - t.Errorf("r.Method = %q; want POST", r.Method) - } + tt := []struct{ typ, value string }{ + {"dns", "example.com"}, + {"ip", "1.2.3.4"}, + } + for _, test := range tt { + t.Run(test.typ, func(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == "HEAD" { + w.Header().Set("Replay-Nonce", "test-nonce") + return + } + if r.Method != "POST" { + t.Errorf("r.Method = %q; want POST", r.Method) + } - var j struct { - Resource string - Identifier struct { - Type string - Value string - } - } - decodeJWSRequest(t, &j, r) + var j struct { + Resource string + Identifier struct { + Type string + Value string + } + } + decodeJWSRequest(t, &j, r) - // Test request - if j.Resource != "new-authz" { - t.Errorf("j.Resource = %q; want new-authz", j.Resource) - } - if j.Identifier.Type != "dns" { - t.Errorf("j.Identifier.Type = %q; want dns", j.Identifier.Type) - } - if j.Identifier.Value != "example.com" { - t.Errorf("j.Identifier.Value = %q; want example.com", j.Identifier.Value) - } + // Test request + if j.Resource != "new-authz" { + t.Errorf("j.Resource = %q; want new-authz", j.Resource) + } + if j.Identifier.Type != test.typ { + t.Errorf("j.Identifier.Type = %q; want %q", j.Identifier.Type, test.typ) + } + if j.Identifier.Value != test.value { + t.Errorf("j.Identifier.Value = %q; want %q", j.Identifier.Value, test.value) + } - w.Header().Set("Location", "https://ca.tld/acme/auth/1") - w.WriteHeader(http.StatusCreated) - fmt.Fprintf(w, `{ - "identifier": {"type":"dns","value":"example.com"}, - "status":"pending", - "challenges":[ - { - "type":"http-01", + w.Header().Set("Location", "https://ca.tld/acme/auth/1") + w.WriteHeader(http.StatusCreated) + fmt.Fprintf(w, `{ + "identifier": {"type":%q,"value":%q}, "status":"pending", - "uri":"https://ca.tld/acme/challenge/publickey/id1", - "token":"token1" - }, - { - "type":"tls-sni-01", - "status":"pending", - "uri":"https://ca.tld/acme/challenge/publickey/id2", - "token":"token2" - } - ], - "combinations":[[0],[1]]}`) - })) - defer ts.Close() + "challenges":[ + { + "type":"http-01", + "status":"pending", + "uri":"https://ca.tld/acme/challenge/publickey/id1", + "token":"token1" + }, + { + "type":"tls-sni-01", + "status":"pending", + "uri":"https://ca.tld/acme/challenge/publickey/id2", + "token":"token2" + } + ], + "combinations":[[0],[1]] + }`, test.typ, test.value) + })) + defer ts.Close() + + var ( + auth *Authorization + err error + ) + cl := Client{ + Key: testKeyEC, + DirectoryURL: ts.URL, + dir: &Directory{AuthzURL: ts.URL}, + } + switch test.typ { + case "dns": + auth, err = cl.Authorize(context.Background(), test.value) + case "ip": + auth, err = cl.AuthorizeIP(context.Background(), test.value) + default: + t.Fatalf("unknown identifier type: %q", test.typ) + } + if err != nil { + t.Fatal(err) + } - cl := Client{Key: testKeyEC, dir: &Directory{AuthzURL: ts.URL}} - auth, err := cl.Authorize(context.Background(), "example.com") - if err != nil { - t.Fatal(err) - } + if auth.URI != "https://ca.tld/acme/auth/1" { + t.Errorf("URI = %q; want https://ca.tld/acme/auth/1", auth.URI) + } + if auth.Status != "pending" { + t.Errorf("Status = %q; want pending", auth.Status) + } + if auth.Identifier.Type != test.typ { + t.Errorf("Identifier.Type = %q; want %q", auth.Identifier.Type, test.typ) + } + if auth.Identifier.Value != test.value { + t.Errorf("Identifier.Value = %q; want %q", auth.Identifier.Value, test.value) + } - if auth.URI != "https://ca.tld/acme/auth/1" { - t.Errorf("URI = %q; want https://ca.tld/acme/auth/1", auth.URI) - } - if auth.Status != "pending" { - t.Errorf("Status = %q; want pending", auth.Status) - } - if auth.Identifier.Type != "dns" { - t.Errorf("Identifier.Type = %q; want dns", auth.Identifier.Type) - } - if auth.Identifier.Value != "example.com" { - t.Errorf("Identifier.Value = %q; want example.com", auth.Identifier.Value) - } + if n := len(auth.Challenges); n != 2 { + t.Fatalf("len(auth.Challenges) = %d; want 2", n) + } - if n := len(auth.Challenges); n != 2 { - t.Fatalf("len(auth.Challenges) = %d; want 2", n) - } + c := auth.Challenges[0] + if c.Type != "http-01" { + t.Errorf("c.Type = %q; want http-01", c.Type) + } + if c.URI != "https://ca.tld/acme/challenge/publickey/id1" { + t.Errorf("c.URI = %q; want https://ca.tld/acme/challenge/publickey/id1", c.URI) + } + if c.Token != "token1" { + t.Errorf("c.Token = %q; want token1", c.Token) + } - c := auth.Challenges[0] - if c.Type != "http-01" { - t.Errorf("c.Type = %q; want http-01", c.Type) - } - if c.URI != "https://ca.tld/acme/challenge/publickey/id1" { - t.Errorf("c.URI = %q; want https://ca.tld/acme/challenge/publickey/id1", c.URI) - } - if c.Token != "token1" { - t.Errorf("c.Token = %q; want token1", c.Token) - } + c = auth.Challenges[1] + if c.Type != "tls-sni-01" { + t.Errorf("c.Type = %q; want tls-sni-01", c.Type) + } + if c.URI != "https://ca.tld/acme/challenge/publickey/id2" { + t.Errorf("c.URI = %q; want https://ca.tld/acme/challenge/publickey/id2", c.URI) + } + if c.Token != "token2" { + t.Errorf("c.Token = %q; want token2", c.Token) + } - c = auth.Challenges[1] - if c.Type != "tls-sni-01" { - t.Errorf("c.Type = %q; want tls-sni-01", c.Type) - } - if c.URI != "https://ca.tld/acme/challenge/publickey/id2" { - t.Errorf("c.URI = %q; want https://ca.tld/acme/challenge/publickey/id2", c.URI) - } - if c.Token != "token2" { - t.Errorf("c.Token = %q; want token2", c.Token) - } + combs := [][]int{{0}, {1}} + if !reflect.DeepEqual(auth.Combinations, combs) { + t.Errorf("auth.Combinations: %+v\nwant: %+v\n", auth.Combinations, combs) + } - combs := [][]int{{0}, {1}} - if !reflect.DeepEqual(auth.Combinations, combs) { - t.Errorf("auth.Combinations: %+v\nwant: %+v\n", auth.Combinations, combs) + }) } } @@ -401,7 +434,11 @@ func TestAuthorizeValid(t *testing.T) { w.Write([]byte(`{"status":"valid"}`)) })) defer ts.Close() - client := Client{Key: testKey, dir: &Directory{AuthzURL: ts.URL}} + client := Client{ + Key: testKey, + DirectoryURL: ts.URL, + dir: &Directory{AuthzURL: ts.URL}, + } _, err := client.Authorize(context.Background(), "example.com") if err != nil { t.Errorf("err = %v", err) @@ -485,95 +522,98 @@ func TestGetAuthorization(t *testing.T) { } func TestWaitAuthorization(t *testing.T) { - var count int - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - count++ - w.Header().Set("Retry-After", "0") - if count > 1 { - fmt.Fprintf(w, `{"status":"valid"}`) - return + t.Run("wait loop", func(t *testing.T) { + var count int + authz, err := runWaitAuthorization(context.Background(), t, func(w http.ResponseWriter, r *http.Request) { + count++ + w.Header().Set("Retry-After", "0") + if count > 1 { + fmt.Fprintf(w, `{"status":"valid"}`) + return + } + fmt.Fprintf(w, `{"status":"pending"}`) + }) + if err != nil { + t.Fatalf("non-nil error: %v", err) } - fmt.Fprintf(w, `{"status":"pending"}`) - })) - defer ts.Close() - - type res struct { - authz *Authorization - err error - } - done := make(chan res) - defer close(done) - go func() { - var client Client - a, err := client.WaitAuthorization(context.Background(), ts.URL) - done <- res{a, err} - }() - - select { - case <-time.After(5 * time.Second): - t.Fatal("WaitAuthz took too long to return") - case res := <-done: - if res.err != nil { - t.Fatalf("res.err = %v", res.err) + if authz == nil { + t.Fatal("authz is nil") + } + }) + t.Run("invalid status", func(t *testing.T) { + _, err := runWaitAuthorization(context.Background(), t, func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintf(w, `{"status":"invalid"}`) + }) + if _, ok := err.(*AuthorizationError); !ok { + t.Errorf("err is %v (%T); want non-nil *AuthorizationError", err, err) } - if res.authz == nil { - t.Fatal("res.authz is nil") + }) + t.Run("non-retriable error", func(t *testing.T) { + const code = http.StatusBadRequest + _, err := runWaitAuthorization(context.Background(), t, func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(code) + }) + res, ok := err.(*Error) + if !ok { + t.Fatalf("err is %v (%T); want a non-nil *Error", err, err) } + if res.StatusCode != code { + t.Errorf("res.StatusCode = %d; want %d", res.StatusCode, code) + } + }) + for _, code := range []int{http.StatusTooManyRequests, http.StatusInternalServerError} { + t.Run(fmt.Sprintf("retriable %d error", code), func(t *testing.T) { + var count int + authz, err := runWaitAuthorization(context.Background(), t, func(w http.ResponseWriter, r *http.Request) { + count++ + w.Header().Set("Retry-After", "0") + if count > 1 { + fmt.Fprintf(w, `{"status":"valid"}`) + return + } + w.WriteHeader(code) + }) + if err != nil { + t.Fatalf("non-nil error: %v", err) + } + if authz == nil { + t.Fatal("authz is nil") + } + }) } -} - -func TestWaitAuthorizationInvalid(t *testing.T) { - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintf(w, `{"status":"invalid"}`) - })) - defer ts.Close() - - res := make(chan error) - defer close(res) - go func() { - var client Client - _, err := client.WaitAuthorization(context.Background(), ts.URL) - res <- err - }() - - select { - case <-time.After(3 * time.Second): - t.Fatal("WaitAuthz took too long to return") - case err := <-res: + t.Run("context cancel", func(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + _, err := runWaitAuthorization(ctx, t, func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Retry-After", "60") + fmt.Fprintf(w, `{"status":"pending"}`) + }) if err == nil { t.Error("err is nil") } - if _, ok := err.(*AuthorizationError); !ok { - t.Errorf("err is %T; want *AuthorizationError", err) - } - } + }) } - -func TestWaitAuthorizationCancel(t *testing.T) { - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Retry-After", "60") - fmt.Fprintf(w, `{"status":"pending"}`) - })) +func runWaitAuthorization(ctx context.Context, t *testing.T, h http.HandlerFunc) (*Authorization, error) { + t.Helper() + ts := httptest.NewServer(h) defer ts.Close() - - res := make(chan error) - defer close(res) + type res struct { + authz *Authorization + err error + } + ch := make(chan res, 1) go func() { var client Client - ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) - defer cancel() - _, err := client.WaitAuthorization(ctx, ts.URL) - res <- err + a, err := client.WaitAuthorization(ctx, ts.URL) + ch <- res{a, err} }() - select { - case <-time.After(time.Second): - t.Fatal("WaitAuthz took too long to return") - case err := <-res: - if err == nil { - t.Error("err is nil") - } + case <-time.After(3 * time.Second): + t.Fatal("WaitAuthorization took too long to return") + case v := <-ch: + return v.authz, v.err } + panic("runWaitAuthorization: out of select") } func TestRevokeAuthorization(t *testing.T) { @@ -600,7 +640,7 @@ func TestRevokeAuthorization(t *testing.T) { t.Errorf("req.Delete is false") } case "/2": - w.WriteHeader(http.StatusInternalServerError) + w.WriteHeader(http.StatusBadRequest) } })) defer ts.Close() @@ -821,7 +861,7 @@ func TestFetchCertRetry(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if count < 1 { w.Header().Set("Retry-After", "0") - w.WriteHeader(http.StatusAccepted) + w.WriteHeader(http.StatusTooManyRequests) count++ return } @@ -946,7 +986,7 @@ func TestNonce_add(t *testing.T) { c.addNonce(http.Header{"Replay-Nonce": {}}) c.addNonce(http.Header{"Replay-Nonce": {"nonce"}}) - nonces := map[string]struct{}{"nonce": struct{}{}} + nonces := map[string]struct{}{"nonce": {}} if !reflect.DeepEqual(c.nonces, nonces) { t.Errorf("c.nonces = %q; want %q", c.nonces, nonces) } @@ -1013,6 +1053,53 @@ func TestNonce_fetchError(t *testing.T) { } } +func TestNonce_popWhenEmpty(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != "HEAD" { + t.Errorf("r.Method = %q; want HEAD", r.Method) + } + switch r.URL.Path { + case "/dir-with-nonce": + w.Header().Set("Replay-Nonce", "dirnonce") + case "/new-nonce": + w.Header().Set("Replay-Nonce", "newnonce") + case "/dir-no-nonce", "/empty": + // No nonce in the header. + default: + t.Errorf("Unknown URL: %s", r.URL) + } + })) + defer ts.Close() + ctx := context.Background() + + tt := []struct { + dirURL, popURL, nonce string + wantOK bool + }{ + {ts.URL + "/dir-with-nonce", ts.URL + "/new-nonce", "dirnonce", true}, + {ts.URL + "/dir-no-nonce", ts.URL + "/new-nonce", "newnonce", true}, + {ts.URL + "/dir-no-nonce", ts.URL + "/empty", "", false}, + } + for _, test := range tt { + t.Run(fmt.Sprintf("nonce:%s wantOK:%v", test.nonce, test.wantOK), func(t *testing.T) { + c := Client{DirectoryURL: test.dirURL} + v, err := c.popNonce(ctx, test.popURL) + if !test.wantOK { + if err == nil { + t.Fatalf("c.popNonce(%q) returned nil error", test.popURL) + } + return + } + if err != nil { + t.Fatalf("c.popNonce(%q): %v", test.popURL, err) + } + if v != test.nonce { + t.Errorf("c.popNonce(%q) = %q; want %q", test.popURL, v, test.nonce) + } + }) + } +} + func TestNonce_postJWS(t *testing.T) { var count int seen := make(map[string]bool) @@ -1046,7 +1133,11 @@ func TestNonce_postJWS(t *testing.T) { })) defer ts.Close() - client := Client{Key: testKey, dir: &Directory{AuthzURL: ts.URL}} + client := Client{ + Key: testKey, + DirectoryURL: ts.URL, // nonces are fetched from here first + dir: &Directory{AuthzURL: ts.URL}, + } if _, err := client.Authorize(context.Background(), "example.com"); err != nil { t.Errorf("client.Authorize 1: %v", err) } @@ -1068,44 +1159,6 @@ func TestNonce_postJWS(t *testing.T) { } } -func TestRetryPostJWS(t *testing.T) { - var count int - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - count++ - w.Header().Set("Replay-Nonce", fmt.Sprintf("nonce%d", count)) - if r.Method == "HEAD" { - // We expect the client to do 2 head requests to fetch - // nonces, one to start and another after getting badNonce - return - } - - head, err := decodeJWSHead(r) - if err != nil { - t.Errorf("decodeJWSHead: %v", err) - } else if head.Nonce == "" { - t.Error("head.Nonce is empty") - } else if head.Nonce == "nonce1" { - // return a badNonce error to force the call to retry - w.WriteHeader(http.StatusBadRequest) - w.Write([]byte(`{"type":"urn:ietf:params:acme:error:badNonce"}`)) - return - } - // Make client.Authorize happy; we're not testing its result. - w.WriteHeader(http.StatusCreated) - w.Write([]byte(`{"status":"valid"}`)) - })) - defer ts.Close() - - client := Client{Key: testKey, dir: &Directory{AuthzURL: ts.URL}} - // This call will fail with badNonce, causing a retry - if _, err := client.Authorize(context.Background(), "example.com"); err != nil { - t.Errorf("client.Authorize 1: %v", err) - } - if count != 4 { - t.Errorf("total requests count: %d; want 4", count) - } -} - func TestLinkHeader(t *testing.T) { h := http.Header{"Link": { `;rel="next"`, @@ -1129,37 +1182,6 @@ func TestLinkHeader(t *testing.T) { } } -func TestErrorResponse(t *testing.T) { - s := `{ - "status": 400, - "type": "urn:acme:error:xxx", - "detail": "text" - }` - res := &http.Response{ - StatusCode: 400, - Status: "400 Bad Request", - Body: ioutil.NopCloser(strings.NewReader(s)), - Header: http.Header{"X-Foo": {"bar"}}, - } - err := responseError(res) - v, ok := err.(*Error) - if !ok { - t.Fatalf("err = %+v (%T); want *Error type", err, err) - } - if v.StatusCode != 400 { - t.Errorf("v.StatusCode = %v; want 400", v.StatusCode) - } - if v.ProblemType != "urn:acme:error:xxx" { - t.Errorf("v.ProblemType = %q; want urn:acme:error:xxx", v.ProblemType) - } - if v.Detail != "text" { - t.Errorf("v.Detail = %q; want text", v.Detail) - } - if !reflect.DeepEqual(v.Header, res.Header) { - t.Errorf("v.Header = %+v; want %+v", v.Header, res.Header) - } -} - func TestTLSSNI01ChallengeCert(t *testing.T) { const ( token = "evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA" @@ -1186,6 +1208,9 @@ func TestTLSSNI01ChallengeCert(t *testing.T) { if cert.DNSNames[0] != name { t.Errorf("cert.DNSNames[0] != name: %q vs %q", cert.DNSNames[0], name) } + if cn := cert.Subject.CommonName; cn != san { + t.Errorf("cert.Subject.CommonName = %q; want %q", cn, san) + } } func TestTLSSNI02ChallengeCert(t *testing.T) { @@ -1219,6 +1244,61 @@ func TestTLSSNI02ChallengeCert(t *testing.T) { if i >= len(cert.DNSNames) || cert.DNSNames[i] != name { t.Errorf("%v doesn't have %q", cert.DNSNames, name) } + if cn := cert.Subject.CommonName; cn != sanA { + t.Errorf("CommonName = %q; want %q", cn, sanA) + } +} + +func TestTLSALPN01ChallengeCert(t *testing.T) { + const ( + token = "evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA" + keyAuth = "evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA." + testKeyECThumbprint + // echo -n | shasum -a 256 + h = "0420dbbd5eefe7b4d06eb9d1d9f5acb4c7cda27d320e4b30332f0b6cb441734ad7b0" + domain = "example.com" + ) + + extValue, err := hex.DecodeString(h) + if err != nil { + t.Fatal(err) + } + + client := &Client{Key: testKeyEC} + tlscert, err := client.TLSALPN01ChallengeCert(token, domain) + if err != nil { + t.Fatal(err) + } + + if n := len(tlscert.Certificate); n != 1 { + t.Fatalf("len(tlscert.Certificate) = %d; want 1", n) + } + cert, err := x509.ParseCertificate(tlscert.Certificate[0]) + if err != nil { + t.Fatal(err) + } + names := []string{domain} + if !reflect.DeepEqual(cert.DNSNames, names) { + t.Fatalf("cert.DNSNames = %v;\nwant %v", cert.DNSNames, names) + } + if cn := cert.Subject.CommonName; cn != domain { + t.Errorf("CommonName = %q; want %q", cn, domain) + } + acmeExts := []pkix.Extension{} + for _, ext := range cert.Extensions { + if idPeACMEIdentifierV1.Equal(ext.Id) { + acmeExts = append(acmeExts, ext) + } + } + if len(acmeExts) != 1 { + t.Errorf("acmeExts = %v; want exactly one", acmeExts) + } + if !acmeExts[0].Critical { + t.Errorf("acmeExt.Critical = %v; want true", acmeExts[0].Critical) + } + if bytes.Compare(acmeExts[0].Value, extValue) != 0 { + t.Errorf("acmeExt.Value = %v; want %v", acmeExts[0].Value, extValue) + } + } func TestTLSChallengeCertOpt(t *testing.T) { @@ -1319,28 +1399,3 @@ func TestDNS01ChallengeRecord(t *testing.T) { t.Errorf("val = %q; want %q", val, value) } } - -func TestBackoff(t *testing.T) { - tt := []struct{ min, max time.Duration }{ - {time.Second, 2 * time.Second}, - {2 * time.Second, 3 * time.Second}, - {4 * time.Second, 5 * time.Second}, - {8 * time.Second, 9 * time.Second}, - } - for i, test := range tt { - d := backoff(i, time.Minute) - if d < test.min || test.max < d { - t.Errorf("%d: d = %v; want between %v and %v", i, d, test.min, test.max) - } - } - - min, max := time.Second, 2*time.Second - if d := backoff(-1, time.Minute); d < min || max < d { - t.Errorf("d = %v; want between %v and %v", d, min, max) - } - - bound := 10 * time.Second - if d := backoff(100, bound); d != bound { - t.Errorf("d = %v; want %v", d, bound) - } -} diff --git a/vendor/golang.org/x/crypto/acme/autocert/autocert.go b/vendor/golang.org/x/crypto/acme/autocert/autocert.go index a478eff..a50d9bf 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/autocert.go +++ b/vendor/golang.org/x/crypto/acme/autocert/autocert.go @@ -24,8 +24,9 @@ import ( "fmt" "io" mathrand "math/rand" + "net" "net/http" - "strconv" + "path" "strings" "sync" "time" @@ -43,7 +44,7 @@ var createCertRetryAfter = time.Minute var pseudoRand *lockedMathRand func init() { - src := mathrand.NewSource(timeNow().UnixNano()) + src := mathrand.NewSource(time.Now().UnixNano()) pseudoRand = &lockedMathRand{rnd: mathrand.New(src)} } @@ -68,7 +69,7 @@ func HostWhitelist(hosts ...string) HostPolicy { } return func(_ context.Context, host string) error { if !whitelist[host] { - return errors.New("acme/autocert: host not configured") + return fmt.Errorf("acme/autocert: host %q not configured in HostWhitelist", host) } return nil } @@ -80,11 +81,14 @@ func defaultHostPolicy(context.Context, string) error { } // Manager is a stateful certificate manager built on top of acme.Client. -// It obtains and refreshes certificates automatically, +// It obtains and refreshes certificates automatically using "tls-alpn-01", +// "tls-sni-01", "tls-sni-02" and "http-01" challenge types, // as well as providing them to a TLS server via tls.Config. // -// To preserve issued certificates and improve overall performance, -// use a cache implementation of Cache. For instance, DirCache. +// You must specify a cache implementation, such as DirCache, +// to reuse obtained certificates across program restarts. +// Otherwise your server is very likely to exceed the certificate +// issuer's request rate limits. type Manager struct { // Prompt specifies a callback function to conditionally accept a CA's Terms of Service (TOS). // The registration may require the caller to agree to the CA's TOS. @@ -94,11 +98,11 @@ type Manager struct { // To always accept the terms, the callers can use AcceptTOS. Prompt func(tosURL string) bool - // Cache optionally stores and retrieves previously-obtained certificates. - // If nil, certs will only be cached for the lifetime of the Manager. + // Cache optionally stores and retrieves previously-obtained certificates + // and other state. If nil, certs will only be cached for the lifetime of + // the Manager. Multiple Managers can share the same Cache. // - // Manager passes the Cache certificates data encoded in PEM, with private/public - // parts combined in a single Cache.Put call, private key first. + // Using a persistent Cache, such as DirCache, is strongly recommended. Cache Cache // HostPolicy controls which domains the Manager will attempt @@ -123,8 +127,10 @@ type Manager struct { // Client is used to perform low-level operations, such as account registration // and requesting new certificates. + // // If Client is nil, a zero-value acme.Client is used with acme.LetsEncryptURL - // directory endpoint and a newly-generated ECDSA P-256 key. + // as directory endpoint. If the Client.Key is nil, a new ECDSA P-256 key is + // generated and, if Cache is not nil, stored in cache. // // Mutating the field after the first call of GetCertificate method will have no effect. Client *acme.Client @@ -136,37 +142,95 @@ type Manager struct { // If the Client's account key is already registered, Email is not used. Email string - // ForceRSA makes the Manager generate certificates with 2048-bit RSA keys. + // ForceRSA used to make the Manager generate RSA certificates. It is now ignored. // - // If false, a default is used. Currently the default - // is EC-based keys using the P-256 curve. + // Deprecated: the Manager will request the correct type of certificate based + // on what each client supports. ForceRSA bool + // ExtraExtensions are used when generating a new CSR (Certificate Request), + // thus allowing customization of the resulting certificate. + // For instance, TLS Feature Extension (RFC 7633) can be used + // to prevent an OCSP downgrade attack. + // + // The field value is passed to crypto/x509.CreateCertificateRequest + // in the template's ExtraExtensions field as is. + ExtraExtensions []pkix.Extension + clientMu sync.Mutex client *acme.Client // initialized by acmeClient method stateMu sync.Mutex - state map[string]*certState // keyed by domain name - - // tokenCert is keyed by token domain name, which matches server name - // of ClientHello. Keys always have ".acme.invalid" suffix. - tokenCertMu sync.RWMutex - tokenCert map[string]*tls.Certificate + state map[certKey]*certState // renewal tracks the set of domains currently running renewal timers. - // It is keyed by domain name. renewalMu sync.Mutex - renewal map[string]*domainRenewal + renewal map[certKey]*domainRenewal + + // tokensMu guards the rest of the fields: tryHTTP01, certTokens and httpTokens. + tokensMu sync.RWMutex + // tryHTTP01 indicates whether the Manager should try "http-01" challenge type + // during the authorization flow. + tryHTTP01 bool + // httpTokens contains response body values for http-01 challenges + // and is keyed by the URL path at which a challenge response is expected + // to be provisioned. + // The entries are stored for the duration of the authorization flow. + httpTokens map[string][]byte + // certTokens contains temporary certificates for tls-sni and tls-alpn challenges + // and is keyed by token domain name, which matches server name of ClientHello. + // Keys always have ".acme.invalid" suffix for tls-sni. Otherwise, they are domain names + // for tls-alpn. + // The entries are stored for the duration of the authorization flow. + certTokens map[string]*tls.Certificate + // nowFunc, if not nil, returns the current time. This may be set for + // testing purposes. + nowFunc func() time.Time +} + +// certKey is the key by which certificates are tracked in state, renewal and cache. +type certKey struct { + domain string // without trailing dot + isRSA bool // RSA cert for legacy clients (as opposed to default ECDSA) + isToken bool // tls-based challenge token cert; key type is undefined regardless of isRSA +} + +func (c certKey) String() string { + if c.isToken { + return c.domain + "+token" + } + if c.isRSA { + return c.domain + "+rsa" + } + return c.domain +} + +// TLSConfig creates a new TLS config suitable for net/http.Server servers, +// supporting HTTP/2 and the tls-alpn-01 ACME challenge type. +func (m *Manager) TLSConfig() *tls.Config { + return &tls.Config{ + GetCertificate: m.GetCertificate, + NextProtos: []string{ + "h2", "http/1.1", // enable HTTP/2 + acme.ALPNProto, // enable tls-alpn ACME challenges + }, + } } // GetCertificate implements the tls.Config.GetCertificate hook. // It provides a TLS certificate for hello.ServerName host, including answering -// *.acme.invalid (TLS-SNI) challenges. All other fields of hello are ignored. +// tls-alpn-01 and *.acme.invalid (tls-sni-01 and tls-sni-02) challenges. +// All other fields of hello are ignored. // // If m.HostPolicy is non-nil, GetCertificate calls the policy before requesting // a new cert. A non-nil error returned from m.HostPolicy halts TLS negotiation. // The error is propagated back to the caller of GetCertificate and is user-visible. // This does not affect cached certs. See HostPolicy field description for more details. +// +// If GetCertificate is used directly, instead of via Manager.TLSConfig, package users will +// also have to add acme.ALPNProto to NextProtos for tls-alpn-01, or use HTTPHandler +// for http-01. (The tls-sni-* challenges have been deprecated by popular ACME providers +// due to security issues in the ecosystem.) func (m *Manager) GetCertificate(hello *tls.ClientHelloInfo) (*tls.Certificate, error) { if m.Prompt == nil { return nil, errors.New("acme/autocert: Manager.Prompt not set") @@ -179,21 +243,26 @@ func (m *Manager) GetCertificate(hello *tls.ClientHelloInfo) (*tls.Certificate, if !strings.Contains(strings.Trim(name, "."), ".") { return nil, errors.New("acme/autocert: server name component count invalid") } - if strings.ContainsAny(name, `/\`) { + if strings.ContainsAny(name, `+/\`) { return nil, errors.New("acme/autocert: server name contains invalid character") } + // In the worst-case scenario, the timeout needs to account for caching, host policy, + // domain ownership verification and certificate issuance. ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) defer cancel() - // check whether this is a token cert requested for TLS-SNI challenge - if strings.HasSuffix(name, ".acme.invalid") { - m.tokenCertMu.RLock() - defer m.tokenCertMu.RUnlock() - if cert := m.tokenCert[name]; cert != nil { + // Check whether this is a token cert requested for TLS-SNI or TLS-ALPN challenge. + if wantsTokenCert(hello) { + m.tokensMu.RLock() + defer m.tokensMu.RUnlock() + // It's ok to use the same token cert key for both tls-sni and tls-alpn + // because there's always at most 1 token cert per on-going domain authorization. + // See m.verify for details. + if cert := m.certTokens[name]; cert != nil { return cert, nil } - if cert, err := m.cacheGet(ctx, name); err == nil { + if cert, err := m.cacheGet(ctx, certKey{domain: name, isToken: true}); err == nil { return cert, nil } // TODO: cache error results? @@ -201,8 +270,11 @@ func (m *Manager) GetCertificate(hello *tls.ClientHelloInfo) (*tls.Certificate, } // regular domain - name = strings.TrimSuffix(name, ".") // golang.org/issue/18114 - cert, err := m.cert(ctx, name) + ck := certKey{ + domain: strings.TrimSuffix(name, "."), // golang.org/issue/18114 + isRSA: !supportsECDSA(hello), + } + cert, err := m.cert(ctx, ck) if err == nil { return cert, nil } @@ -214,27 +286,146 @@ func (m *Manager) GetCertificate(hello *tls.ClientHelloInfo) (*tls.Certificate, if err := m.hostPolicy()(ctx, name); err != nil { return nil, err } - cert, err = m.createCert(ctx, name) + cert, err = m.createCert(ctx, ck) if err != nil { return nil, err } - m.cachePut(ctx, name, cert) + m.cachePut(ctx, ck, cert) return cert, nil } +// wantsTokenCert reports whether a TLS request with SNI is made by a CA server +// for a challenge verification. +func wantsTokenCert(hello *tls.ClientHelloInfo) bool { + // tls-alpn-01 + if len(hello.SupportedProtos) == 1 && hello.SupportedProtos[0] == acme.ALPNProto { + return true + } + // tls-sni-xx + return strings.HasSuffix(hello.ServerName, ".acme.invalid") +} + +func supportsECDSA(hello *tls.ClientHelloInfo) bool { + // The "signature_algorithms" extension, if present, limits the key exchange + // algorithms allowed by the cipher suites. See RFC 5246, section 7.4.1.4.1. + if hello.SignatureSchemes != nil { + ecdsaOK := false + schemeLoop: + for _, scheme := range hello.SignatureSchemes { + const tlsECDSAWithSHA1 tls.SignatureScheme = 0x0203 // constant added in Go 1.10 + switch scheme { + case tlsECDSAWithSHA1, tls.ECDSAWithP256AndSHA256, + tls.ECDSAWithP384AndSHA384, tls.ECDSAWithP521AndSHA512: + ecdsaOK = true + break schemeLoop + } + } + if !ecdsaOK { + return false + } + } + if hello.SupportedCurves != nil { + ecdsaOK := false + for _, curve := range hello.SupportedCurves { + if curve == tls.CurveP256 { + ecdsaOK = true + break + } + } + if !ecdsaOK { + return false + } + } + for _, suite := range hello.CipherSuites { + switch suite { + case tls.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA, + tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, + tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, + tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, + tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, + tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305: + return true + } + } + return false +} + +// HTTPHandler configures the Manager to provision ACME "http-01" challenge responses. +// It returns an http.Handler that responds to the challenges and must be +// running on port 80. If it receives a request that is not an ACME challenge, +// it delegates the request to the optional fallback handler. +// +// If fallback is nil, the returned handler redirects all GET and HEAD requests +// to the default TLS port 443 with 302 Found status code, preserving the original +// request path and query. It responds with 400 Bad Request to all other HTTP methods. +// The fallback is not protected by the optional HostPolicy. +// +// Because the fallback handler is run with unencrypted port 80 requests, +// the fallback should not serve TLS-only requests. +// +// If HTTPHandler is never called, the Manager will only use the "tls-alpn-01" +// challenge for domain verification. +func (m *Manager) HTTPHandler(fallback http.Handler) http.Handler { + m.tokensMu.Lock() + defer m.tokensMu.Unlock() + m.tryHTTP01 = true + + if fallback == nil { + fallback = http.HandlerFunc(handleHTTPRedirect) + } + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if !strings.HasPrefix(r.URL.Path, "/.well-known/acme-challenge/") { + fallback.ServeHTTP(w, r) + return + } + // A reasonable context timeout for cache and host policy only, + // because we don't wait for a new certificate issuance here. + ctx, cancel := context.WithTimeout(r.Context(), time.Minute) + defer cancel() + if err := m.hostPolicy()(ctx, r.Host); err != nil { + http.Error(w, err.Error(), http.StatusForbidden) + return + } + data, err := m.httpToken(ctx, r.URL.Path) + if err != nil { + http.Error(w, err.Error(), http.StatusNotFound) + return + } + w.Write(data) + }) +} + +func handleHTTPRedirect(w http.ResponseWriter, r *http.Request) { + if r.Method != "GET" && r.Method != "HEAD" { + http.Error(w, "Use HTTPS", http.StatusBadRequest) + return + } + target := "https://" + stripPort(r.Host) + r.URL.RequestURI() + http.Redirect(w, r, target, http.StatusFound) +} + +func stripPort(hostport string) string { + host, _, err := net.SplitHostPort(hostport) + if err != nil { + return hostport + } + return net.JoinHostPort(host, "443") +} + // cert returns an existing certificate either from m.state or cache. // If a certificate is found in cache but not in m.state, the latter will be filled // with the cached value. -func (m *Manager) cert(ctx context.Context, name string) (*tls.Certificate, error) { +func (m *Manager) cert(ctx context.Context, ck certKey) (*tls.Certificate, error) { m.stateMu.Lock() - if s, ok := m.state[name]; ok { + if s, ok := m.state[ck]; ok { m.stateMu.Unlock() s.RLock() defer s.RUnlock() return s.tlscert() } defer m.stateMu.Unlock() - cert, err := m.cacheGet(ctx, name) + cert, err := m.cacheGet(ctx, ck) if err != nil { return nil, err } @@ -243,25 +434,25 @@ func (m *Manager) cert(ctx context.Context, name string) (*tls.Certificate, erro return nil, errors.New("acme/autocert: private key cannot sign") } if m.state == nil { - m.state = make(map[string]*certState) + m.state = make(map[certKey]*certState) } s := &certState{ key: signer, cert: cert.Certificate, leaf: cert.Leaf, } - m.state[name] = s - go m.renew(name, s.key, s.leaf.NotAfter) + m.state[ck] = s + go m.renew(ck, s.key, s.leaf.NotAfter) return cert, nil } // cacheGet always returns a valid certificate, or an error otherwise. -// If a cached certficate exists but is not valid, ErrCacheMiss is returned. -func (m *Manager) cacheGet(ctx context.Context, domain string) (*tls.Certificate, error) { +// If a cached certificate exists but is not valid, ErrCacheMiss is returned. +func (m *Manager) cacheGet(ctx context.Context, ck certKey) (*tls.Certificate, error) { if m.Cache == nil { return nil, ErrCacheMiss } - data, err := m.Cache.Get(ctx, domain) + data, err := m.Cache.Get(ctx, ck.String()) if err != nil { return nil, err } @@ -292,7 +483,7 @@ func (m *Manager) cacheGet(ctx context.Context, domain string) (*tls.Certificate } // verify and create TLS cert - leaf, err := validCert(domain, pubDER, privKey) + leaf, err := validCert(ck, pubDER, privKey, m.now()) if err != nil { return nil, ErrCacheMiss } @@ -304,7 +495,7 @@ func (m *Manager) cacheGet(ctx context.Context, domain string) (*tls.Certificate return tlscert, nil } -func (m *Manager) cachePut(ctx context.Context, domain string, tlscert *tls.Certificate) error { +func (m *Manager) cachePut(ctx context.Context, ck certKey, tlscert *tls.Certificate) error { if m.Cache == nil { return nil } @@ -336,7 +527,7 @@ func (m *Manager) cachePut(ctx context.Context, domain string, tlscert *tls.Cert } } - return m.Cache.Put(ctx, domain, buf.Bytes()) + return m.Cache.Put(ctx, ck.String(), buf.Bytes()) } func encodeECDSAKey(w io.Writer, key *ecdsa.PrivateKey) error { @@ -353,9 +544,9 @@ func encodeECDSAKey(w io.Writer, key *ecdsa.PrivateKey) error { // // If the domain is already being verified, it waits for the existing verification to complete. // Either way, createCert blocks for the duration of the whole process. -func (m *Manager) createCert(ctx context.Context, domain string) (*tls.Certificate, error) { +func (m *Manager) createCert(ctx context.Context, ck certKey) (*tls.Certificate, error) { // TODO: maybe rewrite this whole piece using sync.Once - state, err := m.certState(domain) + state, err := m.certState(ck) if err != nil { return nil, err } @@ -369,48 +560,48 @@ func (m *Manager) createCert(ctx context.Context, domain string) (*tls.Certifica // We are the first; state is locked. // Unblock the readers when domain ownership is verified - // and the we got the cert or the process failed. + // and we got the cert or the process failed. defer state.Unlock() state.locked = false - der, leaf, err := m.authorizedCert(ctx, state.key, domain) + der, leaf, err := m.authorizedCert(ctx, state.key, ck) if err != nil { // Remove the failed state after some time, // making the manager call createCert again on the following TLS hello. time.AfterFunc(createCertRetryAfter, func() { - defer testDidRemoveState(domain) + defer testDidRemoveState(ck) m.stateMu.Lock() defer m.stateMu.Unlock() // Verify the state hasn't changed and it's still invalid // before deleting. - s, ok := m.state[domain] + s, ok := m.state[ck] if !ok { return } - if _, err := validCert(domain, s.cert, s.key); err == nil { + if _, err := validCert(ck, s.cert, s.key, m.now()); err == nil { return } - delete(m.state, domain) + delete(m.state, ck) }) return nil, err } state.cert = der state.leaf = leaf - go m.renew(domain, state.key, state.leaf.NotAfter) + go m.renew(ck, state.key, state.leaf.NotAfter) return state.tlscert() } // certState returns a new or existing certState. // If a new certState is returned, state.exist is false and the state is locked. // The returned error is non-nil only in the case where a new state could not be created. -func (m *Manager) certState(domain string) (*certState, error) { +func (m *Manager) certState(ck certKey) (*certState, error) { m.stateMu.Lock() defer m.stateMu.Unlock() if m.state == nil { - m.state = make(map[string]*certState) + m.state = make(map[certKey]*certState) } // existing state - if state, ok := m.state[domain]; ok { + if state, ok := m.state[ck]; ok { return state, nil } @@ -419,7 +610,7 @@ func (m *Manager) certState(domain string) (*certState, error) { err error key crypto.Signer ) - if m.ForceRSA { + if ck.isRSA { key, err = rsa.GenerateKey(rand.Reader, 2048) } else { key, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader) @@ -433,21 +624,22 @@ func (m *Manager) certState(domain string) (*certState, error) { locked: true, } state.Lock() // will be unlocked by m.certState caller - m.state[domain] = state + m.state[ck] = state return state, nil } -// authorizedCert starts domain ownership verification process and requests a new cert upon success. +// authorizedCert starts the domain ownership verification process and requests a new cert upon success. // The key argument is the certificate private key. -func (m *Manager) authorizedCert(ctx context.Context, key crypto.Signer, domain string) (der [][]byte, leaf *x509.Certificate, err error) { - if err := m.verify(ctx, domain); err != nil { - return nil, nil, err - } +func (m *Manager) authorizedCert(ctx context.Context, key crypto.Signer, ck certKey) (der [][]byte, leaf *x509.Certificate, err error) { client, err := m.acmeClient(ctx) if err != nil { return nil, nil, err } - csr, err := certRequest(key, domain) + + if err := m.verify(ctx, client, ck.domain); err != nil { + return nil, nil, err + } + csr, err := certRequest(key, ck.domain, m.ExtraExtensions) if err != nil { return nil, nil, err } @@ -455,105 +647,227 @@ func (m *Manager) authorizedCert(ctx context.Context, key crypto.Signer, domain if err != nil { return nil, nil, err } - leaf, err = validCert(domain, der, key) + leaf, err = validCert(ck, der, key, m.now()) if err != nil { return nil, nil, err } return der, leaf, nil } -// verify starts a new identifier (domain) authorization flow. -// It prepares a challenge response and then blocks until the authorization -// is marked as "completed" by the CA (either succeeded or failed). -// -// verify returns nil iff the verification was successful. -func (m *Manager) verify(ctx context.Context, domain string) error { +// revokePendingAuthz revokes all authorizations idenfied by the elements of uri slice. +// It ignores revocation errors. +func (m *Manager) revokePendingAuthz(ctx context.Context, uri []string) { client, err := m.acmeClient(ctx) if err != nil { - return err + return } - - // start domain authorization and get the challenge - authz, err := client.Authorize(ctx, domain) - if err != nil { - return err + for _, u := range uri { + client.RevokeAuthorization(ctx, u) } - // maybe don't need to at all - if authz.Status == acme.StatusValid { - return nil +} + +// verify runs the identifier (domain) authorization flow +// using each applicable ACME challenge type. +func (m *Manager) verify(ctx context.Context, client *acme.Client, domain string) error { + // The list of challenge types we'll try to fulfill + // in this specific order. + challengeTypes := []string{"tls-alpn-01", "tls-sni-02", "tls-sni-01"} + m.tokensMu.RLock() + if m.tryHTTP01 { + challengeTypes = append(challengeTypes, "http-01") } + m.tokensMu.RUnlock() - // pick a challenge: prefer tls-sni-02 over tls-sni-01 - // TODO: consider authz.Combinations - var chal *acme.Challenge - for _, c := range authz.Challenges { - if c.Type == "tls-sni-02" { - chal = c - break + // Keep track of pending authzs and revoke the ones that did not validate. + pendingAuthzs := make(map[string]bool) + defer func() { + var uri []string + for k, pending := range pendingAuthzs { + if pending { + uri = append(uri, k) + } } - if c.Type == "tls-sni-01" { - chal = c + if len(uri) > 0 { + // Use "detached" background context. + // The revocations need not happen in the current verification flow. + go m.revokePendingAuthz(context.Background(), uri) } + }() + + // errs accumulates challenge failure errors, printed if all fail + errs := make(map[*acme.Challenge]error) + var nextTyp int // challengeType index of the next challenge type to try + for { + // Start domain authorization and get the challenge. + authz, err := client.Authorize(ctx, domain) + if err != nil { + return err + } + // No point in accepting challenges if the authorization status + // is in a final state. + switch authz.Status { + case acme.StatusValid: + return nil // already authorized + case acme.StatusInvalid: + return fmt.Errorf("acme/autocert: invalid authorization %q", authz.URI) + } + + pendingAuthzs[authz.URI] = true + + // Pick the next preferred challenge. + var chal *acme.Challenge + for chal == nil && nextTyp < len(challengeTypes) { + chal = pickChallenge(challengeTypes[nextTyp], authz.Challenges) + nextTyp++ + } + if chal == nil { + errorMsg := fmt.Sprintf("acme/autocert: unable to authorize %q", domain) + for chal, err := range errs { + errorMsg += fmt.Sprintf("; challenge %q failed with error: %v", chal.Type, err) + } + return errors.New(errorMsg) + } + cleanup, err := m.fulfill(ctx, client, chal, domain) + if err != nil { + errs[chal] = err + continue + } + defer cleanup() + if _, err := client.Accept(ctx, chal); err != nil { + errs[chal] = err + continue + } + + // A challenge is fulfilled and accepted: wait for the CA to validate. + if _, err := client.WaitAuthorization(ctx, authz.URI); err != nil { + errs[chal] = err + continue + } + delete(pendingAuthzs, authz.URI) + return nil } - if chal == nil { - return errors.New("acme/autocert: no supported challenge type found") - } +} - // create a token cert for the challenge response - var ( - cert tls.Certificate - name string - ) +// fulfill provisions a response to the challenge chal. +// The cleanup is non-nil only if provisioning succeeded. +func (m *Manager) fulfill(ctx context.Context, client *acme.Client, chal *acme.Challenge, domain string) (cleanup func(), err error) { switch chal.Type { + case "tls-alpn-01": + cert, err := client.TLSALPN01ChallengeCert(chal.Token, domain) + if err != nil { + return nil, err + } + m.putCertToken(ctx, domain, &cert) + return func() { go m.deleteCertToken(domain) }, nil case "tls-sni-01": - cert, name, err = client.TLSSNI01ChallengeCert(chal.Token) + cert, name, err := client.TLSSNI01ChallengeCert(chal.Token) + if err != nil { + return nil, err + } + m.putCertToken(ctx, name, &cert) + return func() { go m.deleteCertToken(name) }, nil case "tls-sni-02": - cert, name, err = client.TLSSNI02ChallengeCert(chal.Token) - default: - err = fmt.Errorf("acme/autocert: unknown challenge type %q", chal.Type) + cert, name, err := client.TLSSNI02ChallengeCert(chal.Token) + if err != nil { + return nil, err + } + m.putCertToken(ctx, name, &cert) + return func() { go m.deleteCertToken(name) }, nil + case "http-01": + resp, err := client.HTTP01ChallengeResponse(chal.Token) + if err != nil { + return nil, err + } + p := client.HTTP01ChallengePath(chal.Token) + m.putHTTPToken(ctx, p, resp) + return func() { go m.deleteHTTPToken(p) }, nil } - if err != nil { - return err + return nil, fmt.Errorf("acme/autocert: unknown challenge type %q", chal.Type) +} + +func pickChallenge(typ string, chal []*acme.Challenge) *acme.Challenge { + for _, c := range chal { + if c.Type == typ { + return c + } } - m.putTokenCert(ctx, name, &cert) - defer func() { - // verification has ended at this point - // don't need token cert anymore - go m.deleteTokenCert(name) - }() + return nil +} - // ready to fulfill the challenge - if _, err := client.Accept(ctx, chal); err != nil { - return err +// putCertToken stores the token certificate with the specified name +// in both m.certTokens map and m.Cache. +func (m *Manager) putCertToken(ctx context.Context, name string, cert *tls.Certificate) { + m.tokensMu.Lock() + defer m.tokensMu.Unlock() + if m.certTokens == nil { + m.certTokens = make(map[string]*tls.Certificate) } - // wait for the CA to validate - _, err = client.WaitAuthorization(ctx, authz.URI) - return err + m.certTokens[name] = cert + m.cachePut(ctx, certKey{domain: name, isToken: true}, cert) } -// putTokenCert stores the cert under the named key in both m.tokenCert map -// and m.Cache. -func (m *Manager) putTokenCert(ctx context.Context, name string, cert *tls.Certificate) { - m.tokenCertMu.Lock() - defer m.tokenCertMu.Unlock() - if m.tokenCert == nil { - m.tokenCert = make(map[string]*tls.Certificate) +// deleteCertToken removes the token certificate with the specified name +// from both m.certTokens map and m.Cache. +func (m *Manager) deleteCertToken(name string) { + m.tokensMu.Lock() + defer m.tokensMu.Unlock() + delete(m.certTokens, name) + if m.Cache != nil { + ck := certKey{domain: name, isToken: true} + m.Cache.Delete(context.Background(), ck.String()) } - m.tokenCert[name] = cert - m.cachePut(ctx, name, cert) } -// deleteTokenCert removes the token certificate for the specified domain name -// from both m.tokenCert map and m.Cache. -func (m *Manager) deleteTokenCert(name string) { - m.tokenCertMu.Lock() - defer m.tokenCertMu.Unlock() - delete(m.tokenCert, name) +// httpToken retrieves an existing http-01 token value from an in-memory map +// or the optional cache. +func (m *Manager) httpToken(ctx context.Context, tokenPath string) ([]byte, error) { + m.tokensMu.RLock() + defer m.tokensMu.RUnlock() + if v, ok := m.httpTokens[tokenPath]; ok { + return v, nil + } + if m.Cache == nil { + return nil, fmt.Errorf("acme/autocert: no token at %q", tokenPath) + } + return m.Cache.Get(ctx, httpTokenCacheKey(tokenPath)) +} + +// putHTTPToken stores an http-01 token value using tokenPath as key +// in both in-memory map and the optional Cache. +// +// It ignores any error returned from Cache.Put. +func (m *Manager) putHTTPToken(ctx context.Context, tokenPath, val string) { + m.tokensMu.Lock() + defer m.tokensMu.Unlock() + if m.httpTokens == nil { + m.httpTokens = make(map[string][]byte) + } + b := []byte(val) + m.httpTokens[tokenPath] = b if m.Cache != nil { - m.Cache.Delete(context.Background(), name) + m.Cache.Put(ctx, httpTokenCacheKey(tokenPath), b) } } +// deleteHTTPToken removes an http-01 token value from both in-memory map +// and the optional Cache, ignoring any error returned from the latter. +// +// If m.Cache is non-nil, it blocks until Cache.Delete returns without a timeout. +func (m *Manager) deleteHTTPToken(tokenPath string) { + m.tokensMu.Lock() + defer m.tokensMu.Unlock() + delete(m.httpTokens, tokenPath) + if m.Cache != nil { + m.Cache.Delete(context.Background(), httpTokenCacheKey(tokenPath)) + } +} + +// httpTokenCacheKey returns a key at which an http-01 token value may be stored +// in the Manager's optional Cache. +func httpTokenCacheKey(tokenPath string) string { + return path.Base(tokenPath) + "+http-01" +} + // renew starts a cert renewal timer loop, one per domain. // // The loop is scheduled in two cases: @@ -562,18 +876,18 @@ func (m *Manager) deleteTokenCert(name string) { // // The key argument is a certificate private key. // The exp argument is the cert expiration time (NotAfter). -func (m *Manager) renew(domain string, key crypto.Signer, exp time.Time) { +func (m *Manager) renew(ck certKey, key crypto.Signer, exp time.Time) { m.renewalMu.Lock() defer m.renewalMu.Unlock() - if m.renewal[domain] != nil { + if m.renewal[ck] != nil { // another goroutine is already on it return } if m.renewal == nil { - m.renewal = make(map[string]*domainRenewal) + m.renewal = make(map[certKey]*domainRenewal) } - dr := &domainRenewal{m: m, domain: domain, key: key} - m.renewal[domain] = dr + dr := &domainRenewal{m: m, ck: ck, key: key} + m.renewal[ck] = dr dr.start(exp) } @@ -589,7 +903,10 @@ func (m *Manager) stopRenew() { } func (m *Manager) accountKey(ctx context.Context) (crypto.Signer, error) { - const keyName = "acme_account.key" + const keyName = "acme_account+key" + + // Previous versions of autocert stored the value under a different key. + const legacyKeyName = "acme_account.key" genKey := func() (*ecdsa.PrivateKey, error) { return ecdsa.GenerateKey(elliptic.P256(), rand.Reader) @@ -600,6 +917,9 @@ func (m *Manager) accountKey(ctx context.Context) (crypto.Signer, error) { } data, err := m.Cache.Get(ctx, keyName) + if err == ErrCacheMiss { + data, err = m.Cache.Get(ctx, legacyKeyName) + } if err == ErrCacheMiss { key, err := genKey() if err != nil { @@ -671,6 +991,13 @@ func (m *Manager) renewBefore() time.Duration { return 720 * time.Hour // 30 days } +func (m *Manager) now() time.Time { + if m.nowFunc != nil { + return m.nowFunc() + } + return time.Now() +} + // certState is ready when its mutex is unlocked for reading. type certState struct { sync.RWMutex @@ -696,12 +1023,12 @@ func (s *certState) tlscert() (*tls.Certificate, error) { }, nil } -// certRequest creates a certificate request for the given common name cn -// and optional SANs. -func certRequest(key crypto.Signer, cn string, san ...string) ([]byte, error) { +// certRequest generates a CSR for the given common name cn and optional SANs. +func certRequest(key crypto.Signer, cn string, ext []pkix.Extension, san ...string) ([]byte, error) { req := &x509.CertificateRequest{ - Subject: pkix.Name{CommonName: cn}, - DNSNames: san, + Subject: pkix.Name{CommonName: cn}, + DNSNames: san, + ExtraExtensions: ext, } return x509.CreateCertificateRequest(rand.Reader, req, key) } @@ -732,12 +1059,12 @@ func parsePrivateKey(der []byte) (crypto.Signer, error) { return nil, errors.New("acme/autocert: failed to parse private key") } -// validCert parses a cert chain provided as der argument and verifies the leaf, der[0], -// corresponds to the private key, as well as the domain match and expiration dates. -// It doesn't do any revocation checking. +// validCert parses a cert chain provided as der argument and verifies the leaf and der[0] +// correspond to the private key, the domain and key type match, and expiration dates +// are valid. It doesn't do any revocation checking. // // The returned value is the verified leaf cert. -func validCert(domain string, der [][]byte, key crypto.Signer) (leaf *x509.Certificate, err error) { +func validCert(ck certKey, der [][]byte, key crypto.Signer, now time.Time) (leaf *x509.Certificate, err error) { // parse public part(s) var n int for _, b := range der { @@ -749,22 +1076,21 @@ func validCert(domain string, der [][]byte, key crypto.Signer) (leaf *x509.Certi n += copy(pub[n:], b) } x509Cert, err := x509.ParseCertificates(pub) - if len(x509Cert) == 0 { + if err != nil || len(x509Cert) == 0 { return nil, errors.New("acme/autocert: no public key found") } // verify the leaf is not expired and matches the domain name leaf = x509Cert[0] - now := timeNow() if now.Before(leaf.NotBefore) { return nil, errors.New("acme/autocert: certificate is not valid yet") } if now.After(leaf.NotAfter) { return nil, errors.New("acme/autocert: expired certificate") } - if err := leaf.VerifyHostname(domain); err != nil { + if err := leaf.VerifyHostname(ck.domain); err != nil { return nil, err } - // ensure the leaf corresponds to the private key + // ensure the leaf corresponds to the private key and matches the certKey type switch pub := leaf.PublicKey.(type) { case *rsa.PublicKey: prv, ok := key.(*rsa.PrivateKey) @@ -774,6 +1100,9 @@ func validCert(domain string, der [][]byte, key crypto.Signer) (leaf *x509.Certi if pub.N.Cmp(prv.N) != 0 { return nil, errors.New("acme/autocert: private key does not match public key") } + if !ck.isRSA && !ck.isToken { + return nil, errors.New("acme/autocert: key type does not match expected value") + } case *ecdsa.PublicKey: prv, ok := key.(*ecdsa.PrivateKey) if !ok { @@ -782,22 +1111,15 @@ func validCert(domain string, der [][]byte, key crypto.Signer) (leaf *x509.Certi if pub.X.Cmp(prv.X) != 0 || pub.Y.Cmp(prv.Y) != 0 { return nil, errors.New("acme/autocert: private key does not match public key") } + if ck.isRSA && !ck.isToken { + return nil, errors.New("acme/autocert: key type does not match expected value") + } default: return nil, errors.New("acme/autocert: unknown public key algorithm") } return leaf, nil } -func retryAfter(v string) time.Duration { - if i, err := strconv.Atoi(v); err == nil { - return time.Duration(i) * time.Second - } - if t, err := http.ParseTime(v); err == nil { - return t.Sub(timeNow()) - } - return time.Second -} - type lockedMathRand struct { sync.Mutex rnd *mathrand.Rand @@ -812,8 +1134,6 @@ func (r *lockedMathRand) int63n(max int64) int64 { // For easier testing. var ( - timeNow = time.Now - // Called when a state is removed. - testDidRemoveState = func(domain string) {} + testDidRemoveState = func(certKey) {} ) diff --git a/vendor/golang.org/x/crypto/acme/autocert/autocert_test.go b/vendor/golang.org/x/crypto/acme/autocert/autocert_test.go index 43a6201..95e12e1 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/autocert_test.go +++ b/vendor/golang.org/x/crypto/acme/autocert/autocert_test.go @@ -5,6 +5,7 @@ package autocert import ( + "bytes" "context" "crypto" "crypto/ecdsa" @@ -14,20 +15,30 @@ import ( "crypto/tls" "crypto/x509" "crypto/x509/pkix" + "encoding/asn1" "encoding/base64" "encoding/json" "fmt" "html/template" "io" + "io/ioutil" "math/big" "net/http" "net/http/httptest" "reflect" + "strings" "sync" "testing" "time" "golang.org/x/crypto/acme" + "golang.org/x/crypto/acme/autocert/internal/acmetest" +) + +var ( + exampleDomain = "example.org" + exampleCertKey = certKey{domain: exampleDomain} + exampleCertKeyRSA = certKey{domain: exampleDomain, isRSA: true} ) var discoTmpl = template.Must(template.New("disco").Parse(`{ @@ -48,11 +59,22 @@ var authzTmpl = template.Must(template.New("authz").Parse(`{ "uri": "{{.}}/challenge/2", "type": "tls-sni-02", "token": "token-02" + }, + { + "uri": "{{.}}/challenge/dns-01", + "type": "dns-01", + "token": "token-dns-01" + }, + { + "uri": "{{.}}/challenge/http-01", + "type": "http-01", + "token": "token-http-01" } ] }`)) type memCache struct { + t *testing.T mu sync.Mutex keyData map[string][]byte } @@ -68,7 +90,26 @@ func (m *memCache) Get(ctx context.Context, key string) ([]byte, error) { return v, nil } +// filenameSafe returns whether all characters in s are printable ASCII +// and safe to use in a filename on most filesystems. +func filenameSafe(s string) bool { + for _, c := range s { + if c < 0x20 || c > 0x7E { + return false + } + switch c { + case '\\', '/', ':', '*', '?', '"', '<', '>', '|': + return false + } + } + return true +} + func (m *memCache) Put(ctx context.Context, key string, data []byte) error { + if !filenameSafe(key) { + m.t.Errorf("invalid characters in cache key %q", key) + } + m.mu.Lock() defer m.mu.Unlock() @@ -84,12 +125,29 @@ func (m *memCache) Delete(ctx context.Context, key string) error { return nil } -func newMemCache() *memCache { +func newMemCache(t *testing.T) *memCache { return &memCache{ + t: t, keyData: make(map[string][]byte), } } +func (m *memCache) numCerts() int { + m.mu.Lock() + defer m.mu.Unlock() + + res := 0 + for key := range m.keyData { + if strings.HasSuffix(key, "+token") || + strings.HasSuffix(key, "+key") || + strings.HasSuffix(key, "+http-01") { + continue + } + res++ + } + return res +} + func dummyCert(pub interface{}, san ...string) ([]byte, error) { return dateDummyCert(pub, time.Now(), time.Now().Add(90*24*time.Hour), san...) } @@ -126,53 +184,58 @@ func decodePayload(v interface{}, r io.Reader) error { return json.Unmarshal(payload, v) } +func clientHelloInfo(sni string, ecdsaSupport bool) *tls.ClientHelloInfo { + hello := &tls.ClientHelloInfo{ + ServerName: sni, + CipherSuites: []uint16{tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305}, + } + if ecdsaSupport { + hello.CipherSuites = append(hello.CipherSuites, tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305) + } + return hello +} + func TestGetCertificate(t *testing.T) { man := &Manager{Prompt: AcceptTOS} defer man.stopRenew() - hello := &tls.ClientHelloInfo{ServerName: "example.org"} + hello := clientHelloInfo("example.org", true) testGetCertificate(t, man, "example.org", hello) } func TestGetCertificate_trailingDot(t *testing.T) { man := &Manager{Prompt: AcceptTOS} defer man.stopRenew() - hello := &tls.ClientHelloInfo{ServerName: "example.org."} + hello := clientHelloInfo("example.org.", true) testGetCertificate(t, man, "example.org", hello) } func TestGetCertificate_ForceRSA(t *testing.T) { man := &Manager{ Prompt: AcceptTOS, - Cache: newMemCache(), + Cache: newMemCache(t), ForceRSA: true, } defer man.stopRenew() - hello := &tls.ClientHelloInfo{ServerName: "example.org"} - testGetCertificate(t, man, "example.org", hello) + hello := clientHelloInfo(exampleDomain, true) + testGetCertificate(t, man, exampleDomain, hello) - cert, err := man.cacheGet(context.Background(), "example.org") + // ForceRSA was deprecated and is now ignored. + cert, err := man.cacheGet(context.Background(), exampleCertKey) if err != nil { t.Fatalf("man.cacheGet: %v", err) } - if _, ok := cert.PrivateKey.(*rsa.PrivateKey); !ok { - t.Errorf("cert.PrivateKey is %T; want *rsa.PrivateKey", cert.PrivateKey) + if _, ok := cert.PrivateKey.(*ecdsa.PrivateKey); !ok { + t.Errorf("cert.PrivateKey is %T; want *ecdsa.PrivateKey", cert.PrivateKey) } } func TestGetCertificate_nilPrompt(t *testing.T) { man := &Manager{} defer man.stopRenew() - url, finish := startACMEServerStub(t, man, "example.org") + url, finish := startACMEServerStub(t, getCertificateFromManager(man, true), "example.org") defer finish() - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - if err != nil { - t.Fatal(err) - } - man.Client = &acme.Client{ - Key: key, - DirectoryURL: url, - } - hello := &tls.ClientHelloInfo{ServerName: "example.org"} + man.Client = &acme.Client{DirectoryURL: url} + hello := clientHelloInfo("example.org", true) if _, err := man.GetCertificate(hello); err == nil { t.Error("got certificate for example.org; wanted error") } @@ -186,7 +249,7 @@ func TestGetCertificate_expiredCache(t *testing.T) { } tmpl := &x509.Certificate{ SerialNumber: big.NewInt(1), - Subject: pkix.Name{CommonName: "example.org"}, + Subject: pkix.Name{CommonName: exampleDomain}, NotAfter: time.Now(), } pub, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, &pk.PublicKey, pk) @@ -198,16 +261,16 @@ func TestGetCertificate_expiredCache(t *testing.T) { PrivateKey: pk, } - man := &Manager{Prompt: AcceptTOS, Cache: newMemCache()} + man := &Manager{Prompt: AcceptTOS, Cache: newMemCache(t)} defer man.stopRenew() - if err := man.cachePut(context.Background(), "example.org", tlscert); err != nil { + if err := man.cachePut(context.Background(), exampleCertKey, tlscert); err != nil { t.Fatalf("man.cachePut: %v", err) } // The expired cached cert should trigger a new cert issuance // and return without an error. - hello := &tls.ClientHelloInfo{ServerName: "example.org"} - testGetCertificate(t, man, "example.org", hello) + hello := clientHelloInfo(exampleDomain, true) + testGetCertificate(t, man, exampleDomain, hello) } func TestGetCertificate_failedAttempt(t *testing.T) { @@ -216,7 +279,6 @@ func TestGetCertificate_failedAttempt(t *testing.T) { })) defer ts.Close() - const example = "example.org" d := createCertRetryAfter f := testDidRemoveState defer func() { @@ -225,51 +287,168 @@ func TestGetCertificate_failedAttempt(t *testing.T) { }() createCertRetryAfter = 0 done := make(chan struct{}) - testDidRemoveState = func(domain string) { - if domain != example { - t.Errorf("testDidRemoveState: domain = %q; want %q", domain, example) + testDidRemoveState = func(ck certKey) { + if ck != exampleCertKey { + t.Errorf("testDidRemoveState: domain = %v; want %v", ck, exampleCertKey) } close(done) } - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - if err != nil { - t.Fatal(err) - } man := &Manager{ Prompt: AcceptTOS, Client: &acme.Client{ - Key: key, DirectoryURL: ts.URL, }, } defer man.stopRenew() - hello := &tls.ClientHelloInfo{ServerName: example} + hello := clientHelloInfo(exampleDomain, true) if _, err := man.GetCertificate(hello); err == nil { t.Error("GetCertificate: err is nil") } select { case <-time.After(5 * time.Second): - t.Errorf("took too long to remove the %q state", example) + t.Errorf("took too long to remove the %q state", exampleCertKey) case <-done: man.stateMu.Lock() defer man.stateMu.Unlock() - if v, exist := man.state[example]; exist { - t.Errorf("state exists for %q: %+v", example, v) + if v, exist := man.state[exampleCertKey]; exist { + t.Errorf("state exists for %v: %+v", exampleCertKey, v) } } } +// testGetCertificate_tokenCache tests the fallback of token certificate fetches +// to cache when Manager.certTokens misses. ecdsaSupport refers to the CA when +// verifying the certificate token. +func testGetCertificate_tokenCache(t *testing.T, ecdsaSupport bool) { + man1 := &Manager{ + Cache: newMemCache(t), + Prompt: AcceptTOS, + } + defer man1.stopRenew() + man2 := &Manager{ + Cache: man1.Cache, + Prompt: AcceptTOS, + } + defer man2.stopRenew() + + // Send the verification request to a different Manager from the one that + // initiated the authorization, when they share caches. + url, finish := startACMEServerStub(t, getCertificateFromManager(man2, ecdsaSupport), "example.org") + defer finish() + man1.Client = &acme.Client{DirectoryURL: url} + hello := clientHelloInfo("example.org", true) + if _, err := man1.GetCertificate(hello); err != nil { + t.Error(err) + } + if _, err := man2.GetCertificate(hello); err != nil { + t.Error(err) + } +} + +func TestGetCertificate_tokenCache(t *testing.T) { + t.Run("ecdsaSupport=true", func(t *testing.T) { + testGetCertificate_tokenCache(t, true) + }) + t.Run("ecdsaSupport=false", func(t *testing.T) { + testGetCertificate_tokenCache(t, false) + }) +} + +func TestGetCertificate_ecdsaVsRSA(t *testing.T) { + cache := newMemCache(t) + man := &Manager{Prompt: AcceptTOS, Cache: cache} + defer man.stopRenew() + url, finish := startACMEServerStub(t, getCertificateFromManager(man, true), "example.org") + defer finish() + man.Client = &acme.Client{DirectoryURL: url} + + cert, err := man.GetCertificate(clientHelloInfo("example.org", true)) + if err != nil { + t.Error(err) + } + if _, ok := cert.Leaf.PublicKey.(*ecdsa.PublicKey); !ok { + t.Error("an ECDSA client was served a non-ECDSA certificate") + } + + cert, err = man.GetCertificate(clientHelloInfo("example.org", false)) + if err != nil { + t.Error(err) + } + if _, ok := cert.Leaf.PublicKey.(*rsa.PublicKey); !ok { + t.Error("a RSA client was served a non-RSA certificate") + } + + if _, err := man.GetCertificate(clientHelloInfo("example.org", true)); err != nil { + t.Error(err) + } + if _, err := man.GetCertificate(clientHelloInfo("example.org", false)); err != nil { + t.Error(err) + } + if numCerts := cache.numCerts(); numCerts != 2 { + t.Errorf("found %d certificates in cache; want %d", numCerts, 2) + } +} + +func TestGetCertificate_wrongCacheKeyType(t *testing.T) { + cache := newMemCache(t) + man := &Manager{Prompt: AcceptTOS, Cache: cache} + defer man.stopRenew() + url, finish := startACMEServerStub(t, getCertificateFromManager(man, true), exampleDomain) + defer finish() + man.Client = &acme.Client{DirectoryURL: url} + + // Make an RSA cert and cache it without suffix. + pk, err := rsa.GenerateKey(rand.Reader, 512) + if err != nil { + t.Fatal(err) + } + tmpl := &x509.Certificate{ + SerialNumber: big.NewInt(1), + Subject: pkix.Name{CommonName: exampleDomain}, + NotAfter: time.Now().Add(90 * 24 * time.Hour), + } + pub, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, &pk.PublicKey, pk) + if err != nil { + t.Fatal(err) + } + rsaCert := &tls.Certificate{ + Certificate: [][]byte{pub}, + PrivateKey: pk, + } + if err := man.cachePut(context.Background(), exampleCertKey, rsaCert); err != nil { + t.Fatalf("man.cachePut: %v", err) + } + + // The RSA cached cert should be silently ignored and replaced. + cert, err := man.GetCertificate(clientHelloInfo(exampleDomain, true)) + if err != nil { + t.Error(err) + } + if _, ok := cert.Leaf.PublicKey.(*ecdsa.PublicKey); !ok { + t.Error("an ECDSA client was served a non-ECDSA certificate") + } + if numCerts := cache.numCerts(); numCerts != 1 { + t.Errorf("found %d certificates in cache; want %d", numCerts, 1) + } +} + +func getCertificateFromManager(man *Manager, ecdsaSupport bool) func(string) error { + return func(sni string) error { + _, err := man.GetCertificate(clientHelloInfo(sni, ecdsaSupport)) + return err + } +} + // startACMEServerStub runs an ACME server // The domain argument is the expected domain name of a certificate request. -func startACMEServerStub(t *testing.T, man *Manager, domain string) (url string, finish func()) { +// TODO: Drop this in favour of x/crypto/acme/autocert/internal/acmetest. +func startACMEServerStub(t *testing.T, getCertificate func(string) error, domain string) (url string, finish func()) { // echo token-02 | shasum -a 256 // then divide result in 2 parts separated by dot tokenCertName := "4e8eb87631187e9ff2153b56b13a4dec.13a35d002e485d60ff37354b32f665d9.token.acme.invalid" verifyTokenCert := func() { - hello := &tls.ClientHelloInfo{ServerName: tokenCertName} - _, err := man.GetCertificate(hello) - if err != nil { + if err := getCertificate(tokenCertName); err != nil { t.Errorf("verifyTokenCert: GetCertificate(%q): %v", tokenCertName, err) return } @@ -351,8 +530,7 @@ func startACMEServerStub(t *testing.T, man *Manager, domain string) (url string, tick := time.NewTicker(100 * time.Millisecond) defer tick.Stop() for { - hello := &tls.ClientHelloInfo{ServerName: tokenCertName} - if _, err := man.GetCertificate(hello); err != nil { + if err := getCertificate(tokenCertName); err != nil { return } select { @@ -376,21 +554,13 @@ func startACMEServerStub(t *testing.T, man *Manager, domain string) (url string, // tests man.GetCertificate flow using the provided hello argument. // The domain argument is the expected domain name of a certificate request. func testGetCertificate(t *testing.T, man *Manager, domain string, hello *tls.ClientHelloInfo) { - url, finish := startACMEServerStub(t, man, domain) + url, finish := startACMEServerStub(t, getCertificateFromManager(man, true), domain) defer finish() - - // use EC key to run faster on 386 - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - if err != nil { - t.Fatal(err) - } - man.Client = &acme.Client{ - Key: key, - DirectoryURL: url, - } + man.Client = &acme.Client{DirectoryURL: url} // simulate tls.Config.GetCertificate var tlscert *tls.Certificate + var err error done := make(chan struct{}) go func() { tlscert, err = man.GetCertificate(hello) @@ -419,8 +589,253 @@ func testGetCertificate(t *testing.T, man *Manager, domain string, hello *tls.Cl } +func TestVerifyHTTP01(t *testing.T) { + var ( + http01 http.Handler + + authzCount int // num. of created authorizations + didAcceptHTTP01 bool + ) + + verifyHTTPToken := func() { + r := httptest.NewRequest("GET", "/.well-known/acme-challenge/token-http-01", nil) + w := httptest.NewRecorder() + http01.ServeHTTP(w, r) + if w.Code != http.StatusOK { + t.Errorf("http token: w.Code = %d; want %d", w.Code, http.StatusOK) + } + if v := w.Body.String(); !strings.HasPrefix(v, "token-http-01.") { + t.Errorf("http token value = %q; want 'token-http-01.' prefix", v) + } + } + + // ACME CA server stub, only the needed bits. + // TODO: Replace this with x/crypto/acme/autocert/internal/acmetest. + var ca *httptest.Server + ca = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Replay-Nonce", "nonce") + if r.Method == "HEAD" { + // a nonce request + return + } + + switch r.URL.Path { + // Discovery. + case "/": + if err := discoTmpl.Execute(w, ca.URL); err != nil { + t.Errorf("discoTmpl: %v", err) + } + // Client key registration. + case "/new-reg": + w.Write([]byte("{}")) + // New domain authorization. + case "/new-authz": + authzCount++ + w.Header().Set("Location", fmt.Sprintf("%s/authz/%d", ca.URL, authzCount)) + w.WriteHeader(http.StatusCreated) + if err := authzTmpl.Execute(w, ca.URL); err != nil { + t.Errorf("authzTmpl: %v", err) + } + // Accept tls-sni-02. + case "/challenge/2": + w.Write([]byte("{}")) + // Reject tls-sni-01. + case "/challenge/1": + http.Error(w, "won't accept tls-sni-01", http.StatusBadRequest) + // Should not accept dns-01. + case "/challenge/dns-01": + t.Errorf("dns-01 challenge was accepted") + http.Error(w, "won't accept dns-01", http.StatusBadRequest) + // Accept http-01. + case "/challenge/http-01": + didAcceptHTTP01 = true + verifyHTTPToken() + w.Write([]byte("{}")) + // Authorization statuses. + // Make tls-sni-xxx invalid. + case "/authz/1", "/authz/2": + w.Write([]byte(`{"status": "invalid"}`)) + case "/authz/3", "/authz/4": + w.Write([]byte(`{"status": "valid"}`)) + default: + http.NotFound(w, r) + t.Errorf("unrecognized r.URL.Path: %s", r.URL.Path) + } + })) + defer ca.Close() + + m := &Manager{ + Client: &acme.Client{ + DirectoryURL: ca.URL, + }, + } + http01 = m.HTTPHandler(nil) + ctx := context.Background() + client, err := m.acmeClient(ctx) + if err != nil { + t.Fatalf("m.acmeClient: %v", err) + } + if err := m.verify(ctx, client, "example.org"); err != nil { + t.Errorf("m.verify: %v", err) + } + // Only tls-sni-01, tls-sni-02 and http-01 must be accepted + // The dns-01 challenge is unsupported. + if authzCount != 3 { + t.Errorf("authzCount = %d; want 3", authzCount) + } + if !didAcceptHTTP01 { + t.Error("did not accept http-01 challenge") + } +} + +func TestRevokeFailedAuthz(t *testing.T) { + // Prefill authorization URIs expected to be revoked. + // The challenges are selected in a specific order, + // each tried within a newly created authorization. + // This means each authorization URI corresponds to a different challenge type. + revokedAuthz := map[string]bool{ + "/authz/0": false, // tls-sni-02 + "/authz/1": false, // tls-sni-01 + "/authz/2": false, // no viable challenge, but authz is created + } + + var authzCount int // num. of created authorizations + var revokeCount int // num. of revoked authorizations + done := make(chan struct{}) // closed when revokeCount is 3 + + // ACME CA server stub, only the needed bits. + // TODO: Replace this with x/crypto/acme/autocert/internal/acmetest. + var ca *httptest.Server + ca = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Replay-Nonce", "nonce") + if r.Method == "HEAD" { + // a nonce request + return + } + + switch r.URL.Path { + // Discovery. + case "/": + if err := discoTmpl.Execute(w, ca.URL); err != nil { + t.Errorf("discoTmpl: %v", err) + } + // Client key registration. + case "/new-reg": + w.Write([]byte("{}")) + // New domain authorization. + case "/new-authz": + w.Header().Set("Location", fmt.Sprintf("%s/authz/%d", ca.URL, authzCount)) + w.WriteHeader(http.StatusCreated) + if err := authzTmpl.Execute(w, ca.URL); err != nil { + t.Errorf("authzTmpl: %v", err) + } + authzCount++ + // tls-sni-02 challenge "accept" request. + case "/challenge/2": + // Refuse. + http.Error(w, "won't accept tls-sni-02 challenge", http.StatusBadRequest) + // tls-sni-01 challenge "accept" request. + case "/challenge/1": + // Accept but the authorization will be "expired". + w.Write([]byte("{}")) + // Authorization requests. + case "/authz/0", "/authz/1", "/authz/2": + // Revocation requests. + if r.Method == "POST" { + var req struct{ Status string } + if err := decodePayload(&req, r.Body); err != nil { + t.Errorf("%s: decodePayload: %v", r.URL, err) + } + switch req.Status { + case "deactivated": + revokedAuthz[r.URL.Path] = true + revokeCount++ + if revokeCount >= 3 { + // Last authorization is revoked. + defer close(done) + } + default: + t.Errorf("%s: req.Status = %q; want 'deactivated'", r.URL, req.Status) + } + w.Write([]byte(`{"status": "invalid"}`)) + return + } + // Authorization status requests. + // Simulate abandoned authorization, deleted by the CA. + w.WriteHeader(http.StatusNotFound) + default: + http.NotFound(w, r) + t.Errorf("unrecognized r.URL.Path: %s", r.URL.Path) + } + })) + defer ca.Close() + + m := &Manager{ + Client: &acme.Client{DirectoryURL: ca.URL}, + } + // Should fail and revoke 3 authorizations. + // The first 2 are tsl-sni-02 and tls-sni-01 challenges. + // The third time an authorization is created but no viable challenge is found. + // See revokedAuthz above for more explanation. + if _, err := m.createCert(context.Background(), exampleCertKey); err == nil { + t.Errorf("m.createCert returned nil error") + } + select { + case <-time.After(3 * time.Second): + t.Error("revocations took too long") + case <-done: + // revokeCount is at least 3. + } + for uri, ok := range revokedAuthz { + if !ok { + t.Errorf("%q authorization was not revoked", uri) + } + } +} + +func TestHTTPHandlerDefaultFallback(t *testing.T) { + tt := []struct { + method, url string + wantCode int + wantLocation string + }{ + {"GET", "http://example.org", 302, "https://example.org/"}, + {"GET", "http://example.org/foo", 302, "https://example.org/foo"}, + {"GET", "http://example.org/foo/bar/", 302, "https://example.org/foo/bar/"}, + {"GET", "http://example.org/?a=b", 302, "https://example.org/?a=b"}, + {"GET", "http://example.org/foo?a=b", 302, "https://example.org/foo?a=b"}, + {"GET", "http://example.org:80/foo?a=b", 302, "https://example.org:443/foo?a=b"}, + {"GET", "http://example.org:80/foo%20bar", 302, "https://example.org:443/foo%20bar"}, + {"GET", "http://[2602:d1:xxxx::c60a]:1234", 302, "https://[2602:d1:xxxx::c60a]:443/"}, + {"GET", "http://[2602:d1:xxxx::c60a]", 302, "https://[2602:d1:xxxx::c60a]/"}, + {"GET", "http://[2602:d1:xxxx::c60a]/foo?a=b", 302, "https://[2602:d1:xxxx::c60a]/foo?a=b"}, + {"HEAD", "http://example.org", 302, "https://example.org/"}, + {"HEAD", "http://example.org/foo", 302, "https://example.org/foo"}, + {"HEAD", "http://example.org/foo/bar/", 302, "https://example.org/foo/bar/"}, + {"HEAD", "http://example.org/?a=b", 302, "https://example.org/?a=b"}, + {"HEAD", "http://example.org/foo?a=b", 302, "https://example.org/foo?a=b"}, + {"POST", "http://example.org", 400, ""}, + {"PUT", "http://example.org", 400, ""}, + {"GET", "http://example.org/.well-known/acme-challenge/x", 404, ""}, + } + var m Manager + h := m.HTTPHandler(nil) + for i, test := range tt { + r := httptest.NewRequest(test.method, test.url, nil) + w := httptest.NewRecorder() + h.ServeHTTP(w, r) + if w.Code != test.wantCode { + t.Errorf("%d: w.Code = %d; want %d", i, w.Code, test.wantCode) + t.Errorf("%d: body: %s", i, w.Body.Bytes()) + } + if v := w.Header().Get("Location"); v != test.wantLocation { + t.Errorf("%d: Location = %q; want %q", i, v, test.wantLocation) + } + } +} + func TestAccountKeyCache(t *testing.T) { - m := Manager{Cache: newMemCache()} + m := Manager{Cache: newMemCache(t)} ctx := context.Background() k1, err := m.accountKey(ctx) if err != nil { @@ -436,36 +851,57 @@ func TestAccountKeyCache(t *testing.T) { } func TestCache(t *testing.T) { - privKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + ecdsaKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) if err != nil { t.Fatal(err) } - tmpl := &x509.Certificate{ - SerialNumber: big.NewInt(1), - Subject: pkix.Name{CommonName: "example.org"}, - NotAfter: time.Now().Add(time.Hour), + cert, err := dummyCert(ecdsaKey.Public(), exampleDomain) + if err != nil { + t.Fatal(err) } - pub, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, &privKey.PublicKey, privKey) + ecdsaCert := &tls.Certificate{ + Certificate: [][]byte{cert}, + PrivateKey: ecdsaKey, + } + + rsaKey, err := rsa.GenerateKey(rand.Reader, 512) if err != nil { t.Fatal(err) } - tlscert := &tls.Certificate{ - Certificate: [][]byte{pub}, - PrivateKey: privKey, + cert, err = dummyCert(rsaKey.Public(), exampleDomain) + if err != nil { + t.Fatal(err) + } + rsaCert := &tls.Certificate{ + Certificate: [][]byte{cert}, + PrivateKey: rsaKey, } - man := &Manager{Cache: newMemCache()} + man := &Manager{Cache: newMemCache(t)} defer man.stopRenew() ctx := context.Background() - if err := man.cachePut(ctx, "example.org", tlscert); err != nil { + + if err := man.cachePut(ctx, exampleCertKey, ecdsaCert); err != nil { t.Fatalf("man.cachePut: %v", err) } - res, err := man.cacheGet(ctx, "example.org") + if err := man.cachePut(ctx, exampleCertKeyRSA, rsaCert); err != nil { + t.Fatalf("man.cachePut: %v", err) + } + + res, err := man.cacheGet(ctx, exampleCertKey) if err != nil { t.Fatalf("man.cacheGet: %v", err) } - if res == nil { - t.Fatal("res is nil") + if res == nil || !bytes.Equal(res.Certificate[0], ecdsaCert.Certificate[0]) { + t.Errorf("man.cacheGet = %+v; want %+v", res, ecdsaCert) + } + + res, err = man.cacheGet(ctx, exampleCertKeyRSA) + if err != nil { + t.Fatalf("man.cacheGet: %v", err) + } + if res == nil || !bytes.Equal(res.Certificate[0], rsaCert.Certificate[0]) { + t.Errorf("man.cacheGet = %+v; want %+v", res, rsaCert) } } @@ -529,26 +965,28 @@ func TestValidCert(t *testing.T) { } tt := []struct { - domain string - key crypto.Signer - cert [][]byte - ok bool + ck certKey + key crypto.Signer + cert [][]byte + ok bool }{ - {"example.org", key1, [][]byte{cert1}, true}, - {"example.org", key3, [][]byte{cert3}, true}, - {"example.org", key1, [][]byte{cert1, cert2, cert3}, true}, - {"example.org", key1, [][]byte{cert1, {1}}, false}, - {"example.org", key1, [][]byte{{1}}, false}, - {"example.org", key1, [][]byte{cert2}, false}, - {"example.org", key2, [][]byte{cert1}, false}, - {"example.org", key1, [][]byte{cert3}, false}, - {"example.org", key3, [][]byte{cert1}, false}, - {"example.net", key1, [][]byte{cert1}, false}, - {"example.org", key1, [][]byte{early}, false}, - {"example.org", key1, [][]byte{expired}, false}, + {certKey{domain: "example.org"}, key1, [][]byte{cert1}, true}, + {certKey{domain: "example.org", isRSA: true}, key3, [][]byte{cert3}, true}, + {certKey{domain: "example.org"}, key1, [][]byte{cert1, cert2, cert3}, true}, + {certKey{domain: "example.org"}, key1, [][]byte{cert1, {1}}, false}, + {certKey{domain: "example.org"}, key1, [][]byte{{1}}, false}, + {certKey{domain: "example.org"}, key1, [][]byte{cert2}, false}, + {certKey{domain: "example.org"}, key2, [][]byte{cert1}, false}, + {certKey{domain: "example.org"}, key1, [][]byte{cert3}, false}, + {certKey{domain: "example.org"}, key3, [][]byte{cert1}, false}, + {certKey{domain: "example.net"}, key1, [][]byte{cert1}, false}, + {certKey{domain: "example.org"}, key1, [][]byte{early}, false}, + {certKey{domain: "example.org"}, key1, [][]byte{expired}, false}, + {certKey{domain: "example.org", isRSA: true}, key1, [][]byte{cert1}, false}, + {certKey{domain: "example.org"}, key3, [][]byte{cert3}, false}, } for i, test := range tt { - leaf, err := validCert(test.domain, test.cert, test.key) + leaf, err := validCert(test.ck, test.cert, test.key, now) if err != nil && test.ok { t.Errorf("%d: err = %v", i, err) } @@ -597,10 +1035,155 @@ func TestManagerGetCertificateBogusSNI(t *testing.T) { {"fo.o", "cache.Get of fo.o"}, } for _, tt := range tests { - _, err := m.GetCertificate(&tls.ClientHelloInfo{ServerName: tt.name}) + _, err := m.GetCertificate(clientHelloInfo(tt.name, true)) got := fmt.Sprint(err) if got != tt.wantErr { t.Errorf("GetCertificate(SNI = %q) = %q; want %q", tt.name, got, tt.wantErr) } } } + +func TestCertRequest(t *testing.T) { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + t.Fatal(err) + } + // An extension from RFC7633. Any will do. + ext := pkix.Extension{ + Id: asn1.ObjectIdentifier{1, 3, 6, 1, 5, 5, 7, 1}, + Value: []byte("dummy"), + } + b, err := certRequest(key, "example.org", []pkix.Extension{ext}, "san.example.org") + if err != nil { + t.Fatalf("certRequest: %v", err) + } + r, err := x509.ParseCertificateRequest(b) + if err != nil { + t.Fatalf("ParseCertificateRequest: %v", err) + } + var found bool + for _, v := range r.Extensions { + if v.Id.Equal(ext.Id) { + found = true + break + } + } + if !found { + t.Errorf("want %v in Extensions: %v", ext, r.Extensions) + } +} + +func TestSupportsECDSA(t *testing.T) { + tests := []struct { + CipherSuites []uint16 + SignatureSchemes []tls.SignatureScheme + SupportedCurves []tls.CurveID + ecdsaOk bool + }{ + {[]uint16{ + tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, + }, nil, nil, false}, + {[]uint16{ + tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + }, nil, nil, true}, + + // SignatureSchemes limits, not extends, CipherSuites + {[]uint16{ + tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, + }, []tls.SignatureScheme{ + tls.PKCS1WithSHA256, tls.ECDSAWithP256AndSHA256, + }, nil, false}, + {[]uint16{ + tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + }, []tls.SignatureScheme{ + tls.PKCS1WithSHA256, + }, nil, false}, + {[]uint16{ + tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + }, []tls.SignatureScheme{ + tls.PKCS1WithSHA256, tls.ECDSAWithP256AndSHA256, + }, nil, true}, + + {[]uint16{ + tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + }, []tls.SignatureScheme{ + tls.PKCS1WithSHA256, tls.ECDSAWithP256AndSHA256, + }, []tls.CurveID{ + tls.CurveP521, + }, false}, + {[]uint16{ + tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + }, []tls.SignatureScheme{ + tls.PKCS1WithSHA256, tls.ECDSAWithP256AndSHA256, + }, []tls.CurveID{ + tls.CurveP256, + tls.CurveP521, + }, true}, + } + for i, tt := range tests { + result := supportsECDSA(&tls.ClientHelloInfo{ + CipherSuites: tt.CipherSuites, + SignatureSchemes: tt.SignatureSchemes, + SupportedCurves: tt.SupportedCurves, + }) + if result != tt.ecdsaOk { + t.Errorf("%d: supportsECDSA = %v; want %v", i, result, tt.ecdsaOk) + } + } +} + +// TODO: add same end-to-end for http-01 challenge type. +func TestEndToEnd(t *testing.T) { + const domain = "example.org" + + // ACME CA server + ca := acmetest.NewCAServer([]string{"tls-alpn-01"}, []string{domain}) + defer ca.Close() + + // User dummy server. + m := &Manager{ + Prompt: AcceptTOS, + Client: &acme.Client{DirectoryURL: ca.URL}, + } + us := httptest.NewUnstartedServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte("OK")) + })) + us.TLS = &tls.Config{ + NextProtos: []string{"http/1.1", acme.ALPNProto}, + GetCertificate: func(hello *tls.ClientHelloInfo) (*tls.Certificate, error) { + cert, err := m.GetCertificate(hello) + if err != nil { + t.Errorf("m.GetCertificate: %v", err) + } + return cert, err + }, + } + us.StartTLS() + defer us.Close() + // In TLS-ALPN challenge verification, CA connects to the domain:443 in question. + // Because the domain won't resolve in tests, we need to tell the CA + // where to dial to instead. + ca.Resolve(domain, strings.TrimPrefix(us.URL, "https://")) + + // A client visiting user dummy server. + tr := &http.Transport{ + TLSClientConfig: &tls.Config{ + RootCAs: ca.Roots, + ServerName: domain, + }, + } + client := &http.Client{Transport: tr} + res, err := client.Get(us.URL) + if err != nil { + t.Logf("CA errors: %v", ca.Errors()) + t.Fatal(err) + } + defer res.Body.Close() + b, err := ioutil.ReadAll(res.Body) + if err != nil { + t.Fatal(err) + } + if v := string(b); v != "OK" { + t.Errorf("user server response: %q; want 'OK'", v) + } +} diff --git a/vendor/golang.org/x/crypto/acme/autocert/cache.go b/vendor/golang.org/x/crypto/acme/autocert/cache.go index 61a5fd2..aa9aa84 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/cache.go +++ b/vendor/golang.org/x/crypto/acme/autocert/cache.go @@ -16,10 +16,10 @@ import ( var ErrCacheMiss = errors.New("acme/autocert: certificate cache miss") // Cache is used by Manager to store and retrieve previously obtained certificates -// as opaque data. +// and other account data as opaque blobs. // -// The key argument of the methods refers to a domain name but need not be an FQDN. -// Cache implementations should not rely on the key naming pattern. +// Cache implementations should not rely on the key naming pattern. Keys can +// include any printable ASCII characters, except the following: \/:*?"<>| type Cache interface { // Get returns a certificate data for the specified key. // If there's no such key, Get returns ErrCacheMiss. diff --git a/vendor/golang.org/x/crypto/acme/autocert/example_test.go b/vendor/golang.org/x/crypto/acme/autocert/example_test.go index c6267b8..d4225e5 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/example_test.go +++ b/vendor/golang.org/x/crypto/acme/autocert/example_test.go @@ -5,7 +5,6 @@ package autocert_test import ( - "crypto/tls" "fmt" "log" "net/http" @@ -22,13 +21,14 @@ func ExampleNewListener() { } func ExampleManager() { - m := autocert.Manager{ + m := &autocert.Manager{ + Cache: autocert.DirCache("secret-dir"), Prompt: autocert.AcceptTOS, - HostPolicy: autocert.HostWhitelist("example.org"), + HostPolicy: autocert.HostWhitelist("example.org", "www.example.org"), } s := &http.Server{ Addr: ":https", - TLSConfig: &tls.Config{GetCertificate: m.GetCertificate}, + TLSConfig: m.TLSConfig(), } s.ListenAndServeTLS("", "") } diff --git a/vendor/golang.org/x/crypto/acme/autocert/internal/acmetest/ca.go b/vendor/golang.org/x/crypto/acme/autocert/internal/acmetest/ca.go new file mode 100644 index 0000000..acc486a --- /dev/null +++ b/vendor/golang.org/x/crypto/acme/autocert/internal/acmetest/ca.go @@ -0,0 +1,416 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package acmetest provides types for testing acme and autocert packages. +// +// TODO: Consider moving this to x/crypto/acme/internal/acmetest for acme tests as well. +package acmetest + +import ( + "crypto" + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/tls" + "crypto/x509" + "crypto/x509/pkix" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "math/big" + "net/http" + "net/http/httptest" + "sort" + "strings" + "sync" + "time" +) + +// CAServer is a simple test server which implements ACME spec bits needed for testing. +type CAServer struct { + URL string // server URL after it has been started + Roots *x509.CertPool // CA root certificates; initialized in NewCAServer + + rootKey crypto.Signer + rootCert []byte // DER encoding + rootTemplate *x509.Certificate + + server *httptest.Server + challengeTypes []string // supported challenge types + domainsWhitelist []string // only these domains are valid for issuing, unless empty + + mu sync.Mutex + certCount int // number of issued certs + domainAddr map[string]string // domain name to addr:port resolution + authorizations map[string]*authorization // keyed by domain name + errors []error // encountered client errors +} + +// NewCAServer creates a new ACME test server and starts serving requests. +// The returned CAServer issues certs signed with the CA roots +// available in the Roots field. +// +// The challengeTypes argument defines the supported ACME challenge types +// sent to a client in a response for a domain authorization. +// If domainsWhitelist is non-empty, the certs will be issued only for the specified +// list of domains. Otherwise, any domain name is allowed. +func NewCAServer(challengeTypes []string, domainsWhitelist []string) *CAServer { + var whitelist []string + for _, name := range domainsWhitelist { + whitelist = append(whitelist, name) + } + sort.Strings(whitelist) + ca := &CAServer{ + challengeTypes: challengeTypes, + domainsWhitelist: whitelist, + domainAddr: make(map[string]string), + authorizations: make(map[string]*authorization), + } + + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + panic(fmt.Sprintf("ecdsa.GenerateKey: %v", err)) + } + tmpl := &x509.Certificate{ + SerialNumber: big.NewInt(1), + Subject: pkix.Name{ + Organization: []string{"Test Acme Co"}, + CommonName: "Root CA", + }, + NotBefore: time.Now(), + NotAfter: time.Now().Add(365 * 24 * time.Hour), + KeyUsage: x509.KeyUsageCertSign, + BasicConstraintsValid: true, + IsCA: true, + } + der, err := x509.CreateCertificate(rand.Reader, tmpl, tmpl, &key.PublicKey, key) + if err != nil { + panic(fmt.Sprintf("x509.CreateCertificate: %v", err)) + } + cert, err := x509.ParseCertificate(der) + if err != nil { + panic(fmt.Sprintf("x509.ParseCertificate: %v", err)) + } + ca.Roots = x509.NewCertPool() + ca.Roots.AddCert(cert) + ca.rootKey = key + ca.rootCert = der + ca.rootTemplate = tmpl + + ca.server = httptest.NewServer(http.HandlerFunc(ca.handle)) + ca.URL = ca.server.URL + return ca +} + +// Close shuts down the server and blocks until all outstanding +// requests on this server have completed. +func (ca *CAServer) Close() { + ca.server.Close() +} + +// Errors returns all client errors. +func (ca *CAServer) Errors() []error { + ca.mu.Lock() + defer ca.mu.Unlock() + return ca.errors +} + +// Resolve adds a domain to address resolution for the ca to dial to +// when validating challenges for the domain authorization. +func (ca *CAServer) Resolve(domain, addr string) { + ca.mu.Lock() + defer ca.mu.Unlock() + ca.domainAddr[domain] = addr +} + +type discovery struct { + NewReg string `json:"new-reg"` + NewAuthz string `json:"new-authz"` + NewCert string `json:"new-cert"` +} + +type challenge struct { + URI string `json:"uri"` + Type string `json:"type"` + Token string `json:"token"` +} + +type authorization struct { + Status string `json:"status"` + Challenges []challenge `json:"challenges"` + + id int + domain string +} + +func (ca *CAServer) handle(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Replay-Nonce", "nonce") + if r.Method == "HEAD" { + // a nonce request + return + } + + // TODO: Verify nonce header for all POST requests. + + switch { + default: + err := fmt.Errorf("unrecognized r.URL.Path: %s", r.URL.Path) + ca.addError(err) + http.Error(w, err.Error(), http.StatusBadRequest) + + // Discovery request. + case r.URL.Path == "/": + resp := &discovery{ + NewReg: ca.serverURL("/new-reg"), + NewAuthz: ca.serverURL("/new-authz"), + NewCert: ca.serverURL("/new-cert"), + } + if err := json.NewEncoder(w).Encode(resp); err != nil { + panic(fmt.Sprintf("discovery response: %v", err)) + } + + // Client key registration request. + case r.URL.Path == "/new-reg": + // TODO: Check the user account key against a ca.accountKeys? + w.Write([]byte("{}")) + + // Domain authorization request. + case r.URL.Path == "/new-authz": + var req struct { + Identifier struct{ Value string } + } + if err := decodePayload(&req, r.Body); err != nil { + ca.addError(err) + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + ca.mu.Lock() + defer ca.mu.Unlock() + authz, ok := ca.authorizations[req.Identifier.Value] + if !ok { + authz = &authorization{ + domain: req.Identifier.Value, + Status: "pending", + } + for _, typ := range ca.challengeTypes { + authz.Challenges = append(authz.Challenges, challenge{ + Type: typ, + URI: ca.serverURL("/challenge/%s/%s", typ, authz.domain), + Token: challengeToken(authz.domain, typ), + }) + } + ca.authorizations[authz.domain] = authz + } + w.Header().Set("Location", ca.serverURL("/authz/%s", authz.domain)) + w.WriteHeader(http.StatusCreated) + if err := json.NewEncoder(w).Encode(authz); err != nil { + panic(fmt.Sprintf("new authz response: %v", err)) + } + + // Accept tls-alpn-01 challenge type requests. + // TODO: Add http-01 and dns-01 handlers. + case strings.HasPrefix(r.URL.Path, "/challenge/tls-alpn-01/"): + domain := strings.TrimPrefix(r.URL.Path, "/challenge/tls-alpn-01/") + ca.mu.Lock() + defer ca.mu.Unlock() + if _, ok := ca.authorizations[domain]; !ok { + err := fmt.Errorf("challenge accept: no authz for %q", domain) + ca.addError(err) + http.Error(w, err.Error(), http.StatusNotFound) + return + } + go func(domain string) { + err := ca.verifyALPNChallenge(domain) + ca.mu.Lock() + defer ca.mu.Unlock() + authz := ca.authorizations[domain] + if err != nil { + authz.Status = "invalid" + return + } + authz.Status = "valid" + + }(domain) + w.Write([]byte("{}")) + + // Get authorization status requests. + case strings.HasPrefix(r.URL.Path, "/authz/"): + domain := strings.TrimPrefix(r.URL.Path, "/authz/") + ca.mu.Lock() + defer ca.mu.Unlock() + authz, ok := ca.authorizations[domain] + if !ok { + http.Error(w, fmt.Sprintf("no authz for %q", domain), http.StatusNotFound) + return + } + if err := json.NewEncoder(w).Encode(authz); err != nil { + panic(fmt.Sprintf("get authz for %q response: %v", domain, err)) + } + + // Cert issuance request. + case r.URL.Path == "/new-cert": + var req struct { + CSR string `json:"csr"` + } + decodePayload(&req, r.Body) + b, _ := base64.RawURLEncoding.DecodeString(req.CSR) + csr, err := x509.ParseCertificateRequest(b) + if err != nil { + ca.addError(err) + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + names := unique(append(csr.DNSNames, csr.Subject.CommonName)) + if err := ca.matchWhitelist(names); err != nil { + ca.addError(err) + http.Error(w, err.Error(), http.StatusUnauthorized) + return + } + if err := ca.authorized(names); err != nil { + ca.addError(err) + http.Error(w, err.Error(), http.StatusUnauthorized) + return + } + der, err := ca.leafCert(csr) + if err != nil { + err = fmt.Errorf("new-cert response: ca.leafCert: %v", err) + ca.addError(err) + http.Error(w, err.Error(), http.StatusBadRequest) + } + w.Header().Set("Link", fmt.Sprintf("<%s>; rel=up", ca.serverURL("/ca-cert"))) + w.WriteHeader(http.StatusCreated) + w.Write(der) + + // CA chain cert request. + case r.URL.Path == "/ca-cert": + w.Write(ca.rootCert) + } +} + +func (ca *CAServer) addError(err error) { + ca.mu.Lock() + defer ca.mu.Unlock() + ca.errors = append(ca.errors, err) +} + +func (ca *CAServer) serverURL(format string, arg ...interface{}) string { + return ca.server.URL + fmt.Sprintf(format, arg...) +} + +func (ca *CAServer) matchWhitelist(dnsNames []string) error { + if len(ca.domainsWhitelist) == 0 { + return nil + } + var nomatch []string + for _, name := range dnsNames { + i := sort.SearchStrings(ca.domainsWhitelist, name) + if i == len(ca.domainsWhitelist) || ca.domainsWhitelist[i] != name { + nomatch = append(nomatch, name) + } + } + if len(nomatch) > 0 { + return fmt.Errorf("matchWhitelist: some domains don't match: %q", nomatch) + } + return nil +} + +func (ca *CAServer) authorized(dnsNames []string) error { + ca.mu.Lock() + defer ca.mu.Unlock() + var noauthz []string + for _, name := range dnsNames { + authz, ok := ca.authorizations[name] + if !ok || authz.Status != "valid" { + noauthz = append(noauthz, name) + } + } + if len(noauthz) > 0 { + return fmt.Errorf("CAServer: no authz for %q", noauthz) + } + return nil +} + +func (ca *CAServer) leafCert(csr *x509.CertificateRequest) (der []byte, err error) { + ca.mu.Lock() + defer ca.mu.Unlock() + ca.certCount++ // next leaf cert serial number + leaf := &x509.Certificate{ + SerialNumber: big.NewInt(int64(ca.certCount)), + Subject: pkix.Name{Organization: []string{"Test Acme Co"}}, + NotBefore: time.Now(), + NotAfter: time.Now().Add(90 * 24 * time.Hour), + KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageKeyEncipherment, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, + DNSNames: csr.DNSNames, + BasicConstraintsValid: true, + } + if len(csr.DNSNames) == 0 { + leaf.DNSNames = []string{csr.Subject.CommonName} + } + return x509.CreateCertificate(rand.Reader, leaf, ca.rootTemplate, csr.PublicKey, ca.rootKey) +} + +func (ca *CAServer) addr(domain string) (string, error) { + ca.mu.Lock() + defer ca.mu.Unlock() + addr, ok := ca.domainAddr[domain] + if !ok { + return "", fmt.Errorf("CAServer: no addr resolution for %q", domain) + } + return addr, nil +} + +func (ca *CAServer) verifyALPNChallenge(domain string) error { + const acmeALPNProto = "acme-tls/1" + + addr, err := ca.addr(domain) + if err != nil { + return err + } + conn, err := tls.Dial("tcp", addr, &tls.Config{ + ServerName: domain, + InsecureSkipVerify: true, + NextProtos: []string{acmeALPNProto}, + }) + if err != nil { + return err + } + if v := conn.ConnectionState().NegotiatedProtocol; v != acmeALPNProto { + return fmt.Errorf("CAServer: verifyALPNChallenge: negotiated proto is %q; want %q", v, acmeALPNProto) + } + if n := len(conn.ConnectionState().PeerCertificates); n != 1 { + return fmt.Errorf("len(PeerCertificates) = %d; want 1", n) + } + // TODO: verify conn.ConnectionState().PeerCertificates[0] + return nil +} + +func decodePayload(v interface{}, r io.Reader) error { + var req struct{ Payload string } + if err := json.NewDecoder(r).Decode(&req); err != nil { + return err + } + payload, err := base64.RawURLEncoding.DecodeString(req.Payload) + if err != nil { + return err + } + return json.Unmarshal(payload, v) +} + +func challengeToken(domain, challType string) string { + return fmt.Sprintf("token-%s-%s", domain, challType) +} + +func unique(a []string) []string { + seen := make(map[string]bool) + var res []string + for _, s := range a { + if s != "" && !seen[s] { + seen[s] = true + res = append(res, s) + } + } + return res +} diff --git a/vendor/golang.org/x/crypto/acme/autocert/listener.go b/vendor/golang.org/x/crypto/acme/autocert/listener.go index d744df0..1e06981 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/listener.go +++ b/vendor/golang.org/x/crypto/acme/autocert/listener.go @@ -72,11 +72,8 @@ func NewListener(domains ...string) net.Listener { // the Manager m's Prompt, Cache, HostPolicy, and other desired options. func (m *Manager) Listener() net.Listener { ln := &listener{ - m: m, - conf: &tls.Config{ - GetCertificate: m.GetCertificate, // bonus: panic on nil m - NextProtos: []string{"h2", "http/1.1"}, // Enable HTTP/2 - }, + m: m, + conf: m.TLSConfig(), } ln.tcpListener, ln.tcpListenErr = net.Listen("tcp", ":443") return ln diff --git a/vendor/golang.org/x/crypto/acme/autocert/renewal.go b/vendor/golang.org/x/crypto/acme/autocert/renewal.go index 6c5da2b..665f870 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/renewal.go +++ b/vendor/golang.org/x/crypto/acme/autocert/renewal.go @@ -17,9 +17,9 @@ const renewJitter = time.Hour // domainRenewal tracks the state used by the periodic timers // renewing a single domain's cert. type domainRenewal struct { - m *Manager - domain string - key crypto.Signer + m *Manager + ck certKey + key crypto.Signer timerMu sync.Mutex timer *time.Timer @@ -71,25 +71,43 @@ func (dr *domainRenewal) renew() { testDidRenewLoop(next, err) } +// updateState locks and replaces the relevant Manager.state item with the given +// state. It additionally updates dr.key with the given state's key. +func (dr *domainRenewal) updateState(state *certState) { + dr.m.stateMu.Lock() + defer dr.m.stateMu.Unlock() + dr.key = state.key + dr.m.state[dr.ck] = state +} + // do is similar to Manager.createCert but it doesn't lock a Manager.state item. // Instead, it requests a new certificate independently and, upon success, // replaces dr.m.state item with a new one and updates cache for the given domain. // -// It may return immediately if the expiration date of the currently cached cert -// is far enough in the future. +// It may lock and update the Manager.state if the expiration date of the currently +// cached cert is far enough in the future. // // The returned value is a time interval after which the renewal should occur again. func (dr *domainRenewal) do(ctx context.Context) (time.Duration, error) { // a race is likely unavoidable in a distributed environment // but we try nonetheless - if tlscert, err := dr.m.cacheGet(ctx, dr.domain); err == nil { + if tlscert, err := dr.m.cacheGet(ctx, dr.ck); err == nil { next := dr.next(tlscert.Leaf.NotAfter) if next > dr.m.renewBefore()+renewJitter { - return next, nil + signer, ok := tlscert.PrivateKey.(crypto.Signer) + if ok { + state := &certState{ + key: signer, + cert: tlscert.Certificate, + leaf: tlscert.Leaf, + } + dr.updateState(state) + return next, nil + } } } - der, leaf, err := dr.m.authorizedCert(ctx, dr.key, dr.domain) + der, leaf, err := dr.m.authorizedCert(ctx, dr.key, dr.ck) if err != nil { return 0, err } @@ -102,16 +120,15 @@ func (dr *domainRenewal) do(ctx context.Context) (time.Duration, error) { if err != nil { return 0, err } - dr.m.cachePut(ctx, dr.domain, tlscert) - dr.m.stateMu.Lock() - defer dr.m.stateMu.Unlock() - // m.state is guaranteed to be non-nil at this point - dr.m.state[dr.domain] = state + if err := dr.m.cachePut(ctx, dr.ck, tlscert); err != nil { + return 0, err + } + dr.updateState(state) return dr.next(leaf.NotAfter), nil } func (dr *domainRenewal) next(expiry time.Time) time.Duration { - d := expiry.Sub(timeNow()) - dr.m.renewBefore() + d := expiry.Sub(dr.m.now()) - dr.m.renewBefore() // add a bit of randomness to renew deadline n := pseudoRand.int63n(int64(renewJitter)) d -= time.Duration(n) diff --git a/vendor/golang.org/x/crypto/acme/autocert/renewal_test.go b/vendor/golang.org/x/crypto/acme/autocert/renewal_test.go index 11d40ff..5d1c63f 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/renewal_test.go +++ b/vendor/golang.org/x/crypto/acme/autocert/renewal_test.go @@ -23,10 +23,10 @@ import ( func TestRenewalNext(t *testing.T) { now := time.Now() - timeNow = func() time.Time { return now } - defer func() { timeNow = time.Now }() - - man := &Manager{RenewBefore: 7 * 24 * time.Hour} + man := &Manager{ + RenewBefore: 7 * 24 * time.Hour, + nowFunc: func() time.Time { return now }, + } defer man.stopRenew() tt := []struct { expiry time.Time @@ -48,8 +48,6 @@ func TestRenewalNext(t *testing.T) { } func TestRenewFromCache(t *testing.T) { - const domain = "example.org" - // ACME CA server stub var ca *httptest.Server ca = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -84,7 +82,7 @@ func TestRenewFromCache(t *testing.T) { if err != nil { t.Fatalf("new-cert: CSR: %v", err) } - der, err := dummyCert(csr.PublicKey, domain) + der, err := dummyCert(csr.PublicKey, exampleDomain) if err != nil { t.Fatalf("new-cert: dummyCert: %v", err) } @@ -105,30 +103,28 @@ func TestRenewFromCache(t *testing.T) { })) defer ca.Close() - // use EC key to run faster on 386 - key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - if err != nil { - t.Fatal(err) - } man := &Manager{ Prompt: AcceptTOS, - Cache: newMemCache(), + Cache: newMemCache(t), RenewBefore: 24 * time.Hour, Client: &acme.Client{ - Key: key, DirectoryURL: ca.URL, }, } defer man.stopRenew() // cache an almost expired cert + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + t.Fatal(err) + } now := time.Now() - cert, err := dateDummyCert(key.Public(), now.Add(-2*time.Hour), now.Add(time.Minute), domain) + cert, err := dateDummyCert(key.Public(), now.Add(-2*time.Hour), now.Add(time.Minute), exampleDomain) if err != nil { t.Fatal(err) } tlscert := &tls.Certificate{PrivateKey: key, Certificate: [][]byte{cert}} - if err := man.cachePut(context.Background(), domain, tlscert); err != nil { + if err := man.cachePut(context.Background(), exampleCertKey, tlscert); err != nil { t.Fatal(err) } @@ -152,7 +148,7 @@ func TestRenewFromCache(t *testing.T) { // ensure the new cert is cached after := time.Now().Add(future) - tlscert, err := man.cacheGet(context.Background(), domain) + tlscert, err := man.cacheGet(context.Background(), exampleCertKey) if err != nil { t.Fatalf("man.cacheGet: %v", err) } @@ -163,9 +159,9 @@ func TestRenewFromCache(t *testing.T) { // verify the old cert is also replaced in memory man.stateMu.Lock() defer man.stateMu.Unlock() - s := man.state[domain] + s := man.state[exampleCertKey] if s == nil { - t.Fatalf("m.state[%q] is nil", domain) + t.Fatalf("m.state[%q] is nil", exampleCertKey) } tlscert, err = s.tlscert() if err != nil { @@ -177,7 +173,7 @@ func TestRenewFromCache(t *testing.T) { } // trigger renew - hello := &tls.ClientHelloInfo{ServerName: domain} + hello := clientHelloInfo(exampleDomain, true) if _, err := man.GetCertificate(hello); err != nil { t.Fatal(err) } @@ -189,3 +185,145 @@ func TestRenewFromCache(t *testing.T) { case <-done: } } + +func TestRenewFromCacheAlreadyRenewed(t *testing.T) { + man := &Manager{ + Prompt: AcceptTOS, + Cache: newMemCache(t), + RenewBefore: 24 * time.Hour, + Client: &acme.Client{ + DirectoryURL: "invalid", + }, + } + defer man.stopRenew() + + // cache a recently renewed cert with a different private key + newKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + t.Fatal(err) + } + now := time.Now() + newCert, err := dateDummyCert(newKey.Public(), now.Add(-2*time.Hour), now.Add(time.Hour*24*90), exampleDomain) + if err != nil { + t.Fatal(err) + } + newLeaf, err := validCert(exampleCertKey, [][]byte{newCert}, newKey, now) + if err != nil { + t.Fatal(err) + } + newTLSCert := &tls.Certificate{PrivateKey: newKey, Certificate: [][]byte{newCert}, Leaf: newLeaf} + if err := man.cachePut(context.Background(), exampleCertKey, newTLSCert); err != nil { + t.Fatal(err) + } + + // set internal state to an almost expired cert + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + t.Fatal(err) + } + oldCert, err := dateDummyCert(key.Public(), now.Add(-2*time.Hour), now.Add(time.Minute), exampleDomain) + if err != nil { + t.Fatal(err) + } + oldLeaf, err := validCert(exampleCertKey, [][]byte{oldCert}, key, now) + if err != nil { + t.Fatal(err) + } + man.stateMu.Lock() + if man.state == nil { + man.state = make(map[certKey]*certState) + } + s := &certState{ + key: key, + cert: [][]byte{oldCert}, + leaf: oldLeaf, + } + man.state[exampleCertKey] = s + man.stateMu.Unlock() + + // veriy the renewal accepted the newer cached cert + defer func() { + testDidRenewLoop = func(next time.Duration, err error) {} + }() + done := make(chan struct{}) + testDidRenewLoop = func(next time.Duration, err error) { + defer close(done) + if err != nil { + t.Errorf("testDidRenewLoop: %v", err) + } + // Next should be about 90 days + // Previous expiration was within 1 min. + future := 88 * 24 * time.Hour + if next < future { + t.Errorf("testDidRenewLoop: next = %v; want >= %v", next, future) + } + + // ensure the cached cert was not modified + tlscert, err := man.cacheGet(context.Background(), exampleCertKey) + if err != nil { + t.Fatalf("man.cacheGet: %v", err) + } + if !tlscert.Leaf.NotAfter.Equal(newLeaf.NotAfter) { + t.Errorf("cache leaf.NotAfter = %v; want == %v", tlscert.Leaf.NotAfter, newLeaf.NotAfter) + } + + // verify the old cert is also replaced in memory + man.stateMu.Lock() + defer man.stateMu.Unlock() + s := man.state[exampleCertKey] + if s == nil { + t.Fatalf("m.state[%q] is nil", exampleCertKey) + } + stateKey := s.key.Public().(*ecdsa.PublicKey) + if stateKey.X.Cmp(newKey.X) != 0 || stateKey.Y.Cmp(newKey.Y) != 0 { + t.Fatalf("state key was not updated from cache x: %v y: %v; want x: %v y: %v", stateKey.X, stateKey.Y, newKey.X, newKey.Y) + } + tlscert, err = s.tlscert() + if err != nil { + t.Fatalf("s.tlscert: %v", err) + } + if !tlscert.Leaf.NotAfter.Equal(newLeaf.NotAfter) { + t.Errorf("state leaf.NotAfter = %v; want == %v", tlscert.Leaf.NotAfter, newLeaf.NotAfter) + } + + // verify the private key is replaced in the renewal state + r := man.renewal[exampleCertKey] + if r == nil { + t.Fatalf("m.renewal[%q] is nil", exampleCertKey) + } + renewalKey := r.key.Public().(*ecdsa.PublicKey) + if renewalKey.X.Cmp(newKey.X) != 0 || renewalKey.Y.Cmp(newKey.Y) != 0 { + t.Fatalf("renewal private key was not updated from cache x: %v y: %v; want x: %v y: %v", renewalKey.X, renewalKey.Y, newKey.X, newKey.Y) + } + + } + + // assert the expiring cert is returned from state + hello := clientHelloInfo(exampleDomain, true) + tlscert, err := man.GetCertificate(hello) + if err != nil { + t.Fatal(err) + } + if !oldLeaf.NotAfter.Equal(tlscert.Leaf.NotAfter) { + t.Errorf("state leaf.NotAfter = %v; want == %v", tlscert.Leaf.NotAfter, oldLeaf.NotAfter) + } + + // trigger renew + go man.renew(exampleCertKey, s.key, s.leaf.NotAfter) + + // wait for renew loop + select { + case <-time.After(10 * time.Second): + t.Fatal("renew took too long to occur") + case <-done: + // assert the new cert is returned from state after renew + hello := clientHelloInfo(exampleDomain, true) + tlscert, err := man.GetCertificate(hello) + if err != nil { + t.Fatal(err) + } + if !newTLSCert.Leaf.NotAfter.Equal(tlscert.Leaf.NotAfter) { + t.Errorf("state leaf.NotAfter = %v; want == %v", tlscert.Leaf.NotAfter, newTLSCert.Leaf.NotAfter) + } + } +} diff --git a/vendor/golang.org/x/crypto/acme/http.go b/vendor/golang.org/x/crypto/acme/http.go new file mode 100644 index 0000000..a43ce6a --- /dev/null +++ b/vendor/golang.org/x/crypto/acme/http.go @@ -0,0 +1,281 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package acme + +import ( + "bytes" + "context" + "crypto" + "crypto/rand" + "encoding/json" + "fmt" + "io/ioutil" + "math/big" + "net/http" + "strconv" + "strings" + "time" +) + +// retryTimer encapsulates common logic for retrying unsuccessful requests. +// It is not safe for concurrent use. +type retryTimer struct { + // backoffFn provides backoff delay sequence for retries. + // See Client.RetryBackoff doc comment. + backoffFn func(n int, r *http.Request, res *http.Response) time.Duration + // n is the current retry attempt. + n int +} + +func (t *retryTimer) inc() { + t.n++ +} + +// backoff pauses the current goroutine as described in Client.RetryBackoff. +func (t *retryTimer) backoff(ctx context.Context, r *http.Request, res *http.Response) error { + d := t.backoffFn(t.n, r, res) + if d <= 0 { + return fmt.Errorf("acme: no more retries for %s; tried %d time(s)", r.URL, t.n) + } + wakeup := time.NewTimer(d) + defer wakeup.Stop() + select { + case <-ctx.Done(): + return ctx.Err() + case <-wakeup.C: + return nil + } +} + +func (c *Client) retryTimer() *retryTimer { + f := c.RetryBackoff + if f == nil { + f = defaultBackoff + } + return &retryTimer{backoffFn: f} +} + +// defaultBackoff provides default Client.RetryBackoff implementation +// using a truncated exponential backoff algorithm, +// as described in Client.RetryBackoff. +// +// The n argument is always bounded between 1 and 30. +// The returned value is always greater than 0. +func defaultBackoff(n int, r *http.Request, res *http.Response) time.Duration { + const max = 10 * time.Second + var jitter time.Duration + if x, err := rand.Int(rand.Reader, big.NewInt(1000)); err == nil { + // Set the minimum to 1ms to avoid a case where + // an invalid Retry-After value is parsed into 0 below, + // resulting in the 0 returned value which would unintentionally + // stop the retries. + jitter = (1 + time.Duration(x.Int64())) * time.Millisecond + } + if v, ok := res.Header["Retry-After"]; ok { + return retryAfter(v[0]) + jitter + } + + if n < 1 { + n = 1 + } + if n > 30 { + n = 30 + } + d := time.Duration(1< max { + return max + } + return d +} + +// retryAfter parses a Retry-After HTTP header value, +// trying to convert v into an int (seconds) or use http.ParseTime otherwise. +// It returns zero value if v cannot be parsed. +func retryAfter(v string) time.Duration { + if i, err := strconv.Atoi(v); err == nil { + return time.Duration(i) * time.Second + } + t, err := http.ParseTime(v) + if err != nil { + return 0 + } + return t.Sub(timeNow()) +} + +// resOkay is a function that reports whether the provided response is okay. +// It is expected to keep the response body unread. +type resOkay func(*http.Response) bool + +// wantStatus returns a function which reports whether the code +// matches the status code of a response. +func wantStatus(codes ...int) resOkay { + return func(res *http.Response) bool { + for _, code := range codes { + if code == res.StatusCode { + return true + } + } + return false + } +} + +// get issues an unsigned GET request to the specified URL. +// It returns a non-error value only when ok reports true. +// +// get retries unsuccessful attempts according to c.RetryBackoff +// until the context is done or a non-retriable error is received. +func (c *Client) get(ctx context.Context, url string, ok resOkay) (*http.Response, error) { + retry := c.retryTimer() + for { + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, err + } + res, err := c.doNoRetry(ctx, req) + switch { + case err != nil: + return nil, err + case ok(res): + return res, nil + case isRetriable(res.StatusCode): + retry.inc() + resErr := responseError(res) + res.Body.Close() + // Ignore the error value from retry.backoff + // and return the one from last retry, as received from the CA. + if retry.backoff(ctx, req, res) != nil { + return nil, resErr + } + default: + defer res.Body.Close() + return nil, responseError(res) + } + } +} + +// post issues a signed POST request in JWS format using the provided key +// to the specified URL. +// It returns a non-error value only when ok reports true. +// +// post retries unsuccessful attempts according to c.RetryBackoff +// until the context is done or a non-retriable error is received. +// It uses postNoRetry to make individual requests. +func (c *Client) post(ctx context.Context, key crypto.Signer, url string, body interface{}, ok resOkay) (*http.Response, error) { + retry := c.retryTimer() + for { + res, req, err := c.postNoRetry(ctx, key, url, body) + if err != nil { + return nil, err + } + if ok(res) { + return res, nil + } + resErr := responseError(res) + res.Body.Close() + switch { + // Check for bad nonce before isRetriable because it may have been returned + // with an unretriable response code such as 400 Bad Request. + case isBadNonce(resErr): + // Consider any previously stored nonce values to be invalid. + c.clearNonces() + case !isRetriable(res.StatusCode): + return nil, resErr + } + retry.inc() + // Ignore the error value from retry.backoff + // and return the one from last retry, as received from the CA. + if err := retry.backoff(ctx, req, res); err != nil { + return nil, resErr + } + } +} + +// postNoRetry signs the body with the given key and POSTs it to the provided url. +// The body argument must be JSON-serializable. +// It is used by c.post to retry unsuccessful attempts. +func (c *Client) postNoRetry(ctx context.Context, key crypto.Signer, url string, body interface{}) (*http.Response, *http.Request, error) { + nonce, err := c.popNonce(ctx, url) + if err != nil { + return nil, nil, err + } + b, err := jwsEncodeJSON(body, key, nonce) + if err != nil { + return nil, nil, err + } + req, err := http.NewRequest("POST", url, bytes.NewReader(b)) + if err != nil { + return nil, nil, err + } + req.Header.Set("Content-Type", "application/jose+json") + res, err := c.doNoRetry(ctx, req) + if err != nil { + return nil, nil, err + } + c.addNonce(res.Header) + return res, req, nil +} + +// doNoRetry issues a request req, replacing its context (if any) with ctx. +func (c *Client) doNoRetry(ctx context.Context, req *http.Request) (*http.Response, error) { + res, err := c.httpClient().Do(req.WithContext(ctx)) + if err != nil { + select { + case <-ctx.Done(): + // Prefer the unadorned context error. + // (The acme package had tests assuming this, previously from ctxhttp's + // behavior, predating net/http supporting contexts natively) + // TODO(bradfitz): reconsider this in the future. But for now this + // requires no test updates. + return nil, ctx.Err() + default: + return nil, err + } + } + return res, nil +} + +func (c *Client) httpClient() *http.Client { + if c.HTTPClient != nil { + return c.HTTPClient + } + return http.DefaultClient +} + +// isBadNonce reports whether err is an ACME "badnonce" error. +func isBadNonce(err error) bool { + // According to the spec badNonce is urn:ietf:params:acme:error:badNonce. + // However, ACME servers in the wild return their versions of the error. + // See https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-5.4 + // and https://github.com/letsencrypt/boulder/blob/0e07eacb/docs/acme-divergences.md#section-66. + ae, ok := err.(*Error) + return ok && strings.HasSuffix(strings.ToLower(ae.ProblemType), ":badnonce") +} + +// isRetriable reports whether a request can be retried +// based on the response status code. +// +// Note that a "bad nonce" error is returned with a non-retriable 400 Bad Request code. +// Callers should parse the response and check with isBadNonce. +func isRetriable(code int) bool { + return code <= 399 || code >= 500 || code == http.StatusTooManyRequests +} + +// responseError creates an error of Error type from resp. +func responseError(resp *http.Response) error { + // don't care if ReadAll returns an error: + // json.Unmarshal will fail in that case anyway + b, _ := ioutil.ReadAll(resp.Body) + e := &wireError{Status: resp.StatusCode} + if err := json.Unmarshal(b, e); err != nil { + // this is not a regular error response: + // populate detail with anything we received, + // e.Status will already contain HTTP response code value + e.Detail = string(b) + if e.Detail == "" { + e.Detail = resp.Status + } + } + return e.error(resp.Header) +} diff --git a/vendor/golang.org/x/crypto/acme/http_test.go b/vendor/golang.org/x/crypto/acme/http_test.go new file mode 100644 index 0000000..2748995 --- /dev/null +++ b/vendor/golang.org/x/crypto/acme/http_test.go @@ -0,0 +1,213 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package acme + +import ( + "context" + "fmt" + "io/ioutil" + "net/http" + "net/http/httptest" + "reflect" + "strings" + "testing" + "time" +) + +func TestDefaultBackoff(t *testing.T) { + tt := []struct { + nretry int + retryAfter string // Retry-After header + out time.Duration // expected min; max = min + jitter + }{ + {-1, "", time.Second}, // verify the lower bound is 1 + {0, "", time.Second}, // verify the lower bound is 1 + {100, "", 10 * time.Second}, // verify the ceiling + {1, "3600", time.Hour}, // verify the header value is used + {1, "", 1 * time.Second}, + {2, "", 2 * time.Second}, + {3, "", 4 * time.Second}, + {4, "", 8 * time.Second}, + } + for i, test := range tt { + r := httptest.NewRequest("GET", "/", nil) + resp := &http.Response{Header: http.Header{}} + if test.retryAfter != "" { + resp.Header.Set("Retry-After", test.retryAfter) + } + d := defaultBackoff(test.nretry, r, resp) + max := test.out + time.Second // + max jitter + if d < test.out || max < d { + t.Errorf("%d: defaultBackoff(%v) = %v; want between %v and %v", i, test.nretry, d, test.out, max) + } + } +} + +func TestErrorResponse(t *testing.T) { + s := `{ + "status": 400, + "type": "urn:acme:error:xxx", + "detail": "text" + }` + res := &http.Response{ + StatusCode: 400, + Status: "400 Bad Request", + Body: ioutil.NopCloser(strings.NewReader(s)), + Header: http.Header{"X-Foo": {"bar"}}, + } + err := responseError(res) + v, ok := err.(*Error) + if !ok { + t.Fatalf("err = %+v (%T); want *Error type", err, err) + } + if v.StatusCode != 400 { + t.Errorf("v.StatusCode = %v; want 400", v.StatusCode) + } + if v.ProblemType != "urn:acme:error:xxx" { + t.Errorf("v.ProblemType = %q; want urn:acme:error:xxx", v.ProblemType) + } + if v.Detail != "text" { + t.Errorf("v.Detail = %q; want text", v.Detail) + } + if !reflect.DeepEqual(v.Header, res.Header) { + t.Errorf("v.Header = %+v; want %+v", v.Header, res.Header) + } +} + +func TestPostWithRetries(t *testing.T) { + var count int + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + count++ + w.Header().Set("Replay-Nonce", fmt.Sprintf("nonce%d", count)) + if r.Method == "HEAD" { + // We expect the client to do 2 head requests to fetch + // nonces, one to start and another after getting badNonce + return + } + + head, err := decodeJWSHead(r) + switch { + case err != nil: + t.Errorf("decodeJWSHead: %v", err) + case head.Nonce == "": + t.Error("head.Nonce is empty") + case head.Nonce == "nonce1": + // Return a badNonce error to force the call to retry. + w.Header().Set("Retry-After", "0") + w.WriteHeader(http.StatusBadRequest) + w.Write([]byte(`{"type":"urn:ietf:params:acme:error:badNonce"}`)) + return + } + // Make client.Authorize happy; we're not testing its result. + w.WriteHeader(http.StatusCreated) + w.Write([]byte(`{"status":"valid"}`)) + })) + defer ts.Close() + + client := &Client{ + Key: testKey, + DirectoryURL: ts.URL, + dir: &Directory{AuthzURL: ts.URL}, + } + // This call will fail with badNonce, causing a retry + if _, err := client.Authorize(context.Background(), "example.com"); err != nil { + t.Errorf("client.Authorize 1: %v", err) + } + if count != 4 { + t.Errorf("total requests count: %d; want 4", count) + } +} + +func TestRetryErrorType(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Replay-Nonce", "nonce") + w.WriteHeader(http.StatusTooManyRequests) + w.Write([]byte(`{"type":"rateLimited"}`)) + })) + defer ts.Close() + + client := &Client{ + Key: testKey, + RetryBackoff: func(n int, r *http.Request, res *http.Response) time.Duration { + // Do no retries. + return 0 + }, + dir: &Directory{AuthzURL: ts.URL}, + } + + t.Run("post", func(t *testing.T) { + testRetryErrorType(t, func() error { + _, err := client.Authorize(context.Background(), "example.com") + return err + }) + }) + t.Run("get", func(t *testing.T) { + testRetryErrorType(t, func() error { + _, err := client.GetAuthorization(context.Background(), ts.URL) + return err + }) + }) +} + +func testRetryErrorType(t *testing.T, callClient func() error) { + t.Helper() + err := callClient() + if err == nil { + t.Fatal("client.Authorize returned nil error") + } + acmeErr, ok := err.(*Error) + if !ok { + t.Fatalf("err is %v (%T); want *Error", err, err) + } + if acmeErr.StatusCode != http.StatusTooManyRequests { + t.Errorf("acmeErr.StatusCode = %d; want %d", acmeErr.StatusCode, http.StatusTooManyRequests) + } + if acmeErr.ProblemType != "rateLimited" { + t.Errorf("acmeErr.ProblemType = %q; want 'rateLimited'", acmeErr.ProblemType) + } +} + +func TestRetryBackoffArgs(t *testing.T) { + const resCode = http.StatusInternalServerError + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Replay-Nonce", "test-nonce") + w.WriteHeader(resCode) + })) + defer ts.Close() + + // Canceled in backoff. + ctx, cancel := context.WithCancel(context.Background()) + + var nretry int + backoff := func(n int, r *http.Request, res *http.Response) time.Duration { + nretry++ + if n != nretry { + t.Errorf("n = %d; want %d", n, nretry) + } + if nretry == 3 { + cancel() + } + + if r == nil { + t.Error("r is nil") + } + if res.StatusCode != resCode { + t.Errorf("res.StatusCode = %d; want %d", res.StatusCode, resCode) + } + return time.Millisecond + } + + client := &Client{ + Key: testKey, + RetryBackoff: backoff, + dir: &Directory{AuthzURL: ts.URL}, + } + if _, err := client.Authorize(ctx, "example.com"); err == nil { + t.Error("err is nil") + } + if nretry != 3 { + t.Errorf("nretry = %d; want 3", nretry) + } +} diff --git a/vendor/golang.org/x/crypto/acme/jws.go b/vendor/golang.org/x/crypto/acme/jws.go index 6cbca25..1093b50 100644 --- a/vendor/golang.org/x/crypto/acme/jws.go +++ b/vendor/golang.org/x/crypto/acme/jws.go @@ -25,7 +25,7 @@ func jwsEncodeJSON(claimset interface{}, key crypto.Signer, nonce string) ([]byt if err != nil { return nil, err } - alg, sha := jwsHasher(key) + alg, sha := jwsHasher(key.Public()) if alg == "" || !sha.Available() { return nil, ErrUnsupportedKey } @@ -97,13 +97,16 @@ func jwkEncode(pub crypto.PublicKey) (string, error) { } // jwsSign signs the digest using the given key. -// It returns ErrUnsupportedKey if the key type is unknown. -// The hash is used only for RSA keys. +// The hash is unused for ECDSA keys. +// +// Note: non-stdlib crypto.Signer implementations are expected to return +// the signature in the format as specified in RFC7518. +// See https://tools.ietf.org/html/rfc7518 for more details. func jwsSign(key crypto.Signer, hash crypto.Hash, digest []byte) ([]byte, error) { - switch key := key.(type) { - case *rsa.PrivateKey: - return key.Sign(rand.Reader, digest, hash) - case *ecdsa.PrivateKey: + if key, ok := key.(*ecdsa.PrivateKey); ok { + // The key.Sign method of ecdsa returns ASN1-encoded signature. + // So, we use the package Sign function instead + // to get R and S values directly and format the result accordingly. r, s, err := ecdsa.Sign(rand.Reader, key, digest) if err != nil { return nil, err @@ -118,18 +121,18 @@ func jwsSign(key crypto.Signer, hash crypto.Hash, digest []byte) ([]byte, error) copy(sig[size*2-len(sb):], sb) return sig, nil } - return nil, ErrUnsupportedKey + return key.Sign(rand.Reader, digest, hash) } // jwsHasher indicates suitable JWS algorithm name and a hash function // to use for signing a digest with the provided key. // It returns ("", 0) if the key is not supported. -func jwsHasher(key crypto.Signer) (string, crypto.Hash) { - switch key := key.(type) { - case *rsa.PrivateKey: +func jwsHasher(pub crypto.PublicKey) (string, crypto.Hash) { + switch pub := pub.(type) { + case *rsa.PublicKey: return "RS256", crypto.SHA256 - case *ecdsa.PrivateKey: - switch key.Params().Name { + case *ecdsa.PublicKey: + switch pub.Params().Name { case "P-256": return "ES256", crypto.SHA256 case "P-384": diff --git a/vendor/golang.org/x/crypto/acme/jws_test.go b/vendor/golang.org/x/crypto/acme/jws_test.go index 0ff0fb5..ee30b1e 100644 --- a/vendor/golang.org/x/crypto/acme/jws_test.go +++ b/vendor/golang.org/x/crypto/acme/jws_test.go @@ -5,6 +5,7 @@ package acme import ( + "crypto" "crypto/ecdsa" "crypto/elliptic" "crypto/rsa" @@ -13,6 +14,7 @@ import ( "encoding/json" "encoding/pem" "fmt" + "io" "math/big" "testing" ) @@ -241,6 +243,79 @@ func TestJWSEncodeJSONEC(t *testing.T) { } } +type customTestSigner struct { + sig []byte + pub crypto.PublicKey +} + +func (s *customTestSigner) Public() crypto.PublicKey { return s.pub } +func (s *customTestSigner) Sign(io.Reader, []byte, crypto.SignerOpts) ([]byte, error) { + return s.sig, nil +} + +func TestJWSEncodeJSONCustom(t *testing.T) { + claims := struct{ Msg string }{"hello"} + const ( + // printf '{"Msg":"hello"}' | base64 | tr -d '=' | tr '/+' '_-' + payload = "eyJNc2ciOiJoZWxsbyJ9" + // printf 'testsig' | base64 | tr -d '=' + testsig = "dGVzdHNpZw" + + // printf '{"alg":"ES256","jwk":{"crv":"P-256","kty":"EC","x":,"y":,"nonce":"nonce"}' | \ + // base64 | tr -d '=' | tr '/+' '_-' + es256phead = "eyJhbGciOiJFUzI1NiIsImp3ayI6eyJjcnYiOiJQLTI1NiIsImt0eSI6IkVDIiwieCI6IjVsaEV1" + + "ZzV4SzR4QkRaMm5BYmF4THRhTGl2ODVieEo3ZVBkMWRrTzIzSFEiLCJ5IjoiNGFpSzcyc0JlVUFH" + + "a3YwVGFMc213b2tZVVl5TnhHc1M1RU1JS3dzTklLayJ9LCJub25jZSI6Im5vbmNlIn0" + + // {"alg":"RS256","jwk":{"e":"AQAB","kty":"RSA","n":"..."},"nonce":"nonce"} + rs256phead = "eyJhbGciOiJSUzI1NiIsImp3ayI6eyJlIjoiQVFBQiIsImt0eSI6" + + "IlJTQSIsIm4iOiI0eGdaM2VSUGt3b1J2eTdxZVJVYm1NRGUwVi14" + + "SDllV0xkdTBpaGVlTGxybUQybXFXWGZQOUllU0tBcGJuMzRnOFR1" + + "QVM5ZzV6aHE4RUxRM2ttanItS1Y4NkdBTWdJNlZBY0dscTNRcnpw" + + "VENmXzMwQWI3LXphd3JmUmFGT05hMUh3RXpQWTFLSG5HVmt4SmM4" + + "NWdOa3dZSTlTWTJSSFh0dmxuM3pzNXdJVE5yZG9zcUVYZWFJa1ZZ" + + "QkVoYmhOdTU0cHAza3hvNlR1V0xpOWU2cFhlV2V0RXdtbEJ3dFda" + + "bFBvaWIyajNUeExCa3NLWmZveUZ5ZWszODBtSGdKQXVtUV9JMmZq" + + "ajk4Xzk3bWszaWhPWTRBZ1ZkQ0RqMXpfR0NvWmtHNVJxN25iQ0d5" + + "b3N5S1d5RFgwMFpzLW5OcVZob0xlSXZYQzRubldkSk1aNnJvZ3h5" + + "UVEifSwibm9uY2UiOiJub25jZSJ9" + ) + + tt := []struct { + alg, phead string + pub crypto.PublicKey + }{ + {"RS256", rs256phead, testKey.Public()}, + {"ES256", es256phead, testKeyEC.Public()}, + } + for _, tc := range tt { + tc := tc + t.Run(tc.alg, func(t *testing.T) { + signer := &customTestSigner{ + sig: []byte("testsig"), + pub: tc.pub, + } + b, err := jwsEncodeJSON(claims, signer, "nonce") + if err != nil { + t.Fatal(err) + } + var j struct{ Protected, Payload, Signature string } + if err := json.Unmarshal(b, &j); err != nil { + t.Fatal(err) + } + if j.Protected != tc.phead { + t.Errorf("j.Protected = %q\nwant %q", j.Protected, tc.phead) + } + if j.Payload != payload { + t.Errorf("j.Payload = %q\nwant %q", j.Payload, payload) + } + if j.Signature != testsig { + t.Errorf("j.Signature = %q\nwant %q", j.Signature, testsig) + } + }) + } +} + func TestJWKThumbprintRSA(t *testing.T) { // Key example from RFC 7638 const base64N = "0vx7agoebGcQSuuPiLJXZptN9nndrQmbXEps2aiAFbWhM78LhWx4cbbfAAt" + diff --git a/vendor/golang.org/x/crypto/acme/types.go b/vendor/golang.org/x/crypto/acme/types.go index 3e19974..54792c0 100644 --- a/vendor/golang.org/x/crypto/acme/types.go +++ b/vendor/golang.org/x/crypto/acme/types.go @@ -104,7 +104,7 @@ func RateLimit(err error) (time.Duration, bool) { if e.Header == nil { return 0, true } - return retryAfter(e.Header.Get("Retry-After"), 0), true + return retryAfter(e.Header.Get("Retry-After")), true } // Account is a user account. It is associated with a private key. @@ -296,8 +296,8 @@ func (e *wireError) error(h http.Header) *Error { } } -// CertOption is an optional argument type for the TLSSNIxChallengeCert methods for -// customizing a temporary certificate for TLS-SNI challenges. +// CertOption is an optional argument type for the TLS ChallengeCert methods for +// customizing a temporary certificate for TLS-based challenges. type CertOption interface { privateCertOpt() } @@ -317,7 +317,7 @@ func (*certOptKey) privateCertOpt() {} // WithTemplate creates an option for specifying a certificate template. // See x509.CreateCertificate for template usage details. // -// In TLSSNIxChallengeCert methods, the template is also used as parent, +// In TLS ChallengeCert methods, the template is also used as parent, // resulting in a self-signed certificate. // The DNSNames field of t is always overwritten for tls-sni challenge certs. func WithTemplate(t *x509.Certificate) CertOption { diff --git a/vendor/golang.org/x/crypto/argon2/argon2.go b/vendor/golang.org/x/crypto/argon2/argon2.go new file mode 100644 index 0000000..b423fea --- /dev/null +++ b/vendor/golang.org/x/crypto/argon2/argon2.go @@ -0,0 +1,285 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package argon2 implements the key derivation function Argon2. +// Argon2 was selected as the winner of the Password Hashing Competition and can +// be used to derive cryptographic keys from passwords. +// +// For a detailed specification of Argon2 see [1]. +// +// If you aren't sure which function you need, use Argon2id (IDKey) and +// the parameter recommendations for your scenario. +// +// +// Argon2i +// +// Argon2i (implemented by Key) is the side-channel resistant version of Argon2. +// It uses data-independent memory access, which is preferred for password +// hashing and password-based key derivation. Argon2i requires more passes over +// memory than Argon2id to protect from trade-off attacks. The recommended +// parameters (taken from [2]) for non-interactive operations are time=3 and to +// use the maximum available memory. +// +// +// Argon2id +// +// Argon2id (implemented by IDKey) is a hybrid version of Argon2 combining +// Argon2i and Argon2d. It uses data-independent memory access for the first +// half of the first iteration over the memory and data-dependent memory access +// for the rest. Argon2id is side-channel resistant and provides better brute- +// force cost savings due to time-memory tradeoffs than Argon2i. The recommended +// parameters for non-interactive operations (taken from [2]) are time=1 and to +// use the maximum available memory. +// +// [1] https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf +// [2] https://tools.ietf.org/html/draft-irtf-cfrg-argon2-03#section-9.3 +package argon2 + +import ( + "encoding/binary" + "sync" + + "golang.org/x/crypto/blake2b" +) + +// The Argon2 version implemented by this package. +const Version = 0x13 + +const ( + argon2d = iota + argon2i + argon2id +) + +// Key derives a key from the password, salt, and cost parameters using Argon2i +// returning a byte slice of length keyLen that can be used as cryptographic +// key. The CPU cost and parallelism degree must be greater than zero. +// +// For example, you can get a derived key for e.g. AES-256 (which needs a +// 32-byte key) by doing: +// +// key := argon2.Key([]byte("some password"), salt, 3, 32*1024, 4, 32) +// +// The draft RFC recommends[2] time=3, and memory=32*1024 is a sensible number. +// If using that amount of memory (32 MB) is not possible in some contexts then +// the time parameter can be increased to compensate. +// +// The time parameter specifies the number of passes over the memory and the +// memory parameter specifies the size of the memory in KiB. For example +// memory=32*1024 sets the memory cost to ~32 MB. The number of threads can be +// adjusted to the number of available CPUs. The cost parameters should be +// increased as memory latency and CPU parallelism increases. Remember to get a +// good random salt. +func Key(password, salt []byte, time, memory uint32, threads uint8, keyLen uint32) []byte { + return deriveKey(argon2i, password, salt, nil, nil, time, memory, threads, keyLen) +} + +// IDKey derives a key from the password, salt, and cost parameters using +// Argon2id returning a byte slice of length keyLen that can be used as +// cryptographic key. The CPU cost and parallelism degree must be greater than +// zero. +// +// For example, you can get a derived key for e.g. AES-256 (which needs a +// 32-byte key) by doing: +// +// key := argon2.IDKey([]byte("some password"), salt, 1, 64*1024, 4, 32) +// +// The draft RFC recommends[2] time=1, and memory=64*1024 is a sensible number. +// If using that amount of memory (64 MB) is not possible in some contexts then +// the time parameter can be increased to compensate. +// +// The time parameter specifies the number of passes over the memory and the +// memory parameter specifies the size of the memory in KiB. For example +// memory=64*1024 sets the memory cost to ~64 MB. The number of threads can be +// adjusted to the numbers of available CPUs. The cost parameters should be +// increased as memory latency and CPU parallelism increases. Remember to get a +// good random salt. +func IDKey(password, salt []byte, time, memory uint32, threads uint8, keyLen uint32) []byte { + return deriveKey(argon2id, password, salt, nil, nil, time, memory, threads, keyLen) +} + +func deriveKey(mode int, password, salt, secret, data []byte, time, memory uint32, threads uint8, keyLen uint32) []byte { + if time < 1 { + panic("argon2: number of rounds too small") + } + if threads < 1 { + panic("argon2: parallelism degree too low") + } + h0 := initHash(password, salt, secret, data, time, memory, uint32(threads), keyLen, mode) + + memory = memory / (syncPoints * uint32(threads)) * (syncPoints * uint32(threads)) + if memory < 2*syncPoints*uint32(threads) { + memory = 2 * syncPoints * uint32(threads) + } + B := initBlocks(&h0, memory, uint32(threads)) + processBlocks(B, time, memory, uint32(threads), mode) + return extractKey(B, memory, uint32(threads), keyLen) +} + +const ( + blockLength = 128 + syncPoints = 4 +) + +type block [blockLength]uint64 + +func initHash(password, salt, key, data []byte, time, memory, threads, keyLen uint32, mode int) [blake2b.Size + 8]byte { + var ( + h0 [blake2b.Size + 8]byte + params [24]byte + tmp [4]byte + ) + + b2, _ := blake2b.New512(nil) + binary.LittleEndian.PutUint32(params[0:4], threads) + binary.LittleEndian.PutUint32(params[4:8], keyLen) + binary.LittleEndian.PutUint32(params[8:12], memory) + binary.LittleEndian.PutUint32(params[12:16], time) + binary.LittleEndian.PutUint32(params[16:20], uint32(Version)) + binary.LittleEndian.PutUint32(params[20:24], uint32(mode)) + b2.Write(params[:]) + binary.LittleEndian.PutUint32(tmp[:], uint32(len(password))) + b2.Write(tmp[:]) + b2.Write(password) + binary.LittleEndian.PutUint32(tmp[:], uint32(len(salt))) + b2.Write(tmp[:]) + b2.Write(salt) + binary.LittleEndian.PutUint32(tmp[:], uint32(len(key))) + b2.Write(tmp[:]) + b2.Write(key) + binary.LittleEndian.PutUint32(tmp[:], uint32(len(data))) + b2.Write(tmp[:]) + b2.Write(data) + b2.Sum(h0[:0]) + return h0 +} + +func initBlocks(h0 *[blake2b.Size + 8]byte, memory, threads uint32) []block { + var block0 [1024]byte + B := make([]block, memory) + for lane := uint32(0); lane < threads; lane++ { + j := lane * (memory / threads) + binary.LittleEndian.PutUint32(h0[blake2b.Size+4:], lane) + + binary.LittleEndian.PutUint32(h0[blake2b.Size:], 0) + blake2bHash(block0[:], h0[:]) + for i := range B[j+0] { + B[j+0][i] = binary.LittleEndian.Uint64(block0[i*8:]) + } + + binary.LittleEndian.PutUint32(h0[blake2b.Size:], 1) + blake2bHash(block0[:], h0[:]) + for i := range B[j+1] { + B[j+1][i] = binary.LittleEndian.Uint64(block0[i*8:]) + } + } + return B +} + +func processBlocks(B []block, time, memory, threads uint32, mode int) { + lanes := memory / threads + segments := lanes / syncPoints + + processSegment := func(n, slice, lane uint32, wg *sync.WaitGroup) { + var addresses, in, zero block + if mode == argon2i || (mode == argon2id && n == 0 && slice < syncPoints/2) { + in[0] = uint64(n) + in[1] = uint64(lane) + in[2] = uint64(slice) + in[3] = uint64(memory) + in[4] = uint64(time) + in[5] = uint64(mode) + } + + index := uint32(0) + if n == 0 && slice == 0 { + index = 2 // we have already generated the first two blocks + if mode == argon2i || mode == argon2id { + in[6]++ + processBlock(&addresses, &in, &zero) + processBlock(&addresses, &addresses, &zero) + } + } + + offset := lane*lanes + slice*segments + index + var random uint64 + for index < segments { + prev := offset - 1 + if index == 0 && slice == 0 { + prev += lanes // last block in lane + } + if mode == argon2i || (mode == argon2id && n == 0 && slice < syncPoints/2) { + if index%blockLength == 0 { + in[6]++ + processBlock(&addresses, &in, &zero) + processBlock(&addresses, &addresses, &zero) + } + random = addresses[index%blockLength] + } else { + random = B[prev][0] + } + newOffset := indexAlpha(random, lanes, segments, threads, n, slice, lane, index) + processBlockXOR(&B[offset], &B[prev], &B[newOffset]) + index, offset = index+1, offset+1 + } + wg.Done() + } + + for n := uint32(0); n < time; n++ { + for slice := uint32(0); slice < syncPoints; slice++ { + var wg sync.WaitGroup + for lane := uint32(0); lane < threads; lane++ { + wg.Add(1) + go processSegment(n, slice, lane, &wg) + } + wg.Wait() + } + } + +} + +func extractKey(B []block, memory, threads, keyLen uint32) []byte { + lanes := memory / threads + for lane := uint32(0); lane < threads-1; lane++ { + for i, v := range B[(lane*lanes)+lanes-1] { + B[memory-1][i] ^= v + } + } + + var block [1024]byte + for i, v := range B[memory-1] { + binary.LittleEndian.PutUint64(block[i*8:], v) + } + key := make([]byte, keyLen) + blake2bHash(key, block[:]) + return key +} + +func indexAlpha(rand uint64, lanes, segments, threads, n, slice, lane, index uint32) uint32 { + refLane := uint32(rand>>32) % threads + if n == 0 && slice == 0 { + refLane = lane + } + m, s := 3*segments, ((slice+1)%syncPoints)*segments + if lane == refLane { + m += index + } + if n == 0 { + m, s = slice*segments, 0 + if slice == 0 || lane == refLane { + m += index + } + } + if index == 0 || lane == refLane { + m-- + } + return phi(rand, uint64(m), uint64(s), refLane, lanes) +} + +func phi(rand, m, s uint64, lane, lanes uint32) uint32 { + p := rand & 0xFFFFFFFF + p = (p * p) >> 32 + p = (p * m) >> 32 + return lane*lanes + uint32((s+m-(p+1))%uint64(lanes)) +} diff --git a/vendor/golang.org/x/crypto/argon2/argon2_test.go b/vendor/golang.org/x/crypto/argon2/argon2_test.go new file mode 100644 index 0000000..775b97a --- /dev/null +++ b/vendor/golang.org/x/crypto/argon2/argon2_test.go @@ -0,0 +1,233 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package argon2 + +import ( + "bytes" + "encoding/hex" + "testing" +) + +var ( + genKatPassword = []byte{ + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + } + genKatSalt = []byte{0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02} + genKatSecret = []byte{0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03} + genKatAAD = []byte{0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04} +) + +func TestArgon2(t *testing.T) { + defer func(sse4 bool) { useSSE4 = sse4 }(useSSE4) + + if useSSE4 { + t.Log("SSE4.1 version") + testArgon2i(t) + testArgon2d(t) + testArgon2id(t) + useSSE4 = false + } + t.Log("generic version") + testArgon2i(t) + testArgon2d(t) + testArgon2id(t) +} + +func testArgon2d(t *testing.T) { + want := []byte{ + 0x51, 0x2b, 0x39, 0x1b, 0x6f, 0x11, 0x62, 0x97, + 0x53, 0x71, 0xd3, 0x09, 0x19, 0x73, 0x42, 0x94, + 0xf8, 0x68, 0xe3, 0xbe, 0x39, 0x84, 0xf3, 0xc1, + 0xa1, 0x3a, 0x4d, 0xb9, 0xfa, 0xbe, 0x4a, 0xcb, + } + hash := deriveKey(argon2d, genKatPassword, genKatSalt, genKatSecret, genKatAAD, 3, 32, 4, 32) + if !bytes.Equal(hash, want) { + t.Errorf("derived key does not match - got: %s , want: %s", hex.EncodeToString(hash), hex.EncodeToString(want)) + } +} + +func testArgon2i(t *testing.T) { + want := []byte{ + 0xc8, 0x14, 0xd9, 0xd1, 0xdc, 0x7f, 0x37, 0xaa, + 0x13, 0xf0, 0xd7, 0x7f, 0x24, 0x94, 0xbd, 0xa1, + 0xc8, 0xde, 0x6b, 0x01, 0x6d, 0xd3, 0x88, 0xd2, + 0x99, 0x52, 0xa4, 0xc4, 0x67, 0x2b, 0x6c, 0xe8, + } + hash := deriveKey(argon2i, genKatPassword, genKatSalt, genKatSecret, genKatAAD, 3, 32, 4, 32) + if !bytes.Equal(hash, want) { + t.Errorf("derived key does not match - got: %s , want: %s", hex.EncodeToString(hash), hex.EncodeToString(want)) + } +} + +func testArgon2id(t *testing.T) { + want := []byte{ + 0x0d, 0x64, 0x0d, 0xf5, 0x8d, 0x78, 0x76, 0x6c, + 0x08, 0xc0, 0x37, 0xa3, 0x4a, 0x8b, 0x53, 0xc9, + 0xd0, 0x1e, 0xf0, 0x45, 0x2d, 0x75, 0xb6, 0x5e, + 0xb5, 0x25, 0x20, 0xe9, 0x6b, 0x01, 0xe6, 0x59, + } + hash := deriveKey(argon2id, genKatPassword, genKatSalt, genKatSecret, genKatAAD, 3, 32, 4, 32) + if !bytes.Equal(hash, want) { + t.Errorf("derived key does not match - got: %s , want: %s", hex.EncodeToString(hash), hex.EncodeToString(want)) + } +} + +func TestVectors(t *testing.T) { + password, salt := []byte("password"), []byte("somesalt") + for i, v := range testVectors { + want, err := hex.DecodeString(v.hash) + if err != nil { + t.Fatalf("Test %d: failed to decode hash: %v", i, err) + } + hash := deriveKey(v.mode, password, salt, nil, nil, v.time, v.memory, v.threads, uint32(len(want))) + if !bytes.Equal(hash, want) { + t.Errorf("Test %d - got: %s want: %s", i, hex.EncodeToString(hash), hex.EncodeToString(want)) + } + } +} + +func benchmarkArgon2(mode int, time, memory uint32, threads uint8, keyLen uint32, b *testing.B) { + password := []byte("password") + salt := []byte("choosing random salts is hard") + b.ReportAllocs() + for i := 0; i < b.N; i++ { + deriveKey(mode, password, salt, nil, nil, time, memory, threads, keyLen) + } +} + +func BenchmarkArgon2i(b *testing.B) { + b.Run(" Time: 3 Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2i, 3, 32*1024, 1, 32, b) }) + b.Run(" Time: 4 Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2i, 4, 32*1024, 1, 32, b) }) + b.Run(" Time: 5 Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2i, 5, 32*1024, 1, 32, b) }) + b.Run(" Time: 3 Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2i, 3, 64*1024, 4, 32, b) }) + b.Run(" Time: 4 Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2i, 4, 64*1024, 4, 32, b) }) + b.Run(" Time: 5 Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2i, 5, 64*1024, 4, 32, b) }) +} + +func BenchmarkArgon2d(b *testing.B) { + b.Run(" Time: 3, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2d, 3, 32*1024, 1, 32, b) }) + b.Run(" Time: 4, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2d, 4, 32*1024, 1, 32, b) }) + b.Run(" Time: 5, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2d, 5, 32*1024, 1, 32, b) }) + b.Run(" Time: 3, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2d, 3, 64*1024, 4, 32, b) }) + b.Run(" Time: 4, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2d, 4, 64*1024, 4, 32, b) }) + b.Run(" Time: 5, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2d, 5, 64*1024, 4, 32, b) }) +} + +func BenchmarkArgon2id(b *testing.B) { + b.Run(" Time: 3, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2id, 3, 32*1024, 1, 32, b) }) + b.Run(" Time: 4, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2id, 4, 32*1024, 1, 32, b) }) + b.Run(" Time: 5, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2id, 5, 32*1024, 1, 32, b) }) + b.Run(" Time: 3, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2id, 3, 64*1024, 4, 32, b) }) + b.Run(" Time: 4, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2id, 4, 64*1024, 4, 32, b) }) + b.Run(" Time: 5, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2id, 5, 64*1024, 4, 32, b) }) +} + +// Generated with the CLI of https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf +var testVectors = []struct { + mode int + time, memory uint32 + threads uint8 + hash string +}{ + { + mode: argon2i, time: 1, memory: 64, threads: 1, + hash: "b9c401d1844a67d50eae3967dc28870b22e508092e861a37", + }, + { + mode: argon2d, time: 1, memory: 64, threads: 1, + hash: "8727405fd07c32c78d64f547f24150d3f2e703a89f981a19", + }, + { + mode: argon2id, time: 1, memory: 64, threads: 1, + hash: "655ad15eac652dc59f7170a7332bf49b8469be1fdb9c28bb", + }, + { + mode: argon2i, time: 2, memory: 64, threads: 1, + hash: "8cf3d8f76a6617afe35fac48eb0b7433a9a670ca4a07ed64", + }, + { + mode: argon2d, time: 2, memory: 64, threads: 1, + hash: "3be9ec79a69b75d3752acb59a1fbb8b295a46529c48fbb75", + }, + { + mode: argon2id, time: 2, memory: 64, threads: 1, + hash: "068d62b26455936aa6ebe60060b0a65870dbfa3ddf8d41f7", + }, + { + mode: argon2i, time: 2, memory: 64, threads: 2, + hash: "2089f3e78a799720f80af806553128f29b132cafe40d059f", + }, + { + mode: argon2d, time: 2, memory: 64, threads: 2, + hash: "68e2462c98b8bc6bb60ec68db418ae2c9ed24fc6748a40e9", + }, + { + mode: argon2id, time: 2, memory: 64, threads: 2, + hash: "350ac37222f436ccb5c0972f1ebd3bf6b958bf2071841362", + }, + { + mode: argon2i, time: 3, memory: 256, threads: 2, + hash: "f5bbf5d4c3836af13193053155b73ec7476a6a2eb93fd5e6", + }, + { + mode: argon2d, time: 3, memory: 256, threads: 2, + hash: "f4f0669218eaf3641f39cc97efb915721102f4b128211ef2", + }, + { + mode: argon2id, time: 3, memory: 256, threads: 2, + hash: "4668d30ac4187e6878eedeacf0fd83c5a0a30db2cc16ef0b", + }, + { + mode: argon2i, time: 4, memory: 4096, threads: 4, + hash: "a11f7b7f3f93f02ad4bddb59ab62d121e278369288a0d0e7", + }, + { + mode: argon2d, time: 4, memory: 4096, threads: 4, + hash: "935598181aa8dc2b720914aa6435ac8d3e3a4210c5b0fb2d", + }, + { + mode: argon2id, time: 4, memory: 4096, threads: 4, + hash: "145db9733a9f4ee43edf33c509be96b934d505a4efb33c5a", + }, + { + mode: argon2i, time: 4, memory: 1024, threads: 8, + hash: "0cdd3956aa35e6b475a7b0c63488822f774f15b43f6e6e17", + }, + { + mode: argon2d, time: 4, memory: 1024, threads: 8, + hash: "83604fc2ad0589b9d055578f4d3cc55bc616df3578a896e9", + }, + { + mode: argon2id, time: 4, memory: 1024, threads: 8, + hash: "8dafa8e004f8ea96bf7c0f93eecf67a6047476143d15577f", + }, + { + mode: argon2i, time: 2, memory: 64, threads: 3, + hash: "5cab452fe6b8479c8661def8cd703b611a3905a6d5477fe6", + }, + { + mode: argon2d, time: 2, memory: 64, threads: 3, + hash: "22474a423bda2ccd36ec9afd5119e5c8949798cadf659f51", + }, + { + mode: argon2id, time: 2, memory: 64, threads: 3, + hash: "4a15b31aec7c2590b87d1f520be7d96f56658172deaa3079", + }, + { + mode: argon2i, time: 3, memory: 1024, threads: 6, + hash: "d236b29c2b2a09babee842b0dec6aa1e83ccbdea8023dced", + }, + { + mode: argon2d, time: 3, memory: 1024, threads: 6, + hash: "a3351b0319a53229152023d9206902f4ef59661cdca89481", + }, + { + mode: argon2id, time: 3, memory: 1024, threads: 6, + hash: "1640b932f4b60e272f5d2207b9a9c626ffa1bd88d2349016", + }, +} diff --git a/vendor/golang.org/x/crypto/argon2/blake2b.go b/vendor/golang.org/x/crypto/argon2/blake2b.go new file mode 100644 index 0000000..10f4694 --- /dev/null +++ b/vendor/golang.org/x/crypto/argon2/blake2b.go @@ -0,0 +1,53 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package argon2 + +import ( + "encoding/binary" + "hash" + + "golang.org/x/crypto/blake2b" +) + +// blake2bHash computes an arbitrary long hash value of in +// and writes the hash to out. +func blake2bHash(out []byte, in []byte) { + var b2 hash.Hash + if n := len(out); n < blake2b.Size { + b2, _ = blake2b.New(n, nil) + } else { + b2, _ = blake2b.New512(nil) + } + + var buffer [blake2b.Size]byte + binary.LittleEndian.PutUint32(buffer[:4], uint32(len(out))) + b2.Write(buffer[:4]) + b2.Write(in) + + if len(out) <= blake2b.Size { + b2.Sum(out[:0]) + return + } + + outLen := len(out) + b2.Sum(buffer[:0]) + b2.Reset() + copy(out, buffer[:32]) + out = out[32:] + for len(out) > blake2b.Size { + b2.Write(buffer[:]) + b2.Sum(buffer[:0]) + copy(out, buffer[:32]) + out = out[32:] + b2.Reset() + } + + if outLen%blake2b.Size > 0 { // outLen > 64 + r := ((outLen + 31) / 32) - 2 // ⌈τ /32⌉-2 + b2, _ = blake2b.New(outLen-32*r, nil) + } + b2.Write(buffer[:]) + b2.Sum(out[:0]) +} diff --git a/vendor/golang.org/x/crypto/argon2/blamka_amd64.go b/vendor/golang.org/x/crypto/argon2/blamka_amd64.go new file mode 100644 index 0000000..2fc1ec0 --- /dev/null +++ b/vendor/golang.org/x/crypto/argon2/blamka_amd64.go @@ -0,0 +1,60 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build amd64,!gccgo,!appengine + +package argon2 + +import "golang.org/x/sys/cpu" + +func init() { + useSSE4 = cpu.X86.HasSSE41 +} + +//go:noescape +func mixBlocksSSE2(out, a, b, c *block) + +//go:noescape +func xorBlocksSSE2(out, a, b, c *block) + +//go:noescape +func blamkaSSE4(b *block) + +func processBlockSSE(out, in1, in2 *block, xor bool) { + var t block + mixBlocksSSE2(&t, in1, in2, &t) + if useSSE4 { + blamkaSSE4(&t) + } else { + for i := 0; i < blockLength; i += 16 { + blamkaGeneric( + &t[i+0], &t[i+1], &t[i+2], &t[i+3], + &t[i+4], &t[i+5], &t[i+6], &t[i+7], + &t[i+8], &t[i+9], &t[i+10], &t[i+11], + &t[i+12], &t[i+13], &t[i+14], &t[i+15], + ) + } + for i := 0; i < blockLength/8; i += 2 { + blamkaGeneric( + &t[i], &t[i+1], &t[16+i], &t[16+i+1], + &t[32+i], &t[32+i+1], &t[48+i], &t[48+i+1], + &t[64+i], &t[64+i+1], &t[80+i], &t[80+i+1], + &t[96+i], &t[96+i+1], &t[112+i], &t[112+i+1], + ) + } + } + if xor { + xorBlocksSSE2(out, in1, in2, &t) + } else { + mixBlocksSSE2(out, in1, in2, &t) + } +} + +func processBlock(out, in1, in2 *block) { + processBlockSSE(out, in1, in2, false) +} + +func processBlockXOR(out, in1, in2 *block) { + processBlockSSE(out, in1, in2, true) +} diff --git a/vendor/golang.org/x/crypto/argon2/blamka_amd64.s b/vendor/golang.org/x/crypto/argon2/blamka_amd64.s new file mode 100644 index 0000000..74a6e73 --- /dev/null +++ b/vendor/golang.org/x/crypto/argon2/blamka_amd64.s @@ -0,0 +1,243 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build amd64,!gccgo,!appengine + +#include "textflag.h" + +DATA ·c40<>+0x00(SB)/8, $0x0201000706050403 +DATA ·c40<>+0x08(SB)/8, $0x0a09080f0e0d0c0b +GLOBL ·c40<>(SB), (NOPTR+RODATA), $16 + +DATA ·c48<>+0x00(SB)/8, $0x0100070605040302 +DATA ·c48<>+0x08(SB)/8, $0x09080f0e0d0c0b0a +GLOBL ·c48<>(SB), (NOPTR+RODATA), $16 + +#define SHUFFLE(v2, v3, v4, v5, v6, v7, t1, t2) \ + MOVO v4, t1; \ + MOVO v5, v4; \ + MOVO t1, v5; \ + MOVO v6, t1; \ + PUNPCKLQDQ v6, t2; \ + PUNPCKHQDQ v7, v6; \ + PUNPCKHQDQ t2, v6; \ + PUNPCKLQDQ v7, t2; \ + MOVO t1, v7; \ + MOVO v2, t1; \ + PUNPCKHQDQ t2, v7; \ + PUNPCKLQDQ v3, t2; \ + PUNPCKHQDQ t2, v2; \ + PUNPCKLQDQ t1, t2; \ + PUNPCKHQDQ t2, v3 + +#define SHUFFLE_INV(v2, v3, v4, v5, v6, v7, t1, t2) \ + MOVO v4, t1; \ + MOVO v5, v4; \ + MOVO t1, v5; \ + MOVO v2, t1; \ + PUNPCKLQDQ v2, t2; \ + PUNPCKHQDQ v3, v2; \ + PUNPCKHQDQ t2, v2; \ + PUNPCKLQDQ v3, t2; \ + MOVO t1, v3; \ + MOVO v6, t1; \ + PUNPCKHQDQ t2, v3; \ + PUNPCKLQDQ v7, t2; \ + PUNPCKHQDQ t2, v6; \ + PUNPCKLQDQ t1, t2; \ + PUNPCKHQDQ t2, v7 + +#define HALF_ROUND(v0, v1, v2, v3, v4, v5, v6, v7, t0, c40, c48) \ + MOVO v0, t0; \ + PMULULQ v2, t0; \ + PADDQ v2, v0; \ + PADDQ t0, v0; \ + PADDQ t0, v0; \ + PXOR v0, v6; \ + PSHUFD $0xB1, v6, v6; \ + MOVO v4, t0; \ + PMULULQ v6, t0; \ + PADDQ v6, v4; \ + PADDQ t0, v4; \ + PADDQ t0, v4; \ + PXOR v4, v2; \ + PSHUFB c40, v2; \ + MOVO v0, t0; \ + PMULULQ v2, t0; \ + PADDQ v2, v0; \ + PADDQ t0, v0; \ + PADDQ t0, v0; \ + PXOR v0, v6; \ + PSHUFB c48, v6; \ + MOVO v4, t0; \ + PMULULQ v6, t0; \ + PADDQ v6, v4; \ + PADDQ t0, v4; \ + PADDQ t0, v4; \ + PXOR v4, v2; \ + MOVO v2, t0; \ + PADDQ v2, t0; \ + PSRLQ $63, v2; \ + PXOR t0, v2; \ + MOVO v1, t0; \ + PMULULQ v3, t0; \ + PADDQ v3, v1; \ + PADDQ t0, v1; \ + PADDQ t0, v1; \ + PXOR v1, v7; \ + PSHUFD $0xB1, v7, v7; \ + MOVO v5, t0; \ + PMULULQ v7, t0; \ + PADDQ v7, v5; \ + PADDQ t0, v5; \ + PADDQ t0, v5; \ + PXOR v5, v3; \ + PSHUFB c40, v3; \ + MOVO v1, t0; \ + PMULULQ v3, t0; \ + PADDQ v3, v1; \ + PADDQ t0, v1; \ + PADDQ t0, v1; \ + PXOR v1, v7; \ + PSHUFB c48, v7; \ + MOVO v5, t0; \ + PMULULQ v7, t0; \ + PADDQ v7, v5; \ + PADDQ t0, v5; \ + PADDQ t0, v5; \ + PXOR v5, v3; \ + MOVO v3, t0; \ + PADDQ v3, t0; \ + PSRLQ $63, v3; \ + PXOR t0, v3 + +#define LOAD_MSG_0(block, off) \ + MOVOU 8*(off+0)(block), X0; \ + MOVOU 8*(off+2)(block), X1; \ + MOVOU 8*(off+4)(block), X2; \ + MOVOU 8*(off+6)(block), X3; \ + MOVOU 8*(off+8)(block), X4; \ + MOVOU 8*(off+10)(block), X5; \ + MOVOU 8*(off+12)(block), X6; \ + MOVOU 8*(off+14)(block), X7 + +#define STORE_MSG_0(block, off) \ + MOVOU X0, 8*(off+0)(block); \ + MOVOU X1, 8*(off+2)(block); \ + MOVOU X2, 8*(off+4)(block); \ + MOVOU X3, 8*(off+6)(block); \ + MOVOU X4, 8*(off+8)(block); \ + MOVOU X5, 8*(off+10)(block); \ + MOVOU X6, 8*(off+12)(block); \ + MOVOU X7, 8*(off+14)(block) + +#define LOAD_MSG_1(block, off) \ + MOVOU 8*off+0*8(block), X0; \ + MOVOU 8*off+16*8(block), X1; \ + MOVOU 8*off+32*8(block), X2; \ + MOVOU 8*off+48*8(block), X3; \ + MOVOU 8*off+64*8(block), X4; \ + MOVOU 8*off+80*8(block), X5; \ + MOVOU 8*off+96*8(block), X6; \ + MOVOU 8*off+112*8(block), X7 + +#define STORE_MSG_1(block, off) \ + MOVOU X0, 8*off+0*8(block); \ + MOVOU X1, 8*off+16*8(block); \ + MOVOU X2, 8*off+32*8(block); \ + MOVOU X3, 8*off+48*8(block); \ + MOVOU X4, 8*off+64*8(block); \ + MOVOU X5, 8*off+80*8(block); \ + MOVOU X6, 8*off+96*8(block); \ + MOVOU X7, 8*off+112*8(block) + +#define BLAMKA_ROUND_0(block, off, t0, t1, c40, c48) \ + LOAD_MSG_0(block, off); \ + HALF_ROUND(X0, X1, X2, X3, X4, X5, X6, X7, t0, c40, c48); \ + SHUFFLE(X2, X3, X4, X5, X6, X7, t0, t1); \ + HALF_ROUND(X0, X1, X2, X3, X4, X5, X6, X7, t0, c40, c48); \ + SHUFFLE_INV(X2, X3, X4, X5, X6, X7, t0, t1); \ + STORE_MSG_0(block, off) + +#define BLAMKA_ROUND_1(block, off, t0, t1, c40, c48) \ + LOAD_MSG_1(block, off); \ + HALF_ROUND(X0, X1, X2, X3, X4, X5, X6, X7, t0, c40, c48); \ + SHUFFLE(X2, X3, X4, X5, X6, X7, t0, t1); \ + HALF_ROUND(X0, X1, X2, X3, X4, X5, X6, X7, t0, c40, c48); \ + SHUFFLE_INV(X2, X3, X4, X5, X6, X7, t0, t1); \ + STORE_MSG_1(block, off) + +// func blamkaSSE4(b *block) +TEXT ·blamkaSSE4(SB), 4, $0-8 + MOVQ b+0(FP), AX + + MOVOU ·c40<>(SB), X10 + MOVOU ·c48<>(SB), X11 + + BLAMKA_ROUND_0(AX, 0, X8, X9, X10, X11) + BLAMKA_ROUND_0(AX, 16, X8, X9, X10, X11) + BLAMKA_ROUND_0(AX, 32, X8, X9, X10, X11) + BLAMKA_ROUND_0(AX, 48, X8, X9, X10, X11) + BLAMKA_ROUND_0(AX, 64, X8, X9, X10, X11) + BLAMKA_ROUND_0(AX, 80, X8, X9, X10, X11) + BLAMKA_ROUND_0(AX, 96, X8, X9, X10, X11) + BLAMKA_ROUND_0(AX, 112, X8, X9, X10, X11) + + BLAMKA_ROUND_1(AX, 0, X8, X9, X10, X11) + BLAMKA_ROUND_1(AX, 2, X8, X9, X10, X11) + BLAMKA_ROUND_1(AX, 4, X8, X9, X10, X11) + BLAMKA_ROUND_1(AX, 6, X8, X9, X10, X11) + BLAMKA_ROUND_1(AX, 8, X8, X9, X10, X11) + BLAMKA_ROUND_1(AX, 10, X8, X9, X10, X11) + BLAMKA_ROUND_1(AX, 12, X8, X9, X10, X11) + BLAMKA_ROUND_1(AX, 14, X8, X9, X10, X11) + RET + +// func mixBlocksSSE2(out, a, b, c *block) +TEXT ·mixBlocksSSE2(SB), 4, $0-32 + MOVQ out+0(FP), DX + MOVQ a+8(FP), AX + MOVQ b+16(FP), BX + MOVQ a+24(FP), CX + MOVQ $128, BP + +loop: + MOVOU 0(AX), X0 + MOVOU 0(BX), X1 + MOVOU 0(CX), X2 + PXOR X1, X0 + PXOR X2, X0 + MOVOU X0, 0(DX) + ADDQ $16, AX + ADDQ $16, BX + ADDQ $16, CX + ADDQ $16, DX + SUBQ $2, BP + JA loop + RET + +// func xorBlocksSSE2(out, a, b, c *block) +TEXT ·xorBlocksSSE2(SB), 4, $0-32 + MOVQ out+0(FP), DX + MOVQ a+8(FP), AX + MOVQ b+16(FP), BX + MOVQ a+24(FP), CX + MOVQ $128, BP + +loop: + MOVOU 0(AX), X0 + MOVOU 0(BX), X1 + MOVOU 0(CX), X2 + MOVOU 0(DX), X3 + PXOR X1, X0 + PXOR X2, X0 + PXOR X3, X0 + MOVOU X0, 0(DX) + ADDQ $16, AX + ADDQ $16, BX + ADDQ $16, CX + ADDQ $16, DX + SUBQ $2, BP + JA loop + RET diff --git a/vendor/golang.org/x/crypto/argon2/blamka_generic.go b/vendor/golang.org/x/crypto/argon2/blamka_generic.go new file mode 100644 index 0000000..a481b22 --- /dev/null +++ b/vendor/golang.org/x/crypto/argon2/blamka_generic.go @@ -0,0 +1,163 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package argon2 + +var useSSE4 bool + +func processBlockGeneric(out, in1, in2 *block, xor bool) { + var t block + for i := range t { + t[i] = in1[i] ^ in2[i] + } + for i := 0; i < blockLength; i += 16 { + blamkaGeneric( + &t[i+0], &t[i+1], &t[i+2], &t[i+3], + &t[i+4], &t[i+5], &t[i+6], &t[i+7], + &t[i+8], &t[i+9], &t[i+10], &t[i+11], + &t[i+12], &t[i+13], &t[i+14], &t[i+15], + ) + } + for i := 0; i < blockLength/8; i += 2 { + blamkaGeneric( + &t[i], &t[i+1], &t[16+i], &t[16+i+1], + &t[32+i], &t[32+i+1], &t[48+i], &t[48+i+1], + &t[64+i], &t[64+i+1], &t[80+i], &t[80+i+1], + &t[96+i], &t[96+i+1], &t[112+i], &t[112+i+1], + ) + } + if xor { + for i := range t { + out[i] ^= in1[i] ^ in2[i] ^ t[i] + } + } else { + for i := range t { + out[i] = in1[i] ^ in2[i] ^ t[i] + } + } +} + +func blamkaGeneric(t00, t01, t02, t03, t04, t05, t06, t07, t08, t09, t10, t11, t12, t13, t14, t15 *uint64) { + v00, v01, v02, v03 := *t00, *t01, *t02, *t03 + v04, v05, v06, v07 := *t04, *t05, *t06, *t07 + v08, v09, v10, v11 := *t08, *t09, *t10, *t11 + v12, v13, v14, v15 := *t12, *t13, *t14, *t15 + + v00 += v04 + 2*uint64(uint32(v00))*uint64(uint32(v04)) + v12 ^= v00 + v12 = v12>>32 | v12<<32 + v08 += v12 + 2*uint64(uint32(v08))*uint64(uint32(v12)) + v04 ^= v08 + v04 = v04>>24 | v04<<40 + + v00 += v04 + 2*uint64(uint32(v00))*uint64(uint32(v04)) + v12 ^= v00 + v12 = v12>>16 | v12<<48 + v08 += v12 + 2*uint64(uint32(v08))*uint64(uint32(v12)) + v04 ^= v08 + v04 = v04>>63 | v04<<1 + + v01 += v05 + 2*uint64(uint32(v01))*uint64(uint32(v05)) + v13 ^= v01 + v13 = v13>>32 | v13<<32 + v09 += v13 + 2*uint64(uint32(v09))*uint64(uint32(v13)) + v05 ^= v09 + v05 = v05>>24 | v05<<40 + + v01 += v05 + 2*uint64(uint32(v01))*uint64(uint32(v05)) + v13 ^= v01 + v13 = v13>>16 | v13<<48 + v09 += v13 + 2*uint64(uint32(v09))*uint64(uint32(v13)) + v05 ^= v09 + v05 = v05>>63 | v05<<1 + + v02 += v06 + 2*uint64(uint32(v02))*uint64(uint32(v06)) + v14 ^= v02 + v14 = v14>>32 | v14<<32 + v10 += v14 + 2*uint64(uint32(v10))*uint64(uint32(v14)) + v06 ^= v10 + v06 = v06>>24 | v06<<40 + + v02 += v06 + 2*uint64(uint32(v02))*uint64(uint32(v06)) + v14 ^= v02 + v14 = v14>>16 | v14<<48 + v10 += v14 + 2*uint64(uint32(v10))*uint64(uint32(v14)) + v06 ^= v10 + v06 = v06>>63 | v06<<1 + + v03 += v07 + 2*uint64(uint32(v03))*uint64(uint32(v07)) + v15 ^= v03 + v15 = v15>>32 | v15<<32 + v11 += v15 + 2*uint64(uint32(v11))*uint64(uint32(v15)) + v07 ^= v11 + v07 = v07>>24 | v07<<40 + + v03 += v07 + 2*uint64(uint32(v03))*uint64(uint32(v07)) + v15 ^= v03 + v15 = v15>>16 | v15<<48 + v11 += v15 + 2*uint64(uint32(v11))*uint64(uint32(v15)) + v07 ^= v11 + v07 = v07>>63 | v07<<1 + + v00 += v05 + 2*uint64(uint32(v00))*uint64(uint32(v05)) + v15 ^= v00 + v15 = v15>>32 | v15<<32 + v10 += v15 + 2*uint64(uint32(v10))*uint64(uint32(v15)) + v05 ^= v10 + v05 = v05>>24 | v05<<40 + + v00 += v05 + 2*uint64(uint32(v00))*uint64(uint32(v05)) + v15 ^= v00 + v15 = v15>>16 | v15<<48 + v10 += v15 + 2*uint64(uint32(v10))*uint64(uint32(v15)) + v05 ^= v10 + v05 = v05>>63 | v05<<1 + + v01 += v06 + 2*uint64(uint32(v01))*uint64(uint32(v06)) + v12 ^= v01 + v12 = v12>>32 | v12<<32 + v11 += v12 + 2*uint64(uint32(v11))*uint64(uint32(v12)) + v06 ^= v11 + v06 = v06>>24 | v06<<40 + + v01 += v06 + 2*uint64(uint32(v01))*uint64(uint32(v06)) + v12 ^= v01 + v12 = v12>>16 | v12<<48 + v11 += v12 + 2*uint64(uint32(v11))*uint64(uint32(v12)) + v06 ^= v11 + v06 = v06>>63 | v06<<1 + + v02 += v07 + 2*uint64(uint32(v02))*uint64(uint32(v07)) + v13 ^= v02 + v13 = v13>>32 | v13<<32 + v08 += v13 + 2*uint64(uint32(v08))*uint64(uint32(v13)) + v07 ^= v08 + v07 = v07>>24 | v07<<40 + + v02 += v07 + 2*uint64(uint32(v02))*uint64(uint32(v07)) + v13 ^= v02 + v13 = v13>>16 | v13<<48 + v08 += v13 + 2*uint64(uint32(v08))*uint64(uint32(v13)) + v07 ^= v08 + v07 = v07>>63 | v07<<1 + + v03 += v04 + 2*uint64(uint32(v03))*uint64(uint32(v04)) + v14 ^= v03 + v14 = v14>>32 | v14<<32 + v09 += v14 + 2*uint64(uint32(v09))*uint64(uint32(v14)) + v04 ^= v09 + v04 = v04>>24 | v04<<40 + + v03 += v04 + 2*uint64(uint32(v03))*uint64(uint32(v04)) + v14 ^= v03 + v14 = v14>>16 | v14<<48 + v09 += v14 + 2*uint64(uint32(v09))*uint64(uint32(v14)) + v04 ^= v09 + v04 = v04>>63 | v04<<1 + + *t00, *t01, *t02, *t03 = v00, v01, v02, v03 + *t04, *t05, *t06, *t07 = v04, v05, v06, v07 + *t08, *t09, *t10, *t11 = v08, v09, v10, v11 + *t12, *t13, *t14, *t15 = v12, v13, v14, v15 +} diff --git a/vendor/golang.org/x/crypto/argon2/blamka_ref.go b/vendor/golang.org/x/crypto/argon2/blamka_ref.go new file mode 100644 index 0000000..baf7b55 --- /dev/null +++ b/vendor/golang.org/x/crypto/argon2/blamka_ref.go @@ -0,0 +1,15 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 appengine gccgo + +package argon2 + +func processBlock(out, in1, in2 *block) { + processBlockGeneric(out, in1, in2, false) +} + +func processBlockXOR(out, in1, in2 *block) { + processBlockGeneric(out, in1, in2, true) +} diff --git a/vendor/golang.org/x/crypto/bcrypt/bcrypt.go b/vendor/golang.org/x/crypto/bcrypt/bcrypt.go index 202fa8a..aeb73f8 100644 --- a/vendor/golang.org/x/crypto/bcrypt/bcrypt.go +++ b/vendor/golang.org/x/crypto/bcrypt/bcrypt.go @@ -241,11 +241,11 @@ func (p *hashed) Hash() []byte { n = 3 } arr[n] = '$' - n += 1 + n++ copy(arr[n:], []byte(fmt.Sprintf("%02d", p.cost))) n += 2 arr[n] = '$' - n += 1 + n++ copy(arr[n:], p.salt) n += encodedSaltSize copy(arr[n:], p.hash) diff --git a/vendor/golang.org/x/crypto/bcrypt/bcrypt_test.go b/vendor/golang.org/x/crypto/bcrypt/bcrypt_test.go index aecf759..b7162d8 100644 --- a/vendor/golang.org/x/crypto/bcrypt/bcrypt_test.go +++ b/vendor/golang.org/x/crypto/bcrypt/bcrypt_test.go @@ -209,19 +209,19 @@ func TestMinorNotRequired(t *testing.T) { func BenchmarkEqual(b *testing.B) { b.StopTimer() passwd := []byte("somepasswordyoulike") - hash, _ := GenerateFromPassword(passwd, 10) + hash, _ := GenerateFromPassword(passwd, DefaultCost) b.StartTimer() for i := 0; i < b.N; i++ { CompareHashAndPassword(hash, passwd) } } -func BenchmarkGeneration(b *testing.B) { +func BenchmarkDefaultCost(b *testing.B) { b.StopTimer() passwd := []byte("mylongpassword1234") b.StartTimer() for i := 0; i < b.N; i++ { - GenerateFromPassword(passwd, 10) + GenerateFromPassword(passwd, DefaultCost) } } diff --git a/vendor/golang.org/x/crypto/blake2b/blake2b.go b/vendor/golang.org/x/crypto/blake2b/blake2b.go index 7f0a86e..c160e1a 100644 --- a/vendor/golang.org/x/crypto/blake2b/blake2b.go +++ b/vendor/golang.org/x/crypto/blake2b/blake2b.go @@ -39,7 +39,10 @@ var ( useSSE4 bool ) -var errKeySize = errors.New("blake2b: invalid key size") +var ( + errKeySize = errors.New("blake2b: invalid key size") + errHashSize = errors.New("blake2b: invalid hash size") +) var iv = [8]uint64{ 0x6a09e667f3bcc908, 0xbb67ae8584caa73b, 0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1, @@ -72,18 +75,31 @@ func Sum256(data []byte) [Size256]byte { } // New512 returns a new hash.Hash computing the BLAKE2b-512 checksum. A non-nil -// key turns the hash into a MAC. The key must between zero and 64 bytes long. +// key turns the hash into a MAC. The key must be between zero and 64 bytes long. func New512(key []byte) (hash.Hash, error) { return newDigest(Size, key) } // New384 returns a new hash.Hash computing the BLAKE2b-384 checksum. A non-nil -// key turns the hash into a MAC. The key must between zero and 64 bytes long. +// key turns the hash into a MAC. The key must be between zero and 64 bytes long. func New384(key []byte) (hash.Hash, error) { return newDigest(Size384, key) } // New256 returns a new hash.Hash computing the BLAKE2b-256 checksum. A non-nil -// key turns the hash into a MAC. The key must between zero and 64 bytes long. +// key turns the hash into a MAC. The key must be between zero and 64 bytes long. func New256(key []byte) (hash.Hash, error) { return newDigest(Size256, key) } +// New returns a new hash.Hash computing the BLAKE2b checksum with a custom length. +// A non-nil key turns the hash into a MAC. The key must be between zero and 64 bytes long. +// The hash size can be a value between 1 and 64 but it is highly recommended to use +// values equal or greater than: +// - 32 if BLAKE2b is used as a hash function (The key is zero bytes long). +// - 16 if BLAKE2b is used as a MAC function (The key is at least 16 bytes long). +// When the key is nil, the returned hash.Hash implements BinaryMarshaler +// and BinaryUnmarshaler for state (de)serialization as documented by hash.Hash. +func New(size int, key []byte) (hash.Hash, error) { return newDigest(size, key) } + func newDigest(hashSize int, key []byte) (*digest, error) { + if hashSize < 1 || hashSize > Size { + return nil, errHashSize + } if len(key) > Size { return nil, errKeySize } @@ -136,6 +152,50 @@ type digest struct { keyLen int } +const ( + magic = "b2b" + marshaledSize = len(magic) + 8*8 + 2*8 + 1 + BlockSize + 1 +) + +func (d *digest) MarshalBinary() ([]byte, error) { + if d.keyLen != 0 { + return nil, errors.New("crypto/blake2b: cannot marshal MACs") + } + b := make([]byte, 0, marshaledSize) + b = append(b, magic...) + for i := 0; i < 8; i++ { + b = appendUint64(b, d.h[i]) + } + b = appendUint64(b, d.c[0]) + b = appendUint64(b, d.c[1]) + // Maximum value for size is 64 + b = append(b, byte(d.size)) + b = append(b, d.block[:]...) + b = append(b, byte(d.offset)) + return b, nil +} + +func (d *digest) UnmarshalBinary(b []byte) error { + if len(b) < len(magic) || string(b[:len(magic)]) != magic { + return errors.New("crypto/blake2b: invalid hash state identifier") + } + if len(b) != marshaledSize { + return errors.New("crypto/blake2b: invalid hash state size") + } + b = b[len(magic):] + for i := 0; i < 8; i++ { + b, d.h[i] = consumeUint64(b) + } + b, d.c[0] = consumeUint64(b) + b, d.c[1] = consumeUint64(b) + d.size = int(b[0]) + b = b[1:] + copy(d.block[:], b[:BlockSize]) + b = b[BlockSize:] + d.offset = int(b[0]) + return nil +} + func (d *digest) BlockSize() int { return BlockSize } func (d *digest) Size() int { return d.size } @@ -205,3 +265,25 @@ func (d *digest) finalize(hash *[Size]byte) { binary.LittleEndian.PutUint64(hash[8*i:], v) } } + +func appendUint64(b []byte, x uint64) []byte { + var a [8]byte + binary.BigEndian.PutUint64(a[:], x) + return append(b, a[:]...) +} + +func appendUint32(b []byte, x uint32) []byte { + var a [4]byte + binary.BigEndian.PutUint32(a[:], x) + return append(b, a[:]...) +} + +func consumeUint64(b []byte) ([]byte, uint64) { + x := binary.BigEndian.Uint64(b) + return b[8:], x +} + +func consumeUint32(b []byte) ([]byte, uint32) { + x := binary.BigEndian.Uint32(b) + return b[4:], x +} diff --git a/vendor/golang.org/x/crypto/blake2b/blake2bAVX2_amd64.go b/vendor/golang.org/x/crypto/blake2b/blake2bAVX2_amd64.go index 8c41cf6..4d31dd0 100644 --- a/vendor/golang.org/x/crypto/blake2b/blake2bAVX2_amd64.go +++ b/vendor/golang.org/x/crypto/blake2b/blake2bAVX2_amd64.go @@ -6,21 +6,14 @@ package blake2b +import "golang.org/x/sys/cpu" + func init() { - useAVX2 = supportsAVX2() - useAVX = supportsAVX() - useSSE4 = supportsSSE4() + useAVX2 = cpu.X86.HasAVX2 + useAVX = cpu.X86.HasAVX + useSSE4 = cpu.X86.HasSSE41 } -//go:noescape -func supportsSSE4() bool - -//go:noescape -func supportsAVX() bool - -//go:noescape -func supportsAVX2() bool - //go:noescape func hashBlocksAVX2(h *[8]uint64, c *[2]uint64, flag uint64, blocks []byte) @@ -31,13 +24,14 @@ func hashBlocksAVX(h *[8]uint64, c *[2]uint64, flag uint64, blocks []byte) func hashBlocksSSE4(h *[8]uint64, c *[2]uint64, flag uint64, blocks []byte) func hashBlocks(h *[8]uint64, c *[2]uint64, flag uint64, blocks []byte) { - if useAVX2 { + switch { + case useAVX2: hashBlocksAVX2(h, c, flag, blocks) - } else if useAVX { + case useAVX: hashBlocksAVX(h, c, flag, blocks) - } else if useSSE4 { + case useSSE4: hashBlocksSSE4(h, c, flag, blocks) - } else { + default: hashBlocksGeneric(h, c, flag, blocks) } } diff --git a/vendor/golang.org/x/crypto/blake2b/blake2bAVX2_amd64.s b/vendor/golang.org/x/crypto/blake2b/blake2bAVX2_amd64.s index 784bce6..5593b1b 100644 --- a/vendor/golang.org/x/crypto/blake2b/blake2bAVX2_amd64.s +++ b/vendor/golang.org/x/crypto/blake2b/blake2bAVX2_amd64.s @@ -748,15 +748,3 @@ noinc: MOVQ BP, SP RET - -// func supportsAVX2() bool -TEXT ·supportsAVX2(SB), 4, $0-1 - MOVQ runtime·support_avx2(SB), AX - MOVB AX, ret+0(FP) - RET - -// func supportsAVX() bool -TEXT ·supportsAVX(SB), 4, $0-1 - MOVQ runtime·support_avx(SB), AX - MOVB AX, ret+0(FP) - RET diff --git a/vendor/golang.org/x/crypto/blake2b/blake2b_amd64.go b/vendor/golang.org/x/crypto/blake2b/blake2b_amd64.go index 2ab7c30..30e2fcd 100644 --- a/vendor/golang.org/x/crypto/blake2b/blake2b_amd64.go +++ b/vendor/golang.org/x/crypto/blake2b/blake2b_amd64.go @@ -6,13 +6,12 @@ package blake2b +import "golang.org/x/sys/cpu" + func init() { - useSSE4 = supportsSSE4() + useSSE4 = cpu.X86.HasSSE41 } -//go:noescape -func supportsSSE4() bool - //go:noescape func hashBlocksSSE4(h *[8]uint64, c *[2]uint64, flag uint64, blocks []byte) diff --git a/vendor/golang.org/x/crypto/blake2b/blake2b_amd64.s b/vendor/golang.org/x/crypto/blake2b/blake2b_amd64.s index 6453074..578e947 100644 --- a/vendor/golang.org/x/crypto/blake2b/blake2b_amd64.s +++ b/vendor/golang.org/x/crypto/blake2b/blake2b_amd64.s @@ -279,12 +279,3 @@ noinc: MOVQ BP, SP RET - -// func supportsSSE4() bool -TEXT ·supportsSSE4(SB), 4, $0-1 - MOVL $1, AX - CPUID - SHRL $19, CX // Bit 19 indicates SSE4 support - ANDL $1, CX // CX != 0 if support SSE4 - MOVB CX, ret+0(FP) - RET diff --git a/vendor/golang.org/x/crypto/blake2b/blake2b_test.go b/vendor/golang.org/x/crypto/blake2b/blake2b_test.go index 4aa4957..723327a 100644 --- a/vendor/golang.org/x/crypto/blake2b/blake2b_test.go +++ b/vendor/golang.org/x/crypto/blake2b/blake2b_test.go @@ -6,6 +6,7 @@ package blake2b import ( "bytes" + "encoding" "encoding/hex" "fmt" "hash" @@ -69,6 +70,54 @@ func TestHashes2X(t *testing.T) { testHashes2X(t) } +func TestMarshal(t *testing.T) { + input := make([]byte, 255) + for i := range input { + input[i] = byte(i) + } + for _, size := range []int{Size, Size256, Size384, 12, 25, 63} { + for i := 0; i < 256; i++ { + h, err := New(size, nil) + if err != nil { + t.Fatalf("size=%d, len(input)=%d: error from New(%v, nil): %v", size, i, size, err) + } + h2, err := New(size, nil) + if err != nil { + t.Fatalf("size=%d, len(input)=%d: error from New(%v, nil): %v", size, i, size, err) + } + + h.Write(input[:i/2]) + halfstate, err := h.(encoding.BinaryMarshaler).MarshalBinary() + if err != nil { + t.Fatalf("size=%d, len(input)=%d: could not marshal: %v", size, i, err) + } + err = h2.(encoding.BinaryUnmarshaler).UnmarshalBinary(halfstate) + if err != nil { + t.Fatalf("size=%d, len(input)=%d: could not unmarshal: %v", size, i, err) + } + + h.Write(input[i/2 : i]) + sum := h.Sum(nil) + h2.Write(input[i/2 : i]) + sum2 := h2.Sum(nil) + + if !bytes.Equal(sum, sum2) { + t.Fatalf("size=%d, len(input)=%d: results do not match; sum = %v, sum2 = %v", size, i, sum, sum2) + } + + h3, err := New(size, nil) + if err != nil { + t.Fatalf("size=%d, len(input)=%d: error from New(%v, nil): %v", size, i, size, err) + } + h3.Write(input[:i]) + sum3 := h3.Sum(nil) + if !bytes.Equal(sum, sum3) { + t.Fatalf("size=%d, len(input)=%d: sum = %v, want %v", size, i, sum, sum3) + } + } + } +} + func testHashes(t *testing.T) { key, _ := hex.DecodeString("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f") @@ -126,7 +175,7 @@ func testHashes2X(t *testing.T) { t.Fatalf("#%d (single write): error from Read: %v", i, err) } if n, err := h.Read(sum); n != 0 || err != io.EOF { - t.Fatalf("#%d (single write): Read did not return (0, os.EOF) after exhaustion, got (%v, %v)", i, n, err) + t.Fatalf("#%d (single write): Read did not return (0, io.EOF) after exhaustion, got (%v, %v)", i, n, err) } if gotHex := fmt.Sprintf("%x", sum); gotHex != expectedHex { t.Fatalf("#%d (single write): got %s, wanted %s", i, gotHex, expectedHex) diff --git a/vendor/golang.org/x/crypto/blake2b/blake2x.go b/vendor/golang.org/x/crypto/blake2b/blake2x.go index c814496..52c414d 100644 --- a/vendor/golang.org/x/crypto/blake2b/blake2x.go +++ b/vendor/golang.org/x/crypto/blake2b/blake2x.go @@ -29,7 +29,7 @@ type XOF interface { } // OutputLengthUnknown can be used as the size argument to NewXOF to indicate -// the the length of the output is not known in advance. +// the length of the output is not known in advance. const OutputLengthUnknown = 0 // magicUnknownOutputLength is a magic value for the output size that indicates diff --git a/vendor/golang.org/x/crypto/blake2s/blake2s.go b/vendor/golang.org/x/crypto/blake2s/blake2s.go index ae0dc92..5fb4a9e 100644 --- a/vendor/golang.org/x/crypto/blake2s/blake2s.go +++ b/vendor/golang.org/x/crypto/blake2s/blake2s.go @@ -49,6 +49,8 @@ func Sum256(data []byte) [Size]byte { // New256 returns a new hash.Hash computing the BLAKE2s-256 checksum. A non-nil // key turns the hash into a MAC. The key must between zero and 32 bytes long. +// When the key is nil, the returned hash.Hash implements BinaryMarshaler +// and BinaryUnmarshaler for state (de)serialization as documented by hash.Hash. func New256(key []byte) (hash.Hash, error) { return newDigest(Size, key) } // New128 returns a new hash.Hash computing the BLAKE2s-128 checksum given a @@ -120,6 +122,50 @@ type digest struct { keyLen int } +const ( + magic = "b2s" + marshaledSize = len(magic) + 8*4 + 2*4 + 1 + BlockSize + 1 +) + +func (d *digest) MarshalBinary() ([]byte, error) { + if d.keyLen != 0 { + return nil, errors.New("crypto/blake2s: cannot marshal MACs") + } + b := make([]byte, 0, marshaledSize) + b = append(b, magic...) + for i := 0; i < 8; i++ { + b = appendUint32(b, d.h[i]) + } + b = appendUint32(b, d.c[0]) + b = appendUint32(b, d.c[1]) + // Maximum value for size is 32 + b = append(b, byte(d.size)) + b = append(b, d.block[:]...) + b = append(b, byte(d.offset)) + return b, nil +} + +func (d *digest) UnmarshalBinary(b []byte) error { + if len(b) < len(magic) || string(b[:len(magic)]) != magic { + return errors.New("crypto/blake2s: invalid hash state identifier") + } + if len(b) != marshaledSize { + return errors.New("crypto/blake2s: invalid hash state size") + } + b = b[len(magic):] + for i := 0; i < 8; i++ { + b, d.h[i] = consumeUint32(b) + } + b, d.c[0] = consumeUint32(b) + b, d.c[1] = consumeUint32(b) + d.size = int(b[0]) + b = b[1:] + copy(d.block[:], b[:BlockSize]) + b = b[BlockSize:] + d.offset = int(b[0]) + return nil +} + func (d *digest) BlockSize() int { return BlockSize } func (d *digest) Size() int { return d.size } @@ -185,3 +231,14 @@ func (d *digest) finalize(hash *[Size]byte) { binary.LittleEndian.PutUint32(hash[4*i:], v) } } + +func appendUint32(b []byte, x uint32) []byte { + var a [4]byte + binary.BigEndian.PutUint32(a[:], x) + return append(b, a[:]...) +} + +func consumeUint32(b []byte) ([]byte, uint32) { + x := binary.BigEndian.Uint32(b) + return b[4:], x +} diff --git a/vendor/golang.org/x/crypto/blake2s/blake2s_386.go b/vendor/golang.org/x/crypto/blake2s/blake2s_386.go index 45ae546..d8f9cea 100644 --- a/vendor/golang.org/x/crypto/blake2s/blake2s_386.go +++ b/vendor/golang.org/x/crypto/blake2s/blake2s_386.go @@ -6,18 +6,14 @@ package blake2s +import "golang.org/x/sys/cpu" + var ( useSSE4 = false - useSSSE3 = supportSSSE3() - useSSE2 = supportSSE2() + useSSSE3 = cpu.X86.HasSSSE3 + useSSE2 = cpu.X86.HasSSE2 ) -//go:noescape -func supportSSE2() bool - -//go:noescape -func supportSSSE3() bool - //go:noescape func hashBlocksSSE2(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) @@ -25,11 +21,12 @@ func hashBlocksSSE2(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) func hashBlocksSSSE3(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) func hashBlocks(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) { - if useSSSE3 { + switch { + case useSSSE3: hashBlocksSSSE3(h, c, flag, blocks) - } else if useSSE2 { + case useSSE2: hashBlocksSSE2(h, c, flag, blocks) - } else { + default: hashBlocksGeneric(h, c, flag, blocks) } } diff --git a/vendor/golang.org/x/crypto/blake2s/blake2s_386.s b/vendor/golang.org/x/crypto/blake2s/blake2s_386.s index 0bb65c7..c123e5d 100644 --- a/vendor/golang.org/x/crypto/blake2s/blake2s_386.s +++ b/vendor/golang.org/x/crypto/blake2s/blake2s_386.s @@ -433,28 +433,3 @@ loop: MOVL BP, SP RET - -// func supportSSSE3() bool -TEXT ·supportSSSE3(SB), 4, $0-1 - MOVL $1, AX - CPUID - MOVL CX, BX - ANDL $0x1, BX // supports SSE3 - JZ FALSE - ANDL $0x200, CX // supports SSSE3 - JZ FALSE - MOVB $1, ret+0(FP) - RET - -FALSE: - MOVB $0, ret+0(FP) - RET - -// func supportSSE2() bool -TEXT ·supportSSE2(SB), 4, $0-1 - MOVL $1, AX - CPUID - SHRL $26, DX - ANDL $1, DX // DX != 0 if support SSE2 - MOVB DX, ret+0(FP) - RET diff --git a/vendor/golang.org/x/crypto/blake2s/blake2s_amd64.go b/vendor/golang.org/x/crypto/blake2s/blake2s_amd64.go index a925e6b..4e8d2d7 100644 --- a/vendor/golang.org/x/crypto/blake2s/blake2s_amd64.go +++ b/vendor/golang.org/x/crypto/blake2s/blake2s_amd64.go @@ -6,18 +6,14 @@ package blake2s +import "golang.org/x/sys/cpu" + var ( - useSSE4 = supportSSE4() - useSSSE3 = supportSSSE3() - useSSE2 = true // Always available on amd64 + useSSE4 = cpu.X86.HasSSE41 + useSSSE3 = cpu.X86.HasSSSE3 + useSSE2 = cpu.X86.HasSSE2 ) -//go:noescape -func supportSSSE3() bool - -//go:noescape -func supportSSE4() bool - //go:noescape func hashBlocksSSE2(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) @@ -28,13 +24,14 @@ func hashBlocksSSSE3(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) func hashBlocksSSE4(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) func hashBlocks(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) { - if useSSE4 { + switch { + case useSSE4: hashBlocksSSE4(h, c, flag, blocks) - } else if useSSSE3 { + case useSSSE3: hashBlocksSSSE3(h, c, flag, blocks) - } else if useSSE2 { + case useSSE2: hashBlocksSSE2(h, c, flag, blocks) - } else { + default: hashBlocksGeneric(h, c, flag, blocks) } } diff --git a/vendor/golang.org/x/crypto/blake2s/blake2s_amd64.s b/vendor/golang.org/x/crypto/blake2s/blake2s_amd64.s index 6cdf5a9..8da2802 100644 --- a/vendor/golang.org/x/crypto/blake2s/blake2s_amd64.s +++ b/vendor/golang.org/x/crypto/blake2s/blake2s_amd64.s @@ -436,28 +436,3 @@ TEXT ·hashBlocksSSSE3(SB), 0, $672-48 // frame = 656 + 16 byte alignment TEXT ·hashBlocksSSE4(SB), 0, $32-48 // frame = 16 + 16 byte alignment HASH_BLOCKS(h+0(FP), c+8(FP), flag+16(FP), blocks_base+24(FP), blocks_len+32(FP), BLAKE2s_SSE4) RET - -// func supportSSE4() bool -TEXT ·supportSSE4(SB), 4, $0-1 - MOVL $1, AX - CPUID - SHRL $19, CX // Bit 19 indicates SSE4.1. - ANDL $1, CX - MOVB CX, ret+0(FP) - RET - -// func supportSSSE3() bool -TEXT ·supportSSSE3(SB), 4, $0-1 - MOVL $1, AX - CPUID - MOVL CX, BX - ANDL $0x1, BX // Bit zero indicates SSE3 support. - JZ FALSE - ANDL $0x200, CX // Bit nine indicates SSSE3 support. - JZ FALSE - MOVB $1, ret+0(FP) - RET - -FALSE: - MOVB $0, ret+0(FP) - RET diff --git a/vendor/golang.org/x/crypto/blake2s/blake2s_test.go b/vendor/golang.org/x/crypto/blake2s/blake2s_test.go index 938fef3..cde79fb 100644 --- a/vendor/golang.org/x/crypto/blake2s/blake2s_test.go +++ b/vendor/golang.org/x/crypto/blake2s/blake2s_test.go @@ -5,6 +5,8 @@ package blake2s import ( + "bytes" + "encoding" "encoding/hex" "fmt" "testing" @@ -64,6 +66,52 @@ func TestHashes2X(t *testing.T) { testHashes2X(t) } +func TestMarshal(t *testing.T) { + input := make([]byte, 255) + for i := range input { + input[i] = byte(i) + } + for i := 0; i < 256; i++ { + h, err := New256(nil) + if err != nil { + t.Fatalf("len(input)=%d: error from New256(nil): %v", i, err) + } + h2, err := New256(nil) + if err != nil { + t.Fatalf("len(input)=%d: error from New256(nil): %v", i, err) + } + + h.Write(input[:i/2]) + halfstate, err := h.(encoding.BinaryMarshaler).MarshalBinary() + if err != nil { + t.Fatalf("len(input)=%d: could not marshal: %v", i, err) + } + err = h2.(encoding.BinaryUnmarshaler).UnmarshalBinary(halfstate) + if err != nil { + t.Fatalf("len(input)=%d: could not unmarshal: %v", i, err) + } + + h.Write(input[i/2 : i]) + sum := h.Sum(nil) + h2.Write(input[i/2 : i]) + sum2 := h2.Sum(nil) + + if !bytes.Equal(sum, sum2) { + t.Fatalf("len(input)=%d: results do not match; sum = %v, sum2 = %v", i, sum, sum2) + } + + h3, err := New256(nil) + if err != nil { + t.Fatalf("len(input)=%d: error from New256(nil): %v", i, err) + } + h3.Write(input[:i]) + sum3 := h3.Sum(nil) + if !bytes.Equal(sum, sum3) { + t.Fatalf("len(input)=%d: sum = %v, want %v", i, sum, sum3) + } + } +} + func testHashes(t *testing.T) { key, _ := hex.DecodeString("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f") @@ -185,7 +233,7 @@ func testHashes2X(t *testing.T) { if n, err := h.Read(result[:]); err != nil { t.Fatalf("#unknown length: error from Read: %v", err) } else if n != len(result) { - t.Fatalf("#unknown length: Read returned %d bytes, want %d: %v", n, len(result)) + t.Fatalf("#unknown length: Read returned %d bytes, want %d", n, len(result)) } const expected = "2a9a6977d915a2c4dd07dbcafe1918bf1682e56d9c8e567ecd19bfd7cd93528833c764d12b34a5e2a219c9fd463dab45e972c5574d73f45de5b2e23af72530d8" diff --git a/vendor/golang.org/x/crypto/blake2s/blake2x.go b/vendor/golang.org/x/crypto/blake2s/blake2x.go index eaff2a7..828749f 100644 --- a/vendor/golang.org/x/crypto/blake2s/blake2x.go +++ b/vendor/golang.org/x/crypto/blake2s/blake2x.go @@ -29,7 +29,7 @@ type XOF interface { } // OutputLengthUnknown can be used as the size argument to NewXOF to indicate -// the the length of the output is not known in advance. +// the length of the output is not known in advance. const OutputLengthUnknown = 0 // magicUnknownOutputLength is a magic value for the output size that indicates diff --git a/vendor/golang.org/x/crypto/blowfish/cipher.go b/vendor/golang.org/x/crypto/blowfish/cipher.go index 2641dad..213bf20 100644 --- a/vendor/golang.org/x/crypto/blowfish/cipher.go +++ b/vendor/golang.org/x/crypto/blowfish/cipher.go @@ -3,6 +3,14 @@ // license that can be found in the LICENSE file. // Package blowfish implements Bruce Schneier's Blowfish encryption algorithm. +// +// Blowfish is a legacy cipher and its short block size makes it vulnerable to +// birthday bound attacks (see https://sweet32.info). It should only be used +// where compatibility with legacy systems, not security, is the goal. +// +// Deprecated: any new system should use AES (from crypto/aes, if necessary in +// an AEAD mode like crypto/cipher.NewGCM) or XChaCha20-Poly1305 (from +// golang.org/x/crypto/chacha20poly1305). package blowfish // import "golang.org/x/crypto/blowfish" // The code is a port of Bruce Schneier's C implementation. diff --git a/vendor/golang.org/x/crypto/bn256/bn256.go b/vendor/golang.org/x/crypto/bn256/bn256.go index 014f8b3..9c99fcd 100644 --- a/vendor/golang.org/x/crypto/bn256/bn256.go +++ b/vendor/golang.org/x/crypto/bn256/bn256.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package bn256 implements a particular bilinear group at the 128-bit security level. +// Package bn256 implements a particular bilinear group. // // Bilinear groups are the basis of many of the new cryptographic protocols // that have been proposed over the past decade. They consist of a triplet of @@ -14,6 +14,15 @@ // Barreto-Naehrig curve as described in // http://cryptojedi.org/papers/dclxvi-20100714.pdf. Its output is compatible // with the implementation described in that paper. +// +// This package previously claimed to operate at a 128-bit security level. +// However, recent improvements in attacks mean that is no longer true. See +// https://moderncrypto.org/mail-archive/curves/2016/000740.html. +// +// Deprecated: due to its weakened security, new systems should not rely on this +// elliptic curve. This package is frozen, and not implemented in constant time. +// There is a more complete implementation at github.com/cloudflare/bn256, but +// note that it suffers from the same security issues of the underlying curve. package bn256 // import "golang.org/x/crypto/bn256" import ( @@ -22,9 +31,6 @@ import ( "math/big" ) -// BUG(agl): this implementation is not constant time. -// TODO(agl): keep GF(p²) elements in Mongomery form. - // G1 is an abstract cyclic group. The zero value is suitable for use as the // output of an operation, but cannot be used as an input. type G1 struct { @@ -49,8 +55,11 @@ func RandomG1(r io.Reader) (*big.Int, *G1, error) { return k, new(G1).ScalarBaseMult(k), nil } -func (g *G1) String() string { - return "bn256.G1" + g.p.String() +func (e *G1) String() string { + if e.p == nil { + return "bn256.G1" + newCurvePoint(nil).String() + } + return "bn256.G1" + e.p.String() } // ScalarBaseMult sets e to g*k where g is the generator of the group and @@ -73,7 +82,8 @@ func (e *G1) ScalarMult(a *G1, k *big.Int) *G1 { } // Add sets e to a+b and then returns e. -// BUG(agl): this function is not complete: a==b fails. +// +// Warning: this function is not complete, it fails for a equal to b. func (e *G1) Add(a, b *G1) *G1 { if e.p == nil { e.p = newCurvePoint(nil) @@ -92,15 +102,19 @@ func (e *G1) Neg(a *G1) *G1 { } // Marshal converts n to a byte slice. -func (n *G1) Marshal() []byte { - n.p.MakeAffine(nil) - - xBytes := new(big.Int).Mod(n.p.x, p).Bytes() - yBytes := new(big.Int).Mod(n.p.y, p).Bytes() - +func (e *G1) Marshal() []byte { // Each value is a 256-bit number. const numBytes = 256 / 8 + if e.p.IsInfinity() { + return make([]byte, numBytes*2) + } + + e.p.MakeAffine(nil) + + xBytes := new(big.Int).Mod(e.p.x, p).Bytes() + yBytes := new(big.Int).Mod(e.p.y, p).Bytes() + ret := make([]byte, numBytes*2) copy(ret[1*numBytes-len(xBytes):], xBytes) copy(ret[2*numBytes-len(yBytes):], yBytes) @@ -166,8 +180,11 @@ func RandomG2(r io.Reader) (*big.Int, *G2, error) { return k, new(G2).ScalarBaseMult(k), nil } -func (g *G2) String() string { - return "bn256.G2" + g.p.String() +func (e *G2) String() string { + if e.p == nil { + return "bn256.G2" + newTwistPoint(nil).String() + } + return "bn256.G2" + e.p.String() } // ScalarBaseMult sets e to g*k where g is the generator of the group and @@ -190,7 +207,8 @@ func (e *G2) ScalarMult(a *G2, k *big.Int) *G2 { } // Add sets e to a+b and then returns e. -// BUG(agl): this function is not complete: a==b fails. +// +// Warning: this function is not complete, it fails for a equal to b. func (e *G2) Add(a, b *G2) *G2 { if e.p == nil { e.p = newTwistPoint(nil) @@ -201,6 +219,13 @@ func (e *G2) Add(a, b *G2) *G2 { // Marshal converts n into a byte slice. func (n *G2) Marshal() []byte { + // Each value is a 256-bit number. + const numBytes = 256 / 8 + + if n.p.IsInfinity() { + return make([]byte, numBytes*4) + } + n.p.MakeAffine(nil) xxBytes := new(big.Int).Mod(n.p.x.x, p).Bytes() @@ -208,9 +233,6 @@ func (n *G2) Marshal() []byte { yxBytes := new(big.Int).Mod(n.p.y.x, p).Bytes() yyBytes := new(big.Int).Mod(n.p.y.y, p).Bytes() - // Each value is a 256-bit number. - const numBytes = 256 / 8 - ret := make([]byte, numBytes*4) copy(ret[1*numBytes-len(xxBytes):], xxBytes) copy(ret[2*numBytes-len(xyBytes):], xyBytes) @@ -265,8 +287,11 @@ type GT struct { p *gfP12 } -func (g *GT) String() string { - return "bn256.GT" + g.p.String() +func (e *GT) String() string { + if e.p == nil { + return "bn256.GT" + newGFp12(nil).String() + } + return "bn256.GT" + e.p.String() } // ScalarMult sets e to a*k and then returns e. diff --git a/vendor/golang.org/x/crypto/bn256/curve.go b/vendor/golang.org/x/crypto/bn256/curve.go index 55b7063..63c052b 100644 --- a/vendor/golang.org/x/crypto/bn256/curve.go +++ b/vendor/golang.org/x/crypto/bn256/curve.go @@ -245,10 +245,19 @@ func (c *curvePoint) Mul(a *curvePoint, scalar *big.Int, pool *bnPool) *curvePoi return c } +// MakeAffine converts c to affine form and returns c. If c is ∞, then it sets +// c to 0 : 1 : 0. func (c *curvePoint) MakeAffine(pool *bnPool) *curvePoint { if words := c.z.Bits(); len(words) == 1 && words[0] == 1 { return c } + if c.IsInfinity() { + c.x.SetInt64(0) + c.y.SetInt64(1) + c.z.SetInt64(0) + c.t.SetInt64(0) + return c + } zInv := pool.Get().ModInverse(c.z, p) t := pool.Get().Mul(c.y, zInv) diff --git a/vendor/golang.org/x/crypto/bn256/gfp12.go b/vendor/golang.org/x/crypto/bn256/gfp12.go index f084edd..2b0151e 100644 --- a/vendor/golang.org/x/crypto/bn256/gfp12.go +++ b/vendor/golang.org/x/crypto/bn256/gfp12.go @@ -125,8 +125,8 @@ func (e *gfP12) Mul(a, b *gfP12, pool *bnPool) *gfP12 { } func (e *gfP12) MulScalar(a *gfP12, b *gfP6, pool *bnPool) *gfP12 { - e.x.Mul(e.x, b, pool) - e.y.Mul(e.y, b, pool) + e.x.Mul(a.x, b, pool) + e.y.Mul(a.y, b, pool) return e } diff --git a/vendor/golang.org/x/crypto/bn256/twist.go b/vendor/golang.org/x/crypto/bn256/twist.go index 4f8b3fe..056d80f 100644 --- a/vendor/golang.org/x/crypto/bn256/twist.go +++ b/vendor/golang.org/x/crypto/bn256/twist.go @@ -219,10 +219,19 @@ func (c *twistPoint) Mul(a *twistPoint, scalar *big.Int, pool *bnPool) *twistPoi return c } +// MakeAffine converts c to affine form and returns c. If c is ∞, then it sets +// c to 0 : 1 : 0. func (c *twistPoint) MakeAffine(pool *bnPool) *twistPoint { if c.z.IsOne() { return c } + if c.IsInfinity() { + c.x.SetZero() + c.y.SetOne() + c.z.SetZero() + c.t.SetZero() + return c + } zInv := newGFp2(pool).Invert(c.z, pool) t := newGFp2(pool).Mul(c.y, zInv, pool) diff --git a/vendor/golang.org/x/crypto/cast5/cast5.go b/vendor/golang.org/x/crypto/cast5/cast5.go index 0b4af37..ddcbeb6 100644 --- a/vendor/golang.org/x/crypto/cast5/cast5.go +++ b/vendor/golang.org/x/crypto/cast5/cast5.go @@ -2,8 +2,15 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package cast5 implements CAST5, as defined in RFC 2144. CAST5 is a common -// OpenPGP cipher. +// Package cast5 implements CAST5, as defined in RFC 2144. +// +// CAST5 is a legacy cipher and its short block size makes it vulnerable to +// birthday bound attacks (see https://sweet32.info). It should only be used +// where compatibility with legacy systems, not security, is the goal. +// +// Deprecated: any new system should use AES (from crypto/aes, if necessary in +// an AEAD mode like crypto/cipher.NewGCM) or XChaCha20-Poly1305 (from +// golang.org/x/crypto/chacha20poly1305). package cast5 // import "golang.org/x/crypto/cast5" import "errors" diff --git a/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305.go b/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305.go index 3f0dcb9..bbb86ef 100644 --- a/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305.go +++ b/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305.go @@ -2,32 +2,50 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package chacha20poly1305 implements the ChaCha20-Poly1305 AEAD as specified in RFC 7539. +// Package chacha20poly1305 implements the ChaCha20-Poly1305 AEAD as specified in RFC 7539, +// and its extended nonce variant XChaCha20-Poly1305. package chacha20poly1305 // import "golang.org/x/crypto/chacha20poly1305" import ( "crypto/cipher" + "encoding/binary" "errors" ) const ( // KeySize is the size of the key used by this AEAD, in bytes. KeySize = 32 - // NonceSize is the size of the nonce used with this AEAD, in bytes. + + // NonceSize is the size of the nonce used with the standard variant of this + // AEAD, in bytes. + // + // Note that this is too short to be safely generated at random if the same + // key is reused more than 2³² times. NonceSize = 12 + + // NonceSizeX is the size of the nonce used with the XChaCha20-Poly1305 + // variant of this AEAD, in bytes. + NonceSizeX = 24 ) type chacha20poly1305 struct { - key [32]byte + key [8]uint32 } -// New returns a ChaCha20-Poly1305 AEAD that uses the given, 256-bit key. +// New returns a ChaCha20-Poly1305 AEAD that uses the given 256-bit key. func New(key []byte) (cipher.AEAD, error) { if len(key) != KeySize { return nil, errors.New("chacha20poly1305: bad key length") } ret := new(chacha20poly1305) - copy(ret.key[:], key) + ret.key[0] = binary.LittleEndian.Uint32(key[0:4]) + ret.key[1] = binary.LittleEndian.Uint32(key[4:8]) + ret.key[2] = binary.LittleEndian.Uint32(key[8:12]) + ret.key[3] = binary.LittleEndian.Uint32(key[12:16]) + ret.key[4] = binary.LittleEndian.Uint32(key[16:20]) + ret.key[5] = binary.LittleEndian.Uint32(key[20:24]) + ret.key[6] = binary.LittleEndian.Uint32(key[24:28]) + ret.key[7] = binary.LittleEndian.Uint32(key[28:32]) return ret, nil } diff --git a/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305_amd64.go b/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305_amd64.go index 7cd7ad8..2aa4fd8 100644 --- a/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305_amd64.go +++ b/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305_amd64.go @@ -6,7 +6,12 @@ package chacha20poly1305 -import "encoding/binary" +import ( + "encoding/binary" + + "golang.org/x/crypto/internal/subtle" + "golang.org/x/sys/cpu" +) //go:noescape func chacha20Poly1305Open(dst []byte, key []uint32, src, ad []byte) bool @@ -14,78 +19,26 @@ func chacha20Poly1305Open(dst []byte, key []uint32, src, ad []byte) bool //go:noescape func chacha20Poly1305Seal(dst []byte, key []uint32, src, ad []byte) -// cpuid is implemented in chacha20poly1305_amd64.s. -func cpuid(eaxArg, ecxArg uint32) (eax, ebx, ecx, edx uint32) - -// xgetbv with ecx = 0 is implemented in chacha20poly1305_amd64.s. -func xgetbv() (eax, edx uint32) - var ( - useASM bool - useAVX2 bool + useAVX2 = cpu.X86.HasAVX2 && cpu.X86.HasBMI2 ) -func init() { - detectCPUFeatures() -} - -// detectCPUFeatures is used to detect if cpu instructions -// used by the functions implemented in assembler in -// chacha20poly1305_amd64.s are supported. -func detectCPUFeatures() { - maxID, _, _, _ := cpuid(0, 0) - if maxID < 1 { - return - } - - _, _, ecx1, _ := cpuid(1, 0) - - haveSSSE3 := isSet(9, ecx1) - useASM = haveSSSE3 - - haveOSXSAVE := isSet(27, ecx1) - - osSupportsAVX := false - // For XGETBV, OSXSAVE bit is required and sufficient. - if haveOSXSAVE { - eax, _ := xgetbv() - // Check if XMM and YMM registers have OS support. - osSupportsAVX = isSet(1, eax) && isSet(2, eax) - } - haveAVX := isSet(28, ecx1) && osSupportsAVX - - if maxID < 7 { - return - } - - _, ebx7, _, _ := cpuid(7, 0) - haveAVX2 := isSet(5, ebx7) && haveAVX - haveBMI2 := isSet(8, ebx7) - - useAVX2 = haveAVX2 && haveBMI2 -} - -// isSet checks if bit at bitpos is set in value. -func isSet(bitpos uint, value uint32) bool { - return value&(1< 0 { - alterAdIdx := mr.Intn(len(ad)) + alterAdIdx := mathrand.Intn(len(ad)) ad[alterAdIdx] ^= 0x80 if _, err := aead.Open(nil, nonce, ct, ad); err == nil { t.Errorf("#%d: Open was successful after altering additional data", i) @@ -50,14 +65,14 @@ func TestVectors(t *testing.T) { ad[alterAdIdx] ^= 0x80 } - alterNonceIdx := mr.Intn(aead.NonceSize()) + alterNonceIdx := mathrand.Intn(aead.NonceSize()) nonce[alterNonceIdx] ^= 0x80 if _, err := aead.Open(nil, nonce, ct, ad); err == nil { t.Errorf("#%d: Open was successful after altering nonce", i) } nonce[alterNonceIdx] ^= 0x80 - alterCtIdx := mr.Intn(len(ct)) + alterCtIdx := mathrand.Intn(len(ct)) ct[alterCtIdx] ^= 0x80 if _, err := aead.Open(nil, nonce, ct, ad); err == nil { t.Errorf("#%d: Open was successful after altering ciphertext", i) @@ -68,87 +83,117 @@ func TestVectors(t *testing.T) { func TestRandom(t *testing.T) { // Some random tests to verify Open(Seal) == Plaintext - for i := 0; i < 256; i++ { - var nonce [12]byte - var key [32]byte - - al := mr.Intn(128) - pl := mr.Intn(16384) - ad := make([]byte, al) - plaintext := make([]byte, pl) - cr.Read(key[:]) - cr.Read(nonce[:]) - cr.Read(ad) - cr.Read(plaintext) - - aead, err := New(key[:]) - if err != nil { - t.Fatal(err) - } + f := func(t *testing.T, nonceSize int) { + for i := 0; i < 256; i++ { + var nonce = make([]byte, nonceSize) + var key [32]byte + + al := mathrand.Intn(128) + pl := mathrand.Intn(16384) + ad := make([]byte, al) + plaintext := make([]byte, pl) + cryptorand.Read(key[:]) + cryptorand.Read(nonce[:]) + cryptorand.Read(ad) + cryptorand.Read(plaintext) + + var ( + aead cipher.AEAD + err error + ) + switch len(nonce) { + case NonceSize: + aead, err = New(key[:]) + case NonceSizeX: + aead, err = NewX(key[:]) + default: + t.Fatalf("#%d: wrong nonce length: %d", i, len(nonce)) + } + if err != nil { + t.Fatal(err) + } - ct := aead.Seal(nil, nonce[:], plaintext, ad) + ct := aead.Seal(nil, nonce[:], plaintext, ad) - plaintext2, err := aead.Open(nil, nonce[:], ct, ad) - if err != nil { - t.Errorf("Random #%d: Open failed", i) - continue - } + plaintext2, err := aead.Open(nil, nonce[:], ct, ad) + if err != nil { + t.Errorf("Random #%d: Open failed", i) + continue + } - if !bytes.Equal(plaintext, plaintext2) { - t.Errorf("Random #%d: plaintext's don't match: got %x vs %x", i, plaintext2, plaintext) - continue - } + if !bytes.Equal(plaintext, plaintext2) { + t.Errorf("Random #%d: plaintext's don't match: got %x vs %x", i, plaintext2, plaintext) + continue + } - if len(ad) > 0 { - alterAdIdx := mr.Intn(len(ad)) - ad[alterAdIdx] ^= 0x80 - if _, err := aead.Open(nil, nonce[:], ct, ad); err == nil { - t.Errorf("Random #%d: Open was successful after altering additional data", i) + if len(ad) > 0 { + alterAdIdx := mathrand.Intn(len(ad)) + ad[alterAdIdx] ^= 0x80 + if _, err := aead.Open(nil, nonce[:], ct, ad); err == nil { + t.Errorf("Random #%d: Open was successful after altering additional data", i) + } + ad[alterAdIdx] ^= 0x80 } - ad[alterAdIdx] ^= 0x80 - } - alterNonceIdx := mr.Intn(aead.NonceSize()) - nonce[alterNonceIdx] ^= 0x80 - if _, err := aead.Open(nil, nonce[:], ct, ad); err == nil { - t.Errorf("Random #%d: Open was successful after altering nonce", i) - } - nonce[alterNonceIdx] ^= 0x80 + alterNonceIdx := mathrand.Intn(aead.NonceSize()) + nonce[alterNonceIdx] ^= 0x80 + if _, err := aead.Open(nil, nonce[:], ct, ad); err == nil { + t.Errorf("Random #%d: Open was successful after altering nonce", i) + } + nonce[alterNonceIdx] ^= 0x80 - alterCtIdx := mr.Intn(len(ct)) - ct[alterCtIdx] ^= 0x80 - if _, err := aead.Open(nil, nonce[:], ct, ad); err == nil { - t.Errorf("Random #%d: Open was successful after altering ciphertext", i) + alterCtIdx := mathrand.Intn(len(ct)) + ct[alterCtIdx] ^= 0x80 + if _, err := aead.Open(nil, nonce[:], ct, ad); err == nil { + t.Errorf("Random #%d: Open was successful after altering ciphertext", i) + } + ct[alterCtIdx] ^= 0x80 } - ct[alterCtIdx] ^= 0x80 } + t.Run("Standard", func(t *testing.T) { f(t, NonceSize) }) + t.Run("X", func(t *testing.T) { f(t, NonceSizeX) }) } -func benchamarkChaCha20Poly1305Seal(b *testing.B, buf []byte) { +func benchamarkChaCha20Poly1305Seal(b *testing.B, buf []byte, nonceSize int) { + b.ReportAllocs() b.SetBytes(int64(len(buf))) var key [32]byte - var nonce [12]byte + var nonce = make([]byte, nonceSize) var ad [13]byte var out []byte - aead, _ := New(key[:]) + var aead cipher.AEAD + switch len(nonce) { + case NonceSize: + aead, _ = New(key[:]) + case NonceSizeX: + aead, _ = NewX(key[:]) + } + b.ResetTimer() for i := 0; i < b.N; i++ { out = aead.Seal(out[:0], nonce[:], buf[:], ad[:]) } } -func benchamarkChaCha20Poly1305Open(b *testing.B, buf []byte) { +func benchamarkChaCha20Poly1305Open(b *testing.B, buf []byte, nonceSize int) { + b.ReportAllocs() b.SetBytes(int64(len(buf))) var key [32]byte - var nonce [12]byte + var nonce = make([]byte, nonceSize) var ad [13]byte var ct []byte var out []byte - aead, _ := New(key[:]) + var aead cipher.AEAD + switch len(nonce) { + case NonceSize: + aead, _ = New(key[:]) + case NonceSizeX: + aead, _ = NewX(key[:]) + } ct = aead.Seal(ct[:0], nonce[:], buf[:], ad[:]) b.ResetTimer() @@ -157,26 +202,54 @@ func benchamarkChaCha20Poly1305Open(b *testing.B, buf []byte) { } } -func BenchmarkChacha20Poly1305Open_64(b *testing.B) { - benchamarkChaCha20Poly1305Open(b, make([]byte, 64)) +func BenchmarkChacha20Poly1305(b *testing.B) { + for _, length := range []int{64, 1350, 8 * 1024} { + b.Run("Open-"+strconv.Itoa(length), func(b *testing.B) { + benchamarkChaCha20Poly1305Open(b, make([]byte, length), NonceSize) + }) + b.Run("Seal-"+strconv.Itoa(length), func(b *testing.B) { + benchamarkChaCha20Poly1305Seal(b, make([]byte, length), NonceSize) + }) + + b.Run("Open-"+strconv.Itoa(length)+"-X", func(b *testing.B) { + benchamarkChaCha20Poly1305Open(b, make([]byte, length), NonceSizeX) + }) + b.Run("Seal-"+strconv.Itoa(length)+"-X", func(b *testing.B) { + benchamarkChaCha20Poly1305Seal(b, make([]byte, length), NonceSizeX) + }) + } } -func BenchmarkChacha20Poly1305Seal_64(b *testing.B) { - benchamarkChaCha20Poly1305Seal(b, make([]byte, 64)) -} +var key = make([]byte, KeySize) -func BenchmarkChacha20Poly1305Open_1350(b *testing.B) { - benchamarkChaCha20Poly1305Open(b, make([]byte, 1350)) -} +func ExampleNewX() { + aead, err := NewX(key) + if err != nil { + log.Fatalln("Failed to instantiate XChaCha20-Poly1305:", err) + } -func BenchmarkChacha20Poly1305Seal_1350(b *testing.B) { - benchamarkChaCha20Poly1305Seal(b, make([]byte, 1350)) -} + for _, msg := range []string{ + "Attack at dawn.", + "The eagle has landed.", + "Gophers, gophers, gophers everywhere!", + } { + // Encryption. + nonce := make([]byte, NonceSizeX) + if _, err := cryptorand.Read(nonce); err != nil { + panic(err) + } + ciphertext := aead.Seal(nil, nonce, []byte(msg), nil) -func BenchmarkChacha20Poly1305Open_8K(b *testing.B) { - benchamarkChaCha20Poly1305Open(b, make([]byte, 8*1024)) -} + // Decryption. + plaintext, err := aead.Open(nil, nonce, ciphertext, nil) + if err != nil { + log.Fatalln("Failed to decrypt or authenticate message:", err) + } + + fmt.Printf("%s\n", plaintext) + } -func BenchmarkChacha20Poly1305Seal_8K(b *testing.B) { - benchamarkChaCha20Poly1305Seal(b, make([]byte, 8*1024)) + // Output: Attack at dawn. + // The eagle has landed. + // Gophers, gophers, gophers everywhere! } diff --git a/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305_vectors_test.go b/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305_vectors_test.go index 49f0da6..fa3607e 100644 --- a/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305_vectors_test.go +++ b/vendor/golang.org/x/crypto/chacha20poly1305/chacha20poly1305_vectors_test.go @@ -7,6 +7,13 @@ package chacha20poly1305 var chacha20Poly1305Tests = []struct { plaintext, aad, key, nonce, out string }{ + { + "", + "", + "808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f", + "070000004041424344454647", + "a0784d7a4716f3feb4f64e7f4b39bf04", + }, { "4c616469657320616e642047656e746c656d656e206f662074686520636c617373206f66202739393a204966204920636f756c64206f6666657220796f75206f6e6c79206f6e652074697020666f7220746865206675747572652c2073756e73637265656e20776f756c642062652069742e", "50515253c0c1c2c3c4c5c6c7", @@ -329,4 +336,391 @@ var chacha20Poly1305Tests = []struct { "129039b5572e8a7a8131f76a", "2c125232a59879aee36cacc4aca5085a4688c4f776667a8fbd86862b5cfb1d57c976688fdd652eafa2b88b1b8e358aa2110ff6ef13cdc1ceca9c9f087c35c38d89d6fbd8de89538070f17916ecb19ca3ef4a1c834f0bdaa1df62aaabef2e117106787056c909e61ecd208357dd5c363f11c5d6cf24992cc873cf69f59360a820fcf290bd90b2cab24c47286acb4e1033962b6d41e562a206a94796a8ab1c6b8bade804ff9bdf5ba6062d2c1f8fe0f4dfc05720bd9a612b92c26789f9f6a7ce43f5e8e3aee99a9cd7d6c11eaa611983c36935b0dda57d898a60a0ab7c4b54", }, + + // XChaCha20-Poly1305 vectors + { + "000000000000000000000000000000", + "", + "0000000000000000000000000000000000000000000000000000000000000000", + "000000000000000000000000000000000000000000000000", + "789e9689e5208d7fd9e1f3c5b5341fb2f7033812ac9ebd3745e2c99c7bbfeb", + }, + { + "02dc819b71875e49f5e1e5a768141cfd3f14307ae61a34d81decd9a3367c00c7", + "", + "b7bbfe61b8041658ddc95d5cbdc01bbe7626d24f3a043b70ddee87541234cff7", + "e293239d4c0a07840c5f83cb515be7fd59c333933027e99c", + "7a51f271bd2e547943c7be3316c05519a5d16803712289aa2369950b1504dd8267222e47b13280077ecada7b8795d535", + }, + { + "7afc5f3f24155002e17dc176a8f1f3a097ff5a991b02ff4640f70b90db0c15c328b696d6998ea7988edfe3b960e47824e4ae002fbe589be57896a9b7bf5578599c6ba0153c7c", + "d499bb9758debe59a93783c61974b7", + "4ea8fab44a07f7ffc0329b2c2f8f994efdb6d505aec32113ae324def5d929ba1", + "404d5086271c58bf27b0352a205d21ce4367d7b6a7628961", + "26d2b46ad58b6988e2dcf1d09ba8ab6f532dc7e0847cdbc0ed00284225c02bbdb278ee8381ebd127a06926107d1b731cfb1521b267168926492e8f77219ad922257a5be2c5e52e6183ca4dfd0ad3912d7bd1ec968065", + }, + { + "", + "", + "48d8bd02c2e9947eae58327114d35e055407b5519c8019535efcb4fc875b5e2b", + "cc0a587a475caba06f8dbc09afec1462af081fe1908c2cba", + "fc3322d0a9d6fac3eb4a9e09b00b361e", + }, + { + "e0862731e5", + "", + "6579e7ee96151131a1fcd06fe0d52802c0021f214960ecceec14b2b8591f62cd", + "e2230748649bc22e2b71e46a7814ecabe3a7005e949bd491", + "e991efb85d8b1cfa3f92cb72b8d3c882e88f4529d9", + }, + { + "00c7dd8f440af1530b44", + "", + "ffb733657c849d50ab4ab40c4ae18f8ee2f0acf7c907afefdc04dff3537fdff3", + "02c6fd8032a8d89edbedcd1db024c09d29f08b1e74325085", + "13dbcdb8c60c3ed28449a57688edfaea89e309ab4faa6d51e532", + }, + { + "7422f311ea476cf819cb8b3c77369f", + "", + "ef0d05d028d6abdd5e99d1761d2028de75ee6eb376ff0dc8036e9a8e10743876", + "f772745200b0f92e38f1d8dae79bf8138e84b301f0be74df", + "d5f992f9834df1be86b580ac59c7eae063a68072829c51bc8a26970dd3d310", + }, + { + "ba09ca69450e6c7bece31a7a3f216e3b9ed0e536", + "", + "8d93e31abfe22a63faf45cbea91877050718f13fef6e2664a1892d7f23007ccf", + "260b7b3554a7e6ff8aae7dd6234077ca539689a20c1610a8", + "c99e9a768eb2ec8569bdff8a37295069552faebcafb1a76e98bc7c5b6b778b3d1b6291f0", + }, + { + "424ec5f98a0fdc5a7388532d11ab0edb26733505627b7f2d1f", + "", + "b68d5e6c46cdbb0060445522bdc5c562ae803b6aaaf1e103c146e93527a59299", + "80bb5dc1dd44a35ec4f91307f1a95b4ca31183a1a596fb7c", + "29d4eed0fff0050d4bb40de3b055d836206e7cbd62de1a63904f0cf731129ba3f9c2b9d46251a6de89", + }, + { + "e7e4515cc0a6ef0491af983eaac4f862d6e726758a3c657f4ec444841e42", + "", + "e31a1d3af650e8e2848bd78432d89ecd1fdece9842dc2792e7bda080f537b17b", + "f3f09905e9a871e757348834f483ed71be9c0f437c8d74b0", + "f5c69528963e17db725a28885d30a45194f12848b8b7644c7bded47a2ee83e6d4ef34006305cfdf82effdced461d", + }, + { + "0f5ca45a54875d1d19e952e53caeaa19389342f776dab11723535503338d6f77202a37", + "", + "1031bc920d4fcb4434553b1bf2d25ab375200643bf523ff037bf8914297e8dca", + "4cc77e2ef5445e07b5f44de2dc5bf62d35b8c6f69502d2bf", + "7aa8669e1bfe8b0688899cdddbb8cee31265928c66a69a5090478da7397573b1cc0f64121e7d8bff8db0ddd3c17460d7f29a12", + }, + { + "c45578c04c194994e89025c7ffb015e5f138be3cd1a93640af167706aee2ad25ad38696df41ad805", + "", + "ac8648b7c94328419c668ce1c57c71893adf73abbb98892a4fc8da17400e3a5e", + "4ad637facf97af5fc03207ae56219da9972858b7430b3611", + "49e093fcd074fb67a755669119b8bd430d98d9232ca988882deeb3508bde7c00160c35cea89092db864dcb6d440aefa5aacb8aa7b9c04cf0", + }, + { + "b877bfa192ea7e4c7569b9ee973f89924d45f9d8ed03c7098ad0cad6e7880906befedcaf6417bb43efabca7a2f", + "", + "125e331d5da423ecabc8adf693cdbc2fc3d3589740d40a3894f914db86c02492", + "913f8b2f08006e6260de41ec3ee01d938a3e68fb12dc44c4", + "1be334253423c90fc8ea885ee5cd3a54268c035ba8a2119e5bd4f7822cd7bf9cb4cec568d5b6d6292606d32979e044df3504e6eb8c0b2fc7e2a0e17d62", + }, + { + "d946484a1df5f85ff72c92ff9e192660cde5074bd0ddd5de900c35eb10ed991113b1b19884631bc8ceb386bcd83908061ce9", + "", + "b7e83276373dcf8929b6a6ea80314c9de871f5f241c9144189ee4caf62726332", + "f59f9d6e3e6c00720dc20dc21586e8330431ebf42cf9180e", + "a38a662b18c2d15e1b7b14443cc23267a10bee23556b084b6254226389c414069b694159a4d0b5abbe34de381a0e2c88b947b4cfaaebf50c7a1ad6c656e386280ad7", + }, + { + "d266927ca40b2261d5a4722f3b4da0dd5bec74e103fab431702309fd0d0f1a259c767b956aa7348ca923d64c04f0a2e898b0670988b15e", + "", + "a60e09cd0bea16f26e54b62b2908687aa89722c298e69a3a22cf6cf1c46b7f8a", + "92da9d67854c53597fc099b68d955be32df2f0d9efe93614", + "9dd6d05832f6b4d7f555a5a83930d6aed5423461d85f363efb6c474b6c4c8261b680dea393e24c2a3c8d1cc9db6df517423085833aa21f9ab5b42445b914f2313bcd205d179430", + }, + { + "f7e11b4d372ed7cb0c0e157f2f9488d8efea0f9bbe089a345f51bdc77e30d1392813c5d22ca7e2c7dfc2e2d0da67efb2a559058d4de7a11bd2a2915e", + "", + "194b1190fa31d483c222ec475d2d6117710dd1ac19a6f1a1e8e894885b7fa631", + "6b07ea26bb1f2d92e04207b447f2fd1dd2086b442a7b6852", + "25ae14585790d71d39a6e88632228a70b1f6a041839dc89a74701c06bfa7c4de3288b7772cb2919818d95777ab58fe5480d6e49958f5d2481431014a8f88dab8f7e08d2a9aebbe691430011d", + }, + { + "", + "1e2b11e3", + "70cd96817da85ede0efdf03a358103a84561b25453dee73735e5fb0161b0d493", + "5ddeba49f7266d11827a43931d1c300dd47a3c33f9f8bf9b", + "592fc4c19f3cddec517b2a00f9df9665", + }, + { + "81b3cb7eb3", + "efcfd0cf", + "a977412f889281a6d75c24186f1bfaa00dcc5132f0929f20ef15bbf9e63c4c91", + "3f26ca997fb9166d9c615babe3e543ca43ab7cab20634ac5", + "8e4ade3e254cf52e93eace5c46667f150832725594", + }, + { + "556f97f2ebdb4e949923", + "f7cee2e0", + "787b3e86546a51028501c801dadf8d5b996fd6f6f2363d5d0f900c44f6a2f4c2", + "7fa6af59a779657d1cada847439ea5b92a1337cfbebbc3b1", + "608ec22dae5f48b89d6f0d2a940d5a7661e0a8e68aaee4ad2d96", + }, + { + "c06847a36ad031595b60edd44dc245", + "d4175e1f", + "16de31e534dd5af32801b1acd0ec541d1f8d82bcbc3af25ec815f3575b7aca73", + "29f6656972838f56c1684f6a278f9e4e207b51d68706fc25", + "836082cc51303e500fceade0b1a18f1d97d64ff41cc81754c07d6231b9fd1b", + }, + { + "0d03c22ced7b29c6741e72166cd61792028dfc80", + "e505dad0", + "ac2b426e5c5c8e00666180a3410e8a2f6e52247a43aecea9622163e8433c93b2", + "c1123430468228625967bbc0fbd0f963e674372259ff2deb", + "bf09979bf4fed2eec6c97f6e1bcfac35eeffc6d54a55cc1d83d8767ae74db2d7cdfbc371", + }, + { + "05bf00e1707cffe7ccbd06a9f846d0fd471a700ed43b4facb8", + "d863bebe", + "66c121f0f84b95ba1e6d29e7d81900bc96a642421b9b6105ae5eb5f2e7b07577", + "8ed6ae211a661e967995b71f7316ba88f44322bb62b4187b", + "b2c5c85d087e0305e9058fba52b661fb3d7f21cb4d4915ae048bc9e5d66a2f921dd4a1c1b030f442c9", + }, + { + "5f2b91a9be8bfaa21451ddc6c5cf28d1cc00b046b76270b95cda3c280c83", + "a8750275", + "39592eb276877fca9dd11e2181c0b23127328407e3cc11e315e5d748f43529cc", + "1084bebd756f193d9eea608b3a0193a5028f8ced19684821", + "eaee1f49ac8468154c601a5dd8b84d597602e5a73534b5fad5664f97d0f017dd114752be969679cf610340c6a312", + }, + { + "01e8e269b5376943f3b2d245483a76461dc8b7634868b559165f5dbb20839029fae9bb", + "a1e96da0", + "b8386123b87e50d9d046242cf1bf141fce7f65aff0fba76861a2bc72582d6ff0", + "0fbe2a13a89bea031de96d78f9f11358ba7b6a5e724b4392", + "705ec3f910ec85c6005baa99641de6ca43332ff52b5466df6af4ffbe4ef2a376a8f871d1eae503b5896601fee005cdc1f4c1c6", + }, + { + "706daba66e2edb1f828f3c0051e3cc214b12210bde0587bba02580f741a4c83e84d4e9fe961120cd", + "87663c5a", + "d519d82ba8a3f0c3af9efe36682b62e285167be101a526c1d73000f169c2a486", + "ad651aac536978e2bc1a54816345ac5e9a9b43b3d9cc0bfc", + "07051b5e72da9c4811beb07ff9f95aece67eae18420eb3f0e8bb8a5e26d4b483fa40eb063a2354842d0c8a41d981cc2b77c530b496db01c8", + }, + { + "1f6b24f2f0d9eb460d726bed953d66fcc4ecc29da6ed2fd711358eac3b2609d74ba3e21885156cde3cbe6d9b6f", + "f5efbc4e", + "86068a00544f749ad4ad15bb8e427ae78577ae22f4ca9778efff828ba10f6b20", + "c8420412c9626dcd34ece14593730f6aa2d01ec51cacd59f", + "a99f6c88eac35bb34439e34b292fe9db8192446dcdc81e2192060ec36d98b47de2bee12bf0f67cb24fb0949c07733a6781cd9455cdc61123f506886b04", + }, + { + "d69389d83362be8c0ddb738659a6cc4bd65d88cb5b525232f4d59a7d4751a7203c254923ecb6873e803220aab19664789a63", + "bc35fb1c", + "835855b326a98682b3075b4d7f1b89059c3cdfc547d4296c80ce7a77ba6434e3", + "c27cb75fc319ba431cbaeb120341d0c4745d883eb47e92bc", + "db6dc3f9a0f4f1a6df2495a88910550c2c6205478bfc1e81282e34b5b36d984c72c0509c522c987c61d2e640ced69402a6d33aa10d3d0b81e680b3c19bc142e81923", + }, + { + "a66a7f089115ed9e2d5bb5d33d7282a7afe401269b00f2a233a59c04b794a42901d862140b61d18d7c7f0ad5da040613e557f8abc74219", + "2c060aaf", + "99758aa7714fd707931f71803eefe04a06955041308a0b2a1104313b270ccf34", + "63f690d8926408c7a34fe8ddd505a8dc58769dc74e8d5da6", + "92b21ee85afcd8996ac28f3aed1047ad814d6e4ffbca3159af16f26eded83e4abda9e4275eb3ff0ad90dffe09f2d443b628f824f680b46527ce0128e8de1920f7c44350ebe7913", + }, + { + "f955183b1f762d4536d3f6885ea7f5ac27414caf46c2e24a2fd3bd56b91c53d840fb657224565e0a6f686f8ba320e04a401057399d9a3d995ab17c13", + "c372ddc5", + "a188be3795b2ca2e69b6aa263244f0963c492d694cf6c9b705a1d7045f3f2a26", + "51bb484ea094ee140474681e1c838e4442fd148de2cc345a", + "48759a5ddfdd829d11de8e0c538ce4a9c475faab6912039b568ad92d737d172fc1eb0c00c3793de6dddbfacfdbbc7f44aeba33684e18005aa982b6fc6c556e63bb90ff7a1dde8153a63eabe0", + }, + { + "", + "e013cd0bfafd486d", + "af3d3ba094d38299ecb91c17bfe3d085da5bd42e11acf8acb5bc26a4be9a7583", + "7dd63c14173831f109761b1c1abe18f6ba937d825957011b", + "8bc685a7d9d501952295cd25d8c92517", + }, + { + "284b64597e", + "31d013e53aa3ea79", + "93c77409d7f805f97fe683b2dd6ee06152a5e918b3eed5b731acccffdcb2cc04", + "3d331e90c4597cf0c30d1b7cfbd07bcb6ab927eda056873c", + "3538a449d6c18d148a8c6cb76f1bc288657ac7036a", + }, + { + "9fe67f5c78180ede8274", + "188608d230d75860", + "b7cca89a82640aea6f80b458c9e633d88594fb498959d39787be87030892d48f", + "ef891d50e8c08958f814590fdb7a9f16c61cc2aae1682109", + "bbb40c30f3d1391a5b38df480cbbf964b71e763e8140751f4e28", + }, + { + "3a2826b6f7e3d542e4ded8f23c9aa4", + "260033e789c4676a", + "7fe2731214f2b4b42f93217d43f1776498413725e4f6cfe62b756e5a52df10ea", + "888728219ebf761547f5e2218532714403020e5a8b7a49d0", + "fe0328f883fcd88930ae017c0f54ed90f883041efc020e959125af370c1d47", + }, + { + "91858bf7b969005d7164acbd5678052b651c53e0", + "f3cc53ecafcbadb3", + "d69c04e9726b22d51f97bc9da0f0fda86736e6b78e8ef9f6f0000f79890d6d43", + "6de3c45161b434e05445cf6bf69eef7bddf595fc6d8836bd", + "a8869dd578c0835e120c843bb7dedc7a1e9eae24ffd742be6bf5b74088a8a2c550976fcb", + }, + { + "b3b1a4d6b2a2b9c5a1ca6c1efaec34dcfa1acbe7074d5e10cc", + "d0f72bd16cda3bae", + "2b317857b089c9305c49b83019f6e158bc4ecc3339b39ade02ee10c37c268da0", + "cb5fa6d1e14a0b4bdf350cd10c8a7bd638102911ec74be09", + "e6372f77c14343650074e07a2b7223c37b29242224b722b24d63b5956f27aa64ce7ce4e39cd14a2787", + }, + { + "057d3e9f865be7dff774938cab6d080e50cf9a1593f53c0063201e0bb7ae", + "fd3881e505c8b12d", + "36e42b1ef1ee8d068f09b5fad3ee43d98d34aa3e3f994f2055aee139da71de9d", + "24124da36473d01bdca30297c9eef4fe61955525a453da17", + "a8b28139524c98c1f8776f442eac4c22766fe6aac83224641c58bf021fc9cb709ec4706f49c2d0c1828acf2bfe8d", + }, + { + "bd8f13e928c34d67a6c70c3c7efdf2982ecc31d8cee68f9cbddc75912cd828ac93d28b", + "193206c8fcc5b19b", + "6e47c40c9d7b757c2efca4d73890e4c73f3c859aab4fdc64b564b8480dd84e72", + "ca31340ae20d30fe488be355cb36652c5db7c9d6265a3e95", + "a121efc5e1843deade4b8adbfef1808de4eda222f176630ad34fb476fca19e0299e4a13668e53cf13882035ba4f04f47c8b4e3", + }, + { + "23067a196e977d10039c14ff358061c918d2148d31961bb3e12c27c5122383cb25c4d1d79c775720", + "62338d02fff78a00", + "2c5c79c92d91fb40ef7d0a77e8033f7b265e3bab998b8116d17b2e62bb4f8a09", + "024736adb1d5c01006dffd8158b57936d158d5b42054336d", + "46d0905473a995d38c7cdbb8ef3da96ecc82a22c5b3c6c9d1c4a61ae7a17db53cb88c5f7eccf2da1d0c417c300f989b4273470e36f03542f", + }, + { + "252e966c680329eb687bff813b78fea3bfd3505333f106c6f9f45ba69896723c41bb763793d9b266e897d05557", + "1e93e0cfe6523380", + "9ec6fd1baa13ee16aec3fac16718a2baccf18a403cec467c25b7448e9b321110", + "e7120b1018ab363a36e61102eedbcbe9847a6cbacaa9c328", + "2934f034587d4144bb11182679cd2cd1c99c8088d18e233379e9bc9c41107a1f57a2723ecc7b9ba4e6ee198adf0fd766738e828827dc73136fc5b996e9", + }, + { + "6744aefcb318f12bc6eeb59d4d62f7eb95f347cea14bd5158415f07f84e4e3baa3de07512d9b76095ac1312cfcb1bb77f499", + "608d2a33ce5d0b04", + "0f665cbdaaa40f4f5a00c53d951b0a98aac2342be259a52670f650a783be7aab", + "378bdb57e957b8c2e1500c9513052a3b02ff5b7edbd4a3a7", + "341c60fcb374b394f1b01a4a80aedef49ab0b67ec963675e6eec43ef106f7003be87dbf4a8976709583dccc55abc7f979c4721837e8664a69804ea31736aa2af615a", + }, + { + "bcf1004f988220b7ce063ef2ec4e276ffd074f0a90aa807de1532679d2a1505568eaa4192d9a6ea52cc500322343ce9f8e68cc2c606d83", + "e64bd00126c8792c", + "58e65150d6a15dcefbc14a171998987ad0d709fb06a17d68d6a778759681c308", + "106d2bd120b06e4eb10bc674fe55c77a3742225268319303", + "a28052a6686a1e9435fee8702f7da563a7b3d7b5d3e9e27f11abf73db309cd1f39a34756258c1c5c7f2fb12cf15eb20175c2a08fc93dd19c5e482ef3fbef3d8404a3cfd54a7baf", + }, + { + "acd08d4938a224b4cb2d723bf75420f3ea27b698fadd815bb7db9548a05651398644354334e69f8e4e5503bf1a6f92b38e860044a7edca6874038ce1", + "28a137808d0225b8", + "a031203b963a395b08be55844d81af39d19b23b7cc24b21afa31edc1eea6edd6", + "e8b31c52b6690f10f4ae62ba9d50ba39fb5edcfb78400e35", + "35cf39ba31da95ac9b661cdbd5e9c9655d13b8ff065c4ec10c810833a47a87d8057dd1948a7801bfe6904b49fed0aabfb3cd755a1a262d372786908ddcf64cae9f71cb9ed199c3ddacc50116", + }, + { + "", + "cda7ee2857e09e9054ef6806", + "d91dffb18132d8dd3d144a2f10ba28bc5df36cb60369f3b19893ec91db3cf904", + "ee56f19c62b0438da6a0d9e01844313902be44f84a6a4ce7", + "ccd48b61a5683c195d4424009eb1d147", + }, + { + "350f4c7ac2", + "7c104b539c1d2ae022434cd6", + "cbb61e369117f9250f68fa707240c554359262a4d66c757f80e3aeb6920894fb", + "fbb14c9943444eac5413c6f5c8095451eddece02c9461043", + "b5c6a35865ed8e5216ff6c77339ee1ab570de50e51", + }, + { + "4f0d61d3ea03a44a8df0", + "51c20a8ae9e9794da931fe23", + "ba6ced943aa62f9261d7513b822e02054e099acafb5360f0d850064da48b5a4f", + "04c68cb50cdbb0ec03f8381cf59b886e64c40548bf8e3f82", + "ea45a73957e2a853655623f2a3bb58791f7ea36dd2957ed66ffa", + }, + { + "4fbdd4d4293a8f34fdbc8f3ad44cf6", + "8212f315e3759c3253c588bb", + "5354791bc2370415811818e913e310dd12e6a0cf5dcab2b6424816eecccf4b65", + "7ee6353c2fbc73c9ebc652270bc86e4008e09583e623e679", + "50a354811a918e1801fb567621a8924baf8dd79da6d36702855d3753f1319c", + }, + { + "5a6f68b5a9a9920ca9c6edf5be7c0af150a063c4", + "9a524aa62938fb7a1e50ed06", + "fd91605a6ad85d8ba7a71b08dce1032aa9992bf4f28d407a53ddda04c043cada", + "46791d99d6de33e79025bf9e97c198e7cf409614c6284b4d", + "648033c1eb615467e90b7d3ac24202d8b849549141f9bab03e9e910c29b8eab3d4fb3f2c", + }, + { + "d9318c2c0d9ed89e35d242a6b1d496e7e0c5bbdf77eba14c56", + "a16053c35fbe8dc93c14a81f", + "f21406aec83134ebf7bc48c6d0f45acb5f341fbc7d3b5a9bff3ea1333c916af7", + "de6b977be450d5efa7777e006802ddbb10814a22da1c3cd9", + "8d3dad487d5161663da830b71c3e24ec5cdb74d858cbb73b084ed0902198532aad3a18416966bff223", + }, + { + "68d0ee08d38cb4bcc9268fee3030666e70e41fcabf6fe06536eeec43eec5", + "11e09447d40b22dc98070eec", + "da5ee1ec02eab13220fcb94f16efec848a8dd57c0f4d67955423f5d17fde5aa3", + "8f13e61d773a250810f75d46bf163a3f9205be5751f6049a", + "92a103b03764c1ad1f88500d22eeae5c0fe1044c872987c0b97affc5e8c3d783f8cc28a11dc91990ea22dd1bad74", + }, + { + "a1d960bda08efcf19e136dc1e8b05b6b381c820eda5f9a8047e1a2dd1803a1e4d11a7f", + "aa73d8d4aaa0cfd9d80a9ae8", + "08028833d617c28ba75b48f177cb5da87189189abb68dcb8974eca9230c25945", + "f7b6f34a910fd11588f567de8555932291f7df05f6e2b193", + "99cfc4cca193998bae153b744e6c94a82a2867780aa0f43acddb7c433fcb297311313ec2199f00d7ca7da0646b40113c60e935", + }, + { + "3b4ae39a745b6247ce5baf675ec36c5065b1bf76c8379eab4b769961d43a753896d068938017777e", + "128c017a985052f8cdbc6b28", + "4683d5caff613187a9b16af897253848e9c54fc0ec319de62452a86961d3cbb2", + "5612a13c2da003b91188921cbac3fa093eba99d8cbbb51ff", + "91a98b93b2174257175f7c882b45cc252e0db8667612bd270c1c12fe28b6bf209760bf8f370318f92ae3f88a5d4773b05714132cc28dddb8", + }, + { + "22ccf680d2995ef6563de281cff76882a036a59ad73f250e710b3040590d69bccde8a8411abe8b0d3cb728ca82", + "13a97d0a167a61aa21e531ec", + "9e140762eed274948b66de25e6e8f36ab65dc730b0cb096ef15aaba900a5588c", + "d0e9594cfd42ab72553bf34062a263f588bb8f1fc86a19f5", + "f194fc866dfba30e42c4508b7d90b3fa3f8983831ede713334563e36aa861f2f885b40be1dbe20ba2d10958a12823588d4bbbefb81a87d87315204f5e3", + }, + { + "a65f5d10c482b3381af296e631eb605eba6a11ccec6ceab021460d0bd35feb676ec6dbba5d4ad6c9f4d683ea541035bc80fa", + "f15ae71ffed50a8fcc4996b0", + "f535d60e8b75ac7e526041eed86eb4d65ae7e315eff15dba6c0133acc2a6a4bf", + "01ba61691ebb3c66d2f94c1b1c597ecd7b5ff7d2a30be405", + "d79e7c3893df5a5879c2f0a3f7ca619f08e4540f3ac7db35790b4211b9d47ae735adadf35fd47252a4763e3fd2b2cd8157f6ea7986108a53437962670a97d68ee281", + }, + { + "8c014655b97f6da76b0b168b565fd62de874c164fd7e227346a0ec22c908bed1e2a0b429620e6f3a68dd518f13a2c0250608a1cb08a7c3", + "10a7eff999029c5040c1b3bd", + "bf11af23e88c350a443493f6fa0eb34f234f4daa2676e26f0701bce5642d13f4", + "f14c97392afd2e32e2c625910ca029f9b6e81676c79cc42f", + "78d5226f372d5d60681dbfc749d12df74249f196b0cbf14fa65a3a59dc65ae458455ec39baa1df3397afe752bb06f6f13bf03c99abda7a95c1d0b73fd92d5f888a5f6f889a9aea", + }, + { + "66234d7a5b71eef134d60eccf7d5096ee879a33983d6f7a575e3a5e3a4022edccffe7865dde20b5b0a37252e31cb9a3650c63e35b057a1bc200a5b5b", + "ccc2406f997bcae737ddd0f5", + "d009eeb5b9b029577b14d200b7687b655eedb7d74add488f092681787999d66d", + "99319712626b400f9458dbb7a9abc9f5810f25b47fc90b39", + "543a2bbf52fd999027ae7c297353f3ce986f810bc2382583d0a81fda5939e4c87b6e8d262790cd614d6f753d8035b32adf43acc7f6d4c2c44289538928564b6587c2fcb99de1d8e34ffff323", + }, } diff --git a/vendor/golang.org/x/crypto/chacha20poly1305/internal/chacha20/chacha_generic.go b/vendor/golang.org/x/crypto/chacha20poly1305/internal/chacha20/chacha_generic.go deleted file mode 100644 index f9e8a3a..0000000 --- a/vendor/golang.org/x/crypto/chacha20poly1305/internal/chacha20/chacha_generic.go +++ /dev/null @@ -1,199 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package ChaCha20 implements the core ChaCha20 function as specified in https://tools.ietf.org/html/rfc7539#section-2.3. -package chacha20 - -import "encoding/binary" - -const rounds = 20 - -// core applies the ChaCha20 core function to 16-byte input in, 32-byte key k, -// and 16-byte constant c, and puts the result into 64-byte array out. -func core(out *[64]byte, in *[16]byte, k *[32]byte) { - j0 := uint32(0x61707865) - j1 := uint32(0x3320646e) - j2 := uint32(0x79622d32) - j3 := uint32(0x6b206574) - j4 := binary.LittleEndian.Uint32(k[0:4]) - j5 := binary.LittleEndian.Uint32(k[4:8]) - j6 := binary.LittleEndian.Uint32(k[8:12]) - j7 := binary.LittleEndian.Uint32(k[12:16]) - j8 := binary.LittleEndian.Uint32(k[16:20]) - j9 := binary.LittleEndian.Uint32(k[20:24]) - j10 := binary.LittleEndian.Uint32(k[24:28]) - j11 := binary.LittleEndian.Uint32(k[28:32]) - j12 := binary.LittleEndian.Uint32(in[0:4]) - j13 := binary.LittleEndian.Uint32(in[4:8]) - j14 := binary.LittleEndian.Uint32(in[8:12]) - j15 := binary.LittleEndian.Uint32(in[12:16]) - - x0, x1, x2, x3, x4, x5, x6, x7 := j0, j1, j2, j3, j4, j5, j6, j7 - x8, x9, x10, x11, x12, x13, x14, x15 := j8, j9, j10, j11, j12, j13, j14, j15 - - for i := 0; i < rounds; i += 2 { - x0 += x4 - x12 ^= x0 - x12 = (x12 << 16) | (x12 >> (16)) - x8 += x12 - x4 ^= x8 - x4 = (x4 << 12) | (x4 >> (20)) - x0 += x4 - x12 ^= x0 - x12 = (x12 << 8) | (x12 >> (24)) - x8 += x12 - x4 ^= x8 - x4 = (x4 << 7) | (x4 >> (25)) - x1 += x5 - x13 ^= x1 - x13 = (x13 << 16) | (x13 >> 16) - x9 += x13 - x5 ^= x9 - x5 = (x5 << 12) | (x5 >> 20) - x1 += x5 - x13 ^= x1 - x13 = (x13 << 8) | (x13 >> 24) - x9 += x13 - x5 ^= x9 - x5 = (x5 << 7) | (x5 >> 25) - x2 += x6 - x14 ^= x2 - x14 = (x14 << 16) | (x14 >> 16) - x10 += x14 - x6 ^= x10 - x6 = (x6 << 12) | (x6 >> 20) - x2 += x6 - x14 ^= x2 - x14 = (x14 << 8) | (x14 >> 24) - x10 += x14 - x6 ^= x10 - x6 = (x6 << 7) | (x6 >> 25) - x3 += x7 - x15 ^= x3 - x15 = (x15 << 16) | (x15 >> 16) - x11 += x15 - x7 ^= x11 - x7 = (x7 << 12) | (x7 >> 20) - x3 += x7 - x15 ^= x3 - x15 = (x15 << 8) | (x15 >> 24) - x11 += x15 - x7 ^= x11 - x7 = (x7 << 7) | (x7 >> 25) - x0 += x5 - x15 ^= x0 - x15 = (x15 << 16) | (x15 >> 16) - x10 += x15 - x5 ^= x10 - x5 = (x5 << 12) | (x5 >> 20) - x0 += x5 - x15 ^= x0 - x15 = (x15 << 8) | (x15 >> 24) - x10 += x15 - x5 ^= x10 - x5 = (x5 << 7) | (x5 >> 25) - x1 += x6 - x12 ^= x1 - x12 = (x12 << 16) | (x12 >> 16) - x11 += x12 - x6 ^= x11 - x6 = (x6 << 12) | (x6 >> 20) - x1 += x6 - x12 ^= x1 - x12 = (x12 << 8) | (x12 >> 24) - x11 += x12 - x6 ^= x11 - x6 = (x6 << 7) | (x6 >> 25) - x2 += x7 - x13 ^= x2 - x13 = (x13 << 16) | (x13 >> 16) - x8 += x13 - x7 ^= x8 - x7 = (x7 << 12) | (x7 >> 20) - x2 += x7 - x13 ^= x2 - x13 = (x13 << 8) | (x13 >> 24) - x8 += x13 - x7 ^= x8 - x7 = (x7 << 7) | (x7 >> 25) - x3 += x4 - x14 ^= x3 - x14 = (x14 << 16) | (x14 >> 16) - x9 += x14 - x4 ^= x9 - x4 = (x4 << 12) | (x4 >> 20) - x3 += x4 - x14 ^= x3 - x14 = (x14 << 8) | (x14 >> 24) - x9 += x14 - x4 ^= x9 - x4 = (x4 << 7) | (x4 >> 25) - } - - x0 += j0 - x1 += j1 - x2 += j2 - x3 += j3 - x4 += j4 - x5 += j5 - x6 += j6 - x7 += j7 - x8 += j8 - x9 += j9 - x10 += j10 - x11 += j11 - x12 += j12 - x13 += j13 - x14 += j14 - x15 += j15 - - binary.LittleEndian.PutUint32(out[0:4], x0) - binary.LittleEndian.PutUint32(out[4:8], x1) - binary.LittleEndian.PutUint32(out[8:12], x2) - binary.LittleEndian.PutUint32(out[12:16], x3) - binary.LittleEndian.PutUint32(out[16:20], x4) - binary.LittleEndian.PutUint32(out[20:24], x5) - binary.LittleEndian.PutUint32(out[24:28], x6) - binary.LittleEndian.PutUint32(out[28:32], x7) - binary.LittleEndian.PutUint32(out[32:36], x8) - binary.LittleEndian.PutUint32(out[36:40], x9) - binary.LittleEndian.PutUint32(out[40:44], x10) - binary.LittleEndian.PutUint32(out[44:48], x11) - binary.LittleEndian.PutUint32(out[48:52], x12) - binary.LittleEndian.PutUint32(out[52:56], x13) - binary.LittleEndian.PutUint32(out[56:60], x14) - binary.LittleEndian.PutUint32(out[60:64], x15) -} - -// XORKeyStream crypts bytes from in to out using the given key and counters. -// In and out may be the same slice but otherwise should not overlap. Counter -// contains the raw ChaCha20 counter bytes (i.e. block counter followed by -// nonce). -func XORKeyStream(out, in []byte, counter *[16]byte, key *[32]byte) { - var block [64]byte - var counterCopy [16]byte - copy(counterCopy[:], counter[:]) - - for len(in) >= 64 { - core(&block, &counterCopy, key) - for i, x := range block { - out[i] = in[i] ^ x - } - u := uint32(1) - for i := 0; i < 4; i++ { - u += uint32(counterCopy[i]) - counterCopy[i] = byte(u) - u >>= 8 - } - in = in[64:] - out = out[64:] - } - - if len(in) > 0 { - core(&block, &counterCopy, key) - for i, v := range in { - out[i] = v ^ block[i] - } - } -} diff --git a/vendor/golang.org/x/crypto/chacha20poly1305/internal/chacha20/chacha_test.go b/vendor/golang.org/x/crypto/chacha20poly1305/internal/chacha20/chacha_test.go deleted file mode 100644 index b80d34c..0000000 --- a/vendor/golang.org/x/crypto/chacha20poly1305/internal/chacha20/chacha_test.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package chacha20 - -import ( - "encoding/hex" - "testing" -) - -func TestCore(t *testing.T) { - // This is just a smoke test that checks the example from - // https://tools.ietf.org/html/rfc7539#section-2.3.2. The - // chacha20poly1305 package contains much more extensive tests of this - // code. - var key [32]byte - for i := range key { - key[i] = byte(i) - } - - var input [16]byte - input[0] = 1 - input[7] = 9 - input[11] = 0x4a - - var out [64]byte - XORKeyStream(out[:], out[:], &input, &key) - const expected = "10f1e7e4d13b5915500fdd1fa32071c4c7d1f4c733c068030422aa9ac3d46c4ed2826446079faa0914c2d705d98b02a2b5129cd1de164eb9cbd083e8a2503c4e" - if result := hex.EncodeToString(out[:]); result != expected { - t.Errorf("wanted %x but got %x", expected, result) - } -} diff --git a/vendor/golang.org/x/crypto/chacha20poly1305/xchacha20poly1305.go b/vendor/golang.org/x/crypto/chacha20poly1305/xchacha20poly1305.go new file mode 100644 index 0000000..a02fa57 --- /dev/null +++ b/vendor/golang.org/x/crypto/chacha20poly1305/xchacha20poly1305.go @@ -0,0 +1,104 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package chacha20poly1305 + +import ( + "crypto/cipher" + "encoding/binary" + "errors" + + "golang.org/x/crypto/internal/chacha20" +) + +type xchacha20poly1305 struct { + key [8]uint32 +} + +// NewX returns a XChaCha20-Poly1305 AEAD that uses the given 256-bit key. +// +// XChaCha20-Poly1305 is a ChaCha20-Poly1305 variant that takes a longer nonce, +// suitable to be generated randomly without risk of collisions. It should be +// preferred when nonce uniqueness cannot be trivially ensured, or whenever +// nonces are randomly generated. +func NewX(key []byte) (cipher.AEAD, error) { + if len(key) != KeySize { + return nil, errors.New("chacha20poly1305: bad key length") + } + ret := new(xchacha20poly1305) + ret.key[0] = binary.LittleEndian.Uint32(key[0:4]) + ret.key[1] = binary.LittleEndian.Uint32(key[4:8]) + ret.key[2] = binary.LittleEndian.Uint32(key[8:12]) + ret.key[3] = binary.LittleEndian.Uint32(key[12:16]) + ret.key[4] = binary.LittleEndian.Uint32(key[16:20]) + ret.key[5] = binary.LittleEndian.Uint32(key[20:24]) + ret.key[6] = binary.LittleEndian.Uint32(key[24:28]) + ret.key[7] = binary.LittleEndian.Uint32(key[28:32]) + return ret, nil +} + +func (*xchacha20poly1305) NonceSize() int { + return NonceSizeX +} + +func (*xchacha20poly1305) Overhead() int { + return 16 +} + +func (x *xchacha20poly1305) Seal(dst, nonce, plaintext, additionalData []byte) []byte { + if len(nonce) != NonceSizeX { + panic("chacha20poly1305: bad nonce length passed to Seal") + } + + // XChaCha20-Poly1305 technically supports a 64-bit counter, so there is no + // size limit. However, since we reuse the ChaCha20-Poly1305 implementation, + // the second half of the counter is not available. This is unlikely to be + // an issue because the cipher.AEAD API requires the entire message to be in + // memory, and the counter overflows at 256 GB. + if uint64(len(plaintext)) > (1<<38)-64 { + panic("chacha20poly1305: plaintext too large") + } + + hNonce := [4]uint32{ + binary.LittleEndian.Uint32(nonce[0:4]), + binary.LittleEndian.Uint32(nonce[4:8]), + binary.LittleEndian.Uint32(nonce[8:12]), + binary.LittleEndian.Uint32(nonce[12:16]), + } + c := &chacha20poly1305{ + key: chacha20.HChaCha20(&x.key, &hNonce), + } + // The first 4 bytes of the final nonce are unused counter space. + cNonce := make([]byte, NonceSize) + copy(cNonce[4:12], nonce[16:24]) + + return c.seal(dst, cNonce[:], plaintext, additionalData) +} + +func (x *xchacha20poly1305) Open(dst, nonce, ciphertext, additionalData []byte) ([]byte, error) { + if len(nonce) != NonceSizeX { + panic("chacha20poly1305: bad nonce length passed to Open") + } + if len(ciphertext) < 16 { + return nil, errOpen + } + if uint64(len(ciphertext)) > (1<<38)-48 { + panic("chacha20poly1305: ciphertext too large") + } + + hNonce := [4]uint32{ + binary.LittleEndian.Uint32(nonce[0:4]), + binary.LittleEndian.Uint32(nonce[4:8]), + binary.LittleEndian.Uint32(nonce[8:12]), + binary.LittleEndian.Uint32(nonce[12:16]), + } + c := &chacha20poly1305{ + key: chacha20.HChaCha20(&x.key, &hNonce), + } + // The first 4 bytes of the final nonce are unused counter space. + cNonce := make([]byte, NonceSize) + copy(cNonce[4:12], nonce[16:24]) + + return c.open(dst, cNonce[:], ciphertext, additionalData) +} diff --git a/vendor/golang.org/x/crypto/cryptobyte/asn1.go b/vendor/golang.org/x/crypto/cryptobyte/asn1.go index 166e22d..528b9bf 100644 --- a/vendor/golang.org/x/crypto/cryptobyte/asn1.go +++ b/vendor/golang.org/x/crypto/cryptobyte/asn1.go @@ -5,40 +5,36 @@ package cryptobyte import ( - "encoding/asn1" + encoding_asn1 "encoding/asn1" "fmt" "math/big" "reflect" "time" + + "golang.org/x/crypto/cryptobyte/asn1" ) // This file contains ASN.1-related methods for String and Builder. -// Tag represents an ASN.1 tag number and class (together also referred to as -// identifier octets). Methods in this package only support the low-tag-number -// form, i.e. a single identifier octet with bits 7-8 encoding the class and -// bits 1-6 encoding the tag number. -type Tag uint8 - -// Contructed returns t with the context-specific class bit set. -func (t Tag) ContextSpecific() Tag { return t | 0x80 } - -// Contructed returns t with the constructed class bit set. -func (t Tag) Constructed() Tag { return t | 0x20 } - // Builder // AddASN1Int64 appends a DER-encoded ASN.1 INTEGER. func (b *Builder) AddASN1Int64(v int64) { - b.addASN1Signed(asn1.TagInteger, v) + b.addASN1Signed(asn1.INTEGER, v) +} + +// AddASN1Int64WithTag appends a DER-encoded ASN.1 INTEGER with the +// given tag. +func (b *Builder) AddASN1Int64WithTag(v int64, tag asn1.Tag) { + b.addASN1Signed(tag, v) } // AddASN1Enum appends a DER-encoded ASN.1 ENUMERATION. func (b *Builder) AddASN1Enum(v int64) { - b.addASN1Signed(asn1.TagEnum, v) + b.addASN1Signed(asn1.ENUM, v) } -func (b *Builder) addASN1Signed(tag Tag, v int64) { +func (b *Builder) addASN1Signed(tag asn1.Tag, v int64) { b.AddASN1(tag, func(c *Builder) { length := 1 for i := v; i >= 0x80 || i < -0x80; i >>= 8 { @@ -54,7 +50,7 @@ func (b *Builder) addASN1Signed(tag Tag, v int64) { // AddASN1Uint64 appends a DER-encoded ASN.1 INTEGER. func (b *Builder) AddASN1Uint64(v uint64) { - b.AddASN1(asn1.TagInteger, func(c *Builder) { + b.AddASN1(asn1.INTEGER, func(c *Builder) { length := 1 for i := v; i >= 0x80; i >>= 8 { length++ @@ -73,7 +69,7 @@ func (b *Builder) AddASN1BigInt(n *big.Int) { return } - b.AddASN1(asn1.TagInteger, func(c *Builder) { + b.AddASN1(asn1.INTEGER, func(c *Builder) { if n.Sign() < 0 { // A negative number has to be converted to two's-complement form. So we // invert and subtract 1. If the most-significant-bit isn't set then @@ -103,7 +99,7 @@ func (b *Builder) AddASN1BigInt(n *big.Int) { // AddASN1OctetString appends a DER-encoded ASN.1 OCTET STRING. func (b *Builder) AddASN1OctetString(bytes []byte) { - b.AddASN1(asn1.TagOctetString, func(c *Builder) { + b.AddASN1(asn1.OCTET_STRING, func(c *Builder) { c.AddBytes(bytes) }) } @@ -116,27 +112,97 @@ func (b *Builder) AddASN1GeneralizedTime(t time.Time) { b.err = fmt.Errorf("cryptobyte: cannot represent %v as a GeneralizedTime", t) return } - b.AddASN1(asn1.TagGeneralizedTime, func(c *Builder) { + b.AddASN1(asn1.GeneralizedTime, func(c *Builder) { c.AddBytes([]byte(t.Format(generalizedTimeFormatStr))) }) } -// AddASN1BitString appends a DER-encoded ASN.1 BIT STRING. -func (b *Builder) AddASN1BitString(s asn1.BitString) { - // TODO(martinkr): Implement. - b.MarshalASN1(s) +// AddASN1BitString appends a DER-encoded ASN.1 BIT STRING. This does not +// support BIT STRINGs that are not a whole number of bytes. +func (b *Builder) AddASN1BitString(data []byte) { + b.AddASN1(asn1.BIT_STRING, func(b *Builder) { + b.AddUint8(0) + b.AddBytes(data) + }) +} + +func (b *Builder) addBase128Int(n int64) { + var length int + if n == 0 { + length = 1 + } else { + for i := n; i > 0; i >>= 7 { + length++ + } + } + + for i := length - 1; i >= 0; i-- { + o := byte(n >> uint(i*7)) + o &= 0x7f + if i != 0 { + o |= 0x80 + } + + b.add(o) + } +} + +func isValidOID(oid encoding_asn1.ObjectIdentifier) bool { + if len(oid) < 2 { + return false + } + + if oid[0] > 2 || (oid[0] <= 1 && oid[1] >= 40) { + return false + } + + for _, v := range oid { + if v < 0 { + return false + } + } + + return true +} + +func (b *Builder) AddASN1ObjectIdentifier(oid encoding_asn1.ObjectIdentifier) { + b.AddASN1(asn1.OBJECT_IDENTIFIER, func(b *Builder) { + if !isValidOID(oid) { + b.err = fmt.Errorf("cryptobyte: invalid OID: %v", oid) + return + } + + b.addBase128Int(int64(oid[0])*40 + int64(oid[1])) + for _, v := range oid[2:] { + b.addBase128Int(int64(v)) + } + }) +} + +func (b *Builder) AddASN1Boolean(v bool) { + b.AddASN1(asn1.BOOLEAN, func(b *Builder) { + if v { + b.AddUint8(0xff) + } else { + b.AddUint8(0) + } + }) } -// MarshalASN1 calls asn1.Marshal on its input and appends the result if +func (b *Builder) AddASN1NULL() { + b.add(uint8(asn1.NULL), 0) +} + +// MarshalASN1 calls encoding_asn1.Marshal on its input and appends the result if // successful or records an error if one occurred. func (b *Builder) MarshalASN1(v interface{}) { // NOTE(martinkr): This is somewhat of a hack to allow propagation of - // asn1.Marshal errors into Builder.err. N.B. if you call MarshalASN1 with a + // encoding_asn1.Marshal errors into Builder.err. N.B. if you call MarshalASN1 with a // value embedded into a struct, its tag information is lost. if b.err != nil { return } - bytes, err := asn1.Marshal(v) + bytes, err := encoding_asn1.Marshal(v) if err != nil { b.err = err return @@ -148,7 +214,7 @@ func (b *Builder) MarshalASN1(v interface{}) { // Tags greater than 30 are not supported and result in an error (i.e. // low-tag-number form only). The child builder passed to the // BuilderContinuation can be used to build the content of the ASN.1 object. -func (b *Builder) AddASN1(tag Tag, f BuilderContinuation) { +func (b *Builder) AddASN1(tag asn1.Tag, f BuilderContinuation) { if b.err != nil { return } @@ -164,11 +230,32 @@ func (b *Builder) AddASN1(tag Tag, f BuilderContinuation) { // String +// ReadASN1Boolean decodes an ASN.1 INTEGER and converts it to a boolean +// representation into out and advances. It reports whether the read +// was successful. +func (s *String) ReadASN1Boolean(out *bool) bool { + var bytes String + if !s.ReadASN1(&bytes, asn1.INTEGER) || len(bytes) != 1 { + return false + } + + switch bytes[0] { + case 0: + *out = false + case 0xff: + *out = true + default: + return false + } + + return true +} + var bigIntType = reflect.TypeOf((*big.Int)(nil)).Elem() // ReadASN1Integer decodes an ASN.1 INTEGER into out and advances. If out does -// not point to an integer or to a big.Int, it panics. It returns true on -// success and false on error. +// not point to an integer or to a big.Int, it panics. It reports whether the +// read was successful. func (s *String) ReadASN1Integer(out interface{}) bool { if reflect.TypeOf(out).Kind() != reflect.Ptr { panic("out is not a pointer") @@ -215,7 +302,7 @@ var bigOne = big.NewInt(1) func (s *String) readASN1BigInt(out *big.Int) bool { var bytes String - if !s.ReadASN1(&bytes, asn1.TagInteger) || !checkASN1Integer(bytes) { + if !s.ReadASN1(&bytes, asn1.INTEGER) || !checkASN1Integer(bytes) { return false } if bytes[0]&0x80 == 0x80 { @@ -235,7 +322,7 @@ func (s *String) readASN1BigInt(out *big.Int) bool { func (s *String) readASN1Int64(out *int64) bool { var bytes String - if !s.ReadASN1(&bytes, asn1.TagInteger) || !checkASN1Integer(bytes) || !asn1Signed(out, bytes) { + if !s.ReadASN1(&bytes, asn1.INTEGER) || !checkASN1Integer(bytes) || !asn1Signed(out, bytes) { return false } return true @@ -258,7 +345,7 @@ func asn1Signed(out *int64, n []byte) bool { func (s *String) readASN1Uint64(out *uint64) bool { var bytes String - if !s.ReadASN1(&bytes, asn1.TagInteger) || !checkASN1Integer(bytes) || !asn1Unsigned(out, bytes) { + if !s.ReadASN1(&bytes, asn1.INTEGER) || !checkASN1Integer(bytes) || !asn1Unsigned(out, bytes) { return false } return true @@ -281,12 +368,20 @@ func asn1Unsigned(out *uint64, n []byte) bool { return true } -// ReadASN1Enum decodes an ASN.1 ENUMERATION into out and advances. It returns -// true on success and false on error. +// ReadASN1Int64WithTag decodes an ASN.1 INTEGER with the given tag into out +// and advances. It reports whether the read was successful and resulted in a +// value that can be represented in an int64. +func (s *String) ReadASN1Int64WithTag(out *int64, tag asn1.Tag) bool { + var bytes String + return s.ReadASN1(&bytes, tag) && checkASN1Integer(bytes) && asn1Signed(out, bytes) +} + +// ReadASN1Enum decodes an ASN.1 ENUMERATION into out and advances. It reports +// whether the read was successful. func (s *String) ReadASN1Enum(out *int) bool { var bytes String var i int64 - if !s.ReadASN1(&bytes, asn1.TagEnum) || !checkASN1Integer(bytes) || !asn1Signed(&i, bytes) { + if !s.ReadASN1(&bytes, asn1.ENUM) || !checkASN1Integer(bytes) || !asn1Signed(&i, bytes) { return false } if int64(int(i)) != i { @@ -314,10 +409,10 @@ func (s *String) readBase128Int(out *int) bool { } // ReadASN1ObjectIdentifier decodes an ASN.1 OBJECT IDENTIFIER into out and -// advances. It returns true on success and false on error. -func (s *String) ReadASN1ObjectIdentifier(out *asn1.ObjectIdentifier) bool { +// advances. It reports whether the read was successful. +func (s *String) ReadASN1ObjectIdentifier(out *encoding_asn1.ObjectIdentifier) bool { var bytes String - if !s.ReadASN1(&bytes, asn1.TagOID) || len(bytes) == 0 { + if !s.ReadASN1(&bytes, asn1.OBJECT_IDENTIFIER) || len(bytes) == 0 { return false } @@ -353,10 +448,10 @@ func (s *String) ReadASN1ObjectIdentifier(out *asn1.ObjectIdentifier) bool { } // ReadASN1GeneralizedTime decodes an ASN.1 GENERALIZEDTIME into out and -// advances. It returns true on success and false on error. +// advances. It reports whether the read was successful. func (s *String) ReadASN1GeneralizedTime(out *time.Time) bool { var bytes String - if !s.ReadASN1(&bytes, asn1.TagGeneralizedTime) { + if !s.ReadASN1(&bytes, asn1.GeneralizedTime) { return false } t := string(bytes) @@ -371,11 +466,11 @@ func (s *String) ReadASN1GeneralizedTime(out *time.Time) bool { return true } -// ReadASN1BitString decodes an ASN.1 BIT STRING into out and advances. It -// returns true on success and false on error. -func (s *String) ReadASN1BitString(out *asn1.BitString) bool { +// ReadASN1BitString decodes an ASN.1 BIT STRING into out and advances. +// It reports whether the read was successful. +func (s *String) ReadASN1BitString(out *encoding_asn1.BitString) bool { var bytes String - if !s.ReadASN1(&bytes, asn1.TagBitString) || len(bytes) == 0 { + if !s.ReadASN1(&bytes, asn1.BIT_STRING) || len(bytes) == 0 { return false } @@ -392,20 +487,37 @@ func (s *String) ReadASN1BitString(out *asn1.BitString) bool { return true } +// ReadASN1BitString decodes an ASN.1 BIT STRING into out and advances. It is +// an error if the BIT STRING is not a whole number of bytes. It reports +// whether the read was successful. +func (s *String) ReadASN1BitStringAsBytes(out *[]byte) bool { + var bytes String + if !s.ReadASN1(&bytes, asn1.BIT_STRING) || len(bytes) == 0 { + return false + } + + paddingBits := uint8(bytes[0]) + if paddingBits != 0 { + return false + } + *out = bytes[1:] + return true +} + // ReadASN1Bytes reads the contents of a DER-encoded ASN.1 element (not including // tag and length bytes) into out, and advances. The element must match the -// given tag. It returns true on success and false on error. -func (s *String) ReadASN1Bytes(out *[]byte, tag Tag) bool { +// given tag. It reports whether the read was successful. +func (s *String) ReadASN1Bytes(out *[]byte, tag asn1.Tag) bool { return s.ReadASN1((*String)(out), tag) } // ReadASN1 reads the contents of a DER-encoded ASN.1 element (not including // tag and length bytes) into out, and advances. The element must match the -// given tag. It returns true on success and false on error. +// given tag. It reports whether the read was successful. // // Tags greater than 30 are not supported (i.e. low-tag-number format only). -func (s *String) ReadASN1(out *String, tag Tag) bool { - var t Tag +func (s *String) ReadASN1(out *String, tag asn1.Tag) bool { + var t asn1.Tag if !s.ReadAnyASN1(out, &t) || t != tag { return false } @@ -414,11 +526,11 @@ func (s *String) ReadASN1(out *String, tag Tag) bool { // ReadASN1Element reads the contents of a DER-encoded ASN.1 element (including // tag and length bytes) into out, and advances. The element must match the -// given tag. It returns true on success and false on error. +// given tag. It reports whether the read was successful. // // Tags greater than 30 are not supported (i.e. low-tag-number format only). -func (s *String) ReadASN1Element(out *String, tag Tag) bool { - var t Tag +func (s *String) ReadASN1Element(out *String, tag asn1.Tag) bool { + var t asn1.Tag if !s.ReadAnyASN1Element(out, &t) || t != tag { return false } @@ -426,37 +538,44 @@ func (s *String) ReadASN1Element(out *String, tag Tag) bool { } // ReadAnyASN1 reads the contents of a DER-encoded ASN.1 element (not including -// tag and length bytes) into out, sets outTag to its tag, and advances. It -// returns true on success and false on error. +// tag and length bytes) into out, sets outTag to its tag, and advances. +// It reports whether the read was successful. // // Tags greater than 30 are not supported (i.e. low-tag-number format only). -func (s *String) ReadAnyASN1(out *String, outTag *Tag) bool { +func (s *String) ReadAnyASN1(out *String, outTag *asn1.Tag) bool { return s.readASN1(out, outTag, true /* skip header */) } // ReadAnyASN1Element reads the contents of a DER-encoded ASN.1 element // (including tag and length bytes) into out, sets outTag to is tag, and -// advances. It returns true on success and false on error. +// advances. It reports whether the read was successful. // // Tags greater than 30 are not supported (i.e. low-tag-number format only). -func (s *String) ReadAnyASN1Element(out *String, outTag *Tag) bool { +func (s *String) ReadAnyASN1Element(out *String, outTag *asn1.Tag) bool { return s.readASN1(out, outTag, false /* include header */) } -// PeekASN1Tag returns true if the next ASN.1 value on the string starts with +// PeekASN1Tag reports whether the next ASN.1 value on the string starts with // the given tag. -func (s String) PeekASN1Tag(tag Tag) bool { +func (s String) PeekASN1Tag(tag asn1.Tag) bool { if len(s) == 0 { return false } - return Tag(s[0]) == tag + return asn1.Tag(s[0]) == tag +} + +// SkipASN1 reads and discards an ASN.1 element with the given tag. It +// reports whether the operation was successful. +func (s *String) SkipASN1(tag asn1.Tag) bool { + var unused String + return s.ReadASN1(&unused, tag) } -// ReadOptionalASN1 attempts to read the contents of a DER-encoded ASN.Element -// (not including tag and length bytes) tagged with the given tag into out. It -// stores whether an element with the tag was found in outPresent, unless -// outPresent is nil. It returns true on success and false on error. -func (s *String) ReadOptionalASN1(out *String, outPresent *bool, tag Tag) bool { +// ReadOptionalASN1 attempts to read the contents of a DER-encoded ASN.1 +// element (not including tag and length bytes) tagged with the given tag into +// out. It stores whether an element with the tag was found in outPresent, +// unless outPresent is nil. It reports whether the read was successful. +func (s *String) ReadOptionalASN1(out *String, outPresent *bool, tag asn1.Tag) bool { present := s.PeekASN1Tag(tag) if outPresent != nil { *outPresent = present @@ -467,12 +586,22 @@ func (s *String) ReadOptionalASN1(out *String, outPresent *bool, tag Tag) bool { return true } +// SkipOptionalASN1 advances s over an ASN.1 element with the given tag, or +// else leaves s unchanged. It reports whether the operation was successful. +func (s *String) SkipOptionalASN1(tag asn1.Tag) bool { + if !s.PeekASN1Tag(tag) { + return true + } + var unused String + return s.ReadASN1(&unused, tag) +} + // ReadOptionalASN1Integer attempts to read an optional ASN.1 INTEGER // explicitly tagged with tag into out and advances. If no element with a // matching tag is present, it writes defaultValue into out instead. If out -// does not point to an integer or to a big.Int, it panics. It returns true on -// success and false on error. -func (s *String) ReadOptionalASN1Integer(out interface{}, tag Tag, defaultValue interface{}) bool { +// does not point to an integer or to a big.Int, it panics. It reports +// whether the read was successful. +func (s *String) ReadOptionalASN1Integer(out interface{}, tag asn1.Tag, defaultValue interface{}) bool { if reflect.TypeOf(out).Kind() != reflect.Ptr { panic("out is not a pointer") } @@ -508,9 +637,9 @@ func (s *String) ReadOptionalASN1Integer(out interface{}, tag Tag, defaultValue // ReadOptionalASN1OctetString attempts to read an optional ASN.1 OCTET STRING // explicitly tagged with tag into out and advances. If no element with a -// matching tag is present, it writes defaultValue into out instead. It returns -// true on success and false on error. -func (s *String) ReadOptionalASN1OctetString(out *[]byte, outPresent *bool, tag Tag) bool { +// matching tag is present, it sets "out" to nil instead. It reports +// whether the read was successful. +func (s *String) ReadOptionalASN1OctetString(out *[]byte, outPresent *bool, tag asn1.Tag) bool { var present bool var child String if !s.ReadOptionalASN1(&child, &present, tag) { @@ -521,7 +650,7 @@ func (s *String) ReadOptionalASN1OctetString(out *[]byte, outPresent *bool, tag } if present { var oct String - if !child.ReadASN1(&oct, asn1.TagOctetString) || !child.Empty() { + if !child.ReadASN1(&oct, asn1.OCTET_STRING) || !child.Empty() { return false } *out = oct @@ -531,7 +660,25 @@ func (s *String) ReadOptionalASN1OctetString(out *[]byte, outPresent *bool, tag return true } -func (s *String) readASN1(out *String, outTag *Tag, skipHeader bool) bool { +// ReadOptionalASN1Boolean sets *out to the value of the next ASN.1 BOOLEAN or, +// if the next bytes are not an ASN.1 BOOLEAN, to the value of defaultValue. +// It reports whether the operation was successful. +func (s *String) ReadOptionalASN1Boolean(out *bool, defaultValue bool) bool { + var present bool + var child String + if !s.ReadOptionalASN1(&child, &present, asn1.BOOLEAN) { + return false + } + + if !present { + *out = defaultValue + return true + } + + return s.ReadASN1Boolean(out) +} + +func (s *String) readASN1(out *String, outTag *asn1.Tag, skipHeader bool) bool { if len(*s) < 2 { return false } @@ -547,7 +694,7 @@ func (s *String) readASN1(out *String, outTag *Tag, skipHeader bool) bool { } if outTag != nil { - *outTag = Tag(tag) + *outTag = asn1.Tag(tag) } // ITU-T X.690 section 8.1.3 diff --git a/vendor/golang.org/x/crypto/cryptobyte/asn1/asn1.go b/vendor/golang.org/x/crypto/cryptobyte/asn1/asn1.go new file mode 100644 index 0000000..cda8e3e --- /dev/null +++ b/vendor/golang.org/x/crypto/cryptobyte/asn1/asn1.go @@ -0,0 +1,46 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package asn1 contains supporting types for parsing and building ASN.1 +// messages with the cryptobyte package. +package asn1 // import "golang.org/x/crypto/cryptobyte/asn1" + +// Tag represents an ASN.1 identifier octet, consisting of a tag number +// (indicating a type) and class (such as context-specific or constructed). +// +// Methods in the cryptobyte package only support the low-tag-number form, i.e. +// a single identifier octet with bits 7-8 encoding the class and bits 1-6 +// encoding the tag number. +type Tag uint8 + +const ( + classConstructed = 0x20 + classContextSpecific = 0x80 +) + +// Constructed returns t with the constructed class bit set. +func (t Tag) Constructed() Tag { return t | classConstructed } + +// ContextSpecific returns t with the context-specific class bit set. +func (t Tag) ContextSpecific() Tag { return t | classContextSpecific } + +// The following is a list of standard tag and class combinations. +const ( + BOOLEAN = Tag(1) + INTEGER = Tag(2) + BIT_STRING = Tag(3) + OCTET_STRING = Tag(4) + NULL = Tag(5) + OBJECT_IDENTIFIER = Tag(6) + ENUM = Tag(10) + UTF8String = Tag(12) + SEQUENCE = Tag(16 | classConstructed) + SET = Tag(17 | classConstructed) + PrintableString = Tag(19) + T61String = Tag(20) + IA5String = Tag(22) + UTCTime = Tag(23) + GeneralizedTime = Tag(24) + GeneralString = Tag(27) +) diff --git a/vendor/golang.org/x/crypto/cryptobyte/asn1_test.go b/vendor/golang.org/x/crypto/cryptobyte/asn1_test.go index c8c1870..9f6c952 100644 --- a/vendor/golang.org/x/crypto/cryptobyte/asn1_test.go +++ b/vendor/golang.org/x/crypto/cryptobyte/asn1_test.go @@ -6,17 +6,19 @@ package cryptobyte import ( "bytes" - "encoding/asn1" + encoding_asn1 "encoding/asn1" "math/big" "reflect" "testing" "time" + + "golang.org/x/crypto/cryptobyte/asn1" ) type readASN1Test struct { name string in []byte - tag Tag + tag asn1.Tag ok bool out interface{} } @@ -147,6 +149,39 @@ func TestReadASN1IntegerSigned(t *testing.T) { } } }) + + // Repeat with the implicit-tagging functions + t.Run("WithTag", func(t *testing.T) { + for i, test := range testData64 { + tag := asn1.Tag((i * 3) % 32).ContextSpecific() + + testData := make([]byte, len(test.in)) + copy(testData, test.in) + + // Alter the tag of the test case. + testData[0] = uint8(tag) + + in := String(testData) + var out int64 + ok := in.ReadASN1Int64WithTag(&out, tag) + if !ok || out != test.out { + t.Errorf("#%d: in.ReadASN1Int64WithTag() = %v, want true; out = %d, want %d", i, ok, out, test.out) + } + + var b Builder + b.AddASN1Int64WithTag(test.out, tag) + result, err := b.Bytes() + + if err != nil { + t.Errorf("#%d: AddASN1Int64WithTag failed: %s", i, err) + continue + } + + if !bytes.Equal(result, testData) { + t.Errorf("#%d: AddASN1Int64WithTag: got %x, want %x", i, result, testData) + } + } + }) } func TestReadASN1IntegerUnsigned(t *testing.T) { @@ -194,7 +229,7 @@ func TestReadASN1IntegerInvalid(t *testing.T) { } } -func TestReadASN1ObjectIdentifier(t *testing.T) { +func TestASN1ObjectIdentifier(t *testing.T) { testData := []struct { in []byte ok bool @@ -212,10 +247,23 @@ func TestReadASN1ObjectIdentifier(t *testing.T) { for i, test := range testData { in := String(test.in) - var out asn1.ObjectIdentifier + var out encoding_asn1.ObjectIdentifier ok := in.ReadASN1ObjectIdentifier(&out) if ok != test.ok || ok && !out.Equal(test.out) { t.Errorf("#%d: in.ReadASN1ObjectIdentifier() = %v, want %v; out = %v, want %v", i, ok, test.ok, out, test.out) + continue + } + + var b Builder + b.AddASN1ObjectIdentifier(out) + result, err := b.Bytes() + if builderOk := err == nil; test.ok != builderOk { + t.Errorf("#%d: error from Builder.Bytes: %s", i, err) + continue + } + if test.ok && !bytes.Equal(result, test.in) { + t.Errorf("#%d: reserialisation didn't match, got %x, want %x", i, result, test.in) + continue } } } @@ -250,7 +298,7 @@ func TestReadASN1GeneralizedTime(t *testing.T) { {"201001020304-10Z", false, time.Time{}}, } for i, test := range testData { - in := String(append([]byte{asn1.TagGeneralizedTime, byte(len(test.in))}, test.in...)) + in := String(append([]byte{byte(asn1.GeneralizedTime), byte(len(test.in))}, test.in...)) var out time.Time ok := in.ReadASN1GeneralizedTime(&out) if ok != test.ok || ok && !reflect.DeepEqual(out, test.out) { @@ -263,20 +311,20 @@ func TestReadASN1BitString(t *testing.T) { testData := []struct { in []byte ok bool - out asn1.BitString + out encoding_asn1.BitString }{ - {[]byte{}, false, asn1.BitString{}}, - {[]byte{0x00}, true, asn1.BitString{}}, - {[]byte{0x07, 0x00}, true, asn1.BitString{Bytes: []byte{0}, BitLength: 1}}, - {[]byte{0x07, 0x01}, false, asn1.BitString{}}, - {[]byte{0x07, 0x40}, false, asn1.BitString{}}, - {[]byte{0x08, 0x00}, false, asn1.BitString{}}, - {[]byte{0xff}, false, asn1.BitString{}}, - {[]byte{0xfe, 0x00}, false, asn1.BitString{}}, + {[]byte{}, false, encoding_asn1.BitString{}}, + {[]byte{0x00}, true, encoding_asn1.BitString{}}, + {[]byte{0x07, 0x00}, true, encoding_asn1.BitString{Bytes: []byte{0}, BitLength: 1}}, + {[]byte{0x07, 0x01}, false, encoding_asn1.BitString{}}, + {[]byte{0x07, 0x40}, false, encoding_asn1.BitString{}}, + {[]byte{0x08, 0x00}, false, encoding_asn1.BitString{}}, + {[]byte{0xff}, false, encoding_asn1.BitString{}}, + {[]byte{0xfe, 0x00}, false, encoding_asn1.BitString{}}, } for i, test := range testData { in := String(append([]byte{3, byte(len(test.in))}, test.in...)) - var out asn1.BitString + var out encoding_asn1.BitString ok := in.ReadASN1BitString(&out) if ok != test.ok || ok && (!bytes.Equal(out.Bytes, test.out.Bytes) || out.BitLength != test.out.BitLength) { t.Errorf("#%d: in.ReadASN1BitString() = %v, want %v; out = %v, want %v", i, ok, test.ok, out, test.out) diff --git a/vendor/golang.org/x/crypto/cryptobyte/builder.go b/vendor/golang.org/x/crypto/cryptobyte/builder.go index 9883fb3..ca7b1db 100644 --- a/vendor/golang.org/x/crypto/cryptobyte/builder.go +++ b/vendor/golang.org/x/crypto/cryptobyte/builder.go @@ -10,15 +10,25 @@ import ( ) // A Builder builds byte strings from fixed-length and length-prefixed values. +// Builders either allocate space as needed, or are ‘fixed’, which means that +// they write into a given buffer and produce an error if it's exhausted. +// // The zero value is a usable Builder that allocates space as needed. +// +// Simple values are marshaled and appended to a Builder using methods on the +// Builder. Length-prefixed values are marshaled by providing a +// BuilderContinuation, which is a function that writes the inner contents of +// the value to a given Builder. See the documentation for BuilderContinuation +// for details. type Builder struct { - err error - result []byte - fixedSize bool - child *Builder - offset int - pendingLenLen int - pendingIsASN1 bool + err error + result []byte + fixedSize bool + child *Builder + offset int + pendingLenLen int + pendingIsASN1 bool + inContinuation *bool } // NewBuilder creates a Builder that appends its output to the given buffer. @@ -40,8 +50,14 @@ func NewFixedBuilder(buffer []byte) *Builder { } } +// SetError sets the value to be returned as the error from Bytes. Writes +// performed after calling SetError are ignored. +func (b *Builder) SetError(err error) { + b.err = err +} + // Bytes returns the bytes written by the builder or an error if one has -// occurred during during building. +// occurred during building. func (b *Builder) Bytes() ([]byte, error) { if b.err != nil { return nil, b.err @@ -84,11 +100,11 @@ func (b *Builder) AddBytes(v []byte) { b.add(v...) } -// BuilderContinuation is continuation-passing interface for building +// BuilderContinuation is a continuation-passing interface for building // length-prefixed byte sequences. Builder methods for length-prefixed -// sequences (AddUint8LengthPrefixed etc.) will invoke the BuilderContinuation +// sequences (AddUint8LengthPrefixed etc) will invoke the BuilderContinuation // supplied to them. The child builder passed to the continuation can be used -// to build the content of the length-prefixed sequence. Example: +// to build the content of the length-prefixed sequence. For example: // // parent := cryptobyte.NewBuilder() // parent.AddUint8LengthPrefixed(func (child *Builder) { @@ -102,8 +118,19 @@ func (b *Builder) AddBytes(v []byte) { // length prefix. After the continuation returns, the child must be considered // invalid, i.e. users must not store any copies or references of the child // that outlive the continuation. +// +// If the continuation panics with a value of type BuildError then the inner +// error will be returned as the error from Bytes. If the child panics +// otherwise then Bytes will repanic with the same value. type BuilderContinuation func(child *Builder) +// BuildError wraps an error. If a BuilderContinuation panics with this value, +// the panic will be recovered and the inner error will be returned from +// Builder.Bytes. +type BuildError struct { + Err error +} + // AddUint8LengthPrefixed adds a 8-bit length-prefixed byte sequence. func (b *Builder) AddUint8LengthPrefixed(f BuilderContinuation) { b.addLengthPrefixed(1, false, f) @@ -119,6 +146,34 @@ func (b *Builder) AddUint24LengthPrefixed(f BuilderContinuation) { b.addLengthPrefixed(3, false, f) } +// AddUint32LengthPrefixed adds a big-endian, 32-bit length-prefixed byte sequence. +func (b *Builder) AddUint32LengthPrefixed(f BuilderContinuation) { + b.addLengthPrefixed(4, false, f) +} + +func (b *Builder) callContinuation(f BuilderContinuation, arg *Builder) { + if !*b.inContinuation { + *b.inContinuation = true + + defer func() { + *b.inContinuation = false + + r := recover() + if r == nil { + return + } + + if buildError, ok := r.(BuildError); ok { + b.err = buildError.Err + } else { + panic(r) + } + }() + } + + f(arg) +} + func (b *Builder) addLengthPrefixed(lenLen int, isASN1 bool, f BuilderContinuation) { // Subsequent writes can be ignored if the builder has encountered an error. if b.err != nil { @@ -128,15 +183,20 @@ func (b *Builder) addLengthPrefixed(lenLen int, isASN1 bool, f BuilderContinuati offset := len(b.result) b.add(make([]byte, lenLen)...) + if b.inContinuation == nil { + b.inContinuation = new(bool) + } + b.child = &Builder{ - result: b.result, - fixedSize: b.fixedSize, - offset: offset, - pendingLenLen: lenLen, - pendingIsASN1: isASN1, + result: b.result, + fixedSize: b.fixedSize, + offset: offset, + pendingLenLen: lenLen, + pendingIsASN1: isASN1, + inContinuation: b.inContinuation, } - f(b.child) + b.callContinuation(f, b.child) b.flushChild() if b.child != nil { panic("cryptobyte: internal error") @@ -214,9 +274,11 @@ func (b *Builder) flushChild() { return } - if !b.fixedSize { - b.result = child.result // In case child reallocated result. + if b.fixedSize && &b.result[0] != &child.result[0] { + panic("cryptobyte: BuilderContinuation reallocated a fixed-size buffer") } + + b.result = child.result } func (b *Builder) add(bytes ...byte) { @@ -224,7 +286,7 @@ func (b *Builder) add(bytes ...byte) { return } if b.child != nil { - panic("attempted write while child is pending") + panic("cryptobyte: attempted write while child is pending") } if len(b.result)+len(bytes) < len(bytes) { b.err = errors.New("cryptobyte: length overflow") @@ -236,6 +298,26 @@ func (b *Builder) add(bytes ...byte) { b.result = append(b.result, bytes...) } +// Unwrite rolls back n bytes written directly to the Builder. An attempt by a +// child builder passed to a continuation to unwrite bytes from its parent will +// panic. +func (b *Builder) Unwrite(n int) { + if b.err != nil { + return + } + if b.child != nil { + panic("cryptobyte: attempted unwrite while child is pending") + } + length := len(b.result) - b.pendingLenLen - b.offset + if length < 0 { + panic("cryptobyte: internal error") + } + if n > length { + panic("cryptobyte: attempted to unwrite more than was written") + } + b.result = b.result[:len(b.result)-n] +} + // A MarshalingValue marshals itself into a Builder. type MarshalingValue interface { // Marshal is called by Builder.AddValue. It receives a pointer to a builder diff --git a/vendor/golang.org/x/crypto/cryptobyte/cryptobyte_test.go b/vendor/golang.org/x/crypto/cryptobyte/cryptobyte_test.go index 49c61dc..fb63709 100644 --- a/vendor/golang.org/x/crypto/cryptobyte/cryptobyte_test.go +++ b/vendor/golang.org/x/crypto/cryptobyte/cryptobyte_test.go @@ -6,6 +6,7 @@ package cryptobyte import ( "bytes" + "errors" "fmt" "testing" ) @@ -18,6 +19,54 @@ func builderBytesEq(b *Builder, want ...byte) error { return nil } +func TestContinuationError(t *testing.T) { + const errorStr = "TestContinuationError" + var b Builder + b.AddUint8LengthPrefixed(func(b *Builder) { + b.AddUint8(1) + panic(BuildError{Err: errors.New(errorStr)}) + }) + + ret, err := b.Bytes() + if ret != nil { + t.Error("expected nil result") + } + if err == nil { + t.Fatal("unexpected nil error") + } + if s := err.Error(); s != errorStr { + t.Errorf("expected error %q, got %v", errorStr, s) + } +} + +func TestContinuationNonError(t *testing.T) { + defer func() { + recover() + }() + + var b Builder + b.AddUint8LengthPrefixed(func(b *Builder) { + b.AddUint8(1) + panic(1) + }) + + t.Error("Builder did not panic") +} + +func TestGeneratedPanic(t *testing.T) { + defer func() { + recover() + }() + + var b Builder + b.AddUint8LengthPrefixed(func(b *Builder) { + var p *byte + *p = 0 + }) + + t.Error("Builder did not panic") +} + func TestBytes(t *testing.T) { var b Builder v := []byte("foobarbaz") @@ -278,12 +327,14 @@ func TestWriteWithPendingChild(t *testing.T) { var b Builder b.AddUint8LengthPrefixed(func(c *Builder) { c.AddUint8LengthPrefixed(func(d *Builder) { - defer func() { - if recover() == nil { - t.Errorf("recover() = nil, want error; c.AddUint8() did not panic") - } + func() { + defer func() { + if recover() == nil { + t.Errorf("recover() = nil, want error; c.AddUint8() did not panic") + } + }() + c.AddUint8(2) // panics }() - c.AddUint8(2) // panics defer func() { if recover() == nil { @@ -302,6 +353,92 @@ func TestWriteWithPendingChild(t *testing.T) { }) } +func TestSetError(t *testing.T) { + const errorStr = "TestSetError" + var b Builder + b.SetError(errors.New(errorStr)) + + ret, err := b.Bytes() + if ret != nil { + t.Error("expected nil result") + } + if err == nil { + t.Fatal("unexpected nil error") + } + if s := err.Error(); s != errorStr { + t.Errorf("expected error %q, got %v", errorStr, s) + } +} + +func TestUnwrite(t *testing.T) { + var b Builder + b.AddBytes([]byte{1, 2, 3, 4, 5}) + b.Unwrite(2) + if err := builderBytesEq(&b, 1, 2, 3); err != nil { + t.Error(err) + } + + func() { + defer func() { + if recover() == nil { + t.Errorf("recover() = nil, want error; b.Unwrite() did not panic") + } + }() + b.Unwrite(4) // panics + }() + + b = Builder{} + b.AddBytes([]byte{1, 2, 3, 4, 5}) + b.AddUint8LengthPrefixed(func(b *Builder) { + b.AddBytes([]byte{1, 2, 3, 4, 5}) + + defer func() { + if recover() == nil { + t.Errorf("recover() = nil, want error; b.Unwrite() did not panic") + } + }() + b.Unwrite(6) // panics + }) + + b = Builder{} + b.AddBytes([]byte{1, 2, 3, 4, 5}) + b.AddUint8LengthPrefixed(func(c *Builder) { + defer func() { + if recover() == nil { + t.Errorf("recover() = nil, want error; b.Unwrite() did not panic") + } + }() + b.Unwrite(2) // panics (attempted unwrite while child is pending) + }) +} + +func TestFixedBuilderLengthPrefixed(t *testing.T) { + bufCap := 10 + inner := bytes.Repeat([]byte{0xff}, bufCap-2) + buf := make([]byte, 0, bufCap) + b := NewFixedBuilder(buf) + b.AddUint16LengthPrefixed(func(b *Builder) { + b.AddBytes(inner) + }) + if got := b.BytesOrPanic(); len(got) != bufCap { + t.Errorf("Expected output length to be %d, got %d", bufCap, len(got)) + } +} + +func TestFixedBuilderPanicReallocate(t *testing.T) { + defer func() { + recover() + }() + + b := NewFixedBuilder(make([]byte, 0, 10)) + b1 := NewFixedBuilder(make([]byte, 0, 10)) + b.AddUint16LengthPrefixed(func(b *Builder) { + *b = *b1 + }) + + t.Error("Builder did not panic") +} + // ASN.1 func TestASN1Int64(t *testing.T) { diff --git a/vendor/golang.org/x/crypto/cryptobyte/example_test.go b/vendor/golang.org/x/crypto/cryptobyte/example_test.go index 7d3c06e..86c098a 100644 --- a/vendor/golang.org/x/crypto/cryptobyte/example_test.go +++ b/vendor/golang.org/x/crypto/cryptobyte/example_test.go @@ -5,9 +5,11 @@ package cryptobyte_test import ( - "encoding/asn1" + "errors" "fmt" + "golang.org/x/crypto/cryptobyte" + "golang.org/x/crypto/cryptobyte/asn1" ) func ExampleString_lengthPrefixed() { @@ -37,7 +39,7 @@ func ExampleString_lengthPrefixed() { fmt.Printf("%#v\n", result) } -func ExampleString_asn1() { +func ExampleString_aSN1() { // This is an example of parsing ASN.1 data that looks like: // Foo ::= SEQUENCE { // version [6] INTEGER DEFAULT 0 @@ -51,12 +53,12 @@ func ExampleString_asn1() { data, inner, versionBytes cryptobyte.String haveVersion bool ) - if !input.ReadASN1(&inner, cryptobyte.Tag(asn1.TagSequence).Constructed()) || + if !input.ReadASN1(&inner, asn1.SEQUENCE) || !input.Empty() || - !inner.ReadOptionalASN1(&versionBytes, &haveVersion, cryptobyte.Tag(6).Constructed().ContextSpecific()) || + !inner.ReadOptionalASN1(&versionBytes, &haveVersion, asn1.Tag(6).Constructed().ContextSpecific()) || (haveVersion && !versionBytes.ReadASN1Integer(&version)) || (haveVersion && !versionBytes.Empty()) || - !inner.ReadASN1(&data, asn1.TagOctetString) || + !inner.ReadASN1(&data, asn1.OCTET_STRING) || !inner.Empty() { panic("bad format") } @@ -65,7 +67,7 @@ func ExampleString_asn1() { fmt.Printf("haveVersion: %t, version: %d, data: %s\n", haveVersion, version, string(data)) } -func ExampleBuilder_asn1() { +func ExampleBuilder_aSN1() { // This is an example of building ASN.1 data that looks like: // Foo ::= SEQUENCE { // version [6] INTEGER DEFAULT 0 @@ -77,9 +79,9 @@ func ExampleBuilder_asn1() { const defaultVersion = 0 var b cryptobyte.Builder - b.AddASN1(cryptobyte.Tag(asn1.TagSequence).Constructed(), func(b *cryptobyte.Builder) { + b.AddASN1(asn1.SEQUENCE, func(b *cryptobyte.Builder) { if version != defaultVersion { - b.AddASN1(cryptobyte.Tag(6).Constructed().ContextSpecific(), func(b *cryptobyte.Builder) { + b.AddASN1(asn1.Tag(6).Constructed().ContextSpecific(), func(b *cryptobyte.Builder) { b.AddASN1Int64(version) }) } @@ -118,3 +120,35 @@ func ExampleBuilder_lengthPrefixed() { // Output: 000c0568656c6c6f05776f726c64 fmt.Printf("%x\n", result) } + +func ExampleBuilder_lengthPrefixOverflow() { + // Writing more data that can be expressed by the length prefix results + // in an error from Bytes(). + + tooLarge := make([]byte, 256) + + var b cryptobyte.Builder + b.AddUint8LengthPrefixed(func(b *cryptobyte.Builder) { + b.AddBytes(tooLarge) + }) + + result, err := b.Bytes() + fmt.Printf("len=%d err=%s\n", len(result), err) + + // Output: len=0 err=cryptobyte: pending child length 256 exceeds 1-byte length prefix +} + +func ExampleBuilderContinuation_errorHandling() { + var b cryptobyte.Builder + // Continuations that panic with a BuildError will cause Bytes to + // return the inner error. + b.AddUint16LengthPrefixed(func(b *cryptobyte.Builder) { + b.AddUint32(0) + panic(cryptobyte.BuildError{Err: errors.New("example error")}) + }) + + result, err := b.Bytes() + fmt.Printf("len=%d err=%s\n", len(result), err) + + // Output: len=0 err=example error +} diff --git a/vendor/golang.org/x/crypto/cryptobyte/string.go b/vendor/golang.org/x/crypto/cryptobyte/string.go index 6780336..39bf98a 100644 --- a/vendor/golang.org/x/crypto/cryptobyte/string.go +++ b/vendor/golang.org/x/crypto/cryptobyte/string.go @@ -2,9 +2,19 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package cryptobyte implements building and parsing of byte strings for -// DER-encoded ASN.1 and TLS messages. See the examples for the Builder and -// String types to get started. +// Package cryptobyte contains types that help with parsing and constructing +// length-prefixed, binary messages, including ASN.1 DER. (The asn1 subpackage +// contains useful ASN.1 constants.) +// +// The String type is for parsing. It wraps a []byte slice and provides helper +// functions for consuming structures, value by value. +// +// The Builder type is for constructing messages. It providers helper functions +// for appending values and also for appending length-prefixed submessages – +// without having to worry about calculating the length prefix ahead of time. +// +// See the documentation and examples for the Builder and String types to get +// started. package cryptobyte // import "golang.org/x/crypto/cryptobyte" // String represents a string of bytes. It provides methods for parsing @@ -27,8 +37,8 @@ func (s *String) Skip(n int) bool { return s.read(n) != nil } -// ReadUint8 decodes an 8-bit value into out and advances over it. It -// returns true on success and false on error. +// ReadUint8 decodes an 8-bit value into out and advances over it. +// It reports whether the read was successful. func (s *String) ReadUint8(out *uint8) bool { v := s.read(1) if v == nil { @@ -39,7 +49,7 @@ func (s *String) ReadUint8(out *uint8) bool { } // ReadUint16 decodes a big-endian, 16-bit value into out and advances over it. -// It returns true on success and false on error. +// It reports whether the read was successful. func (s *String) ReadUint16(out *uint16) bool { v := s.read(2) if v == nil { @@ -50,7 +60,7 @@ func (s *String) ReadUint16(out *uint16) bool { } // ReadUint24 decodes a big-endian, 24-bit value into out and advances over it. -// It returns true on success and false on error. +// It reports whether the read was successful. func (s *String) ReadUint24(out *uint32) bool { v := s.read(3) if v == nil { @@ -61,7 +71,7 @@ func (s *String) ReadUint24(out *uint32) bool { } // ReadUint32 decodes a big-endian, 32-bit value into out and advances over it. -// It returns true on success and false on error. +// It reports whether the read was successful. func (s *String) ReadUint32(out *uint32) bool { v := s.read(4) if v == nil { @@ -109,28 +119,27 @@ func (s *String) readLengthPrefixed(lenLen int, outChild *String) bool { } // ReadUint8LengthPrefixed reads the content of an 8-bit length-prefixed value -// into out and advances over it. It returns true on success and false on -// error. +// into out and advances over it. It reports whether the read was successful. func (s *String) ReadUint8LengthPrefixed(out *String) bool { return s.readLengthPrefixed(1, out) } // ReadUint16LengthPrefixed reads the content of a big-endian, 16-bit -// length-prefixed value into out and advances over it. It returns true on -// success and false on error. +// length-prefixed value into out and advances over it. It reports whether the +// read was successful. func (s *String) ReadUint16LengthPrefixed(out *String) bool { return s.readLengthPrefixed(2, out) } // ReadUint24LengthPrefixed reads the content of a big-endian, 24-bit -// length-prefixed value into out and advances over it. It returns true on -// success and false on error. +// length-prefixed value into out and advances over it. It reports whether +// the read was successful. func (s *String) ReadUint24LengthPrefixed(out *String) bool { return s.readLengthPrefixed(3, out) } -// ReadBytes reads n bytes into out and advances over them. It returns true on -// success and false and error. +// ReadBytes reads n bytes into out and advances over them. It reports +// whether the read was successful. func (s *String) ReadBytes(out *[]byte, n int) bool { v := s.read(n) if v == nil { @@ -140,8 +149,8 @@ func (s *String) ReadBytes(out *[]byte, n int) bool { return true } -// CopyBytes copies len(out) bytes into out and advances over them. It returns -// true on success and false on error. +// CopyBytes copies len(out) bytes into out and advances over them. It reports +// whether the copy operation was successful func (s *String) CopyBytes(out []byte) bool { n := len(out) v := s.read(n) diff --git a/vendor/golang.org/x/crypto/curve25519/curve25519.go b/vendor/golang.org/x/crypto/curve25519/curve25519.go index 2d14c2a..75f24ba 100644 --- a/vendor/golang.org/x/crypto/curve25519/curve25519.go +++ b/vendor/golang.org/x/crypto/curve25519/curve25519.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// We have a implementation in amd64 assembly so this code is only run on +// We have an implementation in amd64 assembly so this code is only run on // non-amd64 platforms. The amd64 assembly does not support gccgo. // +build !amd64 gccgo appengine @@ -86,7 +86,7 @@ func feFromBytes(dst *fieldElement, src *[32]byte) { h6 := load3(src[20:]) << 7 h7 := load3(src[23:]) << 5 h8 := load3(src[26:]) << 4 - h9 := load3(src[29:]) << 2 + h9 := (load3(src[29:]) & 0x7fffff) << 2 var carry [10]int64 carry[9] = (h9 + 1<<24) >> 25 diff --git a/vendor/golang.org/x/crypto/curve25519/curve25519_test.go b/vendor/golang.org/x/crypto/curve25519/curve25519_test.go index 051a830..56ef73b 100644 --- a/vendor/golang.org/x/crypto/curve25519/curve25519_test.go +++ b/vendor/golang.org/x/crypto/curve25519/curve25519_test.go @@ -5,6 +5,8 @@ package curve25519 import ( + "bytes" + "crypto/rand" "fmt" "testing" ) @@ -28,6 +30,44 @@ func TestBaseScalarMult(t *testing.T) { } } +func TestTestVectors(t *testing.T) { + for _, tv := range testVectors { + var got [32]byte + ScalarMult(&got, &tv.In, &tv.Base) + if !bytes.Equal(got[:], tv.Expect[:]) { + t.Logf(" in = %x", tv.In) + t.Logf(" base = %x", tv.Base) + t.Logf(" got = %x", got) + t.Logf("expect = %x", tv.Expect) + t.Fail() + } + } +} + +// TestHighBitIgnored tests the following requirement in RFC 7748: +// +// When receiving such an array, implementations of X25519 (but not X448) MUST +// mask the most significant bit in the final byte. +// +// Regression test for issue #30095. +func TestHighBitIgnored(t *testing.T) { + var s, u [32]byte + rand.Read(s[:]) + rand.Read(u[:]) + + var hi0, hi1 [32]byte + + u[31] &= 0x7f + ScalarMult(&hi0, &s, &u) + + u[31] |= 0x80 + ScalarMult(&hi1, &s, &u) + + if !bytes.Equal(hi0[:], hi1[:]) { + t.Errorf("high bit of group point should not affect result") + } +} + func BenchmarkScalarBaseMult(b *testing.B) { var in, out [32]byte in[0] = 1 diff --git a/vendor/golang.org/x/crypto/curve25519/testvectors_test.go b/vendor/golang.org/x/crypto/curve25519/testvectors_test.go new file mode 100644 index 0000000..79b7a09 --- /dev/null +++ b/vendor/golang.org/x/crypto/curve25519/testvectors_test.go @@ -0,0 +1,93 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package curve25519 + +// testVectors generated with BoringSSL. +var testVectors = []struct { + In [32]byte + Base [32]byte + Expect [32]byte +}{ + { + In: [32]byte{0x66, 0x8f, 0xb9, 0xf7, 0x6a, 0xd9, 0x71, 0xc8, 0x1a, 0xc9, 0x0, 0x7, 0x1a, 0x15, 0x60, 0xbc, 0xe2, 0xca, 0x0, 0xca, 0xc7, 0xe6, 0x7a, 0xf9, 0x93, 0x48, 0x91, 0x37, 0x61, 0x43, 0x40, 0x14}, + Base: [32]byte{0xdb, 0x5f, 0x32, 0xb7, 0xf8, 0x41, 0xe7, 0xa1, 0xa0, 0x9, 0x68, 0xef, 0xfd, 0xed, 0x12, 0x73, 0x5f, 0xc4, 0x7a, 0x3e, 0xb1, 0x3b, 0x57, 0x9a, 0xac, 0xad, 0xea, 0xe8, 0x9, 0x39, 0xa7, 0xdd}, + Expect: [32]byte{0x9, 0xd, 0x85, 0xe5, 0x99, 0xea, 0x8e, 0x2b, 0xee, 0xb6, 0x13, 0x4, 0xd3, 0x7b, 0xe1, 0xe, 0xc5, 0xc9, 0x5, 0xf9, 0x92, 0x7d, 0x32, 0xf4, 0x2a, 0x9a, 0xa, 0xfb, 0x3e, 0xb, 0x40, 0x74}, + }, + { + In: [32]byte{0x63, 0x66, 0x95, 0xe3, 0x4f, 0x75, 0xb9, 0xa2, 0x79, 0xc8, 0x70, 0x6f, 0xad, 0x12, 0x89, 0xf2, 0xc0, 0xb1, 0xe2, 0x2e, 0x16, 0xf8, 0xb8, 0x86, 0x17, 0x29, 0xc1, 0xa, 0x58, 0x29, 0x58, 0xaf}, + Base: [32]byte{0x9, 0xd, 0x7, 0x1, 0xf8, 0xfd, 0xe2, 0x8f, 0x70, 0x4, 0x3b, 0x83, 0xf2, 0x34, 0x62, 0x25, 0x41, 0x9b, 0x18, 0xa7, 0xf2, 0x7e, 0x9e, 0x3d, 0x2b, 0xfd, 0x4, 0xe1, 0xf, 0x3d, 0x21, 0x3e}, + Expect: [32]byte{0xbf, 0x26, 0xec, 0x7e, 0xc4, 0x13, 0x6, 0x17, 0x33, 0xd4, 0x40, 0x70, 0xea, 0x67, 0xca, 0xb0, 0x2a, 0x85, 0xdc, 0x1b, 0xe8, 0xcf, 0xe1, 0xff, 0x73, 0xd5, 0x41, 0xcc, 0x8, 0x32, 0x55, 0x6}, + }, + { + In: [32]byte{0x73, 0x41, 0x81, 0xcd, 0x1a, 0x94, 0x6, 0x52, 0x2a, 0x56, 0xfe, 0x25, 0xe4, 0x3e, 0xcb, 0xf0, 0x29, 0x5d, 0xb5, 0xdd, 0xd0, 0x60, 0x9b, 0x3c, 0x2b, 0x4e, 0x79, 0xc0, 0x6f, 0x8b, 0xd4, 0x6d}, + Base: [32]byte{0xf8, 0xa8, 0x42, 0x1c, 0x7d, 0x21, 0xa9, 0x2d, 0xb3, 0xed, 0xe9, 0x79, 0xe1, 0xfa, 0x6a, 0xcb, 0x6, 0x2b, 0x56, 0xb1, 0x88, 0x5c, 0x71, 0xc5, 0x11, 0x53, 0xcc, 0xb8, 0x80, 0xac, 0x73, 0x15}, + Expect: [32]byte{0x11, 0x76, 0xd0, 0x16, 0x81, 0xf2, 0xcf, 0x92, 0x9d, 0xa2, 0xc7, 0xa3, 0xdf, 0x66, 0xb5, 0xd7, 0x72, 0x9f, 0xd4, 0x22, 0x22, 0x6f, 0xd6, 0x37, 0x42, 0x16, 0xbf, 0x7e, 0x2, 0xfd, 0xf, 0x62}, + }, + { + In: [32]byte{0x1f, 0x70, 0x39, 0x1f, 0x6b, 0xa8, 0x58, 0x12, 0x94, 0x13, 0xbd, 0x80, 0x1b, 0x12, 0xac, 0xbf, 0x66, 0x23, 0x62, 0x82, 0x5c, 0xa2, 0x50, 0x9c, 0x81, 0x87, 0x59, 0xa, 0x2b, 0xe, 0x61, 0x72}, + Base: [32]byte{0xd3, 0xea, 0xd0, 0x7a, 0x0, 0x8, 0xf4, 0x45, 0x2, 0xd5, 0x80, 0x8b, 0xff, 0xc8, 0x97, 0x9f, 0x25, 0xa8, 0x59, 0xd5, 0xad, 0xf4, 0x31, 0x2e, 0xa4, 0x87, 0x48, 0x9c, 0x30, 0xe0, 0x1b, 0x3b}, + Expect: [32]byte{0xf8, 0x48, 0x2f, 0x2e, 0x9e, 0x58, 0xbb, 0x6, 0x7e, 0x86, 0xb2, 0x87, 0x24, 0xb3, 0xc0, 0xa3, 0xbb, 0xb5, 0x7, 0x3e, 0x4c, 0x6a, 0xcd, 0x93, 0xdf, 0x54, 0x5e, 0xff, 0xdb, 0xba, 0x50, 0x5f}, + }, + { + In: [32]byte{0x3a, 0x7a, 0xe6, 0xcf, 0x8b, 0x88, 0x9d, 0x2b, 0x7a, 0x60, 0xa4, 0x70, 0xad, 0x6a, 0xd9, 0x99, 0x20, 0x6b, 0xf5, 0x7d, 0x90, 0x30, 0xdd, 0xf7, 0xf8, 0x68, 0xc, 0x8b, 0x1a, 0x64, 0x5d, 0xaa}, + Base: [32]byte{0x4d, 0x25, 0x4c, 0x80, 0x83, 0xd8, 0x7f, 0x1a, 0x9b, 0x3e, 0xa7, 0x31, 0xef, 0xcf, 0xf8, 0xa6, 0xf2, 0x31, 0x2d, 0x6f, 0xed, 0x68, 0xe, 0xf8, 0x29, 0x18, 0x51, 0x61, 0xc8, 0xfc, 0x50, 0x60}, + Expect: [32]byte{0x47, 0xb3, 0x56, 0xd5, 0x81, 0x8d, 0xe8, 0xef, 0xac, 0x77, 0x4b, 0x71, 0x4c, 0x42, 0xc4, 0x4b, 0xe6, 0x85, 0x23, 0xdd, 0x57, 0xdb, 0xd7, 0x39, 0x62, 0xd5, 0xa5, 0x26, 0x31, 0x87, 0x62, 0x37}, + }, + { + In: [32]byte{0x20, 0x31, 0x61, 0xc3, 0x15, 0x9a, 0x87, 0x6a, 0x2b, 0xea, 0xec, 0x29, 0xd2, 0x42, 0x7f, 0xb0, 0xc7, 0xc3, 0xd, 0x38, 0x2c, 0xd0, 0x13, 0xd2, 0x7c, 0xc3, 0xd3, 0x93, 0xdb, 0xd, 0xaf, 0x6f}, + Base: [32]byte{0x6a, 0xb9, 0x5d, 0x1a, 0xbe, 0x68, 0xc0, 0x9b, 0x0, 0x5c, 0x3d, 0xb9, 0x4, 0x2c, 0xc9, 0x1a, 0xc8, 0x49, 0xf7, 0xe9, 0x4a, 0x2a, 0x4a, 0x9b, 0x89, 0x36, 0x78, 0x97, 0xb, 0x7b, 0x95, 0xbf}, + Expect: [32]byte{0x11, 0xed, 0xae, 0xdc, 0x95, 0xff, 0x78, 0xf5, 0x63, 0xa1, 0xc8, 0xf1, 0x55, 0x91, 0xc0, 0x71, 0xde, 0xa0, 0x92, 0xb4, 0xd7, 0xec, 0xaa, 0xc8, 0xe0, 0x38, 0x7b, 0x5a, 0x16, 0xc, 0x4e, 0x5d}, + }, + { + In: [32]byte{0x13, 0xd6, 0x54, 0x91, 0xfe, 0x75, 0xf2, 0x3, 0xa0, 0x8, 0xb4, 0x41, 0x5a, 0xbc, 0x60, 0xd5, 0x32, 0xe6, 0x95, 0xdb, 0xd2, 0xf1, 0xe8, 0x3, 0xac, 0xcb, 0x34, 0xb2, 0xb7, 0x2c, 0x3d, 0x70}, + Base: [32]byte{0x2e, 0x78, 0x4e, 0x4, 0xca, 0x0, 0x73, 0x33, 0x62, 0x56, 0xa8, 0x39, 0x25, 0x5e, 0xd2, 0xf7, 0xd4, 0x79, 0x6a, 0x64, 0xcd, 0xc3, 0x7f, 0x1e, 0xb0, 0xe5, 0xc4, 0xc8, 0xd1, 0xd1, 0xe0, 0xf5}, + Expect: [32]byte{0x56, 0x3e, 0x8c, 0x9a, 0xda, 0xa7, 0xd7, 0x31, 0x1, 0xb0, 0xf2, 0xea, 0xd3, 0xca, 0xe1, 0xea, 0x5d, 0x8f, 0xcd, 0x5c, 0xd3, 0x60, 0x80, 0xbb, 0x8e, 0x6e, 0xc0, 0x3d, 0x61, 0x45, 0x9, 0x17}, + }, + { + In: [32]byte{0x68, 0x6f, 0x7d, 0xa9, 0x3b, 0xf2, 0x68, 0xe5, 0x88, 0x6, 0x98, 0x31, 0xf0, 0x47, 0x16, 0x3f, 0x33, 0x58, 0x99, 0x89, 0xd0, 0x82, 0x6e, 0x98, 0x8, 0xfb, 0x67, 0x8e, 0xd5, 0x7e, 0x67, 0x49}, + Base: [32]byte{0x8b, 0x54, 0x9b, 0x2d, 0xf6, 0x42, 0xd3, 0xb2, 0x5f, 0xe8, 0x38, 0xf, 0x8c, 0xc4, 0x37, 0x5f, 0x99, 0xb7, 0xbb, 0x4d, 0x27, 0x5f, 0x77, 0x9f, 0x3b, 0x7c, 0x81, 0xb8, 0xa2, 0xbb, 0xc1, 0x29}, + Expect: [32]byte{0x1, 0x47, 0x69, 0x65, 0x42, 0x6b, 0x61, 0x71, 0x74, 0x9a, 0x8a, 0xdd, 0x92, 0x35, 0x2, 0x5c, 0xe5, 0xf5, 0x57, 0xfe, 0x40, 0x9, 0xf7, 0x39, 0x30, 0x44, 0xeb, 0xbb, 0x8a, 0xe9, 0x52, 0x79}, + }, + { + In: [32]byte{0x82, 0xd6, 0x1c, 0xce, 0xdc, 0x80, 0x6a, 0x60, 0x60, 0xa3, 0x34, 0x9a, 0x5e, 0x87, 0xcb, 0xc7, 0xac, 0x11, 0x5e, 0x4f, 0x87, 0x77, 0x62, 0x50, 0xae, 0x25, 0x60, 0x98, 0xa7, 0xc4, 0x49, 0x59}, + Base: [32]byte{0x8b, 0x6b, 0x9d, 0x8, 0xf6, 0x1f, 0xc9, 0x1f, 0xe8, 0xb3, 0x29, 0x53, 0xc4, 0x23, 0x40, 0xf0, 0x7, 0xb5, 0x71, 0xdc, 0xb0, 0xa5, 0x6d, 0x10, 0x72, 0x4e, 0xce, 0xf9, 0x95, 0xc, 0xfb, 0x25}, + Expect: [32]byte{0x9c, 0x49, 0x94, 0x1f, 0x9c, 0x4f, 0x18, 0x71, 0xfa, 0x40, 0x91, 0xfe, 0xd7, 0x16, 0xd3, 0x49, 0x99, 0xc9, 0x52, 0x34, 0xed, 0xf2, 0xfd, 0xfb, 0xa6, 0xd1, 0x4a, 0x5a, 0xfe, 0x9e, 0x5, 0x58}, + }, + { + In: [32]byte{0x7d, 0xc7, 0x64, 0x4, 0x83, 0x13, 0x97, 0xd5, 0x88, 0x4f, 0xdf, 0x6f, 0x97, 0xe1, 0x74, 0x4c, 0x9e, 0xb1, 0x18, 0xa3, 0x1a, 0x7b, 0x23, 0xf8, 0xd7, 0x9f, 0x48, 0xce, 0x9c, 0xad, 0x15, 0x4b}, + Base: [32]byte{0x1a, 0xcd, 0x29, 0x27, 0x84, 0xf4, 0x79, 0x19, 0xd4, 0x55, 0xf8, 0x87, 0x44, 0x83, 0x58, 0x61, 0xb, 0xb9, 0x45, 0x96, 0x70, 0xeb, 0x99, 0xde, 0xe4, 0x60, 0x5, 0xf6, 0x89, 0xca, 0x5f, 0xb6}, + Expect: [32]byte{0x0, 0xf4, 0x3c, 0x2, 0x2e, 0x94, 0xea, 0x38, 0x19, 0xb0, 0x36, 0xae, 0x2b, 0x36, 0xb2, 0xa7, 0x61, 0x36, 0xaf, 0x62, 0x8a, 0x75, 0x1f, 0xe5, 0xd0, 0x1e, 0x3, 0xd, 0x44, 0x25, 0x88, 0x59}, + }, + { + In: [32]byte{0xfb, 0xc4, 0x51, 0x1d, 0x23, 0xa6, 0x82, 0xae, 0x4e, 0xfd, 0x8, 0xc8, 0x17, 0x9c, 0x1c, 0x6, 0x7f, 0x9c, 0x8b, 0xe7, 0x9b, 0xbc, 0x4e, 0xff, 0x5c, 0xe2, 0x96, 0xc6, 0xbc, 0x1f, 0xf4, 0x45}, + Base: [32]byte{0x55, 0xca, 0xff, 0x21, 0x81, 0xf2, 0x13, 0x6b, 0xe, 0xd0, 0xe1, 0xe2, 0x99, 0x44, 0x48, 0xe1, 0x6c, 0xc9, 0x70, 0x64, 0x6a, 0x98, 0x3d, 0x14, 0xd, 0xc4, 0xea, 0xb3, 0xd9, 0x4c, 0x28, 0x4e}, + Expect: [32]byte{0xae, 0x39, 0xd8, 0x16, 0x53, 0x23, 0x45, 0x79, 0x4d, 0x26, 0x91, 0xe0, 0x80, 0x1c, 0xaa, 0x52, 0x5f, 0xc3, 0x63, 0x4d, 0x40, 0x2c, 0xe9, 0x58, 0xb, 0x33, 0x38, 0xb4, 0x6f, 0x8b, 0xb9, 0x72}, + }, + { + In: [32]byte{0x4e, 0x6, 0xc, 0xe1, 0xc, 0xeb, 0xf0, 0x95, 0x9, 0x87, 0x16, 0xc8, 0x66, 0x19, 0xeb, 0x9f, 0x7d, 0xf6, 0x65, 0x24, 0x69, 0x8b, 0xa7, 0x98, 0x8c, 0x3b, 0x90, 0x95, 0xd9, 0xf5, 0x1, 0x34}, + Base: [32]byte{0x57, 0x73, 0x3f, 0x2d, 0x86, 0x96, 0x90, 0xd0, 0xd2, 0xed, 0xae, 0xc9, 0x52, 0x3d, 0xaa, 0x2d, 0xa9, 0x54, 0x45, 0xf4, 0x4f, 0x57, 0x83, 0xc1, 0xfa, 0xec, 0x6c, 0x3a, 0x98, 0x28, 0x18, 0xf3}, + Expect: [32]byte{0xa6, 0x1e, 0x74, 0x55, 0x2c, 0xce, 0x75, 0xf5, 0xe9, 0x72, 0xe4, 0x24, 0xf2, 0xcc, 0xb0, 0x9c, 0x83, 0xbc, 0x1b, 0x67, 0x1, 0x47, 0x48, 0xf0, 0x2c, 0x37, 0x1a, 0x20, 0x9e, 0xf2, 0xfb, 0x2c}, + }, + { + In: [32]byte{0x5c, 0x49, 0x2c, 0xba, 0x2c, 0xc8, 0x92, 0x48, 0x8a, 0x9c, 0xeb, 0x91, 0x86, 0xc2, 0xaa, 0xc2, 0x2f, 0x1, 0x5b, 0xf3, 0xef, 0x8d, 0x3e, 0xcc, 0x9c, 0x41, 0x76, 0x97, 0x62, 0x61, 0xaa, 0xb1}, + Base: [32]byte{0x67, 0x97, 0xc2, 0xe7, 0xdc, 0x92, 0xcc, 0xbe, 0x7c, 0x5, 0x6b, 0xec, 0x35, 0xa, 0xb6, 0xd3, 0xbd, 0x2a, 0x2c, 0x6b, 0xc5, 0xa8, 0x7, 0xbb, 0xca, 0xe1, 0xf6, 0xc2, 0xaf, 0x80, 0x36, 0x44}, + Expect: [32]byte{0xfc, 0xf3, 0x7, 0xdf, 0xbc, 0x19, 0x2, 0xb, 0x28, 0xa6, 0x61, 0x8c, 0x6c, 0x62, 0x2f, 0x31, 0x7e, 0x45, 0x96, 0x7d, 0xac, 0xf4, 0xae, 0x4a, 0xa, 0x69, 0x9a, 0x10, 0x76, 0x9f, 0xde, 0x14}, + }, + { + In: [32]byte{0xea, 0x33, 0x34, 0x92, 0x96, 0x5, 0x5a, 0x4e, 0x8b, 0x19, 0x2e, 0x3c, 0x23, 0xc5, 0xf4, 0xc8, 0x44, 0x28, 0x2a, 0x3b, 0xfc, 0x19, 0xec, 0xc9, 0xdc, 0x64, 0x6a, 0x42, 0xc3, 0x8d, 0xc2, 0x48}, + Base: [32]byte{0x2c, 0x75, 0xd8, 0x51, 0x42, 0xec, 0xad, 0x3e, 0x69, 0x44, 0x70, 0x4, 0x54, 0xc, 0x1c, 0x23, 0x54, 0x8f, 0xc8, 0xf4, 0x86, 0x25, 0x1b, 0x8a, 0x19, 0x46, 0x3f, 0x3d, 0xf6, 0xf8, 0xac, 0x61}, + Expect: [32]byte{0x5d, 0xca, 0xb6, 0x89, 0x73, 0xf9, 0x5b, 0xd3, 0xae, 0x4b, 0x34, 0xfa, 0xb9, 0x49, 0xfb, 0x7f, 0xb1, 0x5a, 0xf1, 0xd8, 0xca, 0xe2, 0x8c, 0xd6, 0x99, 0xf9, 0xc1, 0xaa, 0x33, 0x37, 0x34, 0x2f}, + }, + { + In: [32]byte{0x4f, 0x29, 0x79, 0xb1, 0xec, 0x86, 0x19, 0xe4, 0x5c, 0xa, 0xb, 0x2b, 0x52, 0x9, 0x34, 0x54, 0x1a, 0xb9, 0x44, 0x7, 0xb6, 0x4d, 0x19, 0xa, 0x76, 0xf3, 0x23, 0x14, 0xef, 0xe1, 0x84, 0xe7}, + Base: [32]byte{0xf7, 0xca, 0xe1, 0x8d, 0x8d, 0x36, 0xa7, 0xf5, 0x61, 0x17, 0xb8, 0xb7, 0xe, 0x25, 0x52, 0x27, 0x7f, 0xfc, 0x99, 0xdf, 0x87, 0x56, 0xb5, 0xe1, 0x38, 0xbf, 0x63, 0x68, 0xbc, 0x87, 0xf7, 0x4c}, + Expect: [32]byte{0xe4, 0xe6, 0x34, 0xeb, 0xb4, 0xfb, 0x66, 0x4f, 0xe8, 0xb2, 0xcf, 0xa1, 0x61, 0x5f, 0x0, 0xe6, 0x46, 0x6f, 0xff, 0x73, 0x2c, 0xe1, 0xf8, 0xa0, 0xc8, 0xd2, 0x72, 0x74, 0x31, 0xd1, 0x6f, 0x14}, + }, + { + In: [32]byte{0xf5, 0xd8, 0xa9, 0x27, 0x90, 0x1d, 0x4f, 0xa4, 0x24, 0x90, 0x86, 0xb7, 0xff, 0xec, 0x24, 0xf5, 0x29, 0x7d, 0x80, 0x11, 0x8e, 0x4a, 0xc9, 0xd3, 0xfc, 0x9a, 0x82, 0x37, 0x95, 0x1e, 0x3b, 0x7f}, + Base: [32]byte{0x3c, 0x23, 0x5e, 0xdc, 0x2, 0xf9, 0x11, 0x56, 0x41, 0xdb, 0xf5, 0x16, 0xd5, 0xde, 0x8a, 0x73, 0x5d, 0x6e, 0x53, 0xe2, 0x2a, 0xa2, 0xac, 0x14, 0x36, 0x56, 0x4, 0x5f, 0xf2, 0xe9, 0x52, 0x49}, + Expect: [32]byte{0xab, 0x95, 0x15, 0xab, 0x14, 0xaf, 0x9d, 0x27, 0xe, 0x1d, 0xae, 0xc, 0x56, 0x80, 0xcb, 0xc8, 0x88, 0xb, 0xd8, 0xa8, 0xe7, 0xeb, 0x67, 0xb4, 0xda, 0x42, 0xa6, 0x61, 0x96, 0x1e, 0xfc, 0xb}, + }, +} diff --git a/vendor/golang.org/x/crypto/ed25519/ed25519.go b/vendor/golang.org/x/crypto/ed25519/ed25519.go index 16cd385..d6f683b 100644 --- a/vendor/golang.org/x/crypto/ed25519/ed25519.go +++ b/vendor/golang.org/x/crypto/ed25519/ed25519.go @@ -6,7 +6,10 @@ // https://ed25519.cr.yp.to/. // // These functions are also compatible with the “Ed25519” function defined in -// https://tools.ietf.org/html/draft-irtf-cfrg-eddsa-05. +// RFC 8032. However, unlike RFC 8032's formulation, this package's private key +// representation includes a public key suffix to make multiple signing +// operations with the same key more efficient. This package refers to the RFC +// 8032 private key as the “seed”. package ed25519 // This code is a port of the public domain, “ref10” implementation of ed25519 @@ -31,6 +34,8 @@ const ( PrivateKeySize = 64 // SignatureSize is the size, in bytes, of signatures generated and verified by this package. SignatureSize = 64 + // SeedSize is the size, in bytes, of private key seeds. These are the private key representations used by RFC 8032. + SeedSize = 32 ) // PublicKey is the type of Ed25519 public keys. @@ -46,6 +51,15 @@ func (priv PrivateKey) Public() crypto.PublicKey { return PublicKey(publicKey) } +// Seed returns the private key seed corresponding to priv. It is provided for +// interoperability with RFC 8032. RFC 8032's private keys correspond to seeds +// in this package. +func (priv PrivateKey) Seed() []byte { + seed := make([]byte, SeedSize) + copy(seed, priv[:32]) + return seed +} + // Sign signs the given message with priv. // Ed25519 performs two passes over messages to be signed and therefore cannot // handle pre-hashed messages. Thus opts.HashFunc() must return zero to @@ -61,19 +75,33 @@ func (priv PrivateKey) Sign(rand io.Reader, message []byte, opts crypto.SignerOp // GenerateKey generates a public/private key pair using entropy from rand. // If rand is nil, crypto/rand.Reader will be used. -func GenerateKey(rand io.Reader) (publicKey PublicKey, privateKey PrivateKey, err error) { +func GenerateKey(rand io.Reader) (PublicKey, PrivateKey, error) { if rand == nil { rand = cryptorand.Reader } - privateKey = make([]byte, PrivateKeySize) - publicKey = make([]byte, PublicKeySize) - _, err = io.ReadFull(rand, privateKey[:32]) - if err != nil { + seed := make([]byte, SeedSize) + if _, err := io.ReadFull(rand, seed); err != nil { return nil, nil, err } - digest := sha512.Sum512(privateKey[:32]) + privateKey := NewKeyFromSeed(seed) + publicKey := make([]byte, PublicKeySize) + copy(publicKey, privateKey[32:]) + + return publicKey, privateKey, nil +} + +// NewKeyFromSeed calculates a private key from a seed. It will panic if +// len(seed) is not SeedSize. This function is provided for interoperability +// with RFC 8032. RFC 8032's private keys correspond to seeds in this +// package. +func NewKeyFromSeed(seed []byte) PrivateKey { + if l := len(seed); l != SeedSize { + panic("ed25519: bad seed length: " + strconv.Itoa(l)) + } + + digest := sha512.Sum512(seed) digest[0] &= 248 digest[31] &= 127 digest[31] |= 64 @@ -85,10 +113,11 @@ func GenerateKey(rand io.Reader) (publicKey PublicKey, privateKey PrivateKey, er var publicKeyBytes [32]byte A.ToBytes(&publicKeyBytes) + privateKey := make([]byte, PrivateKeySize) + copy(privateKey, seed) copy(privateKey[32:], publicKeyBytes[:]) - copy(publicKey, publicKeyBytes[:]) - return publicKey, privateKey, nil + return privateKey } // Sign signs the message with privateKey and returns a signature. It will @@ -171,9 +200,16 @@ func Verify(publicKey PublicKey, message, sig []byte) bool { edwards25519.ScReduce(&hReduced, &digest) var R edwards25519.ProjectiveGroupElement - var b [32]byte - copy(b[:], sig[32:]) - edwards25519.GeDoubleScalarMultVartime(&R, &hReduced, &A, &b) + var s [32]byte + copy(s[:], sig[32:]) + + // https://tools.ietf.org/html/rfc8032#section-5.1.7 requires that s be in + // the range [0, order) in order to prevent signature malleability. + if !edwards25519.ScMinimal(&s) { + return false + } + + edwards25519.GeDoubleScalarMultVartime(&R, &hReduced, &A, &s) var checkR [32]byte R.ToBytes(&checkR) diff --git a/vendor/golang.org/x/crypto/ed25519/ed25519_test.go b/vendor/golang.org/x/crypto/ed25519/ed25519_test.go index e272f8a..8094603 100644 --- a/vendor/golang.org/x/crypto/ed25519/ed25519_test.go +++ b/vendor/golang.org/x/crypto/ed25519/ed25519_test.go @@ -139,6 +139,19 @@ func TestGolden(t *testing.T) { if !Verify(pubKey, msg, sig2) { t.Errorf("signature failed to verify on line %d", lineNo) } + + priv2 := NewKeyFromSeed(priv[:32]) + if !bytes.Equal(priv[:], priv2) { + t.Errorf("recreating key pair gave different private key on line %d: %x vs %x", lineNo, priv[:], priv2) + } + + if pubKey2 := priv2.Public().(PublicKey); !bytes.Equal(pubKey, pubKey2) { + t.Errorf("recreating key pair gave different public key on line %d: %x vs %x", lineNo, pubKey, pubKey2) + } + + if seed := priv2.Seed(); !bytes.Equal(priv[:32], seed) { + t.Errorf("recreating key pair gave different seed on line %d: %x vs %x", lineNo, priv[:32], seed) + } } if err := scanner.Err(); err != nil { @@ -146,6 +159,30 @@ func TestGolden(t *testing.T) { } } +func TestMalleability(t *testing.T) { + // https://tools.ietf.org/html/rfc8032#section-5.1.7 adds an additional test + // that s be in [0, order). This prevents someone from adding a multiple of + // order to s and obtaining a second valid signature for the same message. + msg := []byte{0x54, 0x65, 0x73, 0x74} + sig := []byte{ + 0x7c, 0x38, 0xe0, 0x26, 0xf2, 0x9e, 0x14, 0xaa, 0xbd, 0x05, 0x9a, + 0x0f, 0x2d, 0xb8, 0xb0, 0xcd, 0x78, 0x30, 0x40, 0x60, 0x9a, 0x8b, + 0xe6, 0x84, 0xdb, 0x12, 0xf8, 0x2a, 0x27, 0x77, 0x4a, 0xb0, 0x67, + 0x65, 0x4b, 0xce, 0x38, 0x32, 0xc2, 0xd7, 0x6f, 0x8f, 0x6f, 0x5d, + 0xaf, 0xc0, 0x8d, 0x93, 0x39, 0xd4, 0xee, 0xf6, 0x76, 0x57, 0x33, + 0x36, 0xa5, 0xc5, 0x1e, 0xb6, 0xf9, 0x46, 0xb3, 0x1d, + } + publicKey := []byte{ + 0x7d, 0x4d, 0x0e, 0x7f, 0x61, 0x53, 0xa6, 0x9b, 0x62, 0x42, 0xb5, + 0x22, 0xab, 0xbe, 0xe6, 0x85, 0xfd, 0xa4, 0x42, 0x0f, 0x88, 0x34, + 0xb1, 0x08, 0xc3, 0xbd, 0xae, 0x36, 0x9e, 0xf5, 0x49, 0xfa, + } + + if Verify(publicKey, msg, sig) { + t.Fatal("non-canonical signature accepted") + } +} + func BenchmarkKeyGeneration(b *testing.B) { var zero zeroReader for i := 0; i < b.N; i++ { diff --git a/vendor/golang.org/x/crypto/ed25519/internal/edwards25519/edwards25519.go b/vendor/golang.org/x/crypto/ed25519/internal/edwards25519/edwards25519.go index 5f8b994..fd03c25 100644 --- a/vendor/golang.org/x/crypto/ed25519/internal/edwards25519/edwards25519.go +++ b/vendor/golang.org/x/crypto/ed25519/internal/edwards25519/edwards25519.go @@ -4,6 +4,8 @@ package edwards25519 +import "encoding/binary" + // This code is a port of the public domain, “ref10” implementation of ed25519 // from SUPERCOP. @@ -1769,3 +1771,23 @@ func ScReduce(out *[32]byte, s *[64]byte) { out[30] = byte(s11 >> 9) out[31] = byte(s11 >> 17) } + +// order is the order of Curve25519 in little-endian form. +var order = [4]uint64{0x5812631a5cf5d3ed, 0x14def9dea2f79cd6, 0, 0x1000000000000000} + +// ScMinimal returns true if the given scalar is less than the order of the +// curve. +func ScMinimal(scalar *[32]byte) bool { + for i := 3; ; i-- { + v := binary.LittleEndian.Uint64(scalar[i*8:]) + if v > order[i] { + return false + } else if v < order[i] { + break + } else if i == 0 { + return false + } + } + + return true +} diff --git a/vendor/golang.org/x/crypto/go.mod b/vendor/golang.org/x/crypto/go.mod new file mode 100644 index 0000000..3a8a43e --- /dev/null +++ b/vendor/golang.org/x/crypto/go.mod @@ -0,0 +1,3 @@ +module golang.org/x/crypto + +require golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a diff --git a/vendor/golang.org/x/crypto/go.sum b/vendor/golang.org/x/crypto/go.sum new file mode 100644 index 0000000..1936851 --- /dev/null +++ b/vendor/golang.org/x/crypto/go.sum @@ -0,0 +1,2 @@ +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= diff --git a/vendor/golang.org/x/crypto/hkdf/example_test.go b/vendor/golang.org/x/crypto/hkdf/example_test.go index df84395..e89c260 100644 --- a/vendor/golang.org/x/crypto/hkdf/example_test.go +++ b/vendor/golang.org/x/crypto/hkdf/example_test.go @@ -9,49 +9,44 @@ import ( "crypto/rand" "crypto/sha256" "fmt" - "golang.org/x/crypto/hkdf" "io" + + "golang.org/x/crypto/hkdf" ) -// Usage example that expands one master key into three other cryptographically -// secure keys. +// Usage example that expands one master secret into three other +// cryptographically secure keys. func Example_usage() { - // Underlying hash function to use + // Underlying hash function for HMAC. hash := sha256.New - // Cryptographically secure master key. - master := []byte{0x00, 0x01, 0x02, 0x03} // i.e. NOT this. + // Cryptographically secure master secret. + secret := []byte{0x00, 0x01, 0x02, 0x03} // i.e. NOT this. - // Non secret salt, optional (can be nil) - // Recommended: hash-length sized random + // Non-secret salt, optional (can be nil). + // Recommended: hash-length random value. salt := make([]byte, hash().Size()) - n, err := io.ReadFull(rand.Reader, salt) - if n != len(salt) || err != nil { - fmt.Println("error:", err) - return + if _, err := rand.Read(salt); err != nil { + panic(err) } - // Non secret context specific info, optional (can be nil). - // Note, independent from the master key. - info := []byte{0x03, 0x14, 0x15, 0x92, 0x65} - - // Create the key derivation function - hkdf := hkdf.New(hash, master, salt, info) - - // Generate the required keys - keys := make([][]byte, 3) - for i := 0; i < len(keys); i++ { - keys[i] = make([]byte, 24) - n, err := io.ReadFull(hkdf, keys[i]) - if n != len(keys[i]) || err != nil { - fmt.Println("error:", err) - return + // Non-secret context info, optional (can be nil). + info := []byte("hkdf example") + + // Generate three 128-bit derived keys. + hkdf := hkdf.New(hash, secret, salt, info) + + var keys [][]byte + for i := 0; i < 3; i++ { + key := make([]byte, 16) + if _, err := io.ReadFull(hkdf, key); err != nil { + panic(err) } + keys = append(keys, key) } - // Keys should contain 192 bit random keys - for i := 1; i <= len(keys); i++ { - fmt.Printf("Key #%d: %v\n", i, !bytes.Equal(keys[i-1], make([]byte, 24))) + for i := range keys { + fmt.Printf("Key #%d: %v\n", i+1, !bytes.Equal(keys[i], make([]byte, 16))) } // Output: diff --git a/vendor/golang.org/x/crypto/hkdf/hkdf.go b/vendor/golang.org/x/crypto/hkdf/hkdf.go index 5bc2463..dda3f14 100644 --- a/vendor/golang.org/x/crypto/hkdf/hkdf.go +++ b/vendor/golang.org/x/crypto/hkdf/hkdf.go @@ -8,8 +8,6 @@ // HKDF is a cryptographic key derivation function (KDF) with the goal of // expanding limited input keying material into one or more cryptographically // strong secret keys. -// -// RFC 5869: https://tools.ietf.org/html/rfc5869 package hkdf // import "golang.org/x/crypto/hkdf" import ( @@ -19,6 +17,21 @@ import ( "io" ) +// Extract generates a pseudorandom key for use with Expand from an input secret +// and an optional independent salt. +// +// Only use this function if you need to reuse the extracted key with multiple +// Expand invocations and different context values. Most common scenarios, +// including the generation of multiple keys, should use New instead. +func Extract(hash func() hash.Hash, secret, salt []byte) []byte { + if salt == nil { + salt = make([]byte, hash().Size()) + } + extractor := hmac.New(hash, salt) + extractor.Write(secret) + return extractor.Sum(nil) +} + type hkdf struct { expander hash.Hash size int @@ -26,22 +39,22 @@ type hkdf struct { info []byte counter byte - prev []byte - cache []byte + prev []byte + buf []byte } func (f *hkdf) Read(p []byte) (int, error) { // Check whether enough data can be generated need := len(p) - remains := len(f.cache) + int(255-f.counter+1)*f.size + remains := len(f.buf) + int(255-f.counter+1)*f.size if remains < need { return 0, errors.New("hkdf: entropy limit reached") } - // Read from the cache, if enough data is present - n := copy(p, f.cache) + // Read any leftover from the buffer + n := copy(p, f.buf) p = p[n:] - // Fill the buffer + // Fill the rest of the buffer for len(p) > 0 { f.expander.Reset() f.expander.Write(f.prev) @@ -51,25 +64,30 @@ func (f *hkdf) Read(p []byte) (int, error) { f.counter++ // Copy the new batch into p - f.cache = f.prev - n = copy(p, f.cache) + f.buf = f.prev + n = copy(p, f.buf) p = p[n:] } // Save leftovers for next run - f.cache = f.cache[n:] + f.buf = f.buf[n:] return need, nil } -// New returns a new HKDF using the given hash, the secret keying material to expand -// and optional salt and info fields. -func New(hash func() hash.Hash, secret, salt, info []byte) io.Reader { - if salt == nil { - salt = make([]byte, hash().Size()) - } - extractor := hmac.New(hash, salt) - extractor.Write(secret) - prk := extractor.Sum(nil) +// Expand returns a Reader, from which keys can be read, using the given +// pseudorandom key and optional context info, skipping the extraction step. +// +// The pseudorandomKey should have been generated by Extract, or be a uniformly +// random or pseudorandom cryptographically strong key. See RFC 5869, Section +// 3.3. Most common scenarios will want to use New instead. +func Expand(hash func() hash.Hash, pseudorandomKey, info []byte) io.Reader { + expander := hmac.New(hash, pseudorandomKey) + return &hkdf{expander, expander.Size(), info, 1, nil, nil} +} - return &hkdf{hmac.New(hash, prk), extractor.Size(), info, 1, nil, nil} +// New returns a Reader, from which keys can be read, using the given hash, +// secret, salt and context info. Salt and info can be nil. +func New(hash func() hash.Hash, secret, salt, info []byte) io.Reader { + prk := Extract(hash, secret, salt) + return Expand(hash, prk, info) } diff --git a/vendor/golang.org/x/crypto/hkdf/hkdf_test.go b/vendor/golang.org/x/crypto/hkdf/hkdf_test.go index cee659b..ea57577 100644 --- a/vendor/golang.org/x/crypto/hkdf/hkdf_test.go +++ b/vendor/golang.org/x/crypto/hkdf/hkdf_test.go @@ -18,6 +18,7 @@ type hkdfTest struct { hash func() hash.Hash master []byte salt []byte + prk []byte info []byte out []byte } @@ -35,6 +36,12 @@ var hkdfTests = []hkdfTest{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, }, + []byte{ + 0x07, 0x77, 0x09, 0x36, 0x2c, 0x2e, 0x32, 0xdf, + 0x0d, 0xdc, 0x3f, 0x0d, 0xc4, 0x7b, 0xba, 0x63, + 0x90, 0xb6, 0xc7, 0x3b, 0xb5, 0x0f, 0x9c, 0x31, + 0x22, 0xec, 0x84, 0x4a, 0xd7, 0xc2, 0xb3, 0xe5, + }, []byte{ 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, @@ -74,6 +81,12 @@ var hkdfTests = []hkdfTest{ 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, }, + []byte{ + 0x06, 0xa6, 0xb8, 0x8c, 0x58, 0x53, 0x36, 0x1a, + 0x06, 0x10, 0x4c, 0x9c, 0xeb, 0x35, 0xb4, 0x5c, + 0xef, 0x76, 0x00, 0x14, 0x90, 0x46, 0x71, 0x01, + 0x4a, 0x19, 0x3f, 0x40, 0xc1, 0x5f, 0xc2, 0x44, + }, []byte{ 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, @@ -108,6 +121,12 @@ var hkdfTests = []hkdfTest{ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, }, []byte{}, + []byte{ + 0x19, 0xef, 0x24, 0xa3, 0x2c, 0x71, 0x7b, 0x16, + 0x7f, 0x33, 0xa9, 0x1d, 0x6f, 0x64, 0x8b, 0xdf, + 0x96, 0x59, 0x67, 0x76, 0xaf, 0xdb, 0x63, 0x77, + 0xac, 0x43, 0x4c, 0x1c, 0x29, 0x3c, 0xcb, 0x04, + }, []byte{}, []byte{ 0x8d, 0xa4, 0xe7, 0x75, 0xa5, 0x63, 0xc1, 0x8f, @@ -118,6 +137,30 @@ var hkdfTests = []hkdfTest{ 0x96, 0xc8, }, }, + { + sha256.New, + []byte{ + 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + }, + nil, + []byte{ + 0x19, 0xef, 0x24, 0xa3, 0x2c, 0x71, 0x7b, 0x16, + 0x7f, 0x33, 0xa9, 0x1d, 0x6f, 0x64, 0x8b, 0xdf, + 0x96, 0x59, 0x67, 0x76, 0xaf, 0xdb, 0x63, 0x77, + 0xac, 0x43, 0x4c, 0x1c, 0x29, 0x3c, 0xcb, 0x04, + }, + nil, + []byte{ + 0x8d, 0xa4, 0xe7, 0x75, 0xa5, 0x63, 0xc1, 0x8f, + 0x71, 0x5f, 0x80, 0x2a, 0x06, 0x3c, 0x5a, 0x31, + 0xb8, 0xa1, 0x1f, 0x5c, 0x5e, 0xe1, 0x87, 0x9e, + 0xc3, 0x45, 0x4e, 0x5f, 0x3c, 0x73, 0x8d, 0x2d, + 0x9d, 0x20, 0x13, 0x95, 0xfa, 0xa4, 0xb6, 0x1a, + 0x96, 0xc8, + }, + }, { sha1.New, []byte{ @@ -128,6 +171,11 @@ var hkdfTests = []hkdfTest{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, }, + []byte{ + 0x9b, 0x6c, 0x18, 0xc4, 0x32, 0xa7, 0xbf, 0x8f, + 0x0e, 0x71, 0xc8, 0xeb, 0x88, 0xf4, 0xb3, 0x0b, + 0xaa, 0x2b, 0xa2, 0x43, + }, []byte{ 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, @@ -167,6 +215,11 @@ var hkdfTests = []hkdfTest{ 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, }, + []byte{ + 0x8a, 0xda, 0xe0, 0x9a, 0x2a, 0x30, 0x70, 0x59, + 0x47, 0x8d, 0x30, 0x9b, 0x26, 0xc4, 0x11, 0x5a, + 0x22, 0x4c, 0xfa, 0xf6, + }, []byte{ 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, @@ -201,6 +254,11 @@ var hkdfTests = []hkdfTest{ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, }, []byte{}, + []byte{ + 0xda, 0x8c, 0x8a, 0x73, 0xc7, 0xfa, 0x77, 0x28, + 0x8e, 0xc6, 0xf5, 0xe7, 0xc2, 0x97, 0x78, 0x6a, + 0xa0, 0xd3, 0x2d, 0x01, + }, []byte{}, []byte{ 0x0a, 0xc1, 0xaf, 0x70, 0x02, 0xb3, 0xd7, 0x61, @@ -219,7 +277,12 @@ var hkdfTests = []hkdfTest{ 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, }, nil, - []byte{}, + []byte{ + 0x2a, 0xdc, 0xca, 0xda, 0x18, 0x77, 0x9e, 0x7c, + 0x20, 0x77, 0xad, 0x2e, 0xb1, 0x9d, 0x3f, 0x3e, + 0x73, 0x13, 0x85, 0xdd, + }, + nil, []byte{ 0x2c, 0x91, 0x11, 0x72, 0x04, 0xd7, 0x45, 0xf3, 0x50, 0x0d, 0x63, 0x6a, 0x62, 0xf6, 0x4f, 0x0a, @@ -233,6 +296,11 @@ var hkdfTests = []hkdfTest{ func TestHKDF(t *testing.T) { for i, tt := range hkdfTests { + prk := Extract(tt.hash, tt.master, tt.salt) + if !bytes.Equal(prk, tt.prk) { + t.Errorf("test %d: incorrect PRK: have %v, need %v.", i, prk, tt.prk) + } + hkdf := New(tt.hash, tt.master, tt.salt, tt.info) out := make([]byte, len(tt.out)) @@ -244,6 +312,17 @@ func TestHKDF(t *testing.T) { if !bytes.Equal(out, tt.out) { t.Errorf("test %d: incorrect output: have %v, need %v.", i, out, tt.out) } + + hkdf = Expand(tt.hash, prk, tt.info) + + n, err = io.ReadFull(hkdf, out) + if n != len(tt.out) || err != nil { + t.Errorf("test %d: not enough output bytes from Expand: %d.", i, n) + } + + if !bytes.Equal(out, tt.out) { + t.Errorf("test %d: incorrect output from Expand: have %v, need %v.", i, out, tt.out) + } } } diff --git a/vendor/golang.org/x/crypto/internal/chacha20/asm_arm64.s b/vendor/golang.org/x/crypto/internal/chacha20/asm_arm64.s new file mode 100644 index 0000000..b3a16ef --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/chacha20/asm_arm64.s @@ -0,0 +1,308 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.11 +// +build !gccgo,!appengine + +#include "textflag.h" + +#define NUM_ROUNDS 10 + +// func xorKeyStreamVX(dst, src []byte, key *[8]uint32, nonce *[3]uint32, counter *uint32) +TEXT ·xorKeyStreamVX(SB), NOSPLIT, $0 + MOVD dst+0(FP), R1 + MOVD src+24(FP), R2 + MOVD src_len+32(FP), R3 + MOVD key+48(FP), R4 + MOVD nonce+56(FP), R6 + MOVD counter+64(FP), R7 + + MOVD $·constants(SB), R10 + MOVD $·incRotMatrix(SB), R11 + + MOVW (R7), R20 + + AND $~255, R3, R13 + ADD R2, R13, R12 // R12 for block end + AND $255, R3, R13 +loop: + MOVD $NUM_ROUNDS, R21 + VLD1 (R11), [V30.S4, V31.S4] + + // load contants + // VLD4R (R10), [V0.S4, V1.S4, V2.S4, V3.S4] + WORD $0x4D60E940 + + // load keys + // VLD4R 16(R4), [V4.S4, V5.S4, V6.S4, V7.S4] + WORD $0x4DFFE884 + // VLD4R 16(R4), [V8.S4, V9.S4, V10.S4, V11.S4] + WORD $0x4DFFE888 + SUB $32, R4 + + // load counter + nonce + // VLD1R (R7), [V12.S4] + WORD $0x4D40C8EC + + // VLD3R (R6), [V13.S4, V14.S4, V15.S4] + WORD $0x4D40E8CD + + // update counter + VADD V30.S4, V12.S4, V12.S4 + +chacha: + // V0..V3 += V4..V7 + // V12..V15 <<<= ((V12..V15 XOR V0..V3), 16) + VADD V0.S4, V4.S4, V0.S4 + VADD V1.S4, V5.S4, V1.S4 + VADD V2.S4, V6.S4, V2.S4 + VADD V3.S4, V7.S4, V3.S4 + VEOR V12.B16, V0.B16, V12.B16 + VEOR V13.B16, V1.B16, V13.B16 + VEOR V14.B16, V2.B16, V14.B16 + VEOR V15.B16, V3.B16, V15.B16 + VREV32 V12.H8, V12.H8 + VREV32 V13.H8, V13.H8 + VREV32 V14.H8, V14.H8 + VREV32 V15.H8, V15.H8 + // V8..V11 += V12..V15 + // V4..V7 <<<= ((V4..V7 XOR V8..V11), 12) + VADD V8.S4, V12.S4, V8.S4 + VADD V9.S4, V13.S4, V9.S4 + VADD V10.S4, V14.S4, V10.S4 + VADD V11.S4, V15.S4, V11.S4 + VEOR V8.B16, V4.B16, V16.B16 + VEOR V9.B16, V5.B16, V17.B16 + VEOR V10.B16, V6.B16, V18.B16 + VEOR V11.B16, V7.B16, V19.B16 + VSHL $12, V16.S4, V4.S4 + VSHL $12, V17.S4, V5.S4 + VSHL $12, V18.S4, V6.S4 + VSHL $12, V19.S4, V7.S4 + VSRI $20, V16.S4, V4.S4 + VSRI $20, V17.S4, V5.S4 + VSRI $20, V18.S4, V6.S4 + VSRI $20, V19.S4, V7.S4 + + // V0..V3 += V4..V7 + // V12..V15 <<<= ((V12..V15 XOR V0..V3), 8) + VADD V0.S4, V4.S4, V0.S4 + VADD V1.S4, V5.S4, V1.S4 + VADD V2.S4, V6.S4, V2.S4 + VADD V3.S4, V7.S4, V3.S4 + VEOR V12.B16, V0.B16, V12.B16 + VEOR V13.B16, V1.B16, V13.B16 + VEOR V14.B16, V2.B16, V14.B16 + VEOR V15.B16, V3.B16, V15.B16 + VTBL V31.B16, [V12.B16], V12.B16 + VTBL V31.B16, [V13.B16], V13.B16 + VTBL V31.B16, [V14.B16], V14.B16 + VTBL V31.B16, [V15.B16], V15.B16 + + // V8..V11 += V12..V15 + // V4..V7 <<<= ((V4..V7 XOR V8..V11), 7) + VADD V12.S4, V8.S4, V8.S4 + VADD V13.S4, V9.S4, V9.S4 + VADD V14.S4, V10.S4, V10.S4 + VADD V15.S4, V11.S4, V11.S4 + VEOR V8.B16, V4.B16, V16.B16 + VEOR V9.B16, V5.B16, V17.B16 + VEOR V10.B16, V6.B16, V18.B16 + VEOR V11.B16, V7.B16, V19.B16 + VSHL $7, V16.S4, V4.S4 + VSHL $7, V17.S4, V5.S4 + VSHL $7, V18.S4, V6.S4 + VSHL $7, V19.S4, V7.S4 + VSRI $25, V16.S4, V4.S4 + VSRI $25, V17.S4, V5.S4 + VSRI $25, V18.S4, V6.S4 + VSRI $25, V19.S4, V7.S4 + + // V0..V3 += V5..V7, V4 + // V15,V12-V14 <<<= ((V15,V12-V14 XOR V0..V3), 16) + VADD V0.S4, V5.S4, V0.S4 + VADD V1.S4, V6.S4, V1.S4 + VADD V2.S4, V7.S4, V2.S4 + VADD V3.S4, V4.S4, V3.S4 + VEOR V15.B16, V0.B16, V15.B16 + VEOR V12.B16, V1.B16, V12.B16 + VEOR V13.B16, V2.B16, V13.B16 + VEOR V14.B16, V3.B16, V14.B16 + VREV32 V12.H8, V12.H8 + VREV32 V13.H8, V13.H8 + VREV32 V14.H8, V14.H8 + VREV32 V15.H8, V15.H8 + + // V10 += V15; V5 <<<= ((V10 XOR V5), 12) + // ... + VADD V15.S4, V10.S4, V10.S4 + VADD V12.S4, V11.S4, V11.S4 + VADD V13.S4, V8.S4, V8.S4 + VADD V14.S4, V9.S4, V9.S4 + VEOR V10.B16, V5.B16, V16.B16 + VEOR V11.B16, V6.B16, V17.B16 + VEOR V8.B16, V7.B16, V18.B16 + VEOR V9.B16, V4.B16, V19.B16 + VSHL $12, V16.S4, V5.S4 + VSHL $12, V17.S4, V6.S4 + VSHL $12, V18.S4, V7.S4 + VSHL $12, V19.S4, V4.S4 + VSRI $20, V16.S4, V5.S4 + VSRI $20, V17.S4, V6.S4 + VSRI $20, V18.S4, V7.S4 + VSRI $20, V19.S4, V4.S4 + + // V0 += V5; V15 <<<= ((V0 XOR V15), 8) + // ... + VADD V5.S4, V0.S4, V0.S4 + VADD V6.S4, V1.S4, V1.S4 + VADD V7.S4, V2.S4, V2.S4 + VADD V4.S4, V3.S4, V3.S4 + VEOR V0.B16, V15.B16, V15.B16 + VEOR V1.B16, V12.B16, V12.B16 + VEOR V2.B16, V13.B16, V13.B16 + VEOR V3.B16, V14.B16, V14.B16 + VTBL V31.B16, [V12.B16], V12.B16 + VTBL V31.B16, [V13.B16], V13.B16 + VTBL V31.B16, [V14.B16], V14.B16 + VTBL V31.B16, [V15.B16], V15.B16 + + // V10 += V15; V5 <<<= ((V10 XOR V5), 7) + // ... + VADD V15.S4, V10.S4, V10.S4 + VADD V12.S4, V11.S4, V11.S4 + VADD V13.S4, V8.S4, V8.S4 + VADD V14.S4, V9.S4, V9.S4 + VEOR V10.B16, V5.B16, V16.B16 + VEOR V11.B16, V6.B16, V17.B16 + VEOR V8.B16, V7.B16, V18.B16 + VEOR V9.B16, V4.B16, V19.B16 + VSHL $7, V16.S4, V5.S4 + VSHL $7, V17.S4, V6.S4 + VSHL $7, V18.S4, V7.S4 + VSHL $7, V19.S4, V4.S4 + VSRI $25, V16.S4, V5.S4 + VSRI $25, V17.S4, V6.S4 + VSRI $25, V18.S4, V7.S4 + VSRI $25, V19.S4, V4.S4 + + SUB $1, R21 + CBNZ R21, chacha + + // VLD4R (R10), [V16.S4, V17.S4, V18.S4, V19.S4] + WORD $0x4D60E950 + + // VLD4R 16(R4), [V20.S4, V21.S4, V22.S4, V23.S4] + WORD $0x4DFFE894 + VADD V30.S4, V12.S4, V12.S4 + VADD V16.S4, V0.S4, V0.S4 + VADD V17.S4, V1.S4, V1.S4 + VADD V18.S4, V2.S4, V2.S4 + VADD V19.S4, V3.S4, V3.S4 + // VLD4R 16(R4), [V24.S4, V25.S4, V26.S4, V27.S4] + WORD $0x4DFFE898 + // restore R4 + SUB $32, R4 + + // load counter + nonce + // VLD1R (R7), [V28.S4] + WORD $0x4D40C8FC + // VLD3R (R6), [V29.S4, V30.S4, V31.S4] + WORD $0x4D40E8DD + + VADD V20.S4, V4.S4, V4.S4 + VADD V21.S4, V5.S4, V5.S4 + VADD V22.S4, V6.S4, V6.S4 + VADD V23.S4, V7.S4, V7.S4 + VADD V24.S4, V8.S4, V8.S4 + VADD V25.S4, V9.S4, V9.S4 + VADD V26.S4, V10.S4, V10.S4 + VADD V27.S4, V11.S4, V11.S4 + VADD V28.S4, V12.S4, V12.S4 + VADD V29.S4, V13.S4, V13.S4 + VADD V30.S4, V14.S4, V14.S4 + VADD V31.S4, V15.S4, V15.S4 + + VZIP1 V1.S4, V0.S4, V16.S4 + VZIP2 V1.S4, V0.S4, V17.S4 + VZIP1 V3.S4, V2.S4, V18.S4 + VZIP2 V3.S4, V2.S4, V19.S4 + VZIP1 V5.S4, V4.S4, V20.S4 + VZIP2 V5.S4, V4.S4, V21.S4 + VZIP1 V7.S4, V6.S4, V22.S4 + VZIP2 V7.S4, V6.S4, V23.S4 + VZIP1 V9.S4, V8.S4, V24.S4 + VZIP2 V9.S4, V8.S4, V25.S4 + VZIP1 V11.S4, V10.S4, V26.S4 + VZIP2 V11.S4, V10.S4, V27.S4 + VZIP1 V13.S4, V12.S4, V28.S4 + VZIP2 V13.S4, V12.S4, V29.S4 + VZIP1 V15.S4, V14.S4, V30.S4 + VZIP2 V15.S4, V14.S4, V31.S4 + VZIP1 V18.D2, V16.D2, V0.D2 + VZIP2 V18.D2, V16.D2, V4.D2 + VZIP1 V19.D2, V17.D2, V8.D2 + VZIP2 V19.D2, V17.D2, V12.D2 + VLD1.P 64(R2), [V16.B16, V17.B16, V18.B16, V19.B16] + + VZIP1 V22.D2, V20.D2, V1.D2 + VZIP2 V22.D2, V20.D2, V5.D2 + VZIP1 V23.D2, V21.D2, V9.D2 + VZIP2 V23.D2, V21.D2, V13.D2 + VLD1.P 64(R2), [V20.B16, V21.B16, V22.B16, V23.B16] + VZIP1 V26.D2, V24.D2, V2.D2 + VZIP2 V26.D2, V24.D2, V6.D2 + VZIP1 V27.D2, V25.D2, V10.D2 + VZIP2 V27.D2, V25.D2, V14.D2 + VLD1.P 64(R2), [V24.B16, V25.B16, V26.B16, V27.B16] + VZIP1 V30.D2, V28.D2, V3.D2 + VZIP2 V30.D2, V28.D2, V7.D2 + VZIP1 V31.D2, V29.D2, V11.D2 + VZIP2 V31.D2, V29.D2, V15.D2 + VLD1.P 64(R2), [V28.B16, V29.B16, V30.B16, V31.B16] + VEOR V0.B16, V16.B16, V16.B16 + VEOR V1.B16, V17.B16, V17.B16 + VEOR V2.B16, V18.B16, V18.B16 + VEOR V3.B16, V19.B16, V19.B16 + VST1.P [V16.B16, V17.B16, V18.B16, V19.B16], 64(R1) + VEOR V4.B16, V20.B16, V20.B16 + VEOR V5.B16, V21.B16, V21.B16 + VEOR V6.B16, V22.B16, V22.B16 + VEOR V7.B16, V23.B16, V23.B16 + VST1.P [V20.B16, V21.B16, V22.B16, V23.B16], 64(R1) + VEOR V8.B16, V24.B16, V24.B16 + VEOR V9.B16, V25.B16, V25.B16 + VEOR V10.B16, V26.B16, V26.B16 + VEOR V11.B16, V27.B16, V27.B16 + VST1.P [V24.B16, V25.B16, V26.B16, V27.B16], 64(R1) + VEOR V12.B16, V28.B16, V28.B16 + VEOR V13.B16, V29.B16, V29.B16 + VEOR V14.B16, V30.B16, V30.B16 + VEOR V15.B16, V31.B16, V31.B16 + VST1.P [V28.B16, V29.B16, V30.B16, V31.B16], 64(R1) + + ADD $4, R20 + MOVW R20, (R7) // update counter + + CMP R2, R12 + BGT loop + + RET + + +DATA ·constants+0x00(SB)/4, $0x61707865 +DATA ·constants+0x04(SB)/4, $0x3320646e +DATA ·constants+0x08(SB)/4, $0x79622d32 +DATA ·constants+0x0c(SB)/4, $0x6b206574 +GLOBL ·constants(SB), NOPTR|RODATA, $32 + +DATA ·incRotMatrix+0x00(SB)/4, $0x00000000 +DATA ·incRotMatrix+0x04(SB)/4, $0x00000001 +DATA ·incRotMatrix+0x08(SB)/4, $0x00000002 +DATA ·incRotMatrix+0x0c(SB)/4, $0x00000003 +DATA ·incRotMatrix+0x10(SB)/4, $0x02010003 +DATA ·incRotMatrix+0x14(SB)/4, $0x06050407 +DATA ·incRotMatrix+0x18(SB)/4, $0x0A09080B +DATA ·incRotMatrix+0x1c(SB)/4, $0x0E0D0C0F +GLOBL ·incRotMatrix(SB), NOPTR|RODATA, $32 diff --git a/vendor/golang.org/x/crypto/internal/chacha20/chacha_arm64.go b/vendor/golang.org/x/crypto/internal/chacha20/chacha_arm64.go new file mode 100644 index 0000000..ad74e23 --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/chacha20/chacha_arm64.go @@ -0,0 +1,31 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.11 +// +build !gccgo + +package chacha20 + +const ( + haveAsm = true + bufSize = 256 +) + +//go:noescape +func xorKeyStreamVX(dst, src []byte, key *[8]uint32, nonce *[3]uint32, counter *uint32) + +func (c *Cipher) xorKeyStreamAsm(dst, src []byte) { + + if len(src) >= bufSize { + xorKeyStreamVX(dst, src, &c.key, &c.nonce, &c.counter) + } + + if len(src)%bufSize != 0 { + i := len(src) - len(src)%bufSize + c.buf = [bufSize]byte{} + copy(c.buf[:], src[i:]) + xorKeyStreamVX(c.buf[:], c.buf[:], &c.key, &c.nonce, &c.counter) + c.len = bufSize - copy(dst[i:], c.buf[:len(src)%bufSize]) + } +} diff --git a/vendor/golang.org/x/crypto/internal/chacha20/chacha_generic.go b/vendor/golang.org/x/crypto/internal/chacha20/chacha_generic.go new file mode 100644 index 0000000..6570847 --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/chacha20/chacha_generic.go @@ -0,0 +1,264 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ChaCha20 implements the core ChaCha20 function as specified +// in https://tools.ietf.org/html/rfc7539#section-2.3. +package chacha20 + +import ( + "crypto/cipher" + "encoding/binary" + + "golang.org/x/crypto/internal/subtle" +) + +// assert that *Cipher implements cipher.Stream +var _ cipher.Stream = (*Cipher)(nil) + +// Cipher is a stateful instance of ChaCha20 using a particular key +// and nonce. A *Cipher implements the cipher.Stream interface. +type Cipher struct { + key [8]uint32 + counter uint32 // incremented after each block + nonce [3]uint32 + buf [bufSize]byte // buffer for unused keystream bytes + len int // number of unused keystream bytes at end of buf +} + +// New creates a new ChaCha20 stream cipher with the given key and nonce. +// The initial counter value is set to 0. +func New(key [8]uint32, nonce [3]uint32) *Cipher { + return &Cipher{key: key, nonce: nonce} +} + +// ChaCha20 constants spelling "expand 32-byte k" +const ( + j0 uint32 = 0x61707865 + j1 uint32 = 0x3320646e + j2 uint32 = 0x79622d32 + j3 uint32 = 0x6b206574 +) + +func quarterRound(a, b, c, d uint32) (uint32, uint32, uint32, uint32) { + a += b + d ^= a + d = (d << 16) | (d >> 16) + c += d + b ^= c + b = (b << 12) | (b >> 20) + a += b + d ^= a + d = (d << 8) | (d >> 24) + c += d + b ^= c + b = (b << 7) | (b >> 25) + return a, b, c, d +} + +// XORKeyStream XORs each byte in the given slice with a byte from the +// cipher's key stream. Dst and src must overlap entirely or not at all. +// +// If len(dst) < len(src), XORKeyStream will panic. It is acceptable +// to pass a dst bigger than src, and in that case, XORKeyStream will +// only update dst[:len(src)] and will not touch the rest of dst. +// +// Multiple calls to XORKeyStream behave as if the concatenation of +// the src buffers was passed in a single run. That is, Cipher +// maintains state and does not reset at each XORKeyStream call. +func (s *Cipher) XORKeyStream(dst, src []byte) { + if len(dst) < len(src) { + panic("chacha20: output smaller than input") + } + if subtle.InexactOverlap(dst[:len(src)], src) { + panic("chacha20: invalid buffer overlap") + } + + // xor src with buffered keystream first + if s.len != 0 { + buf := s.buf[len(s.buf)-s.len:] + if len(src) < len(buf) { + buf = buf[:len(src)] + } + td, ts := dst[:len(buf)], src[:len(buf)] // BCE hint + for i, b := range buf { + td[i] = ts[i] ^ b + } + s.len -= len(buf) + if s.len != 0 { + return + } + s.buf = [len(s.buf)]byte{} // zero the empty buffer + src = src[len(buf):] + dst = dst[len(buf):] + } + + if len(src) == 0 { + return + } + if haveAsm { + if uint64(len(src))+uint64(s.counter)*64 > (1<<38)-64 { + panic("chacha20: counter overflow") + } + s.xorKeyStreamAsm(dst, src) + return + } + + // set up a 64-byte buffer to pad out the final block if needed + // (hoisted out of the main loop to avoid spills) + rem := len(src) % 64 // length of final block + fin := len(src) - rem // index of final block + if rem > 0 { + copy(s.buf[len(s.buf)-64:], src[fin:]) + } + + // pre-calculate most of the first round + s1, s5, s9, s13 := quarterRound(j1, s.key[1], s.key[5], s.nonce[0]) + s2, s6, s10, s14 := quarterRound(j2, s.key[2], s.key[6], s.nonce[1]) + s3, s7, s11, s15 := quarterRound(j3, s.key[3], s.key[7], s.nonce[2]) + + n := len(src) + src, dst = src[:n:n], dst[:n:n] // BCE hint + for i := 0; i < n; i += 64 { + // calculate the remainder of the first round + s0, s4, s8, s12 := quarterRound(j0, s.key[0], s.key[4], s.counter) + + // execute the second round + x0, x5, x10, x15 := quarterRound(s0, s5, s10, s15) + x1, x6, x11, x12 := quarterRound(s1, s6, s11, s12) + x2, x7, x8, x13 := quarterRound(s2, s7, s8, s13) + x3, x4, x9, x14 := quarterRound(s3, s4, s9, s14) + + // execute the remaining 18 rounds + for i := 0; i < 9; i++ { + x0, x4, x8, x12 = quarterRound(x0, x4, x8, x12) + x1, x5, x9, x13 = quarterRound(x1, x5, x9, x13) + x2, x6, x10, x14 = quarterRound(x2, x6, x10, x14) + x3, x7, x11, x15 = quarterRound(x3, x7, x11, x15) + + x0, x5, x10, x15 = quarterRound(x0, x5, x10, x15) + x1, x6, x11, x12 = quarterRound(x1, x6, x11, x12) + x2, x7, x8, x13 = quarterRound(x2, x7, x8, x13) + x3, x4, x9, x14 = quarterRound(x3, x4, x9, x14) + } + + x0 += j0 + x1 += j1 + x2 += j2 + x3 += j3 + + x4 += s.key[0] + x5 += s.key[1] + x6 += s.key[2] + x7 += s.key[3] + x8 += s.key[4] + x9 += s.key[5] + x10 += s.key[6] + x11 += s.key[7] + + x12 += s.counter + x13 += s.nonce[0] + x14 += s.nonce[1] + x15 += s.nonce[2] + + // increment the counter + s.counter += 1 + if s.counter == 0 { + panic("chacha20: counter overflow") + } + + // pad to 64 bytes if needed + in, out := src[i:], dst[i:] + if i == fin { + // src[fin:] has already been copied into s.buf before + // the main loop + in, out = s.buf[len(s.buf)-64:], s.buf[len(s.buf)-64:] + } + in, out = in[:64], out[:64] // BCE hint + + // XOR the key stream with the source and write out the result + xor(out[0:], in[0:], x0) + xor(out[4:], in[4:], x1) + xor(out[8:], in[8:], x2) + xor(out[12:], in[12:], x3) + xor(out[16:], in[16:], x4) + xor(out[20:], in[20:], x5) + xor(out[24:], in[24:], x6) + xor(out[28:], in[28:], x7) + xor(out[32:], in[32:], x8) + xor(out[36:], in[36:], x9) + xor(out[40:], in[40:], x10) + xor(out[44:], in[44:], x11) + xor(out[48:], in[48:], x12) + xor(out[52:], in[52:], x13) + xor(out[56:], in[56:], x14) + xor(out[60:], in[60:], x15) + } + // copy any trailing bytes out of the buffer and into dst + if rem != 0 { + s.len = 64 - rem + copy(dst[fin:], s.buf[len(s.buf)-64:]) + } +} + +// Advance discards bytes in the key stream until the next 64 byte block +// boundary is reached and updates the counter accordingly. If the key +// stream is already at a block boundary no bytes will be discarded and +// the counter will be unchanged. +func (s *Cipher) Advance() { + s.len -= s.len % 64 + if s.len == 0 { + s.buf = [len(s.buf)]byte{} + } +} + +// XORKeyStream crypts bytes from in to out using the given key and counters. +// In and out must overlap entirely or not at all. Counter contains the raw +// ChaCha20 counter bytes (i.e. block counter followed by nonce). +func XORKeyStream(out, in []byte, counter *[16]byte, key *[32]byte) { + s := Cipher{ + key: [8]uint32{ + binary.LittleEndian.Uint32(key[0:4]), + binary.LittleEndian.Uint32(key[4:8]), + binary.LittleEndian.Uint32(key[8:12]), + binary.LittleEndian.Uint32(key[12:16]), + binary.LittleEndian.Uint32(key[16:20]), + binary.LittleEndian.Uint32(key[20:24]), + binary.LittleEndian.Uint32(key[24:28]), + binary.LittleEndian.Uint32(key[28:32]), + }, + nonce: [3]uint32{ + binary.LittleEndian.Uint32(counter[4:8]), + binary.LittleEndian.Uint32(counter[8:12]), + binary.LittleEndian.Uint32(counter[12:16]), + }, + counter: binary.LittleEndian.Uint32(counter[0:4]), + } + s.XORKeyStream(out, in) +} + +// HChaCha20 uses the ChaCha20 core to generate a derived key from a key and a +// nonce. It should only be used as part of the XChaCha20 construction. +func HChaCha20(key *[8]uint32, nonce *[4]uint32) [8]uint32 { + x0, x1, x2, x3 := j0, j1, j2, j3 + x4, x5, x6, x7 := key[0], key[1], key[2], key[3] + x8, x9, x10, x11 := key[4], key[5], key[6], key[7] + x12, x13, x14, x15 := nonce[0], nonce[1], nonce[2], nonce[3] + + for i := 0; i < 10; i++ { + x0, x4, x8, x12 = quarterRound(x0, x4, x8, x12) + x1, x5, x9, x13 = quarterRound(x1, x5, x9, x13) + x2, x6, x10, x14 = quarterRound(x2, x6, x10, x14) + x3, x7, x11, x15 = quarterRound(x3, x7, x11, x15) + + x0, x5, x10, x15 = quarterRound(x0, x5, x10, x15) + x1, x6, x11, x12 = quarterRound(x1, x6, x11, x12) + x2, x7, x8, x13 = quarterRound(x2, x7, x8, x13) + x3, x4, x9, x14 = quarterRound(x3, x4, x9, x14) + } + + var out [8]uint32 + out[0], out[1], out[2], out[3] = x0, x1, x2, x3 + out[4], out[5], out[6], out[7] = x12, x13, x14, x15 + return out +} diff --git a/vendor/golang.org/x/crypto/internal/chacha20/chacha_noasm.go b/vendor/golang.org/x/crypto/internal/chacha20/chacha_noasm.go new file mode 100644 index 0000000..47eac03 --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/chacha20/chacha_noasm.go @@ -0,0 +1,16 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !arm64,!s390x arm64,!go1.11 gccgo appengine + +package chacha20 + +const ( + bufSize = 64 + haveAsm = false +) + +func (*Cipher) xorKeyStreamAsm(dst, src []byte) { + panic("not implemented") +} diff --git a/vendor/golang.org/x/crypto/internal/chacha20/chacha_s390x.go b/vendor/golang.org/x/crypto/internal/chacha20/chacha_s390x.go new file mode 100644 index 0000000..aad645b --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/chacha20/chacha_s390x.go @@ -0,0 +1,29 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build s390x,!gccgo,!appengine + +package chacha20 + +import ( + "golang.org/x/sys/cpu" +) + +var haveAsm = cpu.S390X.HasVX + +const bufSize = 256 + +// xorKeyStreamVX is an assembly implementation of XORKeyStream. It must only +// be called when the vector facility is available. +// Implementation in asm_s390x.s. +//go:noescape +func xorKeyStreamVX(dst, src []byte, key *[8]uint32, nonce *[3]uint32, counter *uint32, buf *[256]byte, len *int) + +func (c *Cipher) xorKeyStreamAsm(dst, src []byte) { + xorKeyStreamVX(dst, src, &c.key, &c.nonce, &c.counter, &c.buf, &c.len) +} + +// EXRL targets, DO NOT CALL! +func mvcSrcToBuf() +func mvcBufToDst() diff --git a/vendor/golang.org/x/crypto/internal/chacha20/chacha_s390x.s b/vendor/golang.org/x/crypto/internal/chacha20/chacha_s390x.s new file mode 100644 index 0000000..57df404 --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/chacha20/chacha_s390x.s @@ -0,0 +1,260 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build s390x,!gccgo,!appengine + +#include "go_asm.h" +#include "textflag.h" + +// This is an implementation of the ChaCha20 encryption algorithm as +// specified in RFC 7539. It uses vector instructions to compute +// 4 keystream blocks in parallel (256 bytes) which are then XORed +// with the bytes in the input slice. + +GLOBL ·constants<>(SB), RODATA|NOPTR, $32 +// BSWAP: swap bytes in each 4-byte element +DATA ·constants<>+0x00(SB)/4, $0x03020100 +DATA ·constants<>+0x04(SB)/4, $0x07060504 +DATA ·constants<>+0x08(SB)/4, $0x0b0a0908 +DATA ·constants<>+0x0c(SB)/4, $0x0f0e0d0c +// J0: [j0, j1, j2, j3] +DATA ·constants<>+0x10(SB)/4, $0x61707865 +DATA ·constants<>+0x14(SB)/4, $0x3320646e +DATA ·constants<>+0x18(SB)/4, $0x79622d32 +DATA ·constants<>+0x1c(SB)/4, $0x6b206574 + +// EXRL targets: +TEXT ·mvcSrcToBuf(SB), NOFRAME|NOSPLIT, $0 + MVC $1, (R1), (R8) + RET + +TEXT ·mvcBufToDst(SB), NOFRAME|NOSPLIT, $0 + MVC $1, (R8), (R9) + RET + +#define BSWAP V5 +#define J0 V6 +#define KEY0 V7 +#define KEY1 V8 +#define NONCE V9 +#define CTR V10 +#define M0 V11 +#define M1 V12 +#define M2 V13 +#define M3 V14 +#define INC V15 +#define X0 V16 +#define X1 V17 +#define X2 V18 +#define X3 V19 +#define X4 V20 +#define X5 V21 +#define X6 V22 +#define X7 V23 +#define X8 V24 +#define X9 V25 +#define X10 V26 +#define X11 V27 +#define X12 V28 +#define X13 V29 +#define X14 V30 +#define X15 V31 + +#define NUM_ROUNDS 20 + +#define ROUND4(a0, a1, a2, a3, b0, b1, b2, b3, c0, c1, c2, c3, d0, d1, d2, d3) \ + VAF a1, a0, a0 \ + VAF b1, b0, b0 \ + VAF c1, c0, c0 \ + VAF d1, d0, d0 \ + VX a0, a2, a2 \ + VX b0, b2, b2 \ + VX c0, c2, c2 \ + VX d0, d2, d2 \ + VERLLF $16, a2, a2 \ + VERLLF $16, b2, b2 \ + VERLLF $16, c2, c2 \ + VERLLF $16, d2, d2 \ + VAF a2, a3, a3 \ + VAF b2, b3, b3 \ + VAF c2, c3, c3 \ + VAF d2, d3, d3 \ + VX a3, a1, a1 \ + VX b3, b1, b1 \ + VX c3, c1, c1 \ + VX d3, d1, d1 \ + VERLLF $12, a1, a1 \ + VERLLF $12, b1, b1 \ + VERLLF $12, c1, c1 \ + VERLLF $12, d1, d1 \ + VAF a1, a0, a0 \ + VAF b1, b0, b0 \ + VAF c1, c0, c0 \ + VAF d1, d0, d0 \ + VX a0, a2, a2 \ + VX b0, b2, b2 \ + VX c0, c2, c2 \ + VX d0, d2, d2 \ + VERLLF $8, a2, a2 \ + VERLLF $8, b2, b2 \ + VERLLF $8, c2, c2 \ + VERLLF $8, d2, d2 \ + VAF a2, a3, a3 \ + VAF b2, b3, b3 \ + VAF c2, c3, c3 \ + VAF d2, d3, d3 \ + VX a3, a1, a1 \ + VX b3, b1, b1 \ + VX c3, c1, c1 \ + VX d3, d1, d1 \ + VERLLF $7, a1, a1 \ + VERLLF $7, b1, b1 \ + VERLLF $7, c1, c1 \ + VERLLF $7, d1, d1 + +#define PERMUTE(mask, v0, v1, v2, v3) \ + VPERM v0, v0, mask, v0 \ + VPERM v1, v1, mask, v1 \ + VPERM v2, v2, mask, v2 \ + VPERM v3, v3, mask, v3 + +#define ADDV(x, v0, v1, v2, v3) \ + VAF x, v0, v0 \ + VAF x, v1, v1 \ + VAF x, v2, v2 \ + VAF x, v3, v3 + +#define XORV(off, dst, src, v0, v1, v2, v3) \ + VLM off(src), M0, M3 \ + PERMUTE(BSWAP, v0, v1, v2, v3) \ + VX v0, M0, M0 \ + VX v1, M1, M1 \ + VX v2, M2, M2 \ + VX v3, M3, M3 \ + VSTM M0, M3, off(dst) + +#define SHUFFLE(a, b, c, d, t, u, v, w) \ + VMRHF a, c, t \ // t = {a[0], c[0], a[1], c[1]} + VMRHF b, d, u \ // u = {b[0], d[0], b[1], d[1]} + VMRLF a, c, v \ // v = {a[2], c[2], a[3], c[3]} + VMRLF b, d, w \ // w = {b[2], d[2], b[3], d[3]} + VMRHF t, u, a \ // a = {a[0], b[0], c[0], d[0]} + VMRLF t, u, b \ // b = {a[1], b[1], c[1], d[1]} + VMRHF v, w, c \ // c = {a[2], b[2], c[2], d[2]} + VMRLF v, w, d // d = {a[3], b[3], c[3], d[3]} + +// func xorKeyStreamVX(dst, src []byte, key *[8]uint32, nonce *[3]uint32, counter *uint32, buf *[256]byte, len *int) +TEXT ·xorKeyStreamVX(SB), NOSPLIT, $0 + MOVD $·constants<>(SB), R1 + MOVD dst+0(FP), R2 // R2=&dst[0] + LMG src+24(FP), R3, R4 // R3=&src[0] R4=len(src) + MOVD key+48(FP), R5 // R5=key + MOVD nonce+56(FP), R6 // R6=nonce + MOVD counter+64(FP), R7 // R7=counter + MOVD buf+72(FP), R8 // R8=buf + MOVD len+80(FP), R9 // R9=len + + // load BSWAP and J0 + VLM (R1), BSWAP, J0 + + // set up tail buffer + ADD $-1, R4, R12 + MOVBZ R12, R12 + CMPUBEQ R12, $255, aligned + MOVD R4, R1 + AND $~255, R1 + MOVD $(R3)(R1*1), R1 + EXRL $·mvcSrcToBuf(SB), R12 + MOVD $255, R0 + SUB R12, R0 + MOVD R0, (R9) // update len + +aligned: + // setup + MOVD $95, R0 + VLM (R5), KEY0, KEY1 + VLL R0, (R6), NONCE + VZERO M0 + VLEIB $7, $32, M0 + VSRLB M0, NONCE, NONCE + + // initialize counter values + VLREPF (R7), CTR + VZERO INC + VLEIF $1, $1, INC + VLEIF $2, $2, INC + VLEIF $3, $3, INC + VAF INC, CTR, CTR + VREPIF $4, INC + +chacha: + VREPF $0, J0, X0 + VREPF $1, J0, X1 + VREPF $2, J0, X2 + VREPF $3, J0, X3 + VREPF $0, KEY0, X4 + VREPF $1, KEY0, X5 + VREPF $2, KEY0, X6 + VREPF $3, KEY0, X7 + VREPF $0, KEY1, X8 + VREPF $1, KEY1, X9 + VREPF $2, KEY1, X10 + VREPF $3, KEY1, X11 + VLR CTR, X12 + VREPF $1, NONCE, X13 + VREPF $2, NONCE, X14 + VREPF $3, NONCE, X15 + + MOVD $(NUM_ROUNDS/2), R1 + +loop: + ROUND4(X0, X4, X12, X8, X1, X5, X13, X9, X2, X6, X14, X10, X3, X7, X15, X11) + ROUND4(X0, X5, X15, X10, X1, X6, X12, X11, X2, X7, X13, X8, X3, X4, X14, X9) + + ADD $-1, R1 + BNE loop + + // decrement length + ADD $-256, R4 + BLT tail + +continue: + // rearrange vectors + SHUFFLE(X0, X1, X2, X3, M0, M1, M2, M3) + ADDV(J0, X0, X1, X2, X3) + SHUFFLE(X4, X5, X6, X7, M0, M1, M2, M3) + ADDV(KEY0, X4, X5, X6, X7) + SHUFFLE(X8, X9, X10, X11, M0, M1, M2, M3) + ADDV(KEY1, X8, X9, X10, X11) + VAF CTR, X12, X12 + SHUFFLE(X12, X13, X14, X15, M0, M1, M2, M3) + ADDV(NONCE, X12, X13, X14, X15) + + // increment counters + VAF INC, CTR, CTR + + // xor keystream with plaintext + XORV(0*64, R2, R3, X0, X4, X8, X12) + XORV(1*64, R2, R3, X1, X5, X9, X13) + XORV(2*64, R2, R3, X2, X6, X10, X14) + XORV(3*64, R2, R3, X3, X7, X11, X15) + + // increment pointers + MOVD $256(R2), R2 + MOVD $256(R3), R3 + + CMPBNE R4, $0, chacha + CMPUBEQ R12, $255, return + EXRL $·mvcBufToDst(SB), R12 // len was updated during setup + +return: + VSTEF $0, CTR, (R7) + RET + +tail: + MOVD R2, R9 + MOVD R8, R2 + MOVD R8, R3 + MOVD $0, R4 + JMP continue diff --git a/vendor/golang.org/x/crypto/internal/chacha20/chacha_test.go b/vendor/golang.org/x/crypto/internal/chacha20/chacha_test.go new file mode 100644 index 0000000..9a7a099 --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/chacha20/chacha_test.go @@ -0,0 +1,225 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package chacha20 + +import ( + "encoding/binary" + "encoding/hex" + "fmt" + "math/rand" + "testing" +) + +func TestCore(t *testing.T) { + // This is just a smoke test that checks the example from + // https://tools.ietf.org/html/rfc7539#section-2.3.2. The + // chacha20poly1305 package contains much more extensive tests of this + // code. + var key [32]byte + for i := range key { + key[i] = byte(i) + } + + var input [16]byte + input[0] = 1 + input[7] = 9 + input[11] = 0x4a + + var out [64]byte + XORKeyStream(out[:], out[:], &input, &key) + const expected = "10f1e7e4d13b5915500fdd1fa32071c4c7d1f4c733c068030422aa9ac3d46c4ed2826446079faa0914c2d705d98b02a2b5129cd1de164eb9cbd083e8a2503c4e" + if result := hex.EncodeToString(out[:]); result != expected { + t.Errorf("wanted %x but got %x", expected, result) + } +} + +// Run the test cases with the input and output in different buffers. +func TestNoOverlap(t *testing.T) { + for _, c := range testVectors { + s := New(c.key, c.nonce) + input, err := hex.DecodeString(c.input) + if err != nil { + t.Fatalf("cannot decode input %#v: %v", c.input, err) + } + output := make([]byte, c.length) + s.XORKeyStream(output, input) + got := hex.EncodeToString(output) + if got != c.output { + t.Errorf("length=%v: got %#v, want %#v", c.length, got, c.output) + } + } +} + +// Run the test cases with the input and output overlapping entirely. +func TestOverlap(t *testing.T) { + for _, c := range testVectors { + s := New(c.key, c.nonce) + data, err := hex.DecodeString(c.input) + if err != nil { + t.Fatalf("cannot decode input %#v: %v", c.input, err) + } + s.XORKeyStream(data, data) + got := hex.EncodeToString(data) + if got != c.output { + t.Errorf("length=%v: got %#v, want %#v", c.length, got, c.output) + } + } +} + +// Run the test cases with various source and destination offsets. +func TestUnaligned(t *testing.T) { + const max = 8 // max offset (+1) to test + for _, c := range testVectors { + input := make([]byte, c.length+max) + output := make([]byte, c.length+max) + for i := 0; i < max; i++ { // input offsets + for j := 0; j < max; j++ { // output offsets + s := New(c.key, c.nonce) + + input := input[i : i+c.length] + output := output[j : j+c.length] + + data, err := hex.DecodeString(c.input) + if err != nil { + t.Fatalf("cannot decode input %#v: %v", c.input, err) + } + copy(input, data) + s.XORKeyStream(output, input) + got := hex.EncodeToString(output) + if got != c.output { + t.Errorf("length=%v: got %#v, want %#v", c.length, got, c.output) + } + } + } + } +} + +// Run the test cases by calling XORKeyStream multiple times. +func TestStep(t *testing.T) { + // wide range of step sizes to try and hit edge cases + steps := [...]int{1, 3, 4, 7, 8, 17, 24, 30, 64, 256} + rnd := rand.New(rand.NewSource(123)) + for _, c := range testVectors { + s := New(c.key, c.nonce) + input, err := hex.DecodeString(c.input) + if err != nil { + t.Fatalf("cannot decode input %#v: %v", c.input, err) + } + output := make([]byte, c.length) + + // step through the buffers + i, step := 0, steps[rnd.Intn(len(steps))] + for i+step < c.length { + s.XORKeyStream(output[i:i+step], input[i:i+step]) + if i+step < c.length && output[i+step] != 0 { + t.Errorf("length=%v, i=%v, step=%v: output overwritten", c.length, i, step) + } + i += step + step = steps[rnd.Intn(len(steps))] + } + // finish the encryption + s.XORKeyStream(output[i:], input[i:]) + + got := hex.EncodeToString(output) + if got != c.output { + t.Errorf("length=%v: got %#v, want %#v", c.length, got, c.output) + } + } +} + +// Test that Advance() discards bytes until a block boundary is hit. +func TestAdvance(t *testing.T) { + for _, c := range testVectors { + for i := 0; i < 63; i++ { + s := New(c.key, c.nonce) + z := New(c.key, c.nonce) + input, err := hex.DecodeString(c.input) + if err != nil { + t.Fatalf("cannot decode input %#v: %v", c.input, err) + } + zeros, discard := make([]byte, 64), make([]byte, 64) + so, zo := make([]byte, c.length), make([]byte, c.length) + for j := 0; j < c.length; j += 64 { + lim := j + i + if lim > c.length { + lim = c.length + } + s.XORKeyStream(so[j:lim], input[j:lim]) + // calling s.Advance() multiple times should have no effect + for k := 0; k < i%3+1; k++ { + s.Advance() + } + z.XORKeyStream(zo[j:lim], input[j:lim]) + if lim < c.length { + end := 64 - i + if c.length-lim < end { + end = c.length - lim + } + z.XORKeyStream(discard[:], zeros[:end]) + } + } + + got := hex.EncodeToString(so) + want := hex.EncodeToString(zo) + if got != want { + t.Errorf("length=%v: got %#v, want %#v", c.length, got, want) + } + } + } +} + +func BenchmarkChaCha20(b *testing.B) { + sizes := []int{32, 63, 64, 256, 1024, 1350, 65536} + for _, size := range sizes { + s := size + b.Run(fmt.Sprint(s), func(b *testing.B) { + k := [32]byte{} + c := [16]byte{} + src := make([]byte, s) + dst := make([]byte, s) + b.SetBytes(int64(s)) + b.ResetTimer() + for i := 0; i < b.N; i++ { + XORKeyStream(dst, src, &c, &k) + } + }) + } +} + +func TestHChaCha20(t *testing.T) { + // See draft-paragon-paseto-rfc-00 §7.2.1. + key := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f} + nonce := []byte{0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x4a, + 0x00, 0x00, 0x00, 0x00, 0x31, 0x41, 0x59, 0x27} + expected := []byte{0x82, 0x41, 0x3b, 0x42, 0x27, 0xb2, 0x7b, 0xfe, + 0xd3, 0x0e, 0x42, 0x50, 0x8a, 0x87, 0x7d, 0x73, + 0xa0, 0xf9, 0xe4, 0xd5, 0x8a, 0x74, 0xa8, 0x53, + 0xc1, 0x2e, 0xc4, 0x13, 0x26, 0xd3, 0xec, 0xdc, + } + result := HChaCha20(&[8]uint32{ + binary.LittleEndian.Uint32(key[0:4]), + binary.LittleEndian.Uint32(key[4:8]), + binary.LittleEndian.Uint32(key[8:12]), + binary.LittleEndian.Uint32(key[12:16]), + binary.LittleEndian.Uint32(key[16:20]), + binary.LittleEndian.Uint32(key[20:24]), + binary.LittleEndian.Uint32(key[24:28]), + binary.LittleEndian.Uint32(key[28:32]), + }, &[4]uint32{ + binary.LittleEndian.Uint32(nonce[0:4]), + binary.LittleEndian.Uint32(nonce[4:8]), + binary.LittleEndian.Uint32(nonce[8:12]), + binary.LittleEndian.Uint32(nonce[12:16]), + }) + for i := 0; i < 8; i++ { + want := binary.LittleEndian.Uint32(expected[i*4 : (i+1)*4]) + if got := result[i]; got != want { + t.Errorf("word %d incorrect: want 0x%x, got 0x%x", i, want, got) + } + } +} diff --git a/vendor/golang.org/x/crypto/internal/chacha20/vectors_test.go b/vendor/golang.org/x/crypto/internal/chacha20/vectors_test.go new file mode 100644 index 0000000..b441fbd --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/chacha20/vectors_test.go @@ -0,0 +1,578 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package chacha20 + +// Test vectors for ChaCha20 implementations. + +type testCase struct { + length int + nonce [3]uint32 + key [8]uint32 + input string + output string +} + +var testVectors = [...]testCase{ + { + length: 0, + nonce: [3]uint32{0x94d13317, 0x6b6a2b3, 0x3ffe0036}, + key: [8]uint32{0x9da8a3b6, 0x3abf4ae6, 0xa2f19cae, 0x1068c707, 0x72e4801e, 0xce165d92, 0x61e7028f, 0x82ac3d57}, + input: "", + output: "", + }, + { + length: 5, + nonce: [3]uint32{0x469fadd, 0xee3fcc1e, 0x45cf77b0}, + key: [8]uint32{0x3477e02b, 0x45bf809f, 0x27f4a1fa, 0xdb901de8, 0xd8a190dc, 0x1d2c21d4, 0x87bdf2ac, 0xdfbf0000}, + input: "23dbad0780", + output: "415a3e498d", + }, + { + length: 9, + nonce: [3]uint32{0x512a6b49, 0x8df9af6d, 0x5336a2a5}, + key: [8]uint32{0xe9124c25, 0x4fd1a373, 0x7945f7bb, 0xeed5f064, 0x29c4185d, 0x3c9acf13, 0x4c94a367, 0x7c2c2c53}, + input: "f518831fab69c054a6", + output: "cfe40f63f81391484b", + }, + { + length: 12, + nonce: [3]uint32{0xca697a9e, 0x6b2f6717, 0xb7859220}, + key: [8]uint32{0xfc825020, 0x5ca4410b, 0x7d5285d0, 0x160a1c9d, 0x15470b41, 0x3634742a, 0xe64aa7fa, 0xca0be67a}, + input: "805fad1d62951537aeed9859", + output: "47bd303f93c3ce04bce44710", + }, + { + length: 14, + nonce: [3]uint32{0xcded3db3, 0x35770a7f, 0x6aede9b}, + key: [8]uint32{0x44632def, 0xa5e420a7, 0xfc12a8f, 0x63b79a15, 0x337de314, 0xb82fbf16, 0x3104bc57, 0x677c9227}, + input: "f4e8a7577affb841cf48392cf5df", + output: "f445c0fb7e3d5bfdab47090ddee6", + }, + { + length: 15, + nonce: [3]uint32{0x348a50b1, 0x4acc9280, 0x8d6014ce}, + key: [8]uint32{0x34bd31a8, 0x2808f47e, 0x9d8b19f9, 0x4df59683, 0x31584348, 0x34a74a45, 0xde174a2, 0x29d4c7dc}, + input: "1179b71ec4dc34bd812f742b5a0b27", + output: "cc7f80f333c647d6e592e4f7ecc834", + }, + { + length: 20, + nonce: [3]uint32{0xc8754703, 0x9188c521, 0xac8ce8a6}, + key: [8]uint32{0xe93c79ed, 0xce89162b, 0x116a8366, 0xecdc657f, 0x5bc81d98, 0xff5d2f52, 0x171f3ebb, 0x50773f2f}, + input: "7bd94943d55392d0311c413ac755ce0347872ba3", + output: "c43665de15136af232675d9d5dbbeca77f3c542a", + }, + { + length: 21, + nonce: [3]uint32{0x9a8655cb, 0x6e9d6ea5, 0x5dad705e}, + key: [8]uint32{0x3542d5b3, 0x1f7bfd8f, 0x1038abf8, 0x7214e8ec, 0xedd05693, 0x60e663bd, 0xe8e5d506, 0xeea923a2}, + input: "1505f669acc5ad9aaa0e993ba8c24e744d13655e1f", + output: "26cad1ccf4cf4c49b267ab7be10bc2ffa3ba66bc86", + }, + { + length: 25, + nonce: [3]uint32{0x3f202ca4, 0x63fc86, 0x7260a10e}, + key: [8]uint32{0xe28ab1d6, 0xe83b3d47, 0x671271ca, 0xb977bcff, 0xa2f64476, 0x311d79b4, 0x180d91d0, 0xec1a6e0c}, + input: "20070523ddb4ebf0d5f20fd95aacf47fb269ebadda6879638a", + output: "5ce972624cb2b7e7c28f5b865ba08c887911b4f5e361830a4b", + }, + { + length: 31, + nonce: [3]uint32{0xcf8671ea, 0x8d72df2f, 0x8b5a538a}, + key: [8]uint32{0xe46ca2bb, 0xd06ab5ef, 0xb0e2966b, 0x54dd0c2d, 0x8815d89a, 0x426c30a9, 0x15b0f1e, 0x254bae75}, + input: "d10f8050c1186f92e26f351db36490d82ea677498562d8d4f487a0a4058adf", + output: "f30c11bc553b2baf6870760d735680897c9fee168f976b2a33ef395fdbd4fc", + }, + { + length: 34, + nonce: [3]uint32{0xd1be983a, 0xf5aa389, 0xfa26c7e1}, + key: [8]uint32{0x795c6da7, 0x8cb1aadc, 0xa042359a, 0x95ea2e27, 0x128253c4, 0xaabc592f, 0x391e810, 0xf641d971}, + input: "e88dc380b7d45a4a762c34f310199587867516fac4a2634022b96a9f862e17714d17", + output: "aac98ba3821399e55a5eab5862f7f1bfc63637d700125878c2b17151f306c9aec80e", + }, + { + length: 34, + nonce: [3]uint32{0x98f5f4b8, 0x3f181d73, 0x5bf4572e}, + key: [8]uint32{0xa86f8cf7, 0x8db41a2b, 0xe0e03156, 0x3dad8a59, 0xb3e4d1ba, 0x75f6fb38, 0xdb94709d, 0xc3db34f3}, + input: "b0fcf0a731e2902787309697db2384e1cda07b60002c95355a4e261fb601f034b2b3", + output: "b6c8c40ddda029a70a21c25f724cc90c43f6edc407055683572a9f5e9690a1d571bb", + }, + { + length: 40, + nonce: [3]uint32{0x7289ae18, 0x7ebe7e50, 0x7d819176}, + key: [8]uint32{0x336c07a0, 0x4a2ea22b, 0xa8872f46, 0xa47b5e28, 0xbe645e3f, 0x371c6591, 0xd2dc237a, 0x92c59580}, + input: "cf9ec6fa3f0a67488adb5598a48ed916729a1e416d206f9675dfa9fd6585793f274f363bbca348b3", + output: "bb7ed8a199aa329dcd18736ce705804ffae8c3e2ba341ae907f94f4672d57175df25d28e16962fd6", + }, + { + length: 47, + nonce: [3]uint32{0xfd3181de, 0x8b193e26, 0xbebc799}, + key: [8]uint32{0x781a4c2e, 0x27ab55e2, 0x814aaf43, 0xa0bab01, 0x9de62ce0, 0x472b03d2, 0xdfee18e8, 0x8b855b93}, + input: "be9a8211d68642310724eda3dd02f63fcc03a101d9564b0ecee6f4ecececcb0099bb26aabee46b1a2c0416b4ac269e", + output: "3152f317cf3626e26d02cff9392619ea02e22115b6d43d6dd2e1177c6bb3cb71c4a90c3d13b63c43e03605ec98d9a1", + }, + { + length: 51, + nonce: [3]uint32{0x27b02ff6, 0xa510613e, 0x218b22d8}, + key: [8]uint32{0x62fc7732, 0xcef06cf4, 0xa4f45ed5, 0x2f96654f, 0x9f2b956e, 0x42b572f4, 0x5bb59c86, 0x35e4784f}, + input: "495343a257250f8970f791f493b89d10edba89806b88aaaeb3b5aefd078ba7b765746164bce653f5e6c096dd8499fb76d97d77", + output: "62c01f426581551b5b16e8b1a3a23c86bcdd189ab695dbea4bf811a14741e6ebbb0261ef8ae47778a6be7e0ef11697b891412c", + }, + { + length: 52, + nonce: [3]uint32{0x9db97a63, 0xff50248, 0xf2b6df56}, + key: [8]uint32{0x2b657a8f, 0xfe67575d, 0xaa56d261, 0x30179a97, 0xaefcfff1, 0x9b8eb698, 0x1efe3756, 0xb4ea450c}, + input: "e37fbbd3fe37ce5a99d18e5dcb0dafe7adf8b596528708f7d310569ab44c251377f7363a390c653965e0cb8dd217464b3d8f79c1", + output: "b07d4c56fb83a49e8d9fc992e1230bb5086fecbd828cdbc7353f61b1a3cec0baf9c5bf67c9da06b49469a999ba3b37916ec125be", + }, + { + length: 56, + nonce: [3]uint32{0xc1dfec38, 0x7d7503d3, 0x9a3e3c66}, + key: [8]uint32{0x8614d8e7, 0xde9b0413, 0x2a48b4fa, 0xcbbde744, 0xad5ddc5e, 0x9144d83e, 0x74d9d617, 0x230bdb45}, + input: "9efab614388a7d99102bcc901e3623d31fd9dd9d3c3338d086f69c13e7aa8653f9ce76e722e5a6a8cbbbee067a6cb9c59aa9b4b4c518bbed", + output: "829d9fe74b7a4b3aeb04580b41d38a156ffbebba5d49ad55d1b0370f25abcd41221304941ad8e0d5095e15fbd839295bf1e7a509a807c005", + }, + { + length: 63, + nonce: [3]uint32{0xc7e2521c, 0x795499b4, 0xc7946cd7}, + key: [8]uint32{0x53fce774, 0x9a4b53bf, 0x5f614134, 0xa3c39414, 0xa8a07c72, 0x93242311, 0x43aeec99, 0x216deb5a}, + input: "03b5d7ab4bd8c9a4f47ec122cbeb595bd1a0d58de3bb3dcc66c4e288f29622d6863e846fdfb27a90740feb03a4761c6017250bc0f129cc65d19680ab9d6970", + output: "83db55d9eb441a909268311da67d432c732ad6bda0a0dae710d1bce040b91269deb558a68ced4aa5760ca0b9c5efc84e725f297bdbdadbc368bea4e20261c5", + }, + { + length: 66, + nonce: [3]uint32{0x1d41f0a1, 0x7c3b7778, 0x6991eea5}, + key: [8]uint32{0x1f213e39, 0x56261d14, 0x15fc7c2c, 0x21feccc5, 0xa95684c5, 0x26600506, 0xdadcc06b, 0xf2c810b0}, + input: "2f4da518578a2a82c8c855155645838ca431cdf35d9f8562f256746150580ca1c74f79b3e9ae78224573da8b47a4b3cc63fbed8d4e831a6b4d796c124d87c78a66e5", + output: "6fc086ded3d1d5566577ccd9971e713c1126ec52d3894f09ab701116c7b5abda959cbb207f4468eb7b6a6b7e1b6d2bc6047f337499d63522f256ee751b91f84f70b6", + }, + { + length: 72, + nonce: [3]uint32{0x749f022c, 0xa021dab0, 0x648c2252}, + key: [8]uint32{0xa1ace7b0, 0x567a0ea1, 0x52af13b9, 0xcba30c08, 0xe07a6d74, 0x5c3bca39, 0x85b2ac07, 0x3b5afc0}, + input: "55739a1738b4a4028021b21549e2661b050e3d830ad9a56f57bfcaca3e0f72051b9ca92411840061083e5e45124d8425061ab26c632ac7852118411ac80026da946357c630f27225", + output: "8051bf98f8f2617e159ba205a9342ab700973dd045e09321805eed89e419f37f3211c5aa82666b9a097270babc26d3bfe0c990fe245ae982a31f23cfbf6156b5c8cfb77f340e2bf5", + }, + { + length: 74, + nonce: [3]uint32{0x23c16ba8, 0x9fd1cd4e, 0xcb224ecb}, + key: [8]uint32{0xb694404a, 0x86b5f198, 0x10fd1bff, 0x13a84e54, 0xab21e509, 0x7443d764, 0x931b3f1, 0x686e87f2}, + input: "7ffd8d5970fdee613eeae531d1c673fd379d64b0b6bfedd010433b080b561038f7f266fa7e15d7d8e10d23f21b9d7724bb200b0f58b9250483e784f4a6555d09c234e8d1c549ebb76a8e", + output: "c173617e36ea20ce04c490803b2098bd4f1ff4b31fdca1c51c6475ade83892c5f12731652d5774631d55ae2938617a5e9462bb6083328a23a4fba52de50ca9075586f2efc22aae56e3a8", + }, + { + length: 81, + nonce: [3]uint32{0xd65f6f29, 0xf3f76219, 0x9a033c9e}, + key: [8]uint32{0xeba017c4, 0x69e0421a, 0x449e2317, 0x29858a11, 0xd0c8523a, 0xa8b0c9a2, 0xab2ca84, 0xaf011a45}, + input: "7a5766097562361cfaeac5b8a6175e1ceeeda30aec5e354df4302e7700ea48c505da9fdc57874da879480ecfea9c6c8904f330cbac5e27f296b33b667fea483348f031bef761d0b8e318a8132caa7a5943", + output: "5e9fbf427c4f0fcf44db3180ea47d923f52bee933a985543622eff70e2b3f5c673be8e05cd7acbcadd8593da454c60d5f19131e61730a73b9c0f87e3921ee5a591a086446b2a0fadd8a4bc7b49a8e83764", + }, + { + length: 88, + nonce: [3]uint32{0xc70ee56e, 0xe58ec41, 0xafd96f61}, + key: [8]uint32{0x172af2bb, 0x9085d27c, 0x8ca2c44d, 0x8aa148da, 0x290c88b0, 0x88187439, 0x18d54781, 0x633f2cce}, + input: "0777c02a2900052d9b79f38387d2c234108a2ad066cbf7df6ea6acc5a3f86b3d6156abb5b18ad4ecf79e171383a1897e64a95ecdbba6aa3f1c7c12fe31283629ff547cb113a826cb348a7c10507cc645fa2eb97b5f22e44d", + output: "368c90db3464ba488340b1960e9f75d2c3b5b392bdd5622ff70e85e6d00b1e6a996ba3978ce64f8f2b5a9a90576c8f32b908233e15d2f443cccc98af87745c93c8056603407a3fb37ce0c1f8ab6384cc37c69c98bfecf337", + }, + { + length: 92, + nonce: [3]uint32{0x3006da79, 0x2748051d, 0x72c17cdc}, + key: [8]uint32{0x60cdb7e8, 0xcecbe928, 0xe19b7ab9, 0x30d61537, 0xa0fbc199, 0x897738bf, 0xdd7705a9, 0x3e5c1763}, + input: "cf2dccbcfd781c030376f9019d841ca701cb54a1791f50f50bee0c2bf178182603a4712b5916eebd5001595c3f48283f1ba097ce2e7bf94f2b7fa957ce776e14a7a570093be2de386ececbd6525e72c5970c3e7d35974b8f0b831fbc", + output: "7c92b8c75e6eb8675229660cedcb10334965a7737cde7336512d9eff846c670d1fa8f8a427ea4f43e66be609466711fd241ccff7d3f049bda3a2394e5aa2108abc80e859611dbd3c7ba2d044a3ececa4980dd65e823dd110fea7a548", + }, + { + length: 96, + nonce: [3]uint32{0xfc0fb1ee, 0x414cc60a, 0x4144bd67}, + key: [8]uint32{0x103291c6, 0x822b03b6, 0xd29ab548, 0xc88f3efe, 0x6936056a, 0x28aaa61f, 0xa0df7858, 0xdaa23519}, + input: "e08a8949a1bfd6a8c1186b431b6ad59b106ae5552821db69b66dc03fbc4a2b970dcf9c7da4f5082572bc978f8ee27c554c8884b5a450b36d70453348cd6cac9b80c9900cf98a4088803f564bb1281d24507b2f61ba737c8145c71b50eb0f6dfc", + output: "73d043acf9dcd758c7299bd1fd1f4100d61ff77d339e279bfbe6f9233b0d9afa24992a9c1c7a19545d469fdfb369c201322f6fe8c633fcdcffef31032bfb41b9fb55506e301d049fd447d61f974a713debeaed886f486a98efd3d6c3f25fbb30", + }, + { + length: 103, + nonce: [3]uint32{0xc2030c57, 0x1e3b59e1, 0x607ede1a}, + key: [8]uint32{0xd1bac2b5, 0x56a94583, 0x628b479b, 0x3056a51e, 0x69bf8f8f, 0x2df1e03d, 0x4b9d48d2, 0x7df5c379}, + input: "a0c302120111f00c99cff7d839cdf43207a7e2f73d5dd888daa00d84254db0e621a72493480420c9c61ce1cfc54188ff525bb7a0e6c1cd298f598973a1de9fd2d79a21401588775b0adbe261ba4e4f79a894d1bd5835b5924d09ba32ef03cb4bc0bd6eb4ee4274", + output: "bc714bd7d8399beedc238f7ddeb0b99d94ad6bf8bf54548a3e4b90a76aa5673c91db6482591e8ff9126e1412bce56d52a4c2d89f22c29858e24482f177abacef428d0ae1779f0ae0778c44f9f02fe474da93c35c615b5fad29eca697978891f426714441317f2b", + }, + { + length: 109, + nonce: [3]uint32{0xf44dc81f, 0xcf6e03e7, 0xf4966796}, + key: [8]uint32{0xd7b12f4, 0x683f4789, 0xc7828fb4, 0x820fc6a0, 0xc51231eb, 0xe46716d7, 0x4036ef93, 0x26afb96c}, + input: "ebce290c03c7cb65d053918ba2da0256dc700b337b8c124c43d5da4746888ca78387feea1a3a72c5e249d3d93a1907977dd4009699a15be5da2ca89c60e971c8df5d4553b61b710d92d3453dea595a0e45ae1e093f02ea70608b7b32f9c6aadc661a052f9b14c03ea0117a3192", + output: "cbb8c4ec827a1123c1141327c594d4a8b0b4a74b0008115bb9ec4275db3a8e5529a4f145551af29c473764cbaa0794b2d1eb1066f32a07fd39f5f3fe51498c46fba5310ae7c3664571d6a851e673ded3badc25e426f9c6038724779aa6d2d8ec3f54865f7df612e25575635ab5", + }, + { + length: 115, + nonce: [3]uint32{0x8d3e461b, 0x7e05c360, 0x3bbbafdd}, + key: [8]uint32{0xf9b917c9, 0x9af89bf7, 0x7decbbc9, 0xe7e5ea7b, 0x9b4aab55, 0x90eff6be, 0xa19b6d90, 0xb9f69b1a}, + input: "275c97de985aa265332065ccce437770b110737a77dea62137a5d6cb62e9cb8b504d34334a58a71aba153d9b86f21377467b2fafaf54829331bf2ce0009acb37842b7a4b5f152aab650a393153f1ed479abc21f7a6fe205b9852ff2f7f3a0e3bfe76ca9770efada4e29e06db0569a99d08648e", + output: "b225aa01d5c438d572deaea51ac12c0c694e0f9dc0ed2884a98e5e2943d52bb4692d7d8f12486de12d0559087e8c09e4f2d5b74e350838aa2bd36023032ccbcae56be75c6a17c59583d81a1fd60e305af5053ac89f753c9347f3040e48405232dc8428c49dcb3d9b899145f5b3bc955f34dbbe", + }, + { + length: 119, + nonce: [3]uint32{0x871f33f5, 0xe4fee3ba, 0xcb8c1e93}, + key: [8]uint32{0x33124903, 0x7e0287e5, 0xe9d6988f, 0x1962405f, 0x5f21c1b5, 0x2ac695e6, 0x46b200c9, 0x9fda98ba}, + input: "ceda15cfffd53ccebe31b5886facd863f6166e02ec65f46f54148860a5c2702e34fd204d881af6055952690cd1ffa8ba4d0e297cc165d981b371932adb935398c987baff335108c5e77f2e5dd5e1ca9a017bc376cbdbe3c0f45e079c212e8986b438444e79cd37927c1479f45c9e75b0076cc9f8679011", + output: "a3f1c3f885583b999c85cd118e2ababfa5a2de0c8eb28aacc161b1efee89d8de36ddeb584174c0e92011b8d667cb64009049976082072e6262933dbf7b14839805e1face375b7cbb54f9828ba1ed8aa55634ec5d72b6351feff4d77a3a22b34203b02e096f5e5f9ae9ad6a9dd16c57ce6d94dcc8873d18", + }, + { + length: 120, + nonce: [3]uint32{0xef553ce8, 0xdfe120ea, 0x9a047e3a}, + key: [8]uint32{0xbef479c1, 0x59554f8b, 0xbf97f089, 0x52316f1e, 0x141e428, 0xff26dc04, 0xe10c8f57, 0xa7568a59}, + input: "799bb2d634406753416b3a2b67513293a0b3496ef5b2d019758dedaaac2edd72502fc4a375b3f0d4237bc16b0e3d47e7ddc315c6aef3a23fcae2eb3a6083bc7ac4fd1b5bf0025cc1cb266b40234b77db762c747d3a7b27956cf3a4cf72320fb60c0d0713fa60b37a6cb5b21a599e79d0f06a5b7201aeb5d2", + output: "e84dfb3dbaac364085497aeabd197db852d3140c0c07f5f10e5c144c1fe26a50a9877649e88c6fe04283f4b7590a8d0d042ef577693f76f706e31c4979437590fe0ab03d89afb089d1be50ae173ea5458810372838eceac53bf4bac792735d8149e548efb432e236da92bf3168bbcf36f644c23efb478a4e", + }, + { + length: 123, + nonce: [3]uint32{0xd98124a0, 0x78cd80aa, 0x3dc55cfc}, + key: [8]uint32{0x2286e41, 0xf13e38e3, 0xf735476b, 0x33c44bfc, 0xd7978797, 0x4a9c4595, 0x6080413, 0x1299fdd8}, + input: "b2d060bd173955f44ee01b8bfcf0a6fad017c3517e4e8c8da728379f6d54471c955615e2b1effe4ce3d0139df225223c361be1cac416ade10a749c5da324563696dae8272577e44e8588cd5306bff0bfbdb32af3ac7cbc78be24b51baf4d5e47cf8f1d6b0a63ed9359da45c3e7297b2314028848f5816feab885e2", + output: "ffa4aa66dd5d39694ae64696bfa96f771accef68f195456ad815751e25c47ed4f27b436f1b3e3fcaa3e0d04133b53559c100cd633ced3d4321fc56225c85d2443727bce40434455aa4c1f3e6768c0fe58ad88b3a928313d41a7629f1ce874d2c8bcf822ebdaebfd9d95a31bb62daab5385eb8eefe026e8cbf1ff7a", + }, + { + length: 127, + nonce: [3]uint32{0x53106b0f, 0xdf11fd81, 0x69d1b6f3}, + key: [8]uint32{0x736b138, 0x55cde194, 0xf8273c1, 0xf7c268e6, 0x61362bd5, 0xbb3cb455, 0x44d3c9fc, 0x7d56d3fd}, + input: "4f0171d7309493a349530940feece3c6200693f9cff38924114d53f723d090fffa3c80731b5ca989d3e924d1fa14266632cb9ab879e1a36df22dc9f8d1dadea229db72fded0c42187c38b9fa263c20e5fb5b4aa80eb90e8616e36d9b8c613b371c402343823184ecad3532058a46cf9e7ea5a9ecad043ac3028cbcc3f36d32", + output: "88c773ff34b23e691e14018ba1b2bd48a4a6979b377eb0d68336ce6192dcd5177e6b4f1c4bea2df90af56b35fe2a1d6279d253c0194dcbca9bf136f92d69165b216e4c9d1ce6b3fbe40c71e32c3f4088de352732d0e2bad9c16fd0bb9bde3d6c30257ce063432d09f19da79d49aa7641124a6c9e3f09449e911edbae11a053", + }, + { + length: 130, + nonce: [3]uint32{0x5e90ffbd, 0xa898f173, 0x269f9a88}, + key: [8]uint32{0x5244e05f, 0xf9adbe9b, 0x9e9f54ac, 0x23460046, 0x6782cdea, 0xba982c96, 0xc721506b, 0xed10f7e3}, + input: "8f8d9e18d3212bd20b96d75c06d1a63622fd83d13f79d542e45996135368772ea81511302a0d87e246dd346314cfe019bae8a5c97f567f12d82aca98dfea397c6a47dd0c419f1c609d9c52dcfcbe7eee68b2635954206ed592b7081442ce9ce3187d10ccd41cc856fb924b011f817c676c9419f52a2938c7af5f76755a75eb065411", + output: "4e130c5df384b9c3c84aa38a744260735e93783da0337ade99f777e692c5ea276ac4cc65880b4ae9c3b96888760fdddb74bc2e2694bedf1ee6f14619c8015f951ba81b274b466e318d09defe80bdbed57bc213ac4631d2eb14c8e348181d60f6295ceee1e9231ae047830ef4778ff66146621b76974773b5d11c8e17a476450f46ef", + }, + { + length: 130, + nonce: [3]uint32{0x308e39e8, 0x9aa4f14f, 0xf511db96}, + key: [8]uint32{0x833b5219, 0x4b82e588, 0x4b2d652c, 0x7c8f6ed7, 0xfe4be863, 0x9d3a50e5, 0xb888099b, 0x9f8d1968}, + input: "30d2379dd3ceae612182576f9acf6de505ab5a9445fe1a86ae75c5c29429e11c50fd9ec657b29b173a3763b1e171b5a7da1803ba5d64fccb2d32cb7788be194dbca00c3c91774c4c4c8ede48c1027d7cc8b387101a4fe5e44a1d9693b2f627626025072806083aadbced91c9711a0171f52ffb8ed5596cf34130022398c8a1da99c7", + output: "b1e8da34ad0189038ee24673979b405ef73fdbdd6f376f800031d64005a4ebed51a37f2180571223848decbea6dd22b198ab9560d7edc047c5d69183dc69b5fca346911d25cb2a1a9f830dc6382ad0024e8c3eef3aa2d155abcfe43bff01956a5e20a862fbed5c5e8df8eed0601a120caac634b068314e221f175baa11ae29002bb9", + }, + { + length: 135, + nonce: [3]uint32{0xa5feca5a, 0x753ac1b4, 0xc5a46609}, + key: [8]uint32{0xabbf4859, 0x828d9bf6, 0xf7f7aa6d, 0x25208ca2, 0xd7a4c0ad, 0x2fdd3282, 0x2bfcb8c2, 0x8389d84b}, + input: "d9404ccdcc8ef128a1b1ace4f9f1669d274ec82aa914cac34b83ac00b236478fd6167e96ec658850c6c139eb0f6fc0dd7191ba9a39828032008f7f37eb9a8df9d6cdd54240e600efe7fc49a674000c5030d825b2c5c96d0f19b8ecdbf4eeb86d3e569c5e3131abc7d6359dd4255284ccacf150d42e7a899536d51ee6db329654a4581c5ac6e419", + output: "c5534b5fb40b4834300e9577a9d87440c5272263d06e6aee84aa92cdf5d1b033145d336f26e5fe55c09a7e75753af93d0786dfc1cb435e86c67bd3ec8e766d0801b99e68691e2c3c3ffec539cf62e68285ea9027daa2716cd6f97e8eb7b9e266357a25eb2d4839a829508a6b7228f2832b3cd998f77597ae530430e6e4ecb53eb9efe456863a04", + }, + { + length: 135, + nonce: [3]uint32{0x12aa5846, 0x88604f6c, 0xc10d9585}, + key: [8]uint32{0x1491ccd6, 0x602f559d, 0xd4080c06, 0x202fabd, 0xffd3f4f8, 0xbf144c17, 0x88bf3f3c, 0x8083375}, + input: "231765f832927461f338aceb0f4cf51fd8469348c69c549c1dec7333d4aa4968c1ed58b65ab3fe3d0562600a2b076d56fd9ef91f589752e0455dd1d2e614cacfc0d757a11a4a2264bd38f23d3cca108632201b4f6c3b06477467726dde0c2f3aee01d66d788247663f1d0e66b044da9393ede27b9905b44115b067914961bdade85a2eca2844e1", + output: "1dd35f3f774f66d88cb7c2b23820ee078a093d0d85f86c4f103d869f93e2dbdd8a7cb8f101084fe1d7281a71754ec9aac5eb4fca8c365b24ed80e695caace1a8781a5a225938b50b8be96d0499752fdabd4f50d0b6ce396c6e2ca45308d1f2cc5a2a2361a8ca7a334e6ee62d466d74a1b0bf5b352f4ef6d8f8c589b733748bd3d7cda593243fab", + }, + { + length: 140, + nonce: [3]uint32{0x1c9d70f0, 0xa088a367, 0x4ec24d2b}, + key: [8]uint32{0x494e9775, 0xd07a852, 0xaf8af24a, 0xc65b825c, 0xc5e06780, 0x17fbbace, 0x651d71b5, 0xf548d8ef}, + input: "e46841f12d98aeb7710b9162d342895a971b0e3a499886bbb6aa74dc744a28d89a54542b628acdc2f693cb7c03f73fc3b74069bc3f2d000a145fb8a806cdc7d6fa971da09a33b92851cc3d1f6f5646d7fa2b1d564876feefeb63b6e66dba1c0b86ca345235bb822e0f93132346840d2a3d6eb1b541178ea51affc7b31f8da02732cc4e5bcb5d8683ae0a91c9", + output: "1dcbfd0bb2b905656c52bd7b1bcdad9b4d434ae9ac221a0d3a316115cdd4a463fa9b3444d2612a4e277d0dcd881fa6e80e59e5a54e35e1a14747aed31edf4ac24214f9d9c329ebe2157620b64efaded9976549bc4aa100d5c15be3f85f700f8a21dfe77590dfee2de9a23cc1ed1e44f32ebf68ca289b097bc13b42802dc7c75309c4afc25b5741839f7db3d5", + }, + { + length: 144, + nonce: [3]uint32{0x23067b8b, 0x5b276c6d, 0xaeca6c60}, + key: [8]uint32{0x29d64488, 0x893a2973, 0x32e3b4ef, 0x2af3d5d4, 0x95ec01b, 0xc805b64c, 0x884e8b7d, 0x798d7062}, + input: "e98e4a9550bdd29e4106f0cc8669dcc646a69438408e9a72c7cdb9b9d437b5f7a13fcb197629541c55bca1f8972a80cd1c1f591a0e24f977cdeb84763eab2648e42286e6473ea95e3a6a43b07a32b6a6cd80fe007ba0cf7f5ac7e651431f5e72690ec52a7134f9757daf0d8eff6b831a229db4ab8288f6bbf81e16fedebe621fd1737c8792cfd15fb3040f4f6a4cbc1e", + output: "5c69cf522c058790a3bc38979e172b60e71f7896d362d754edc1668d4f388b3fc0acdf40786d2f34886e107a142b1e724b9b9b171cb0e38fd78b35f8ac5269d74296c39c9f8628d848f57af9d8525a33f19021db2b9c64ba113171ebb3882075019ec7e77b51ce80b063ed41d48dad481d9536c030002a75d15c1c10ce0ec3ff17bc483f8416055a99b53035f4b6ea60", + }, + { + length: 148, + nonce: [3]uint32{0x2b079658, 0xbdf5da85, 0x8a75450d}, + key: [8]uint32{0x49c9eaa3, 0x62048819, 0x9baacfa5, 0x3870addc, 0x5c682e1, 0xf4f9fff3, 0xa3848e4b, 0xac1ebc1}, + input: "ce0f0d900dd0d31749d08631ec59f216a1391f66a73bae81d3b0e2919a461bc9a14d6a01b827e3bcb55bbccf27c1ed574157e6becd5cf47181a73c9d3e865ab48a20551027e560e965876b0e1a256bfa5cb5179bf54bd8ec65e5570e374b853b37bf4b3ef1ec612d288ebc19275fa88da9419e012f957f9b6a7e375b3377db0eb3619c731aebfeb0930772b4020d3a3e90723e72", + output: "b06981b57fe184091ef9f8ccf522a5bcdb59bf9a68a3ddb817fdd999a6ecf81053a602141cf1b17017bae592b6b6e64756631a2b29a9e1b4f877c8b2ae30f71bc921e4f34b6f9cd8e587c57a30245f80e95005d0f18f5114400785140e6743da352d921fb4a74632a9c40115ad7706263ac9b41a11609fa0c42fc00f8d60931976162598df63ebad9496dd8943d25a03fa47475c", + }, + { + length: 148, + nonce: [3]uint32{0x98e8ab8, 0x84d8e77b, 0xbb305841}, + key: [8]uint32{0x46b5f93c, 0xc8b2778d, 0x2cc5278f, 0xd2a3904c, 0x6ce5d4f, 0xc4459e8, 0x4a35c30, 0x2feadc02}, + input: "eccfd66bdc691478f354b8423d6a3f20932a1f591d8e6cefa734975fb8ee6881b6dc92c0d1d5ed54fd1999efd7f11ac697a1f130587dd895eb498c9a8fc7d1714c385ec156ecae3bdea2a3462834245e724531d0fedda2b77693a53ed7354b758e875b23cfc83219a091fb2076e7a88cd77f779ed96f8d81ffa3fe5059303ac706086494b9f2982f4f88a0c6fadc3748625004db", + output: "925529047d4177b72bf50905ba77e47608815522c1829b24046e439d5451901257903a5409fb910373167e8b7f4fdfa543a477608ddfc11bbd1efc138366961463b9915b302a346b795dd593f6fcf4fa73529b6fe83079552aabbe99474a72806f59688d826675fa7f6649b9f5307e5028853c9821b8c4a1a0fc4bfdc7c8c78b25aeaba2b5821d17b36317381a3bd578917d2504", + }, + { + length: 152, + nonce: [3]uint32{0x2e2a6e4a, 0x9a6d488a, 0xf9966cb6}, + key: [8]uint32{0x58903bff, 0xc2be173f, 0xe26128b5, 0xb6b6af53, 0x92f8eeb, 0x38cf3336, 0x7fdf90fb, 0x7ae24b37}, + input: "f0c7139c69413869bca980d7f192b2bc3f57e34ca4f26164e1a54a234e84e1aa285cc02cfbaef3dfba2dbb52a555ec1f6ef0e89d0b2f0bd1846e65b74444b5f003a7308965e67bed558689be2668ca10ca368fac072e0e4535a031af23b3c37c561e185872b86c9bceddb5c1199e43fb5f735384766d33710460b541b52d3f5b6c108c08e76724bcac7ad2d866a8bbeeea92a3d867660d2e", + output: "d2c16c7a242b493038203daec65960de384c030eb698ef6a53c36eabb7556cbfa4770eaa8bc0a2b385ad97495eeb1c03ff4e6efcb804aefa81c177dc62700a9eefe6e8dd10cff5d43a2f47463cab5eb1ee260c3826cac9bfa070f1e0435541a89ebd224d13cc43f8fff12f38091c2b3f2102d5c20d8b1c3ae4f129364bbe9f9ce2147dcf0639668ddb90dffe6a50f939f53fa7ba358e913f", + }, + { + length: 155, + nonce: [3]uint32{0x243e0198, 0x884448c, 0x9a31e760}, + key: [8]uint32{0x37e017bc, 0x9b1e2e90, 0x15679daa, 0xf94a23ee, 0xda86dfe, 0xc3eea84c, 0xdd199799, 0x6eeffb92}, + input: "7024974ebf3f66e25631c0699bcc057be0af06bc60d81a7131acaa620a998e15f385c4eaf51ff1e0a81ae5c6a7442d28a3cdc8aeb9701055e75d39ecac35f1e0ac9f9affb6f9197c0066bf39338a2286316e9d1bb7464398e411da1507c470d64f88d11d86d09e6958fa856583ace697f4ee4edc82618662cb3c5380cb4ce7f01c770aab3467d6367c409a83e447c36768a92fc78f9cbe5698c11e", + output: "ff56a3a6e3867588c753260b320c301ce80de8c406545fdd69025abc21ce7430cba6b4f4a08ad3d95dc09be50e67beeff20d1983a98b9cb544b91165f9a0a5b803a66c4e21bd3a10b463b7c1f565e66064f7019362290c77238d72b0ea1e264c0939d76799843439b9f09e220982eb1dc075d449412f838709428a6b8975db25163c58f40bf320514abf7a685150d37a98bac8b34ccb5245edb551", + }, + { + length: 160, + nonce: [3]uint32{0xd24e866d, 0xc59d25d8, 0xfcf623f1}, + key: [8]uint32{0x5f32cca0, 0x4167cac5, 0xc04943ee, 0x507fa1ec, 0xad8fdfc0, 0x6266fa2d, 0x22f05341, 0x8074143e}, + input: "8d79329cf647e966fde65a57fc959223c745801c55312046b791671773cca0af4cd48ead1f316eba0da44aa5d18025eced0c9ed97abaabb24570d89b5b00c179dca15dbae89c0b12bb9e67028e3ae4d6065041b76e508706bec36517a135554d8e6ef7cf3b613cbf894bec65d4dc4e8cb5ca8734ad397238e1e5f528fa11181a57dc71cc3d8c29f3aba45f746b1e8c7faace119c9ba23a05fffd9022c6c85260", + output: "60aea840869f7be6fcc5584b87f43d7ba91ed2d246a8f0a58e82c5153772a9561bdf08e31a0a974f8a057b04a238feb014403cd5ffe9cf231db292199198271f9793c9202387f0835a1e1dc24f85dd86cb34608923783fd38226244a2dd745071b27d49cbffebea80d9dacad1578c09852406aa15250de58d6d09cf50c3fcfff3313fac92c8dad5cb0a61ccc02c91cecee3f628e30c666698edecf81831e55ec", + }, + { + length: 167, + nonce: [3]uint32{0x30b61047, 0x810cf901, 0x4d681524}, + key: [8]uint32{0xe51476d0, 0xdf98008d, 0x59dfe69e, 0xdb39166, 0x6c1e4a4a, 0xfb76165e, 0x5180f185, 0x7359fb35}, + input: "85484293a843d2d80b72924b7972dfa97cbe5b8c6bcc096f4d5b38956eb3f13f47b02b0f759ea37014ecdecfb55f2707ef6d7e81fd4973c92b0043eac160aaf90a4f32b83067b708a08b48db7c5900d87e4f2f62b932cf0981de72b4feea50a5eb00e39429c374698cbe5b86cf3e1fc313a6156a1559f73c5bac146ceaaaf3ccf81917c3fdd0b639d57cf19ab5bc98295fff3c779242f8be486ba348bd757ba920ca6579be2156", + output: "bb1650260ef2e86d96d39170f355411b6561082dcc763df0e018fdea8f10e9dc48489fb7a075f7f84260aecc10abcfadbc6e1cd26924b25dedb1cc887ada49bb4e3e02006bdd39098ef404c1c320fb3b294ded3e82b3920c8798727badfb0d63853138c29cf1ebf1759423a1457b3d2c252acf0d1cde8165f01c0b2266297e688ff03756d1b06cb79a2cc3ba649d161b8d9ef1f8fb792bd823c4eabb7fb799393f4106ab324d98", + }, + { + length: 172, + nonce: [3]uint32{0x42020cbe, 0xad62af90, 0x29e53cd}, + key: [8]uint32{0xabad2095, 0x601ec477, 0x3bc923a1, 0x1edede1a, 0x33612355, 0x285b4858, 0xd3fd6714, 0xe0f4bcc3}, + input: "a2fc6e1b5281a4e0330eecd1ab4c41670570423173255979953142b78733b2910fa5540e8294208df6ae4f18672d5ac65acf851bcd394e1932db13c81b21e6f165e5538aff862e46126c650bbe055e54b31c78f2f0221d2631d66ef6d3f4c5ae25eada043b74d8770e2c29799c0954d8ccbd17766b79e6e94e88f478db3566a20cb890846917591a07738328d5c05f7ed4695a82607660f1239661faa9af0368aeb89726f13c2aaecf0deaf7", + output: "d8fe402a641c388522842385de98be60f87d922c318215947d4b7562d4ca1e2dbc7ee86494e65fb0bfddfdebdb2ae6469312f95b32c722b2720d64bb8d7cc3dd82f9055b1d89f05b77984f91f94ba4ac79c5129cd7c91cc751b0defc3f2799518e372d27aa683f1e7bbd4f55414c48fe8a3a37ac1f179a1a329cda775aec0d31d75a5a38addb1de67c06bddbedf4c8d87abc18c9f9dd072d457ea29ad4dfb109ce7e99a4a82fbe330b0afbb5", + }, + { + length: 176, + nonce: [3]uint32{0xa8021c8f, 0x667a02c4, 0x7a68b693}, + key: [8]uint32{0xece401c8, 0xfa805a47, 0x6d572fca, 0x9c1c780c, 0x647545e5, 0xd7ef4c11, 0x91dc1e46, 0xba2a694e}, + input: "480387bc6d2bbc9e4ced2448d9ec39a4f27abe8cfb46752d773552ad7808a794058962b49e005fef4e403e6a391d1d3f59025eeb5fb8fbbe920f5361862c205d430eac613cd66108f2f2f0bd4d95a8f6ca7bd1f917eaeb388be87d8b7084a2eb98c575034578edf1b3dafff051a59313873a7be78908599e7e1c442d883d3fd3d26787eb7467eed3a3fb2d40046a4460d5d14215565606bcf8b6270af8500e3504d6d27dacf45bace32214472d525fdc", + output: "ab81a9c28358dfe12e35a21e96f5f4190afb59214f3cf310c092ab273c63cd73a783d080c7d4db2faccd70d1180b954cd700c0a56b086691e2c2cd735c88e765e2266cd9ebe1830d63df4b34e2611a8abeeca9c8c4fac71135dafb1cb3569540ed1362ddeb744ed62f6fd21de87b836ec2980f165c02506e0c316ae3cf3d18a862954d9781f726ecc1723af4a730ccc6d6de82553450a52499acb58fb2008969401c45b2f20e12b58f308db1d199b4ff", + }, + { + length: 176, + nonce: [3]uint32{0x414e687c, 0xc6fc69c2, 0xd3ca12d3}, + key: [8]uint32{0x1b51cca, 0xbc8455af, 0x3f904842, 0x6042b452, 0xcd4dd164, 0xda83f3f0, 0xff04b972, 0xf972dd0e}, + input: "b274e61059f3215173ae226e30a92ee4b4f8a3da95f2e768e3fac2e54ddac92c200c525f190403a6ef9d13c0661c6a7e52ed14c73b821c9680f1f29711f28a6f3163cf762742ed9474dbea51ff94503a5a404adbbdfbf4c6041e57cb14ea90945dc6cb095a52a1c57c69c5f62ac1a91cd8784b925666335bbfee331820b5f7470bc566f8bbb303366aafe75d77c4df5de2649ed55b2e5e514c3cb9f632b567594a0cf02ec6089a950dbe00554ee4dfb9", + output: "a0969730d48ee881792a3927b2f5d279aba9f2ed01e6b31b92d0e1fb8ba7f35a236d838e0ce5f8654957167de864f324c870864b4e7450a6050cd4950aa35e5a1a34a595e88dd6f6396300aff285de369691b6e0e894106dc5b31525e4539c1e56df3ceedbbab1e85da8c0914e816270a4bae3af294b04a3ea6e9ef7e2aab4da5f5370df2706b5e3f000d88179ac756deaa652a1cc85e80ad9622f1bf91a2776262eb7289846d44f7f8192e763cb37aa", + }, + { + length: 183, + nonce: [3]uint32{0xdd315c1d, 0x2335da98, 0xe0a0da0f}, + key: [8]uint32{0x6419c7d6, 0xd340f42, 0x7af2f4b8, 0x3536cf42, 0x2f68c6fb, 0xac9d855f, 0x7c4d490, 0x9711b1b1}, + input: "ee849039c6cd972dc943d2a4468844d130c0150276f4e0889047e2300c3ecc6792c4527bfe9437dad877eb986e6b1aa9b867d1798c9d314243f0a87ec9ee5b601c2554876c87cbf50df3334a077c4152f8b8fef4a2d301ddbfa90c887ece757c3eb6c4fc1e0212d6b5a8bb038acaec28cba064c9b34f5364cb7f0fc2ac4ef2c7ddde0f5ba17014459eaa78f08a46a01882ebf7c6e409dadda250bb899dc8b3b70e160bbcb4412a9963b174d0fc6bc16383a46ffaacb6e0", + output: "3e272ded9c0a5cebe7cf17ac03f69eb20f62996e047501b6cc3c8691ddb2780ea72c21a81888bfea96e4373a412c55ca95648390de740102d661143043baec3976230e024477d134b8504a223c36a215b34164c9e9e1fa99a49fdc56f2f04ea525a6b82997d9bbc95c4b5baeab4dec50061efb7c1a757887acb8b47b142e0a2e61885a2c14c4642d83d718a0546b90699adc545a48129603862a1c89d8e665cde54b3ba487754db6d6f5acf6a4b95693cc569577a2dc48", + }, + { + length: 185, + nonce: [3]uint32{0xebb44f7c, 0xaf14c7dd, 0x4543cd7a}, + key: [8]uint32{0xce71977, 0x99790e86, 0x6510d6dc, 0x37968ae7, 0x2917fb9a, 0x19ef25f, 0xd282d085, 0x6128d043}, + input: "0992396a6f29b861dd0bc256e1d1b7dce88435733506a6aa20c62e43afa542d1c46e28b2e6d8e2eacb7c08db05e356fe404684b0e3a9849596db82eb788aa09258c28eb19e9838f757425b4edef12deeca56e30cf030272e325d4246d6e083219b2f965124963ca91f066d47bf5a8282a011a78b0155aa70038259a4a59135f241fd2f88c908b9f4eef7b7df0f3a1c16a52c009b522f89dabd52601bbf6e3ce68732e1a6d444469480f06da218786cf6c9666362e7a7f7be12", + output: "545c05a84b5a4fffd1dd623c8f2b11443818560bdb0c26dadd3b694d4790d294b99059f4127b7cca122c4000954d745af96094ff4623f60db33e994bb6903263d775f48d7047427b3a498c2ecde65bd37bcb8ee7e240a1e08c884c0079cab518f4e1c38ba5ea547f4da83b7c6036e4259bee91c42e8fae895df07781cc166f1d50e1550a88ee0244bb2950070714dd80a891aa8a9f0580a67a35cb44609b82a5cc7235f16deea2c4f3667f2c2b33e8eeef944e1abdc25e48fa", + }, + { + length: 187, + nonce: [3]uint32{0x35cb7190, 0x212e9a86, 0xbc423ce4}, + key: [8]uint32{0xfa19cede, 0x576ae8f2, 0x58055dab, 0x91b3355d, 0x69d2501a, 0x736323c2, 0x266c1385, 0x134f4557}, + input: "3b9efcbbb607fad5e9f1263dad014cc5c2617d439fcd980408f4f9a93acb1a33d1c3a22f38c037e4603dfbbfb5571bc08c4a1958cbbf510e3e4dd19007fe15fad7808369149a9c4db7ca0496f7a600a6f2454ee1cffd5a68d45c270e4b53ac9b77f33a1ffbb1804244f57d2b05b8036fe2cda9efead3d4eff074ea5c07128e0b354a4a11ffa179163933bc6bd10d200804cc93b64575746e94e975f990bddcc8a4335e99e2459fbe9bc0e004ffcd6cac52f48ef55cc0637a75c1dc", + output: "631ba7301e33236da2477506ea98d3b732447389e849b68e1f09bd5fd814f40dc3247a1012fa654f08e3dda0c104ee2dff12ecf5cb018644de50d70dfb6c8cc1f5f552e5f1e50466bbb538ad6b98fd37f33fe615c326efc9cc97899b829b007f91569fa9b28ce0076c53daedf9cc0f838e22cf1125b86a6a2c2eb4a45dadea45ad00fb4f054e7d6b09c13ab1dd5328debfbf4f1b70af2b8a5b1d02df8a87d7661473e0c180ba4c815f14db87c5bdc15f11a29d8e0ce3d747d5dcd4", + }, + { + length: 191, + nonce: [3]uint32{0xccc941ac, 0xdba45b02, 0xab0d7ad6}, + key: [8]uint32{0x9b750752, 0xa627090a, 0x967c95f0, 0xf8ff2c3f, 0x69beb97e, 0xa30b99c1, 0xadddc83, 0x443f9baf}, + input: "f28a71efd95e963e5e0bc0fcf04d8768ce93cb55dc73c32e6496022e214596314b7f843f5c7b136a371c2776a0bfbdd534dccbe7f55e9d3d3b5e938f2d7e74393e4caf6c38fa4b05c948e31dc6a9126817fa3d7892c478f75ab9f6ab85c0e12091bd06e89c7d3ca8d9dcdd4c21fead3d769a253919c2c72dd068474ea322b7e71cafa31684e05a63e179e6432fb70661792cc626a5060cec9e506b35d9286f15dc53cc220b1826314eec337dd8e7af688e5950b2316c30516620569ea65aab", + output: "1bcea54b1bf4e6e17f87e0d16388abe49b988b9c785b31f67f49f2ca4011ecd2ad5283d52ef707dd3b803e73a17663b5bfa9027710e045a0da4237f77a725cf92792b178575456de731b2971718937dd0e9ea12558c3fa06e80bbf769e9799f7470db5b91476d6175f1a6d8e974fd505854c1230b252bb892a318e6d0c24dcc9ecb4861769cd746abab58805bc41c6086a6d22b951fba57b00c5b78f6dcb2831715b9d4d788b11c06086f1d6e6279cd130bc752218d7836abc77d255a9e7a1", + }, + { + length: 198, + nonce: [3]uint32{0x987e7c58, 0xcc839a94, 0x30952e60}, + key: [8]uint32{0xe34a286f, 0x4adcd996, 0x97168712, 0xa82dde8, 0x14249e5, 0x5e82810b, 0xb4a445e8, 0x9579adb0}, + input: "c1d1ede73bd89b7c3d4ea43b7d49c065a99f789c57452670d1f92f04f2e26f4f5325c825f545016c854f2db2b3448f3dc00afe37c547d0740223515de57fd7a0861b00acfb39931dc9b1681035d69702183e4b9c6559fb8196acbf80b45e8cc5348b638c6d12cea11f6ef3cc370073c5467d0e077d2fb75e6bf89cea9e93e5cf9612862219ca743ef1696783140d833cd2147d8821a33310e3a49360cb26e393b3fee6dba08fcda38d1b7e2310ec1f715e3d8fa0c6b5e291eea07c25afd5c82759a834a89cc5", + output: "11a8493cdc495c179f0d29c2b4672997205a9080f596ee3c80d79b55162b1c875ac18eb94bf2a9e05b08024f524a1e9665912394a330c593d23260e6bdf87620c10a48f678693196fb744c49054182fba667c601e7b7ebf0f068e8d69ba004b804fda616a4a0d5350e1a3bd424b8266462be282308219c578569aefc1ccd09ecdf5da283356c9e524e14e69d25b0e19643dab26f54373a7272b43755c3f1ddaee6c5fb9e8e093110c41697e95f73a68c75454e050239197c9fbd8cec76698bd11894ebf6e2b2", + }, + { + length: 204, + nonce: [3]uint32{0x851f025a, 0xe6f3c800, 0x85ae7530}, + key: [8]uint32{0x2d0dbe47, 0xda05e465, 0x42f6b3b2, 0x7026e79e, 0x9e446680, 0x691df976, 0xf7b23da2, 0xbb3421fa}, + input: "37b2dc4b6a5203d3a753d2aeffcdaed5a7c1741ed04d755dd6325902128f63b6981f93c8cc540f678987f0ddb13aae6965abb975a565f0769528e2bc8c6c19d66b8934f2a39f1234f5a5e16f8f0e47789cd3042ca24d7e1d4ddb9f69d6a96e4fd648673a3a7e988a0730229512382caaded327b6bbbbd00a35df681aca21b186bc7ac3356d50889bbf891839a22bb85db4c00bfa43717b26699c485892eb5e16d1034b08d3afa61f3b5f798af502bba33d7281f2f1942b18fb733ca983244e57963615a43b64184f00a5e220", + output: "b68c7a2a1c8d8c8a03fc33495199c432726b9a1500bc5b0f8034ce32c3e3a78c42c1078e087665bd93c72a41df6bfa4e5beb63e3d3226aeeba686128229a584fab0c8c074a65cef417ad06ab1565675a41cf06bb0fb38f51204eccccb75edd724cdd16b1d65a272f939c01508f0385ca55ac68a0e145806317cc12e6848b1124943a6b2d99a8c92083fc5f31ab2e7354db3f8f2d783dbf1cfec9c54f8bfcb93d6f28ef66f18f19b0fab8836458e9b09bee742ba936cb2b747dd9dcf97ca7f6c82bf0af6f1b433592d65143fe", + }, + { + length: 210, + nonce: [3]uint32{0xaebfd97f, 0xf583442d, 0x15ab2f1f}, + key: [8]uint32{0xd3d1cf9b, 0xe43187e6, 0x5071a757, 0x412a83b4, 0x3f27716f, 0x17fdc488, 0x271f77ed, 0x6c4bb056}, + input: "68c2c5612912b5f994172720130dff092ee85a2c1395111efa64d5a281ca864d3db9600e685854d81c6de7e8747b92fb7c4c2efa829d3d4c0c9fc9d689e2e5c84c9eae8ba4ab536fb6c7523124b9e9f2997f0b36e05fb16163d6952eee066dd22fb7585925ffded0204cc76818bcead0d1f8095ca2cf9cd1ddcd0361b9c9451940e14332dac4e870e8b2af57f8c55996447e2a8c9d548255fe3ed6c08aedaf05bb599743ecb0df8655152bbb162a52e3f21bea51cb8bf29f6df8525eb1aa9f2dd73cd3d99f4cca31f90c05316a146aab2b5b", + output: "d0ae327fa3c4d6270a2750b1125145bdeef8ab5d0a11662c25372e56f368c82c6f5fc99115a06a5968f22ffe1e4c3034c231614dd6304e6853090c5940b4d1f7905ef4588356d16d903199186167fec57e3d5ce72c900fe1330a389200ed61eec0bdc3672554f1588ec342961bf4be874139b95df66431178d1d10b178e11fcbd26963ff589d5d5faf301b7774a56bbfa836112a6ea9c3026ebdd051085f9131132c2700674bef6e6c2c5b96aace94eb2ba6c0e0aef0eefa88995e742ca51ac50af83683b801b7c2c5af4880e2b344cc5564", + }, + { + length: 216, + nonce: [3]uint32{0xf9e973b8, 0x2485a6a7, 0x2ea7dee6}, + key: [8]uint32{0x96edef11, 0x8cf57f26, 0xb6e3a83c, 0x9ef434c6, 0x4607ea48, 0xace87e4d, 0xa0d87475, 0x3a9c9458}, + input: "fed3d1efa309c8b50cb9da02b95167f3b77c76e0f213490a404f049270a9c105158160357b7922e6be78bc014053360534add61c2052265d9d1985022af6c2327cf2d565e9cef25a13202577948c01edc22337dc4c45defe6adbfb36385b2766e4fa7e9059b23754b1bad52e42fce76c87782918c5911f57a9394a565620d4b2d46716aa6b2ba73e9c4001298c77bfdca6e9f7df8c20807fa71278bd11d6c318ed323584978ad345c9d383b9186db3bd9cec6d128f43ff89998f315dd07fa56e2230c89d803c1c000a1b749107a3159a54398dac37487d9a", + output: "6a95fba06be8147a269599bccda0ce8f5c693398a83738512e972808ec2f25bc72402d4bcd1bc808cc7772b6e863b0e49d1d70c58fcf4fcaa442215eeb3a4648ade085177b4e7a0b0e2198f0acf5465c97bd63f93781db3f0b9a0a184c3e06a76c4793a13923f83b2242b62511c2edff00b5304584cbe317c538de23785d2504fae8faabee81c5315298186ce3dcbf63370d1ccd9efec718cbc90b3d2e0b0b6aefb3a9b31e4311f8f518be22fdc2b0f00e79a283701c53f6936dd63734ecb24480d5365d1a81392498faf9a1ddee577007acc5f8c87895be", + }, + { + length: 217, + nonce: [3]uint32{0xe3bd4c44, 0xa3b75a31, 0xfe92010f}, + key: [8]uint32{0xdd05ab8b, 0x5ac7cd1, 0xb8113720, 0x53524706, 0x8e0ceea1, 0x52eb23e7, 0x1c85730b, 0xb33914d5}, + input: "d776bee5625d29a2ebf6fec4df94d2b9ac62e8e7c56704fd38a87ee932b787cbc555621535e76ea30183cb0ee30604f485b541f45feb8c01b9750d37fded5cdffbbc34fb90fdc9c7c7ddf949a1d50b796f1ea5db437238c7fb83c4b22c9e491f75b33d84746f1cd10bfda56851b8514ff0ded0adfd5092a66a85202d06bd967485d06a2c56011110da74bf40b6e59f61b0273164744da02ce2b285d5c3f03aee79eea4d4503e517177692ed3bb035071d77fc1b95c97a4d6cc0d41462ae4a357edf478d457c4805fa586515614697e647e19271091d5734d90", + output: "60e9b2dd15da511770162345251edfb15cea929fb79285a42f6c616dfde6befc77f252e653b2d7902a403032fc4ce4934620931a2ec952a8d0f14bf1c0b65cc287b23c2300999ed993446eb416749bf0c9c7dfe60181903e5d78a92d85e7a46b5e1f824c6004d851810b0875ec7b4083e7d861aabdd251b255b3f1fd1ee64619a17d97fde45c5704ab1ef28242d607d9501709a3ac28ee7d91a3aac00cd7f27eb9e7feaf7279962b9d3468bb4367e8e725ecf168a2e1af0b0dc5ca3f5a205b8a7a2aae6534edd224efa2cf1a9cd113b372577decaaf83c1afd", + }, + { + length: 218, + nonce: [3]uint32{0xcdabfd50, 0xd10d5b99, 0x9e160a85}, + key: [8]uint32{0x8231a4e9, 0x89f33c8b, 0xf96b11b, 0x853cae9d, 0xf6624a33, 0xee9523ee, 0x28bb7853, 0x688ac6f8}, + input: "4f57848ff5398e61bcafd4d4609bcd616ef109c0f5aa826c84f0e5055d475c6a3a90f978a38d0bd773df153179465ab6402b2c03a4bf43de1f7516eb8626d057ae1ab455316dd87f7636b15762a9e46a332645648b707b139e609b377165207bb501b8bccfa05f1bf0084631c648279afdf51c26798899777812de520f6a6f0d3c7f3ef866982f5d57f9c8d81c9a4eabb036651e8055a43c23a7f558b893dd66d8534bf8d179d8aa7d9e8987cfdaaa7b5a9381ba9c79d5c1161b1bdbd30defdd304ee07f19b7ba829a0d5b40a04b42edd6407b68399caac69069", + output: "e096cc68956ed16d2dea1154a259e01647913eeea488be0b54bd1816c781a35e161772ae1f7a26b82e864ade297a51cc9be518641b2e5f195b557ec6fc183e4e5c1fc01d84fe6ca75e5b073af8339427569b1b8ee7fcff0ffa5e7e6237987c40deec0abf091c06a3b28469c8f955fc72e4f3727557f78e8606123e0639dff782e954d55e236448f4223ff6301accda9f8fa6cd43a8d6381e5dde61851a5aec0f23aeca7262659bc793ce71fa7992f80e44611ae080b7d36066e5c75c30851306f0af514591d4d5034ecdf0d6c704bfdf85473f86141c9eb59377", + }, + { + length: 219, + nonce: [3]uint32{0x67de323f, 0xa0442ac9, 0x9d77b1d9}, + key: [8]uint32{0xca8d33d4, 0x834349d9, 0x5e68d581, 0x99a7c30e, 0xdc7f6038, 0x697e8b63, 0x284c2ece, 0xee3e3bfa}, + input: "046a61c0f09dcbf3e3af52fab8bbcded365092fad817b66ed8ca6603b649780ed812af0150adbc8b988c43a6ada564a70df677661aff7b9f380d62977d8180d2506c63637c0585dcef6fe3f7a2cf3bbb7b3d0df7769f04bf0f2e3af9439ab7615c304b32055aea0fc060890beb34fa9f90084814b6ed7363b400dfc52ee87925c5b4a14a98e3b50c7f65adc48c89ddd6414626c5e0bdefabab85c4a0e012243e682d4931be413af62fd7123ab7e7774fcae7e423bf1d3a31d036195437e9ea8f38aa40182daa9aacf3c9f3d90cc0050977c6065c9a46bcca6ba745", + output: "cd5a6a263e3ee50dda0e34c614b94c3ec1b14b99a2f4095a6b5715fdfc3449fcdf8a09d1ae02d4c52e5e638f1ee87a4a629f99f15a23dd06718792f24285f5a415e40f698752c697ee81f2f9248da1506ce04a7f489f8e2b02e6834671a2da79acc1cdfb78ea01822d09a1c4a87ffa44e56c4f85f97507044cf946ccb6a2e06e2917bac013f608d75ee78fa422a5efc9c569226bf7068d4705fde3a9fad2030256db0acf9a1d12666e0acf9f5346ad62e5af4c01a008d67ab1224b3e98278d073116ff966cdc779fb3aff985ec9411a3eefa042d71dd4ae5b15d5e", + }, + { + length: 221, + nonce: [3]uint32{0xa36a3d5a, 0x1747a05f, 0x5440eb4}, + key: [8]uint32{0x2d701ee6, 0x143d5a1a, 0xbb67b9ab, 0xabc88ccc, 0x20baad8f, 0x6507e48b, 0xdb1e1b39, 0x9e521d80}, + input: "af516216f74a6344cbe458cbba820f7e25c0b10aa84b790da2ee6317e059171076d7246c2878be83fc00c200d546c007f849e4c163d52c7b0da31beff4abff481be3266b92e668cf4dd1c84d9d7b3e5191dcd6ddb51d17d337621046e83e9ac035fccfb239648bc3c6fd340fbb50707e5a33b3ef439d292192d0e4bc727690c61450e5a28789e5ea50e746bc66d039493e080fb70e9ae06d89004cb71de8178941c422f1e9862492fc9149a4864ff52b1277b9f5a63c2f16e9adb5263cf65a034a62ebb0f1a385d2681c87a35f1c45670b4edef1c68fe9544fcf411d95", + output: "b22ffd8f0e549bd3e0206d7f01ff222f92d39b41cf995a331d5ef0cf5c24bcc3ddb36e64d351b5755400246fe4989b5f912e18daa46cdd33e52dafbd2872f16e94220b56315d72c1dbb1525fd34831d7202970c11711ff36de3fc479407c34fef0aea86e172f9beb0f393194355b9dd59625639f4a6bf72ba571c229f2fb053c1114e82793deb2dfe8232f1a327949689d2fb2820662dcd2a39a2546c7df12b3ff7e87e58c74badf568cddebd3c558f0f7874c834c4b8aa988653f138ec79620f5e3ed737690928a30f981dca9f2920ac7307607063b40f87c204de47c", + }, + { + length: 223, + nonce: [3]uint32{0xb92be022, 0x1e1257c7, 0xad7c01e}, + key: [8]uint32{0xca1dbb9c, 0xaadb9504, 0x77b8a95c, 0xc50deb5e, 0x2dbc0fb8, 0x9e654bc2, 0x94d8925a, 0xfe9cfb66}, + input: "a3d70bdb509f10bb28a8caab96db61652467cf4d8e608ee365699d6148d4e84d5d93bdabe29aa4f0bc8ee155f0b1fb73293c5293929eaacdd070e770c7cccfb2de120b0c3811abeeddaf77b7214a375ca67d618a5d169bb274a477421d71a651cfb9370bcf7e0d38f913754c11002089cf6cd6a8de1c8a937fb216591d57b37efdf3797f280773950f7eddeb9c3385c8315ff5ff581c64610a86ada7ff6a1657e262df94892dff9fdfb6e958d101f4c26296470c138dc4e1ca4bb565b3ff877a7f78b3d11d64b7c24e27ba6f6b06f6e368f5ac218cd5d11b815ab0987678eb", + output: "646314264896a6e25601e536f6e783d465b2ead1e0be4422bc9cc8eacabae4a749ad533eb28091be8397328dcfb34c92006bbda930ab070ed7b806095bb1c8f476350e7b08ffbd4d7d6055c8defaa8deff9d54f5215c2d7db27ce09e08f5d87a859145ea3126e2a01882921c3fddef3985bd451bca44063258390aec8ec725b07d064314fe43a9c83e9287b47616dfefbf539b82da209aa08a6d3176b7e3b4be4a17d44e581280a684e4a64414649bfcea82b541729f8178b580e8b972a89f5b8c4f9b68205e9396d8ae5e81873b61da074080fd44c52d50fb0880ee9c35da", + }, + { + length: 224, + nonce: [3]uint32{0x5091927, 0x661c75ba, 0xc23dad}, + key: [8]uint32{0x2e00499d, 0xafdc63db, 0xc3c62efb, 0xb4157a19, 0x84ce8b13, 0x85326279, 0x2ee71e9d, 0x318721e4}, + input: "f48b5ae62f9968baa9ba0754276cd8e9dcfa8a88e4571856d483ee857b1e7bc98b4732e81f1b4421a3bf05ab9020d56c573474b2a2ac4a2daf0a7e0c3a692a097e746d12507ba6c47bec1d91d4c7cfc8993c6700c65a0e5f11b1ccd07a04eac41f59b15b085c1e2a38b7d3be9eb7d08984782753ae23acdafbd01ae0065ab9c6d2a2d157c1fc9c49c2444f2e5f9b0f0bbfb055cc04e29b2658b85d414b448a5b62d32af9a1e115d3d396387d4bb97ba656a9202f868b32353cc05f15ae46cbe983d47b78ba73d2578a94d149e2c64a48d0c1a04fc68baf34c24b641ea0b7a800", + output: "b9af1016275eaff9905356292944168c3fe5fdffd9e4494eb33d539b34546680936c664420769204e91ead32c2bb33a8b4868b563174d1a46108b9dfe6d9ac6cc1e975f9662c8473b14950cbc9bc2c08de19d5d0653bb460bea37b4c20a9ab118a9550bfeb1b4892a3ff774e8efe3656adcdf48239f96e844d242525ee9f9559f6a469e920dcb3eaa283a0f31f5dfac3c4fac7befa586ac31bd17f8406f5c4379ba8c3e03a6992a1915afa526d5ed8cc7d5a2605423ece9f4a44f0c41d6dc35a5d2085916ca8cabd85ac257421eb78d73451f69aaedeb4ec57840231436654ce", + }, + { + length: 227, + nonce: [3]uint32{0x5d6d997c, 0x9d623987, 0x5742de36}, + key: [8]uint32{0x57b2a5ea, 0xc5bdd68b, 0x99c7b0c6, 0x26aea960, 0xba5c75f1, 0xa904cf6b, 0x685031de, 0xa0f0e99}, + input: "b39101601efa2ecdf41878b0fd920a3005ce709e4ec2970abb76e32c232ea21069f81b246eda75aace7555ce8ae203455d3723e684bd671389300e353eec0d2f499d10654fafda2e7a69bfca7198eb172249167ca8864b5d5f58d28723090ec86e251a1bac0346d52fd81f06e0c05429e0b2b895588290b7d00878a4da3378eb6c7e61487de2b318fedf68fa7ad7c88ee746827c1f60d98c7716f3f9695c5ffd4670f71a0fa78a1fb554ba482c5de83feaed7c65fc71acc9f541342eb8f7622b12bb2cfa222fa2ddd8b3ed210ce442275afa3132c8a0e17dd504ecbc92525c118952be", + output: "50eb5b21c179a03b9a822f0075906a3ce4acc32486139f92635c7d834f69071d5a6dc0e15ed06a5cee37147071d59641d140a82ad5815b954e7f28e080c3dbbeaf13943d7b7c66d49d51ba1132eeadd4cb7a7e7d726d08d95f1578d55519f267f753f3e16ff39504a87b2286d8bfba0fe6bc28887b466bf276453a82cdd0abbbbf08db0e1c26c317d50ad9b8dc09cd621bc566d362024e8404739df6468869d2125c58b25d70e392f5e75924c4341be81c263915bb514ad436fb24c2c67450e84f6d1b72d1a02a3310c07a7814d930264fdbbf5ddca7067e18e8a44faa87169b7f2e35", + }, + { + length: 233, + nonce: [3]uint32{0x75bca707, 0x89f6d1f4, 0x2a6f657a}, + key: [8]uint32{0x949f42cc, 0x2b5d3c48, 0xfe0be473, 0x17ac92aa, 0xbdc9d9dd, 0x74f9df26, 0x26487508, 0x7c7b41a2}, + input: "0a42f63b975ad0e12a1e32615813dfd6f79e53ce011e2a2f0534dd054689f8df73a8326fecfd517ff7fe530d78081af66c3a8c7c189eb9d9efed1e5577b5512d42ef1fe273f670ce380c64bc62e217a7e410a8ed89998344e29301e4e053a3a3cf7e71587fd056a6bd976f16e157476a06997dfaaff32172dd84190570621f2221420c0a0ea607ea756e9792c8c0e7157c95b89c9490e20b750ee85e4c27c9b8f409e848ec90afcad33342010bb9808358afbcb3d9b094127c38c243a204e76899677079758e7cbada9a5c18363449eebc07bab516a16372722403a046df85c7dd2ffc804c54d38aab", + output: "87a47bcaa1c1eb8e55151011c4f39af4b9e108a55a7124cdcf66d0dee727306e6971f783b038bd6b215f530cdbb53e17975742ec304fdb3792a88b674504396978c6a5e4a9c87a7c3ca430d61165c1a3f6162eeaf38c93e18b6ccb6a595ad428cdc98efef8f84463eed757a72ffd827b71c0579fcc1f4baa11812be2bc5a2a95df8e41d04b33343df09ce628c367d1f88488f7a2787f013c8e76f0b9257cee777ec4adc6df8c5790e41ea02da85142b777a0d4e7c7157a48118046935f8888b5352d1750bf00b92843027a349cf5685e8a2a2efde16dcf5e1c1ed8c779bb38cabfb42ec4dd87d58273", + }, + { + length: 234, + nonce: [3]uint32{0x5003a4f7, 0x40bd8cde, 0xfe35fb25}, + key: [8]uint32{0x576e49d9, 0xe84e9df, 0x9f227a3, 0x437c9de0, 0xc46ac8de, 0x1a6a2d2b, 0x42ab7684, 0x4253fbb6}, + input: "abeff48fa082dfe78cac33636c421991b0d94c3bc9e5bd6d22763601a55201fa47b09ce60cb959ba107020213c28ae31d54923d1e74ab1d9ddc2762b2d23d8c6961d81068230884a39682fa4b30676ffec19319362c075df0b879a0f083a67b23597bf95c4bb997fae4736479cb8a9c00520ba2f6e5962d54c313c576180d17779ff239ad60f1f1373627770d50a1c49718b2b2e536846299e052f8c1a5d3079e91cb1b8eac4661daac32d73b3b99e2051f8f694a61d1e9d3935f802921a4d979b6ade453cf30d73a4a498a6a2c5395c60fcf271d50b4967ac12b0d7bf818c2679d552e9b3b963f9f789", + output: "a0d11e732984ad575570ed51031b8ac2d7b4c536f7e85f6fce9ef5d2b946cefe2ee009227d6747c7d133ba69609f4a1e2253d0eb59d1f930611e0c26a7c0cf2d2ce7ccea6e079eadf2eb1acf0463d90fb4b3269faae3febfc88cb9fb0873d8b74894506199394c8e44a96e6b479bd3e045749cce1c3f57243abdb37e67084eb573cd820c6cee424227019592a027e9da8f7b8997bfb292627a986f83c8fb8d156a91a12a8b52659cf9272924631745ed3a2453a4c2d87a167faa9104e799c715ed597bcb66949ab15dae29a86ba147507e8d8af66e96c09c53caa053ad3b79d9ed3c0c6c00169eaec3a3", + }, + { + length: 237, + nonce: [3]uint32{0xc6ae48ce, 0x26f0906f, 0xfd8ab8bf}, + key: [8]uint32{0x42b82c50, 0x7f519e0d, 0xcbb95098, 0x6f75e532, 0xe2c9f61b, 0x5a4af942, 0x2679777b, 0x6a8e1c9c}, + input: "a77b7a5870335b9145fd2e08ec898ba2f158fda16e8a2661a7a416857b6ba6937b4843ecaa79d3635d28383af80290842de9ca0bb621ee22b7fd6bf379922741e812b1739c33dd6923d0607826fc84d46bbdbd1fe9d1255f56a167779a560a6eed1b9c9579b8f771147df467e67a070d9e9ce8ad92dc0543d1c28216c1dec82614ac5e853ed49b6abac7eb3426ef0c749febce2ca4e589d06ccfc8f9f622ede388282d69ceb2fd5122ba024b7a194da9dffc7acb481eabfcd127e9b854be1da727483452a83d1ca14238a496db89958afd7140dd057773ea9a1eee412875b552d464ba0fab31239c752d7dd3d9", + output: "b330c33a511d9809436ab0c4b84253eeda63b095d5e8dc74803de5f070444a0256d21d6c1cf82054a231b43648c3547aa37919b32cfd9893e265b55545be6d7cd11d3f238ef66c3c278fcccb7dd0dc59f57750562cb28da05d86ee30265ff6a3991a466ba7e6208c56fc8862e19ac332e5fb3cbcc84e83a6205dee61a71acd363a3c9de96d54070a69860c152d4ceb9c4b4cc3b878547b6116699885654b11f888dc3c23483a4b24fbe27c52545c06dd80ab7223d4578ab89bff5f9cbf5d55b19611a5251031df5da5060a1f198226c638ab5e8ec5db459e9cd8210f64b2521a2329d79228cc484c5065ef8a1d", + }, + { + length: 244, + nonce: [3]uint32{0xea38678b, 0xc41eada, 0x3381147b}, + key: [8]uint32{0x268fc2ac, 0x21297e86, 0xdf9ef8cf, 0xd4b45234, 0x2a95c4f2, 0xcec36ce3, 0xd5fa38c9, 0x7dc43790}, + input: "322d634bc180458123e10d0509870b54e0f0a3a72a2bd9e9cf44324c7a1ca37dd6adf9db1fcc8dadabd881f91d47d93b58382802b42ee936802fac8612ea4dd9eca5f215935ea9ba6233b9c8bddba3385861de669d95c888c8977851cb305db577a4eb2360f362fa459d61ffc8fcaa1502905b073bd8e9567ac7cff8e5fb1002c55641a3af5fc47ac0131fae372f073e19721ffcce9821e0241d7fa67bfc499c8f100e050d39bd4d7cae4557d208629603ec4a007852762ec1905d0e81b873510fd334dedcd9c288eb8415db505913af06bea94d197ab627d58f6a9944f6c56247595fc54ae3f8604aa37c3466f74561131e11dc", + output: "edbfb1090987762f75eba2439d746cdbefe8605b8ebad59e075d28b54edfe48813ccae891f6ed655c5ab5211ba896fff0c8e09bd1554aad987dc53f355d0822e9b0f524a99a79c68a9f3b4e30506cd725b07be135e4540078be88dac64fc545c433837b96a924452f6b844291c4c3fb5f8cc94f06d9f19dad7fc945f093020e82ed19f9eb3ddff68b813629991d1a460e5455e1cb41cf23bb3d96fdb6b96581c3bf9ef72814406329bbbba5b835e7724c728cebe88efcd996dea71d0fd5c53e081c21ce8b3764738d693e390fbf8e0137a716760fc9cd2014cd9bf3fd706bc3464d1f15803606976e96b1077cda0a62921ff7c32", + }, + { + length: 250, + nonce: [3]uint32{0x883ac584, 0x8fb8e7d5, 0xdf07de66}, + key: [8]uint32{0xc7747e47, 0x853d88c6, 0xbf9aa631, 0x78f16480, 0x7c248080, 0x15ff973b, 0x31528a40, 0x629686e5}, + input: "e6b8a9012cdfd2041ab2b65b4e4f1442794fdf1c3685e6622ce70f80b9c2252ba6d9e6384d474a7622053d35df946a3b19408b3e1712da00525070279ce381359b542a9ad7c07750e393e0834593777352c1f7dbc84cc1a2b1eba787377d2cb1d08a7d20e1393d44022107acac5d765be37f9075af02e4bbf8e60ceb262aa34e2b870cc7adcf54329a667249cb4958393bff4f4333338cae45cbca419d59e605aa0cecb1241080339198b9b283e4201afc07360b8ae2a57b0b9b97167c315f03fd7a87a00ae73f91ca560a1505f3cdf04576b9aee5ea775f719916f1e1942ad5311c7f87153f8e62855ace3f34afb08d4d7c7f4fd2bf83e42f76", + output: "fc2673c80812d101bca7a2e0e105fa449550e695a016596f5c3cde11fb7dc518b94fdb74058e634546a726c37896110e1d1f9cdeccba1c89958041061ded8e8bc2751ec6dad76a305e70c57f9c81a5a65b5116390af4f7bf7053a03ec13f5d60a58cc5ba61f8c46ef6d2d291de490082dcfdf294aeb3a9414d64e4bd6497d4625acfa591627bfd98f0aec7e7def71515c09942db6911d73b96b4bd2d6df03bb729e945d71549d40e4bc401e1f73baf263a74280537692240638619f92645a5ade1eb8151191c7ff8bd715b3c1cd667e69745b806e16d46d9aa680a7367b8fb45a1598631cf3d44c1f5cfcd95bc8dafdb65a2083905a6937fcf21", + }, + { + length: 256, + nonce: [3]uint32{0x79cd7a62, 0xae619be, 0x7d96d236}, + key: [8]uint32{0x7dec8e64, 0x9f12b14, 0x6c70df2a, 0xeae0aa0d, 0x27b1ac14, 0x7a00d833, 0xe63c0aca, 0x189438e2}, + input: "0cfd93b195e37dd15dfae83132c24ed5bfce7fe6fad4064b213b2c31a39e39ddad2f977e904c9c5b055ed03db46fcdd845bbb6ff0ab5a8c92e89295b6801f36ae63eba61fba24a3858aeb36f2da226b23b24d7b2c7d2670f23a9a1b60db85c0ecee584bef1b00e42d10ca17432a74bbb220d88356d82c850da4c09dd5baf413caf8f9479e02a330065fb865489c0f59605d56146ec8434182345de2d15e2a1dceeeee2fe94871d41913f6788738947ed9849ca0ae985e3e19a97bee82b96feeddceb196c9b6012264661945981c279f43db9599a4ef01116f592478619690daa64387290484d21e8d2444751194e1f361fb37f04014a3c7e4b409e5c828d8990", + output: "0502848571d1472ff10bec06c1299fad23a2cb824d88bf91b5447c5139500bd837a2fddc629e4a964e84907c1e6740263f1fef4f5ed41062982c150d9e77a1047b7d86c0e191945e8db00ca3845a39560857fc9e0e4a394eea4ba80a689cb5714c4bab7124ffdbfa8bbb91c3eb3caa1621f49dba1eea3ebf1d547ee337f9085638a12317b86c11aa1525813445107038942fc519eebdc1b98d313ad822bf0b94a054259aa8cf1be4b3a68f974269729941747f9a23fa5d83453071b431dac62274c24f6a32248b0785ff90aad5840fadc89af0aef7553d9352cfb00d3999ffbe28cd9fde7854e95710f4532b8bf5011e518c93361e58d22a2302182e00e8bccd", + }, + { + length: 268, + nonce: [3]uint32{0xb7581e00, 0x9a1bba92, 0x64356674}, + key: [8]uint32{0xdc2c9fcd, 0x5e50de1a, 0x8546466b, 0xc1b49b21, 0x36a670cd, 0x2887f367, 0x2fbf4300, 0xf90a0374}, + input: "0d8d864010ce8df1c0179cf0236dce1c100f9c115eaa5294c24a2e1afa27f9d57ebc18f00482be0218d44262bd4db73002ff53c6388f5e333470aced2a42a73b376686c8d02e05ece27cdd8b1e3f675c715981f8b656d68d0e16227b529cf881d2433e4371fbcd933eaa72346e77e688ac80ee95324512c66a4c16338cf38c941b72c21c3d01e005a07c0eb436014fb1ee61806de7e96842ca3217ab8c7607d609dd2f637f9fda8a85cb0549f262c9e4a955c384319a6ad2b696e2593d7d174f5ddb98e2a8d5d12558c18ab67571e9a0202e91ce26d720cbe41a3a6a4f309296ca4d9d9a59a9043dd2e5a707ed7d5034023d5ea06ab14b39b7852e5c984848d5670c6f2f0b189c2a8a4a4bca", + output: "d2a5693c9d503a8821751d085a0837579233e65b691366e4a7464481d22800e786939349f721a815f28b4e47c8889f0814fb95d592d1185e45d6dbcac14ffa4f1d6c79194f2f7eb7323439d9607edf80f01e3a968b483eb93c01d9cb9d3625d21d66927e7aeedc1d9bd589560ed2b61cbed5ad0e0310c8ebe140c64c67d4909c010902d5386efa359ab60a9573493d3e5d8761cfd4023eba23de48372032d4673b5f6ad66cd0dfab02a73aa81f269ae88fcabb3ae9cb09f6bf60fd3575a3046bc6843f444e1e9fb9ff9b991620344fb99da68df09496b40f8b9dfc34e830a87f65710940603ebab554d36e8b4c9228bc9c26c07b828f34cdfdd40b161717236ba325e8c20bd018b324345e09", + }, + { + length: 305, + nonce: [3]uint32{0x2c641fcb, 0x5170c7e2, 0x62a23688}, + key: [8]uint32{0x5aed5915, 0xc5c4cc18, 0xf0e51574, 0x75d894c6, 0x1b7082d1, 0x5d2ea1db, 0x709fd24, 0xf5f69898}, + input: "07c50a69e168e388caf6f91471cf436886a3de58ef2c44795d94fba6538add8d414d84f3ef0ac9377fd5bed6aa6805a695f3a711025550bb6f014893c664e09bd05f4d3b850771991fc02f41c7353cd062156243b67fce9c1f0c21eb73087a5de0db0578923eb49bf87a583351e8441c7b121645bcb64ef5960fdca85af863dca7ebb56662e9707d541513bc91bf9b301431423b552e2c148e66ecfd48045ecb3a940dd65694d7fc8bf511e691b9cfd7547fe7bca6465b72ff9f1748723c4eb14f8bc1efb2fbc6726115c597a3881e0d5019335daf2e5ea8796c2a8b893ca798c4ef2639465505c4bd492bf7e934bb35be9b66c9f35730736c65fa4c1a2485378b9d71912cb924634a8e0db2802b75728818dc00fc28effdf1d8a05e4de4608bb6a78bb19c377d5ec77dca1b5ad38fded7", + output: "3dff5fde2ca24bf419e13cb7d12368e70449d41f2aa22e4b567f5cbdbcf3257975e44097deb180f2621ec36acf375dad3b7a19234b9856dc6c7842a7f86be00304b41a8c1662a02e8390346cbd0ff6be7bc1ceb821dbd805ab5c93c9c6ea5093249b5dc52081cbbbe1b326e831ef3c6c42fb791790086d1586f7daf031e70a71b54e9134f942e9ce229fc77980eb80c985ee0c5965eaba375d156f9b423b0615f4ca6fd77de28e28f35aba327e4f1b75725730155b7b4d6c5c264bf3d9dc9a16e7ededcc261add8c666278bac5cf0b3275d6d6678060eae30bbf2ce5f63e6a53a450b65aa0adbd1c90cf045f5ddd9700c2a99c80586c5244cf4c08035b6ff630c82cec3a4fcc83860e987898b42fe746939f8b37c814f8dab65de276e9784fb90f0751d3ba0826889e1e7e4fdbf8a90942", + }, + { + length: 430, + nonce: [3]uint32{0x99b172cc, 0x91056d0, 0x48057533}, + key: [8]uint32{0xe6cf398e, 0xc3c56066, 0xc5ff194c, 0xf6d2d8c4, 0x6d1d8908, 0x63e62065, 0xcca485cb, 0x1eb03dd6}, + input: "3ddcd3c00014747903c95e49f64258615455a0b26c5070a9532382a9bbd18eeb19c9fe1a902f5c6baf544c5938fc256d310a9332223dc3c54a6eb79a4b4091c3b01c798d2800418863f2865c1cd8add760e445588576d4a6c945e1d6d50dc913674daa4737ac94d84eb0ff57cda95df915989c75adc97c4e3c1c837c798a432ba4803a246bb274b032db77e5c1bb554a5342ef2e5d3ff7f102adb5d4e282ad800ccae83f68c4bfd3b6046786a8cfaa2b63c62d64c938189b1039ae1a81ce5c91530772cca0f4a3470ba68e4e0548a221eb4addf91554e603155a4592dc5c338aa0f75a8cc2822b318fbfba4a8f73fa08512132705dae792eed6b809c251d35cca60c476406d964187b63cd59333771e37367671d0ccb393f5b8bde77bebc133485ec5c66bdd631d98cdbee78a3cf435d2f824fa2f9e91e89af28b2e155df4fb04bbe4ce0b6162dcd8e81ee8d5922ebf9c957b26c343a0396d91f6287a4af9e11b7fbb5a5a5c1fcdb186365a20617d4ff5037b0bfa97b6213a6ebcf0b78b81c65737378787b255cba03d715fed4addc2c70c1fb4d3ab16f2bff287186c26a164dae2fe9dbe3c4a2e1617f01cae79f", + output: "ecea5fc18dc4aed23359cacb8f79a457512e0a27d9816f353e315519d2b2faf74d14ae8ae5e227b203823998a47a050c363a807f45f610942fed4518b8091b88dff8b2af8fb6552eb654c85d2b6a918bcf56fb898392941d983b1afd867ef840e12313059ed3e4d217498dd511563a939c3c536fbbf8e019deed29262f0a655fc680b15939475e0cee0ce2e8bab5834f7354b93e2e0958a5bc608fab369b6aee3c9d73a6898e402484eac7300150517bbd137bf55762897696a3dc4be74b0c141755ac8f2f6e59f707b1690c451a774c46bbe195d826a6784f8d807b78f8ebc343ecacf37cb9b1b2fdbff6a1237b5098853d783e77515c419894c2628f8b5117042294ee2ed58a33746f9e79b13fdfaa25a75fc95340a89076e786e0ecad7de437a9a3fb3092146d255005b22895310b1252a3e34572cf74665b97f4adc30dd0f34e3216c7757953a4b618a775bbe68f9e0922d75afc80a1379aaf1745f2263afb6f0b37553d9c984f1ef781ea75b1980c559c77565c83f3e0bd7a3cd7cdb594658beb7e5eb940633dbc6ae2f50383beea676cb6c814b17b1d73dd133f544da88ab371415889ead21803c1ffe3f2", + }, + { + length: 449, + nonce: [3]uint32{0x2adb4a6d, 0x33d00c1c, 0x10a0193c}, + key: [8]uint32{0x8bd707df, 0x70212019, 0xdb685581, 0x9cdbd1a3, 0x7db9ff1a, 0x1af119ee, 0xb1d8c0ff, 0x3c4a22cb}, + input: "93ce72a518ae892e00c271a08ead720cc4a32b676016612b5bf2b45d9ae9a27da52e664dbbdf709d9a69ba0506e2c988bb5a587400bca8ae4773bf1f315a8f383826741bfd36afeae5219796f5ce34b229cac71c066988dbcae2cbcfcdbb49efcf335380519669aaf3058e9df7f364bfd66c84703d3faaf8747442bdd35ac98acdc719011d27beba39f62eab8656060df02fab7039223f2a96caac8649bc34da45f6f224f928d69c18b281a9b3065f376858c9fd10f26658ae21f5166a50fe9a0d20739402eec84f5240ee05e61268f34408089e264e7006a59bb63eeaa629ba72603e65718d48e94e244e7b39d21d85848d5f6f417631f3876f51b76b6c264356d7d7b1b27bbac78316c5167b689eff236078cf9e2e4626a4ae8bedeecbcaf6883e2e6e9304969b4fc7a4280dcdc5196267e9bb980e225fcbf7a9b2f7098f7f5c9edd06f50c8791edaf387ff3e85ff7bee1f61e4660fddd4eaf5ab0320508e3ccaa9823ae5a71faa86bd76e16d862d83ed57bf6a13de046a3095a74a10c4da952b3c9b8fbde36048537f76eef631a83d55d3a13096e48f02b96a5a8da74c287a9164ce03ddf2f868e9ca3119ec41f0233792e64086c903eb9247dbae80e923eae", + output: "bcf49d62dcd1cff9dc37d7096df0c39031e64ccaeea3830fa485edb71b7fcf2ec709a4b327ef9c7d4ea2b35f113a8485d4c236e06b3baccee30e79c6c08739fe5fbed59db30479b56dfbe584a5d79b169b200430ed27072137e940a34170606b31f22095f2151b4d9b901f6337f991a23e4c8997a1ebf5105361fdade1c889b8dc9565e3b33e0bd608c39d725becbb60da8a797186fe0986736112da3d09906442364d6e253e5b27fd5ad72e877c120ea7a11d42b19948f0df5ddabf9cf661c5ce14b81adc2a95b6b0009ece48922b6a2b6efffdf961be8f8ec1b51ad7cfc5c1bca371f42cdac2389cbddcdc5373b6507cdf3ffc7bfb7e81487a778fcf380b934f7326b131cb568bbaa14c8f427920aa78cc0b323d6ea65260022113e2febfb93dcfce791ab6a18489e9b38de281169f1cd3b35eee0a57ed30533d7411a7e50641a78d2e80db1f872398e4ae49938b8d5aa930c0c0da2182bd176e3df56ab90af3e46cdb862cfc12070bc3bd62d6b0387e4eee66d90c50972427b34acaf2baff9d8a76002a20f43c22ac93686defc68b98b7b707d78d0e7265aabadde32507a67f425cbd16c22a426d56b9892bac3a73dd2d2c03efdb22ecc6483f8d1ca67fc7d5", + }, + { + length: 487, + nonce: [3]uint32{0xecf15215, 0x45e31add, 0x56499d31}, + key: [8]uint32{0xf5988496, 0x49bcc2df, 0x7b4ba3c3, 0x5d5138be, 0xd6cb466b, 0xe98c82f8, 0x147d3f27, 0xc82389f0}, + input: "f72bec13b0f0b6f2317118f14c2a0d8e963b1bd49ae7584e710dbde75bb1e30c79281847cb822a5f3ae4fa56825e511212f17f0d293cfe80f872e6992d304e9283d08ce65ceeacb003b36a862c91282a22536e0b9c19953512a1bf9e20d3e7a8f1a2dff45dec0b9b04c592e88a7814540cf636a024d10008463d0b3aafbc4c9359889149433ef173124866aa6f53526ef3b3f2c630860ecdd08ffd9fc050e95da512cc87f812f9391085cdec5cc87258b8560806a52336d612da7ab05e0f60566b950904aa27c975a48c7d78455728c87f9b53aa4978374ab9592e12c22d9a760e26eb527133534ac5bbf969596b71cde8b4ef3587fa7ffa7116834348c275ad4dce68ab3397521ddc8e54380129cc81b981f9b32db20dddb0ecaa0f1ff7b06495a42b4a800a207b8e9ca38794e2fa9f40546e0e3aef7b5236d7fdadd72b1158714a5ad8d6264df1e75120088e449b9e911eddac59f1f19a795205ab7532783a93159876133b3fe3a518475a545fbe8dd2ac143f33c35d98e3ee13b63606b1e671917ac3ff9412773a3ac47b8c6627b8ba9dde6820f4f16c2ed9cb7d7086cfbb0cf2d7533eff253d14f634ab2aad3fb4289b9a0bb667a6fdd0acd5949185d53f1dd2b96ff060bb44f872a67259100669e6eaf1a7e2b11dd5fc35792db0c44a1127765934a068bf", + output: "bb618ae6b7739a4dedde1dbacf864b0892b93dea3007237d2f6f23be0718bdd29321e6b0fcb6a44dacf0f5c53d91e16165997e2302ae7ebc2dbd02c0fd8e8606a4ad13e409a4e807f331cf4174171c5fff23ca232192906b4eefdf2ffb4c65af78be01b0ba7d15b4341dd5a2edd49b17db2812358c8af0a4a9724e0169f50d1d331936bc2400012a60849876c3ead52cc9fe60173c9992f83f3e41ebd24fe3961835109612994c7620280539d483f91ef9a64c16032a35612a119589efe6357fa35b19531274576e304be75bc7e91d58015792095bb00ce4de251a52b946554366ea7ed9ce9317020ec155ae0071e022af36ad10eda5d671e5090c136e381cecdb8bc179474fabc7dab2d8a134772976cf0791b6cebe2333d34b4b8e2b6b2eab2b5dc7c6a08a583d091df64328cbcde36bc1b81095d82c741a1503c55d833d551a855e098166c5efffb8e4146e32e54abcaa85076ca6660abdfca9e82824217b5d3f23f7ff3455872bc76751480c1a8e3e725365c82fc135cd3713cc0f1ea733754142f8c37716a2a4fa8a6b898215c287565325774c2510df6b49e78cb986853ac5ca532c9a7e2bceb7c0157f60433f29fe29009343d6035d7b5892c77f821b644590615dc505604501dd218dcab789e6f0525387919cf25c7c6d62a8979e39d346decbed2657", + }, + { + length: 511, + nonce: [3]uint32{0xba68c47, 0xbc020097, 0xbf7d14a7}, + key: [8]uint32{0x3bbeedde, 0x6e8f4d6c, 0x6e27cd72, 0x140ff360, 0xc891efa0, 0x4aaa227f, 0x733cfef2, 0x2b51f1f3}, + input: "96eb94e1adbcc0646440c8824a2fc0f2c4b17d9cbddbb8ba8d9dbd6482fbf7201c74eb923153e0138b2f6f182f9c3d5656ee40bb7c26a01740b5c7d125261d4e4197614800aa152b402ba581bfbf4288e73c9ef7e7e37491212b921420eaaff880eeb458e3d0aa108b01b53492c97e328e9d10e3220b924351d583c00e76aee9325d6b89b1f162ffa30b386b37b5eaf4dfc25d22987dde4496158818c4d8f19ea300fe140be921d3f1abdaf9ab8946833a57cda5f41f995ff80e98b0f10f7afd736dd33438dfd395547f11563056078ff8f7c202aac262955f0ca5dae2365472de40f069028104ac552ea5a45ff2773335e5d3242f1e62e0e98003333dc51a3c8abbaf368f284536672e55d005b24b7aeba8e4cef23289adc12db2213aa037c797e7e753ae985568199cfe14cf1704fbca443e6036bdd05859e3583897cbefe7a0cf268b75d554b2da6e503ee04b126fbf74eaac0ebca37e84ab9c726973af780fe2bc9869fe67b7d9e4a04062ee535b2c1740d1347224e211b5cd37ee14c3325f40abee930eb6a1634986e756b3a1f86a3d7ee7184d95ea948506d8ab8b23f92ecf3eb0586f7a8b1bc227e08a0e32ca75ca4eeffc5c0a2a623547788bca66f3dc2c48671e462544d52a87d34307a7f111aeacb7da50262deab33d9f29dd6b47c3bb555be598d619cc66be8c4b74b01772725268a43d467f39bc565e5efcd0", + output: "590965d18ebdf1a89689662cfae1b8c8a73db8b26941313006b9b9bd6afa6a57149d09a27390b8883069e4fc2dfcf75035def1f8b865e24c21b1a1ed3e9f220d7b48046577b661bc92d9888a912984ad415ea2fc92c9e37da0bef5c7dab11495c612c27b5babe6eee28fd26482272fce69ca7f11bac95251735ad808365ac587830ec04105304f8e440a4da47d30e788718da4282941c9c76f18de4f954b8be750b54cb1145489edf273625a0df9a694a23fe7bfea12579b53c3b2a3de85705568cd7e603f3b8beba9a14cad9979ea283a8a291d3e1105b7f890e2a569804d9b7dd4c7e50bd0dcd11223fd7247af77f04212ece1b98c238d2fa0386a994bc502f83dcdd2e5a0d45b185155e1a395d91726d383c2c198fff1590e983c65ee041638510787c8c59c2e96f31678226a033e027bb40c416b73c3dbef31affc93a659c8ec7ffeca313fd5283a80533b2d63941c8f245d22b160c5fe57c5fa4b759c407b9acd6d9c4f80f244360b9acd11e2b43d4af757e16a6ef9d6756df39ca3a8a235e74351f50b2ebf54df633c8c400fd80b41b07117676d486377095660f2f20f62c034563b4560b473a8f4d6a740306d2a822fd8bd98012a840ba9b1709df9a0d61ecc305f7180fd764e334045d9a8ca23cb8036c05616a8b21fc488429ba4168c59dfa231f0ffa668a3be7b16583df1a55bb9c15d51660ddeca730d66f7a9", + }, + { + length: 607, + nonce: [3]uint32{0x9419df54, 0x4593f2a, 0x71c06dd6}, + key: [8]uint32{0x7b517740, 0x41e86353, 0xed629408, 0x5fe32cea, 0xb06bc5df, 0xaec9b350, 0xc00c2a6f, 0xb3aaf44f}, + input: "be3f309c6e7b89e1ec4a855cf161156d09f8a04d5630534ee19e9e071e3f4603f23f0c59a7b7f8a32c4c203ec8c129a268faba09abde7b61135c6c37fd091e2d695f0e242488098ebed30c7d321f4dcef0bdd23fa85a53569868cf2008bf4d2ee7a12a6673298c7e797321b9f4559748223b590e6fcf17aa72251586b01181cefcd32c6a1a20a0fc27143426f6572b1aab0e7301e390cb857f912d78d5153906c698ee140b36cdc72693cc019cb7add747ca3a07b2b82a2332bfa76c962b186ad94209fcf590ed0f6a73b08a771a58eb9649f2f1da4f7c385da83d50c939231f745514d14b0920deedd9c4dc6d2e547f83643d13541870875e52c610372b14b602e7a47f0b3721cfca60ec68e2eee91f40ceba2d0fdb4ebe19cb1d1ab170726c9e600030454ef355f9a40033672be520e528937f38e7a862a5ae50cd94f667cd015a72ee3f91b1a09031bf4c207e0c516b2e7a4baedf373f1ee71843e560741ed3a3094d2b513e2248caf27ce135716f6887d9f1fe5b11e02c12c989d29054ab183a3f55d9b40d78e12ff56edf936ab966c7c3130bea472b71fd69e70165a76afbf720e2c1587a77943b35acfd81b2ab6f39476623edf3663024fb84da8057ed3a361e9533caf9fc58a5e4897e4bf84f58ed063b5c353bdca3792952eec0a1404149ebeb5b17cd6350ab3e27e44e40fbcb00780d001a48d0365d534ff830553409919608881e665f83bb5cf0736d728c41cc4e985c377f89ee1186303d0d76bc634875ab3ebd87059969f24b0464ae11967bcc47f300a34e3b917b1affceea716c5ad9abf1aa3a1106e2f4d006514dc62cfd2a52426968f2f3991c9f9d8fcd", + output: "e4032c01bcece73fde73961ed216820dcb44ce20134678c98afb674bb03afec2f4aacbade7f87a32fff57ae9213eaf0509e9d9db1313b06fd1df53561f85896ba627cccd2d0e2ae4f24f5579bf02f6599f5e63412ba084cf53a5bc9a8061b5c029b755329fcd73f629fadd3bcf6cb4c572fea86466cb5159d19eaaf0f44c3471d0323bc7206bb514ed8117a61c6d98d44faff6a83716657531d965ba3efbcf067c452e0d2807db3423958d9a4421886fe132d7c47e82086db9507616b67f0051dffc1a49ecce3ca8e4d5f5af15684cd8837a471430ddd333ea0b6ee603b7d9e702692f857fab060ccf26f2a8e61dfd3b12923acca78b83a6004e4ff09113becf6bdd0bec3a449a195559dfeafd4e2a79ead5ae3c993a15ad9b1a2ce818e18edb010b7fece9aa437d85ba9841d89026d6aac1a3a6ab6dad932a26d7db6f3664b06d51584cf4d22a75c06e2840db7292798306e4d39379af85a6bc8dcaebb5246e07fadd5e336f122de0ecb99ca24a971701a1f43bd69933beef6e52d299b132e7510caf27b99739e32bd272afc36755ea80cc7ed3957d91325584b338d15b19fe554ee70bee903babe21d0cbecd49235c70a3a4f516ce16761d1cfcd70bb4b9c7c73c359f3fdd0753d6c1ac1a1463142f18266b6a9c84675f247d56563646fb2c8c3b6b81944c2ba2b76b685ba5ea40cf539bcf3850a8af3e0a69c0b38164de520a3bea82b91f67d36bbd87877b5be7f06c2d26b2dc747a26a51f51fe293197db0e91e6ac617c71ddc6edfeb7db8f067ac2012268deb7e5f00a640c1bbec5c4c71f10f921071308cadededad5c90e72d744d0bf790b043fd35729570889ebe5", + }, + { + length: 682, + nonce: [3]uint32{0x17cebe90, 0xeffe259b, 0xbdf9d4ca}, + key: [8]uint32{0x172d51e8, 0x5b80f5c6, 0xb9c9e438, 0xa56119c0, 0x62212323, 0xf5386589, 0xde7079a3, 0x669e643}, + input: "0aa4fbce7e1774f0607e7ea01fc0e6d210bb283964ae75e180a9f6ff3d2c4d50914bfc32bca6d243eb33551521d54d66f377fdc1d31974ece79b157905ff7e7a9b064f349727ce37c83c15ae13df635c3e6b4baf994d9aa0bb90b06c6cda51deefda72c97a2993448e654b746b216d2b949bff1af5238558205cfc3162f1d7a020a919db4d4eb44bcf7b269d4df57e24133d1e540694b9148444cee16e64035ef006a6079dff449949c1b342991f2a27f21c8bd74ccf4bc944284a46e9fd9f9bfd4b95f80c05553950fabbf5e5aed6babb8427832266aa4d175114de9127ff6ee848534d6dd5aa6d2dc361319863cdf32cfb1b074faed17d368964393352df01fe8d86af0e994bc9dac315f7d9efa7bef47a16676cdf17a535ae71d399c4c11a3a3ba0491e8d41f419685258a4ec7d1ae588b3ca341719c0827ce5f5a653959a8671844f2d0293c09bc7d35497ed18c160fc7b6d073a311b621a7a37f7ded1df3d73dcba1821278c9e17a191997fa4dab0802e1ee1b468e91e4272c4569a17dc0b2805b980bde798640aa328a3605abea1865083d7446e960c27f69d32882a2a2295efc9c440dc203872373411925f8839715e9441d31dd9cc14bab09a3e03b4a63e14db3039d58725796326ea6327f189beecd63955f1409467c81f4691ecfe9f0ac5234f23dfb84e3199e415ee7b4f67189e8857ff6cb3f64c2ac1b554bfbd679a6ea8491cfd69d96d08ee2744d9103e0b044212560ff707974b1a9043e1f2c3592828fde8ab5e993652c00e2b3fdb19082611b67866ece6c4a2635f87e04d2136d679f632416b03ece4d7e9406f3437163f4fe0c8cc7b87d487f6de3b3022665bcafa847c2b9199e1ba9af7deb0e29b66ad41688d03a8369416dfbee6d03526adb3ebc4b4f8531d73589499a3010b5309e9d9d2f5a9cf347983a92722dbf6c4f0bae8aba57b37d322", + output: "a31f9a532f35f20ba604a9ab9989260e5a4ed04e6ecfa1cb9e0e1d16943906acbbb4e761a2bebc86cad0ce8b3f26d98b455e4b0835eb8b43791cea29fe8fa6e5187b60198142059bbce98917aa2957ae2555bee70e6e9e21ff6197a51ac2ca2952c413efec4d9903a2f6883e88aebe7ca8316831f6a8f2cd0e486319b58dc8db862779adff98b7f35c33faa53d56acd7a81e0feffc286b728f3a11afab7cace4c30b1a45780276b1f0ab89242410d07cb1191c7b9da5d09db7c9a729d91ac3ed82f4350f2871a12d125ba672861d1b0af7219c360a0e023a8b7c23fb9d72631c72e032c097118d90e5db0576586d8224165a8376fe8d04de93516848e7c2653cb4f7d24a971ccf4f16c527ea5b4153fad5fd5bf473b15806671854507bf1a6d9e5fe4a6f6ec977197d21d69a041dd955e199031f895adefd850c8b0ae327ba0c18ca1783560e1ff0feb2f659137e34a91e9e9ff04fe3375b7db6e4326986e6265e5fef00297f6ae627c7563846e531762748fe8d0b6baff17acf1e6c5cfefa35a95ef634ff96f83f16342a6c62311fc653d314f8a6de109356ab7801316e69a48834cb6325816b1f66d5c67d6e9c9cbc8e1a0521fd6e4bf77a7d2609f99c9579e143f530677b99d198a97620d087f058edf35eb7271701ecebb8bfde5671641ed21aeee9e7db06b932e0def91be93cf2955159e9666c770cdffa03886eb6e98dfca8f91ff5cef1927c0f82b9226d65c68d011416cbef802c264e34244ead7a6ebbe28a510a37e1276f4f3cf27a3944a08aaa23bd321092761627dae20dc269b6150545c75e995cfee0a9bcedb1ad8b364beb8839fd5c9f7984fa0a08a1a354aebe18f62acf6d6664978fcfda2ce6fc16eaa2cda5b835339001b3b98d3a407a3e18e0ec2da6ee3d1448c1ece2ed67c3f51f01e76ed59f0e61102b103a3c65aea94275e8d1f0d331538efe", + }, + { + length: 768, + nonce: [3]uint32{0xb1c9bd09, 0xdbe6497d, 0x16c73b95}, + key: [8]uint32{0xbf9d9e5, 0x2eede668, 0x631dca95, 0x4233e36d, 0xd83fe644, 0x99b11f89, 0xef055717, 0x1ae9695f}, + input: "e097b1e8dea40f63714e63ab3ad9bdd518ac3e188926d1086a9850a5580affb592f6e421abc617c103479ba39a3924eea1c0bbbb051614c4b5003bbd5fcbb8093864fc1c130748194d6b560e203b889b98b574a98ec3e0e07cb2d9f271ba7794e5419123b4f2ebc7e0d65cd404104868905ff2c38d30c967fe9d77ebdd4b8fa836c3b0ad15e3e70e9a28236d5593e761e694b047f63bc62c7b0d493c3e2528c8af78f56725172ac9416ec2bdc54de92b92a63f9ccb61e686f9249c7cc337d99b2160400bb5535eb8f8eb1e3cafcbceaa821c1088edbacb3b01b5bed977e702de747ad00268ffe72e3d877dd75816db65b5459607cd1b963fe43bf2405ec223ddc0de514d59cde74f7522dc72285caa3eeb7eae527a7723b33d21ce91c91c8d26bf36eeb1dcdfc1e9e475c1565ed9c7e64ef601874a4f277280a5ceec26717e9385aee8b159379e3feed7952b87240c942970d63351259aa7a286ddb4a2620fa67565c92f592902e49422f1eecea2f44d1c0bbbf54a9e5612b86a9549aa3e6639a924c7bbe2d3c1b5669da73c0e2c6f6f6084f54a912ad2635d0141c2f5ac925414dce0da09ab8f86eae2a7b7e48741253189e5fd554d5c04d9807ac6ffd8a4f8229a3e8ab75ca5c778bd7ec5a5c02085faba9792cbc47f9e9311f3444e6544359769e1b3eb4d42ac8923ec94536e1a44497766b5da523f5763749dbc2738dfa8e13c191dfeac56c7614a96bd3ae23e4e6e5ac00be851ac9831108989b491eaade62113c531385ef3e964ce817c8ed0857adca946467682c2f4387fab2f31ce71b58370853171720268459588d5d216faca58d0bebbd7cd83a78445d9b49e83ec2cdb59b5d760880bf60532178d60372752b47d52562b316c7de5c74af9cd588643002d66bc6260595a540d2f82cf2c07fa64e0cdd1f79877b6a25b0608c735a7d35ca10852da441fcfb31061fd7e482a0989866f9eea8b0b39c3d519715c1c2766c3ad99f041143cdb36557ed647403458155dccbb80c3a365f0a85b1135695648ab67ac76b3d219c7b77e49d735c72ac947b1d7eeb279beb9d2602aba7b36ca", + output: "7b6e07e6415660affba56047b988f4548b308e7a642c76791f5c3742cc4cb744cde48fc30e50d458084e06c6dd29a52cb4c306a69a493a17c0838d14b107d07b81c983a2dbad09b80f087ba48465a8beaae5b16e8093e17cfb9e84ea3bdb9af00889268a5c01ddf25af434de56f65882322432aa275fac8519e317ef4d89478f29182143f97350983050f5d37c4b518611da6fa2aed7bb73e614231a194fe17c9073e377fc6ea0aa491e15ca54808e0536c8c3f1bf657283f807ebfc89b55049ac8fb86f89f17974fcf0afc1a2c690c0442842d0f4af9ee29dd960e499d1077bfdad4c0c9189a6e83799bb585acdb853c1e99da7ce9c7eeb9bf431f8d364d0ea80b0a95a7807f196c6ee69fe90e6d1f5d23e5cb256e37e65826d7a111f2272884d6319f968580b3164b2697ea6556816cea3ca316651fe2fd68dfa905d080c28622606f7d24da216289fa2c54c6f42dc244ecb047512ace62f0801f2dfad8f0218f45e2b3bbac97c2176c842398b16dfa1fdfc9a68b7b5a1e785d2a0cc592bc491f5a69c81127b758ee02c66b81674d3135c5882d1dc89dadcffa06f4b0644df5c7fd65c72611d79be7ad637edd6fc38b39946aa2a2c6d08ca9d3ff9a8ffe2e7989546489539b1a623fa937c468e59e0978602526b4367de277526895aa222fbaeae2084f418c5745d8ee844da0baa47f592970c14cf710f49539c12104a62baddb3382f5773dd18c83ecb238ae2e749a51584a38e394ebadd175bf5c3cec787907abb1d94af70ae63d3b7d8d5ff254da90b78ec8fe2ea95dfbc6e3e69ecad856c9e54906df8fe39859f2014b74dc3ca0ee2a957001939d37a6c0b489bd3f1658b835a57b24aa282c23e875c9e67e6eb8b32fe44e7d7d8e285d85da0ce1b53990f9fdb5e2e74728e433ed2c1044df9e89cb9bb316c39fc6fc8bcc74a382093926a288170e857d6b7f47858a4c2d05c74263dc9e8199332d0179687f4a4cdfc80ee6737300cefba75905b22d21e897f887b67aa3051877fff11d98bf96ca5091bb225bddd5eae697f3dfb0efcdb788ebf6694b5b39dbb0d4bf9427382a3a58f0b", + }, + { + length: 828, + nonce: [3]uint32{0xc7e503e, 0xf8110ddf, 0x83316c8c}, + key: [8]uint32{0xfa2d1cd, 0x4fe7f905, 0x2b9e4c1b, 0x115bc881, 0x2922bcc5, 0x3f60aa25, 0x13c26d31, 0x2096af63}, + input: "0a1064714f20d9e47fe53250ecfec759f4137e60afaf65755f4709a483504c3855833b6dcaf7aa0180fd735fa9a73d46697f6c45004adf12452ea4c04a720fd7c20b9783b74b8b3ea0c8b1563d5a85f44af8afd7d91ca6298ca22642a684f66e365edd6f6bdb2dd32dfa13c62dc497fb341b86f65d40655931171416e23e3b2623c0b4a67d448877b6e3d4e0fe284034a10162b2b5e21639047036874f4bcde22b145b5f18aa8ff32dec81e6a5ac68b3c30c24bd8fd3b8e098a1cf202e2ab2a3bb66a9393222b9f7384653cda7707f00bc3c81e9591fd040a07d3629410c2db78781a4c9db3df5f9d648162f1b087974f56a89db07aa21ba827e3864a1618945b2fba06853a13c35da2909f5013feb313bae09870b8eab904024adab0d6ac46c1a1499791b47413139dee59db676949b9e9ab8d3d6abaa954ec2a9fc83953c91b483c3b6bd6700b96484850734e72e3710a1b379c0d0698aeaf68f13a0d317bfd689471e3299288e7a383a58522f0daaff210cc4917fa05f0b8ceefc2afc46148a05a100d30787accfb4da094e61ea6b58f132692aedcabae928e53c2594b01507b8fc2d0a85a1d111d1f4de0b95258281ae01873a72606753b6f878ecd8c4f613fb3477710d260f0bca0d4c06f675ab7113eded395f88755a98a0ad22b4a002cfe9447c4e39eda13738f4eccb9c13367ebc2878257c4647d31b67e5e32b6a77f23e9593658d19c0a40e8a7228767afba1cf23072b013b2d76ee66e42b57bec2797ce3619c695a661004c8129cb5c5d6a2836be22483f3b7e40bf8ac5535bf6cd065c4821a87829948c88163cfe3c0f60cea4e7ff59df4cdbf80064b2d664b39487413039999b5e86f1d467b12682d0cd355e9f7cd980e87d584ddbda89f68632d3b8fd6bc3b80205d7feb97a46842b093f74aa14bb21accda7474247b5e39ac76ef75e9b5b52b6a829a7e2297ab88fb0eb690d54ab1af2d7437149a6202035ce15f1e6c6267458d62677c263d83d3f8119af191b7d766582620e0f08b411c996c25ba6a32c2d73f592e789ed662e94103329bfa5e6573f1116ec04438997f3e4ad91b4123b570743455020d914bde2d8417fb24671e6db261732fb89dda1a36614b095529e4f97374c9bc0e55aa577bfffa663c816ca9fae3472e0a", + output: "b00a7caf5359c5bcebe590e6bab9aa03370050c55cbd45a257f4869937e922a15f2d38121b1493d6b5dd4a8a47d7b4e5cb049d396ad84ed421df774b0408b6939f18ebf5cf83f48c540affcc2a885967bf4bd222c42904b8a73c4185bde3f97e874fad25b46714235e60c9ff53ed2975c9c85ebad0752249e4b627ffa41555eb9074f63a5f7d61d207d2ce11b2a9fa23a13a0832eccb91efa2afd8d9acfee94ac78a733fa156bfea5006da1d0127c32aadbb75c015b68c627903e1c85bf3a1a9f99c6cfbdbb5c871f7f9661b78cf5e16d819f53e9930e201d4f58e69bcdce77ec5b9b1d2cf206a71f744342273c26b9abc71303c20df3d51f52222893d803fc8e0e0afcd99ee1c7f95b48680403566f7f9e296d7ccc0ec348b6ad515af58d11fd82c628ea29ee6a5d67aaeabd8823addc01a078b04313af73105d4ce4abef8e6ee8ce649640a19678292d4f1017d121549fd2c19ba6cdc0b613e512bc9551d759c6d38aea7e35c0847a142e273a16bb1495e652f9668b97801ba3f6d9931c0a1efaa4452e15732dca1ca9cb45ed289e0fd08d1cee1cdcc9dfba8d0b2562b0b1a180f4ee69d63573222c8d4789bf0d63d2a201a70c7b27c84e620e33e8a863cf49b784269a51ead3d4ad26f044d5859988d5485a11533ea805f5a8f6313caa6b421071a34f57170fdd8e4663e9a4cdcdcc1ddaa9f6e651fb365cf827667b018ae7d028c7f96295b2b4f9eeb4b361b48af86463a79f50b107ab0935e3cec3f4f203cea801ff95fb870d2c2f0e315dc8a6a547dd3c390a1f5403917315164bd2d40362489b389a54e8dc0ddb83e6a43a26c65923e6f76ee0ee0e3a33b0a9066620a01f0319e20b9f1beb3910ad962a3000e6aacb0ae57f3f6c5e0315be5de93edcf0e45e0e47332f9daf7f33e6e8bf1929910b78b8f88ca12bf5519a3217b7554c8c8350cc314561d580bf67a3878e3979430d070121a5e070a3458742e8549bda972f603222e2b30eb8a49a955805307e6e02f8c60a08188f69340e116422458d4a8841f46a78c833b1a822e3f6c9c97422c918f17c36175ca4b3d1c081ee4b175b4b07bf101c3836eb5b9e3cbd08a89b4a1c50edcb41ea8ea6ceb1532f5b842715d50dc21e2499e08c373d3dedb96bb477c8802ab7aa957e0b5810f38", + }, + { + length: 859, + nonce: [3]uint32{0xeb02dac9, 0xa7cba06c, 0xc24764c}, + key: [8]uint32{0xe9414a57, 0xd5e29546, 0x1a5e2f4c, 0x806e4c46, 0x48098d1f, 0x4351ca1a, 0x53ed97c, 0xa6a495ca}, + input: "00fa3b13b5cfa9b5d65a41cc2d3c420518802c22c4582873f1ad52a22032d2cef7c975078b199787e852fb1f914529f60d1cc854e5d6d547216dce043e0fc94866bb2193343c3a07fde60e668266d1cee3067c6f2ce0f9f63456ad08094b6c7f515f7ca90caa96494e2a6835ba1f3f166012ad1ff6af6b5f8455d5c26e72402966af9066ca70ad027eed23b0eb02c751195064a62283975efeb29bc5993f83360d012a2f5275ac758a9e8fe458fc7cc0673e6b9e338678f0faff60a67fff3784c3054dcbd95d1b00ed4c6156b3831cc42a2ccdeee55541f228b88e6c318e2d797c6fc035ae12868c4a4e3843b5b25a530b1477dec3f5ac27644476b5766e0ee132d833f9a63200eb0980bf72c3666150e567e01e3e1f469cf36beea65946fce714a3f354983e54ca4315b57ea35c5f48bd5eada05f49db1004cbb39888ebab3afad62f6509abad77ca8c4ff28731c7ae545e6876c8f4a80b6cc26928ee05001a9764694b52edd605e182d5a3a5fd192bff58aba90f57e4debe612d02cf6f08af33a78ebf8823bb3eb46d4da25b7dfa15ad436c380633d3db3d0dc4dfec6c2324d105e7090e65342b554854e777b40b5dab8125a58e8b212364ff88459a8466ff5ae661034abc8286a78ad5aa582e2dabbcd7a0b0cedcb9fd5f0bb8c3bef9117f2ca6520a72b94e528c1a4a464398e654995d5f4c77cbabf2b204b96a058cf1b38284b34e41ac37b05a003ed51be9602050f21c6b9326714bc425c1e22833da95a6e77571691d4dcab4ef9056c4c7f85d5b445b902eb375b5164c6bdf629ccfd4127a6c024bb6c4da0b6b08350432e58f8229e04e2e76f704be17d36e0c04fcc7a98f721d4572aa7f66ae8e9664300a189bc3862da47b60c8b33424f6d577cc10f4755f36c2a6decc30ba81bf48f96616ccfcfb74965d6bdcab82728bb224c560d1cfd7a175413ad1c14c734746be3b062b4e7514e9075c688103515e32e3335dbd272a315024d56f4ecd354264da9bc712080657b2b51b06dc7c4c441d9858935a4c3e6b207bde38ea83bba4c6854b2bcf914d758e0a174c0528e0e385c7cff355c38db1c22440369141e91266824c59f1ed23e7d4b99d31b0baa7bed4526e24259dbef5c9ae275e97267b756645f804c274d65ac7ab0f7683435bc2e4f24075cd1b790aa2b53fbf044e8f2092bdf0dbe88a582ff8f8de291e8220", + output: "bea32587095caac661c3ac49e65654b282192b2addf5b9a403aea6c8bd0096291a0a66ca4062acf1da91fb5749952096ec63ab652ecf94c29807f0aaac939b6896edcd6f0cd8dd8d208b906ef4d7a8766831fecd6ce98f4ea0c34fa9a5114dbeb23c2cd6d3aa962e39b18cb343c24e65d49fad0a0fb50736f8d2b24b011108932484399f4c4510ac9a5e6bc78ff0b450e67f87b49f253b99d95d6294e15a9934fc8b89a5913c08f75d3516766fb0f60f82e2b2647b4619991c78adbcf548c07c0dda30c629349d84f298313c3e629e03760b1cf860264205a950d6fd86732a6513827f72c0dff5aff96f7203464f60849c1065beb70f282cca1334f6f6c767dfff94f063361f592e85597de5d313eaed17bd533db24818d9ba9aea2afa797721fbd19eea7b8d46bbc4b9dc0164636d2e754f5e9e8c04e2a381096331731c645ea1f613a37bfa9a6fb2c6307e9bacacbeab7f5672163ff9742a8115049bce0269d7d5f6f35787be031dbee1535b0516ec0b46d12f5833cde5f2cc569edcdd20993e9776aacf48ace7bfadf79065f2803fba6b2b27aa622abb7ae023ff2b27b727f509f313f92026392485a5ed4fd53b2e22b2d2dc1538ce158d34921214638be30ae054a0f5f1d4f9c590a2d215ac2a5b23ed33871ab26c8bb6db7fe9d6f51e527c9547248a4e9734c64658b22893f4f6867a35f18e2bbfd7d62142025955cb51af8e40b6fcb91c7e959cea2c92022c87c29dae107a306f41b00e73c7bceef8cb070e8f9e830caeee463170e919cba6eee63092a5a7ee33b74db09cdd022fdafbcd5d524253a29a103ba6f4d668d31d18f867557871c0e0258221c3050d57c18bdae4cc4ff8da0daddb5c08619be127ee76a317b59a9d8e67808603a1bfce6b4e0d070082b283bf9c0e6ef8256208e482f3e2d1a40d30807f60a868e2279dfbc3586d44ee25fdca3505cd39fd469c2cd03bc2f921d22a8346750f346c919e7247301c1c8a4a3ddb8eabc6e80d85cd2459afe1cbb4851ea2c86b8075e0fef3177cb074894410ecf681242fac62b5fa4ed3a10ddaa595427851d376cf69e350207b667f7aa26d003f1ec739a8792532ebd93f3cafb1fea40d227bcadda2fb6da794cea3371240f257f80b1b8a857ea453b46938397c1f4b303a46257750003a60666a11d03bf2afb5c71e059933d617288891733b63784bd9c662234f", + }, + { + length: 985, + nonce: [3]uint32{0x3c2b47a4, 0xf614c813, 0xa26f7014}, + key: [8]uint32{0x39bd3d18, 0xc9aacd67, 0xcb5485b5, 0x20536a22, 0xbb22ac87, 0x1c9da580, 0x7d996b2e, 0x456fe461}, + input: "01847d8a97d56e55e12f89adb13c8c0f9dea5555e8dc61171fbb8e181f6cf846a4dd68b2c75335c0896fa215bf7f9eb7e398e4520aaaf33461ecfb61051f43d43569fb75fabd79d319bf39469f951e4da7932a74624c46d8d26a9499c701c00d3dea57a6f65b4c0f33b568d13989340294d17cd005b26d89cf6fa1c88e7b6ef4d074291fa8c117ae05d7c785459ef4561c45af63a811e9aa1c31b69a5bdac2356d955a0f579791247a757a691b3de447a53619878397cd82a74053f06da3574045bc856500ec01fd2afbc64d8dd283ac876a50e9396f78c424ab157f481316fd9c90cd899f5aca46dad32c68f1d64ea7f1c4bdb994ad847072609bd89adc2fa8382a5d573b680533640b8321b6adf27926274660b2cbaf04fbc9a4fb17ce8957c38c7bab1aafd5bf7263171e47d2e1ae5cf0494815642209d303dba561754479c24ea01a573c9083b68acc49907b1748924d3c6a82feb9417ca932578c123f9db35521c0d992565f7396f0c23e436289c1720e4e7c6e285c04a8159f93e06801334e523b18fe188355cc6a155febe64ba053e6b5d1cc87787fd5ae68fa86d8c51868b9f6a9664cf0d56aa6cb8463362bb671e6b8423bcbefe2a1a0acba3f135496736b5cec5e329494af46aba322bf5d1cc108c98298459558773a316e09b0bb960a26f4b0bfbaa493b5f98a0e522b6203c471b10e662abe9b9e60de2a1517843933add02017fadd62608383ad53796159f3d21b2c8ed7295802ca79ea65d550114ca2bcc7f7c3b4c6709fffc3c2de00da06e83d8f0cf04b8c8edd21c0fc11a0b2aa7f6adad255fef25e5c0a9d59546e97446e1fbf6a51a8ea6cad54cabfdd19cd10d7d33ff0549b710557e3931821dd8809ab0a9d3aaa761a01ae0f2e378906672924d6a1b12fb1cca7bed41f31974b9917a05de60c32796f502e7035a2c01cb49bc8e1734b9fa138b81b4dfe19d37f5942dd1b42f03e1e5a6a046ecd457174150e17dd148e4bfea44b72da35ef42a7251244700e59e702033677d42611168fd246e1b18b9a464b6c20fc7fcf6360cd00466ece059a69d7d54a4f5565d08799f85dd3c849a08ba43415077c1c0e5dbdba52bb3167ee99a11db551f0260493be1dde58d2072e8c02251f4f574b6e115cbb6136dc2c3fbce75fdcefe812d9c07a91a89088985a52cb1fb9f6cef80fa30364706414175e42c75e8e37f6e7cd028c99f59caa88c49db5b46e8d6301bc39034013718a9eeef5506415016fb21d70e46a03b4c5ba72f91dd9321ff5e210e5e5f7b0723a3bc4bb02b5a74c1f4a63aa5a993a31f79a768fe8033c9abfeb4deb536af1054be02d8d1c4a6a0fa75f3eb787d57a03b7ae994fb1b54b2c43b230ce32e6245d944b3cea4fa6", + output: "785dbea5d1e50af4743ed5fd2209e441fc7c50bc7f6fd9cc7f24654c619e2606178dcbbd81a1f94c1b3176837024098bd31326145be326b32fd9277a55a6fb38780c8dc8b471a3184379d90da4aa87d80b889b1f4d8d0755c1704a526b99ac829b8ad157ca54b2b05ff8b2917e27b0c147ab54add9a89fdcad7b93ba1fe2d5be9de88b68a5324f1b42943e45ee31c4ef783ec9e2337b3f2834b10cf452b313fafdf0c03719140f64060da0a565e185cb8e544e1c185ca230ff2321739a285abe8be4be0ce76678a7b0902a77a645194de49fef8ff64cc464ea25e1f1d72c775e450f08ddd7680d27a4142879787b198583d93b84cd87fd5b4063d92d13d9c9cb580c01fac0174686a18f64e6fa0b3589624cfae04aad74950559bdf92b2b199c60cb04013aa0ef56d1f9ec5b7e968f6a83756ecc9cee7dd8b433f64649f948df5474a64549e71e46fd8bb16568d21f5fb67f5ed555f2b8aec4709383e8cbc45b9fe47c0434178ad4c6d0d42606d6eef0e21d0370898d1d5d646830a88d5f024094fe9c7a2003ca13d20ab7cd748dc11a22f578ddab416f3500eff3d89fc177b46436108e2e2c7973910cb8454a01c9e9b98f966848325444b2ac205b1ed6919fa76aaf63717574761b7f62b10649357df49f85a845a30b6acd57fa202fe58673930ec59399f537e9682b1f5f6f409988789a8e0c1f803478dded14b40d3b6eb3109758efeb6a7fe21f41c4dcc8027258da27ad74010839dbfdf8fe55050511f85c321e653f76e55f22248f46da529a380c6b1a16a19ce73af9715545c2cae098dc42dd61248dbcf7b295f4dc6b8930b41baeef677156c534869be65e723e1aa0336e8be8a3b138f840c9cd63bab6d9d61f239a47d8cf56258544e6ef65edca27069f7a57f087a7cc021fa1294b75c0c0f1093c025e426e4f041ed5187f358402676d5da5fb6ceba76a178f65c8c3046f258531c165b8808bdd221c59ff56e3e06247576e144aac01ea96a07f1be15d7a2b0b3b6c259a9133f8a50b56154ecf9f61022f470027247e6e70e6eaf7ece5e324ec8f95667ffed10337652b119e7cb8d197e306e81ea251340b9fb2c33aa230c0a16e1ca783f9344b3acbf413acd96616e6d477dba90e39326089934bc5ca6620855cdc442e25bf8b8debf335e16e7e25cceb68659cc81b13a507fbd9f30b347126beeb57016bd348fe3df592d4778011664a218227e70d7360d139480500b7f6f84153e61ca4dea105875e19ce3d11a3dfd0ad0074035ff6a9fac0ece91afd8be74c168da20c8baafcc14632eb0e774db758a3d90709cddf0266c27963788c35a842beea8ba2d916234431efde4bb32fd7e1cef51dcf580f4697206bbc3f991f4046360aea6e88ec", + }, +} diff --git a/vendor/golang.org/x/crypto/internal/chacha20/xor.go b/vendor/golang.org/x/crypto/internal/chacha20/xor.go new file mode 100644 index 0000000..9c5ba0b --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/chacha20/xor.go @@ -0,0 +1,43 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found src the LICENSE file. + +package chacha20 + +import ( + "runtime" +) + +// Platforms that have fast unaligned 32-bit little endian accesses. +const unaligned = runtime.GOARCH == "386" || + runtime.GOARCH == "amd64" || + runtime.GOARCH == "arm64" || + runtime.GOARCH == "ppc64le" || + runtime.GOARCH == "s390x" + +// xor reads a little endian uint32 from src, XORs it with u and +// places the result in little endian byte order in dst. +func xor(dst, src []byte, u uint32) { + _, _ = src[3], dst[3] // eliminate bounds checks + if unaligned { + // The compiler should optimize this code into + // 32-bit unaligned little endian loads and stores. + // TODO: delete once the compiler does a reliably + // good job with the generic code below. + // See issue #25111 for more details. + v := uint32(src[0]) + v |= uint32(src[1]) << 8 + v |= uint32(src[2]) << 16 + v |= uint32(src[3]) << 24 + v ^= u + dst[0] = byte(v) + dst[1] = byte(v >> 8) + dst[2] = byte(v >> 16) + dst[3] = byte(v >> 24) + } else { + dst[0] = src[0] ^ byte(u) + dst[1] = src[1] ^ byte(u>>8) + dst[2] = src[2] ^ byte(u>>16) + dst[3] = src[3] ^ byte(u>>24) + } +} diff --git a/vendor/golang.org/x/crypto/internal/subtle/aliasing.go b/vendor/golang.org/x/crypto/internal/subtle/aliasing.go new file mode 100644 index 0000000..f38797b --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/subtle/aliasing.go @@ -0,0 +1,32 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !appengine + +// Package subtle implements functions that are often useful in cryptographic +// code but require careful thought to use correctly. +package subtle // import "golang.org/x/crypto/internal/subtle" + +import "unsafe" + +// AnyOverlap reports whether x and y share memory at any (not necessarily +// corresponding) index. The memory beyond the slice length is ignored. +func AnyOverlap(x, y []byte) bool { + return len(x) > 0 && len(y) > 0 && + uintptr(unsafe.Pointer(&x[0])) <= uintptr(unsafe.Pointer(&y[len(y)-1])) && + uintptr(unsafe.Pointer(&y[0])) <= uintptr(unsafe.Pointer(&x[len(x)-1])) +} + +// InexactOverlap reports whether x and y share memory at any non-corresponding +// index. The memory beyond the slice length is ignored. Note that x and y can +// have different lengths and still not have any inexact overlap. +// +// InexactOverlap can be used to implement the requirements of the crypto/cipher +// AEAD, Block, BlockMode and Stream interfaces. +func InexactOverlap(x, y []byte) bool { + if len(x) == 0 || len(y) == 0 || &x[0] == &y[0] { + return false + } + return AnyOverlap(x, y) +} diff --git a/vendor/golang.org/x/crypto/internal/subtle/aliasing_appengine.go b/vendor/golang.org/x/crypto/internal/subtle/aliasing_appengine.go new file mode 100644 index 0000000..0cc4a8a --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/subtle/aliasing_appengine.go @@ -0,0 +1,35 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build appengine + +// Package subtle implements functions that are often useful in cryptographic +// code but require careful thought to use correctly. +package subtle // import "golang.org/x/crypto/internal/subtle" + +// This is the Google App Engine standard variant based on reflect +// because the unsafe package and cgo are disallowed. + +import "reflect" + +// AnyOverlap reports whether x and y share memory at any (not necessarily +// corresponding) index. The memory beyond the slice length is ignored. +func AnyOverlap(x, y []byte) bool { + return len(x) > 0 && len(y) > 0 && + reflect.ValueOf(&x[0]).Pointer() <= reflect.ValueOf(&y[len(y)-1]).Pointer() && + reflect.ValueOf(&y[0]).Pointer() <= reflect.ValueOf(&x[len(x)-1]).Pointer() +} + +// InexactOverlap reports whether x and y share memory at any non-corresponding +// index. The memory beyond the slice length is ignored. Note that x and y can +// have different lengths and still not have any inexact overlap. +// +// InexactOverlap can be used to implement the requirements of the crypto/cipher +// AEAD, Block, BlockMode and Stream interfaces. +func InexactOverlap(x, y []byte) bool { + if len(x) == 0 || len(y) == 0 || &x[0] == &y[0] { + return false + } + return AnyOverlap(x, y) +} diff --git a/vendor/golang.org/x/crypto/internal/subtle/aliasing_test.go b/vendor/golang.org/x/crypto/internal/subtle/aliasing_test.go new file mode 100644 index 0000000..a5b62ff --- /dev/null +++ b/vendor/golang.org/x/crypto/internal/subtle/aliasing_test.go @@ -0,0 +1,50 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package subtle_test + +import ( + "testing" + + "golang.org/x/crypto/internal/subtle" +) + +var a, b [100]byte + +var aliasingTests = []struct { + x, y []byte + anyOverlap, inexactOverlap bool +}{ + {a[:], b[:], false, false}, + {a[:], b[:0], false, false}, + {a[:], b[:50], false, false}, + {a[40:50], a[50:60], false, false}, + {a[40:50], a[60:70], false, false}, + {a[:51], a[50:], true, true}, + {a[:], a[:], true, false}, + {a[:50], a[:60], true, false}, + {a[:], nil, false, false}, + {nil, nil, false, false}, + {a[:], a[:0], false, false}, + {a[:10], a[:10:20], true, false}, + {a[:10], a[5:10:20], true, true}, +} + +func testAliasing(t *testing.T, i int, x, y []byte, anyOverlap, inexactOverlap bool) { + any := subtle.AnyOverlap(x, y) + if any != anyOverlap { + t.Errorf("%d: wrong AnyOverlap result, expected %v, got %v", i, anyOverlap, any) + } + inexact := subtle.InexactOverlap(x, y) + if inexact != inexactOverlap { + t.Errorf("%d: wrong InexactOverlap result, expected %v, got %v", i, inexactOverlap, any) + } +} + +func TestAliasing(t *testing.T) { + for i, tt := range aliasingTests { + testAliasing(t, i, tt.x, tt.y, tt.anyOverlap, tt.inexactOverlap) + testAliasing(t, i, tt.y, tt.x, tt.anyOverlap, tt.inexactOverlap) + } +} diff --git a/vendor/golang.org/x/crypto/md4/md4.go b/vendor/golang.org/x/crypto/md4/md4.go index 6d9ba9e..59d3480 100644 --- a/vendor/golang.org/x/crypto/md4/md4.go +++ b/vendor/golang.org/x/crypto/md4/md4.go @@ -3,6 +3,10 @@ // license that can be found in the LICENSE file. // Package md4 implements the MD4 hash algorithm as defined in RFC 1320. +// +// Deprecated: MD4 is cryptographically broken and should should only be used +// where compatibility with legacy systems, not security, is the goal. Instead, +// use a secure hash like SHA-256 (from crypto/sha256). package md4 // import "golang.org/x/crypto/md4" import ( diff --git a/vendor/golang.org/x/crypto/nacl/auth/auth.go b/vendor/golang.org/x/crypto/nacl/auth/auth.go index 0835d3b..ec1d6eb 100644 --- a/vendor/golang.org/x/crypto/nacl/auth/auth.go +++ b/vendor/golang.org/x/crypto/nacl/auth/auth.go @@ -47,7 +47,7 @@ func Sum(m []byte, key *[KeySize]byte) *[Size]byte { // Verify checks that digest is a valid authenticator of message m under the // given secret key. Verify does not leak timing information. -func Verify(digest []byte, m []byte, key *[32]byte) bool { +func Verify(digest []byte, m []byte, key *[KeySize]byte) bool { if len(digest) != Size { return false } diff --git a/vendor/golang.org/x/crypto/nacl/box/box.go b/vendor/golang.org/x/crypto/nacl/box/box.go index 7ed1864..31b697b 100644 --- a/vendor/golang.org/x/crypto/nacl/box/box.go +++ b/vendor/golang.org/x/crypto/nacl/box/box.go @@ -3,7 +3,7 @@ // license that can be found in the LICENSE file. /* -Package box authenticates and encrypts messages using public-key cryptography. +Package box authenticates and encrypts small messages using public-key cryptography. Box uses Curve25519, XSalsa20 and Poly1305 to encrypt and authenticate messages. The length of messages is not hidden. @@ -13,6 +13,23 @@ example, by using nonce 1 for the first message, nonce 2 for the second message, etc. Nonces are long enough that randomly generated nonces have negligible risk of collision. +Messages should be small because: + +1. The whole message needs to be held in memory to be processed. + +2. Using large messages pressures implementations on small machines to decrypt +and process plaintext before authenticating it. This is very dangerous, and +this API does not allow it, but a protocol that uses excessive message sizes +might present some implementations with no other choice. + +3. Fixed overheads will be sufficiently amortised by messages as small as 8KB. + +4. Performance may be improved by working with messages that fit into data caches. + +Thus large amounts of data should be chunked so that each message is small. +(Each message still needs a unique nonce.) If in doubt, 16KB is a reasonable +chunk size. + This package is interoperable with NaCl: https://nacl.cr.yp.to/box.html. */ package box // import "golang.org/x/crypto/nacl/box" @@ -56,7 +73,7 @@ func Precompute(sharedKey, peersPublicKey, privateKey *[32]byte) { } // Seal appends an encrypted and authenticated copy of message to out, which -// will be Overhead bytes longer than the original and must not overlap. The +// will be Overhead bytes longer than the original and must not overlap it. The // nonce must be unique for each distinct message for a given pair of keys. func Seal(out, message []byte, nonce *[24]byte, peersPublicKey, privateKey *[32]byte) []byte { var sharedKey [32]byte diff --git a/vendor/golang.org/x/crypto/nacl/secretbox/secretbox.go b/vendor/golang.org/x/crypto/nacl/secretbox/secretbox.go index 1e1dff5..a98d1bd 100644 --- a/vendor/golang.org/x/crypto/nacl/secretbox/secretbox.go +++ b/vendor/golang.org/x/crypto/nacl/secretbox/secretbox.go @@ -13,11 +13,29 @@ example, by using nonce 1 for the first message, nonce 2 for the second message, etc. Nonces are long enough that randomly generated nonces have negligible risk of collision. +Messages should be small because: + +1. The whole message needs to be held in memory to be processed. + +2. Using large messages pressures implementations on small machines to decrypt +and process plaintext before authenticating it. This is very dangerous, and +this API does not allow it, but a protocol that uses excessive message sizes +might present some implementations with no other choice. + +3. Fixed overheads will be sufficiently amortised by messages as small as 8KB. + +4. Performance may be improved by working with messages that fit into data caches. + +Thus large amounts of data should be chunked so that each message is small. +(Each message still needs a unique nonce.) If in doubt, 16KB is a reasonable +chunk size. + This package is interoperable with NaCl: https://nacl.cr.yp.to/secretbox.html. */ package secretbox // import "golang.org/x/crypto/nacl/secretbox" import ( + "golang.org/x/crypto/internal/subtle" "golang.org/x/crypto/poly1305" "golang.org/x/crypto/salsa20/salsa" ) @@ -70,6 +88,9 @@ func Seal(out, message []byte, nonce *[24]byte, key *[32]byte) []byte { copy(poly1305Key[:], firstBlock[:]) ret, out := sliceForAppend(out, len(message)+poly1305.TagSize) + if subtle.AnyOverlap(out, message) { + panic("nacl: invalid buffer overlap") + } // We XOR up to 32 bytes of message with the keystream generated from // the first block. @@ -101,7 +122,7 @@ func Seal(out, message []byte, nonce *[24]byte, key *[32]byte) []byte { // Open authenticates and decrypts a box produced by Seal and appends the // message to out, which must not overlap box. The output will be Overhead // bytes smaller than box. -func Open(out []byte, box []byte, nonce *[24]byte, key *[32]byte) ([]byte, bool) { +func Open(out, box []byte, nonce *[24]byte, key *[32]byte) ([]byte, bool) { if len(box) < Overhead { return nil, false } @@ -126,6 +147,9 @@ func Open(out []byte, box []byte, nonce *[24]byte, key *[32]byte) ([]byte, bool) } ret, out := sliceForAppend(out, len(box)-Overhead) + if subtle.AnyOverlap(out, box) { + panic("nacl: invalid buffer overlap") + } // We XOR up to 32 bytes of box with the keystream generated from // the first block. diff --git a/vendor/golang.org/x/crypto/nacl/sign/sign.go b/vendor/golang.org/x/crypto/nacl/sign/sign.go new file mode 100644 index 0000000..d076270 --- /dev/null +++ b/vendor/golang.org/x/crypto/nacl/sign/sign.go @@ -0,0 +1,90 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package sign signs small messages using public-key cryptography. +// +// Sign uses Ed25519 to sign messages. The length of messages is not hidden. +// Messages should be small because: +// 1. The whole message needs to be held in memory to be processed. +// 2. Using large messages pressures implementations on small machines to process +// plaintext without verifying the signature. This is very dangerous, and this API +// discourages it, but a protocol that uses excessive message sizes might present +// some implementations with no other choice. +// 3. Performance may be improved by working with messages that fit into data caches. +// Thus large amounts of data should be chunked so that each message is small. +// +// This package is not interoperable with the current release of NaCl +// (https://nacl.cr.yp.to/sign.html), which does not support Ed25519 yet. However, +// it is compatible with the NaCl fork libsodium (https://www.libsodium.org), as well +// as TweetNaCl (https://tweetnacl.cr.yp.to/). +package sign + +import ( + "io" + + "golang.org/x/crypto/ed25519" + "golang.org/x/crypto/internal/subtle" +) + +// Overhead is the number of bytes of overhead when signing a message. +const Overhead = 64 + +// GenerateKey generates a new public/private key pair suitable for use with +// Sign and Open. +func GenerateKey(rand io.Reader) (publicKey *[32]byte, privateKey *[64]byte, err error) { + pub, priv, err := ed25519.GenerateKey(rand) + if err != nil { + return nil, nil, err + } + publicKey, privateKey = new([32]byte), new([64]byte) + copy((*publicKey)[:], pub) + copy((*privateKey)[:], priv) + return publicKey, privateKey, nil +} + +// Sign appends a signed copy of message to out, which will be Overhead bytes +// longer than the original and must not overlap it. +func Sign(out, message []byte, privateKey *[64]byte) []byte { + sig := ed25519.Sign(ed25519.PrivateKey((*privateKey)[:]), message) + ret, out := sliceForAppend(out, Overhead+len(message)) + if subtle.AnyOverlap(out, message) { + panic("nacl: invalid buffer overlap") + } + copy(out, sig) + copy(out[Overhead:], message) + return ret +} + +// Open verifies a signed message produced by Sign and appends the message to +// out, which must not overlap the signed message. The output will be Overhead +// bytes smaller than the signed message. +func Open(out, signedMessage []byte, publicKey *[32]byte) ([]byte, bool) { + if len(signedMessage) < Overhead { + return nil, false + } + if !ed25519.Verify(ed25519.PublicKey((*publicKey)[:]), signedMessage[Overhead:], signedMessage[:Overhead]) { + return nil, false + } + ret, out := sliceForAppend(out, len(signedMessage)-Overhead) + if subtle.AnyOverlap(out, signedMessage) { + panic("nacl: invalid buffer overlap") + } + copy(out, signedMessage[Overhead:]) + return ret, true +} + +// sliceForAppend takes a slice and a requested number of bytes. It returns a +// slice with the contents of the given slice followed by that many bytes and a +// second slice that aliases into it and contains only the extra bytes. If the +// original slice has sufficient capacity then no allocation is performed. +func sliceForAppend(in []byte, n int) (head, tail []byte) { + if total := len(in) + n; cap(in) >= total { + head = in[:total] + } else { + head = make([]byte, total) + copy(head, in) + } + tail = head[len(in):] + return +} diff --git a/vendor/golang.org/x/crypto/nacl/sign/sign_test.go b/vendor/golang.org/x/crypto/nacl/sign/sign_test.go new file mode 100644 index 0000000..db26901 --- /dev/null +++ b/vendor/golang.org/x/crypto/nacl/sign/sign_test.go @@ -0,0 +1,74 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package sign + +import ( + "bytes" + "crypto/rand" + "encoding/hex" + "testing" +) + +var testSignedMessage, _ = hex.DecodeString("26a0a47f733d02ddb74589b6cbd6f64a7dab1947db79395a1a9e00e4c902c0f185b119897b89b248d16bab4ea781b5a3798d25c2984aec833dddab57e0891e0d68656c6c6f20776f726c64") +var testMessage = testSignedMessage[Overhead:] +var testPublicKey [32]byte +var testPrivateKey = [64]byte{ + 0x98, 0x3c, 0x6a, 0xa6, 0x21, 0xcc, 0xbb, 0xb2, 0xa7, 0xe8, 0x97, 0x94, 0xde, 0x5f, 0xf8, 0x11, + 0x8a, 0xf3, 0x33, 0x1a, 0x03, 0x5c, 0x43, 0x99, 0x03, 0x13, 0x2d, 0xd7, 0xb4, 0xc4, 0x8b, 0xb0, + 0xf6, 0x33, 0x20, 0xa3, 0x34, 0x8b, 0x7b, 0xe2, 0xfe, 0xb4, 0xe7, 0x3a, 0x54, 0x08, 0x2d, 0xd7, + 0x0c, 0xb7, 0xc0, 0xe3, 0xbf, 0x62, 0x6c, 0x55, 0xf0, 0x33, 0x28, 0x52, 0xf8, 0x48, 0x7d, 0xfd, +} + +func init() { + copy(testPublicKey[:], testPrivateKey[32:]) +} + +func TestSign(t *testing.T) { + signedMessage := Sign(nil, testMessage, &testPrivateKey) + if !bytes.Equal(signedMessage, testSignedMessage) { + t.Fatalf("signed message did not match, got\n%x\n, expected\n%x", signedMessage, testSignedMessage) + } +} + +func TestOpen(t *testing.T) { + message, ok := Open(nil, testSignedMessage, &testPublicKey) + if !ok { + t.Fatalf("valid signed message not successfully verified") + } + if !bytes.Equal(message, testMessage) { + t.Fatalf("message did not match, got\n%x\n, expected\n%x", message, testMessage) + } + _, ok = Open(nil, testSignedMessage[1:], &testPublicKey) + if ok { + t.Fatalf("invalid signed message successfully verified") + } + + badMessage := make([]byte, len(testSignedMessage)) + copy(badMessage, testSignedMessage) + badMessage[5] ^= 1 + if _, ok := Open(nil, badMessage, &testPublicKey); ok { + t.Fatalf("Open succeeded with a corrupt message") + } + + var badPublicKey [32]byte + copy(badPublicKey[:], testPublicKey[:]) + badPublicKey[5] ^= 1 + if _, ok := Open(nil, testSignedMessage, &badPublicKey); ok { + t.Fatalf("Open succeeded with a corrupt public key") + } +} + +func TestGenerateSignOpen(t *testing.T) { + publicKey, privateKey, _ := GenerateKey(rand.Reader) + signedMessage := Sign(nil, testMessage, privateKey) + message, ok := Open(nil, signedMessage, publicKey) + if !ok { + t.Fatalf("failed to verify signed message") + } + + if !bytes.Equal(message, testMessage) { + t.Fatalf("verified message does not match signed messge, got\n%x\n, expected\n%x", message, testMessage) + } +} diff --git a/vendor/golang.org/x/crypto/ocsp/ocsp.go b/vendor/golang.org/x/crypto/ocsp/ocsp.go index ae8d63e..d297ac9 100644 --- a/vendor/golang.org/x/crypto/ocsp/ocsp.go +++ b/vendor/golang.org/x/crypto/ocsp/ocsp.go @@ -63,7 +63,7 @@ func (r ResponseStatus) String() string { } // ResponseError is an error that may be returned by ParseResponse to indicate -// that the response itself is an error, not just that its indicating that a +// that the response itself is an error, not just that it's indicating that a // certificate is revoked, unknown, etc. type ResponseError struct { Status ResponseStatus @@ -295,17 +295,17 @@ const ( // The enumerated reasons for revoking a certificate. See RFC 5280. const ( - Unspecified = iota - KeyCompromise = iota - CACompromise = iota - AffiliationChanged = iota - Superseded = iota - CessationOfOperation = iota - CertificateHold = iota - _ = iota - RemoveFromCRL = iota - PrivilegeWithdrawn = iota - AACompromise = iota + Unspecified = 0 + KeyCompromise = 1 + CACompromise = 2 + AffiliationChanged = 3 + Superseded = 4 + CessationOfOperation = 5 + CertificateHold = 6 + + RemoveFromCRL = 8 + PrivilegeWithdrawn = 9 + AACompromise = 10 ) // Request represents an OCSP request. See RFC 6960. @@ -487,9 +487,8 @@ func ParseResponseForCert(bytes []byte, cert, issuer *x509.Certificate) (*Respon if err != nil { return nil, err } - - if len(basicResp.Certificates) > 1 { - return nil, ParseError("OCSP response contains bad number of certificates") + if len(rest) > 0 { + return nil, ParseError("trailing data in OCSP response") } if n := len(basicResp.TBSResponseData.Responses); n == 0 || cert == nil && n > 1 { @@ -544,6 +543,13 @@ func ParseResponseForCert(bytes []byte, cert, issuer *x509.Certificate) (*Respon } if len(basicResp.Certificates) > 0 { + // Responders should only send a single certificate (if they + // send any) that connects the responder's certificate to the + // original issuer. We accept responses with multiple + // certificates due to a number responders sending them[1], but + // ignore all but the first. + // + // [1] https://github.com/golang/go/issues/21527 ret.Certificate, err = x509.ParseCertificate(basicResp.Certificates[0].FullBytes) if err != nil { return nil, err @@ -659,7 +665,7 @@ func CreateRequest(cert, issuer *x509.Certificate, opts *RequestOptions) ([]byte // // The issuer cert is used to puplate the IssuerNameHash and IssuerKeyHash fields. // -// The template is used to populate the SerialNumber, RevocationStatus, RevokedAt, +// The template is used to populate the SerialNumber, Status, RevokedAt, // RevocationReason, ThisUpdate, and NextUpdate fields. // // If template.IssuerHash is not set, SHA1 will be used. @@ -760,7 +766,7 @@ func CreateResponse(issuer, responderCert *x509.Certificate, template Response, } if template.Certificate != nil { response.Certificates = []asn1.RawValue{ - asn1.RawValue{FullBytes: template.Certificate.Raw}, + {FullBytes: template.Certificate.Raw}, } } responseDER, err := asn1.Marshal(response) diff --git a/vendor/golang.org/x/crypto/ocsp/ocsp_test.go b/vendor/golang.org/x/crypto/ocsp/ocsp_test.go index df674b3..70b1976 100644 --- a/vendor/golang.org/x/crypto/ocsp/ocsp_test.go +++ b/vendor/golang.org/x/crypto/ocsp/ocsp_test.go @@ -43,11 +43,11 @@ func TestOCSPDecode(t *testing.T) { } if !reflect.DeepEqual(resp.ThisUpdate, expected.ThisUpdate) { - t.Errorf("resp.ThisUpdate: got %d, want %d", resp.ThisUpdate, expected.ThisUpdate) + t.Errorf("resp.ThisUpdate: got %v, want %v", resp.ThisUpdate, expected.ThisUpdate) } if !reflect.DeepEqual(resp.NextUpdate, expected.NextUpdate) { - t.Errorf("resp.NextUpdate: got %d, want %d", resp.NextUpdate, expected.NextUpdate) + t.Errorf("resp.NextUpdate: got %v, want %v", resp.NextUpdate, expected.NextUpdate) } if resp.Status != expected.Status { @@ -218,7 +218,7 @@ func TestOCSPResponse(t *testing.T) { extensionBytes, _ := hex.DecodeString(ocspExtensionValueHex) extensions := []pkix.Extension{ - pkix.Extension{ + { Id: ocspExtensionOID, Critical: false, Value: extensionBytes, @@ -268,15 +268,15 @@ func TestOCSPResponse(t *testing.T) { } if !reflect.DeepEqual(resp.ThisUpdate, template.ThisUpdate) { - t.Errorf("resp.ThisUpdate: got %d, want %d", resp.ThisUpdate, template.ThisUpdate) + t.Errorf("resp.ThisUpdate: got %v, want %v", resp.ThisUpdate, template.ThisUpdate) } if !reflect.DeepEqual(resp.NextUpdate, template.NextUpdate) { - t.Errorf("resp.NextUpdate: got %d, want %d", resp.NextUpdate, template.NextUpdate) + t.Errorf("resp.NextUpdate: got %v, want %v", resp.NextUpdate, template.NextUpdate) } if !reflect.DeepEqual(resp.RevokedAt, template.RevokedAt) { - t.Errorf("resp.RevokedAt: got %d, want %d", resp.RevokedAt, template.RevokedAt) + t.Errorf("resp.RevokedAt: got %v, want %v", resp.RevokedAt, template.RevokedAt) } if !reflect.DeepEqual(resp.Extensions, template.ExtraExtensions) { diff --git a/vendor/golang.org/x/crypto/openpgp/clearsign/clearsign.go b/vendor/golang.org/x/crypto/openpgp/clearsign/clearsign.go index def4cab..a9437dc 100644 --- a/vendor/golang.org/x/crypto/openpgp/clearsign/clearsign.go +++ b/vendor/golang.org/x/crypto/openpgp/clearsign/clearsign.go @@ -13,6 +13,7 @@ import ( "bufio" "bytes" "crypto" + "fmt" "hash" "io" "net/textproto" @@ -177,8 +178,9 @@ func Decode(data []byte) (b *Block, rest []byte) { // message. type dashEscaper struct { buffered *bufio.Writer - h hash.Hash + hashers []hash.Hash // one per key in privateKeys hashType crypto.Hash + toHash io.Writer // writes to all the hashes in hashers atBeginningOfLine bool isFirstLine bool @@ -186,8 +188,8 @@ type dashEscaper struct { whitespace []byte byteBuf []byte // a one byte buffer to save allocations - privateKey *packet.PrivateKey - config *packet.Config + privateKeys []*packet.PrivateKey + config *packet.Config } func (d *dashEscaper) Write(data []byte) (n int, err error) { @@ -198,7 +200,7 @@ func (d *dashEscaper) Write(data []byte) (n int, err error) { // The final CRLF isn't included in the hash so we have to wait // until this point (the start of the next line) before writing it. if !d.isFirstLine { - d.h.Write(crlf) + d.toHash.Write(crlf) } d.isFirstLine = false } @@ -219,12 +221,12 @@ func (d *dashEscaper) Write(data []byte) (n int, err error) { if _, err = d.buffered.Write(dashEscape); err != nil { return } - d.h.Write(d.byteBuf) + d.toHash.Write(d.byteBuf) d.atBeginningOfLine = false } else if b == '\n' { // Nothing to do because we delay writing CRLF to the hash. } else { - d.h.Write(d.byteBuf) + d.toHash.Write(d.byteBuf) d.atBeginningOfLine = false } if err = d.buffered.WriteByte(b); err != nil { @@ -245,13 +247,13 @@ func (d *dashEscaper) Write(data []byte) (n int, err error) { // Any buffered whitespace wasn't at the end of the line so // we need to write it out. if len(d.whitespace) > 0 { - d.h.Write(d.whitespace) + d.toHash.Write(d.whitespace) if _, err = d.buffered.Write(d.whitespace); err != nil { return } d.whitespace = d.whitespace[:0] } - d.h.Write(d.byteBuf) + d.toHash.Write(d.byteBuf) if err = d.buffered.WriteByte(b); err != nil { return } @@ -269,25 +271,29 @@ func (d *dashEscaper) Close() (err error) { return } } - sig := new(packet.Signature) - sig.SigType = packet.SigTypeText - sig.PubKeyAlgo = d.privateKey.PubKeyAlgo - sig.Hash = d.hashType - sig.CreationTime = d.config.Now() - sig.IssuerKeyId = &d.privateKey.KeyId - - if err = sig.Sign(d.h, d.privateKey, d.config); err != nil { - return - } out, err := armor.Encode(d.buffered, "PGP SIGNATURE", nil) if err != nil { return } - if err = sig.Serialize(out); err != nil { - return + t := d.config.Now() + for i, k := range d.privateKeys { + sig := new(packet.Signature) + sig.SigType = packet.SigTypeText + sig.PubKeyAlgo = k.PubKeyAlgo + sig.Hash = d.hashType + sig.CreationTime = t + sig.IssuerKeyId = &k.KeyId + + if err = sig.Sign(d.hashers[i], k, d.config); err != nil { + return + } + if err = sig.Serialize(out); err != nil { + return + } } + if err = out.Close(); err != nil { return } @@ -300,8 +306,17 @@ func (d *dashEscaper) Close() (err error) { // Encode returns a WriteCloser which will clear-sign a message with privateKey // and write it to w. If config is nil, sensible defaults are used. func Encode(w io.Writer, privateKey *packet.PrivateKey, config *packet.Config) (plaintext io.WriteCloser, err error) { - if privateKey.Encrypted { - return nil, errors.InvalidArgumentError("signing key is encrypted") + return EncodeMulti(w, []*packet.PrivateKey{privateKey}, config) +} + +// EncodeMulti returns a WriteCloser which will clear-sign a message with all the +// private keys indicated and write it to w. If config is nil, sensible defaults +// are used. +func EncodeMulti(w io.Writer, privateKeys []*packet.PrivateKey, config *packet.Config) (plaintext io.WriteCloser, err error) { + for _, k := range privateKeys { + if k.Encrypted { + return nil, errors.InvalidArgumentError(fmt.Sprintf("signing key %s is encrypted", k.KeyIdString())) + } } hashType := config.Hash() @@ -313,7 +328,14 @@ func Encode(w io.Writer, privateKey *packet.PrivateKey, config *packet.Config) ( if !hashType.Available() { return nil, errors.UnsupportedError("unsupported hash type: " + strconv.Itoa(int(hashType))) } - h := hashType.New() + var hashers []hash.Hash + var ws []io.Writer + for range privateKeys { + h := hashType.New() + hashers = append(hashers, h) + ws = append(ws, h) + } + toHash := io.MultiWriter(ws...) buffered := bufio.NewWriter(w) // start has a \n at the beginning that we don't want here. @@ -338,16 +360,17 @@ func Encode(w io.Writer, privateKey *packet.PrivateKey, config *packet.Config) ( plaintext = &dashEscaper{ buffered: buffered, - h: h, + hashers: hashers, hashType: hashType, + toHash: toHash, atBeginningOfLine: true, isFirstLine: true, byteBuf: make([]byte, 1), - privateKey: privateKey, - config: config, + privateKeys: privateKeys, + config: config, } return diff --git a/vendor/golang.org/x/crypto/openpgp/clearsign/clearsign_test.go b/vendor/golang.org/x/crypto/openpgp/clearsign/clearsign_test.go index 2c09480..96f5d78 100644 --- a/vendor/golang.org/x/crypto/openpgp/clearsign/clearsign_test.go +++ b/vendor/golang.org/x/crypto/openpgp/clearsign/clearsign_test.go @@ -6,8 +6,11 @@ package clearsign import ( "bytes" - "golang.org/x/crypto/openpgp" + "fmt" "testing" + + "golang.org/x/crypto/openpgp" + "golang.org/x/crypto/openpgp/packet" ) func testParse(t *testing.T, input []byte, expected, expectedPlaintext string) { @@ -125,6 +128,71 @@ func TestSigning(t *testing.T) { } } +// We use this to make test keys, so that they aren't all the same. +type quickRand byte + +func (qr *quickRand) Read(p []byte) (int, error) { + for i := range p { + p[i] = byte(*qr) + } + *qr++ + return len(p), nil +} + +func TestMultiSign(t *testing.T) { + zero := quickRand(0) + config := packet.Config{Rand: &zero} + + for nKeys := 0; nKeys < 4; nKeys++ { + nextTest: + for nExtra := 0; nExtra < 4; nExtra++ { + var signKeys []*packet.PrivateKey + var verifyKeys openpgp.EntityList + + desc := fmt.Sprintf("%d keys; %d of which will be used to verify", nKeys+nExtra, nKeys) + for i := 0; i < nKeys+nExtra; i++ { + e, err := openpgp.NewEntity("name", "comment", "email", &config) + if err != nil { + t.Errorf("cannot create key: %v", err) + continue nextTest + } + if i < nKeys { + verifyKeys = append(verifyKeys, e) + } + signKeys = append(signKeys, e.PrivateKey) + } + + input := []byte("this is random text\r\n4 17") + var output bytes.Buffer + w, err := EncodeMulti(&output, signKeys, nil) + if err != nil { + t.Errorf("EncodeMulti (%s) failed: %v", desc, err) + } + if _, err := w.Write(input); err != nil { + t.Errorf("Write(%q) to signer (%s) failed: %v", string(input), desc, err) + } + if err := w.Close(); err != nil { + t.Errorf("Close() of signer (%s) failed: %v", desc, err) + } + + block, _ := Decode(output.Bytes()) + if string(block.Bytes) != string(input) { + t.Errorf("Inline data didn't match original; got %q want %q", string(block.Bytes), string(input)) + } + _, err = openpgp.CheckDetachedSignature(verifyKeys, bytes.NewReader(block.Bytes), block.ArmoredSignature.Body) + if nKeys == 0 { + if err == nil { + t.Errorf("verifying inline (%s) succeeded; want failure", desc) + } + } else { + if err != nil { + t.Errorf("verifying inline (%s) failed (%v); want success", desc, err) + } + } + } + } +} + var clearsignInput = []byte(` ;lasjlkfdsa diff --git a/vendor/golang.org/x/crypto/openpgp/keys.go b/vendor/golang.org/x/crypto/openpgp/keys.go index 68b14c6..3e25186 100644 --- a/vendor/golang.org/x/crypto/openpgp/keys.go +++ b/vendor/golang.org/x/crypto/openpgp/keys.go @@ -325,16 +325,14 @@ func ReadEntity(packets *packet.Reader) (*Entity, error) { if e.PrivateKey, ok = p.(*packet.PrivateKey); !ok { packets.Unread(p) return nil, errors.StructuralError("first packet was not a public/private key") - } else { - e.PrimaryKey = &e.PrivateKey.PublicKey } + e.PrimaryKey = &e.PrivateKey.PublicKey } if !e.PrimaryKey.PubKeyAlgo.CanSign() { return nil, errors.StructuralError("primary key cannot be used for signatures") } - var current *Identity var revocations []*packet.Signature EachPacket: for { @@ -347,32 +345,8 @@ EachPacket: switch pkt := p.(type) { case *packet.UserId: - current = new(Identity) - current.Name = pkt.Id - current.UserId = pkt - e.Identities[pkt.Id] = current - - for { - p, err = packets.Next() - if err == io.EOF { - return nil, io.ErrUnexpectedEOF - } else if err != nil { - return nil, err - } - - sig, ok := p.(*packet.Signature) - if !ok { - return nil, errors.StructuralError("user ID packet not followed by self-signature") - } - - if (sig.SigType == packet.SigTypePositiveCert || sig.SigType == packet.SigTypeGenericCert) && sig.IssuerKeyId != nil && *sig.IssuerKeyId == e.PrimaryKey.KeyId { - if err = e.PrimaryKey.VerifyUserIdSignature(pkt.Id, e.PrimaryKey, sig); err != nil { - return nil, errors.StructuralError("user ID self-signature invalid: " + err.Error()) - } - current.SelfSignature = sig - break - } - current.Signatures = append(current.Signatures, sig) + if err := addUserID(e, packets, pkt); err != nil { + return nil, err } case *packet.Signature: if pkt.SigType == packet.SigTypeKeyRevocation { @@ -381,11 +355,9 @@ EachPacket: // TODO: RFC4880 5.2.1 permits signatures // directly on keys (eg. to bind additional // revocation keys). - } else if current == nil { - return nil, errors.StructuralError("signature packet found before user id packet") - } else { - current.Signatures = append(current.Signatures, pkt) } + // Else, ignoring the signature as it does not follow anything + // we would know to attach it to. case *packet.PrivateKey: if pkt.IsSubkey == false { packets.Unread(p) @@ -426,33 +398,105 @@ EachPacket: return e, nil } +func addUserID(e *Entity, packets *packet.Reader, pkt *packet.UserId) error { + // Make a new Identity object, that we might wind up throwing away. + // We'll only add it if we get a valid self-signature over this + // userID. + identity := new(Identity) + identity.Name = pkt.Id + identity.UserId = pkt + + for { + p, err := packets.Next() + if err == io.EOF { + break + } else if err != nil { + return err + } + + sig, ok := p.(*packet.Signature) + if !ok { + packets.Unread(p) + break + } + + if (sig.SigType == packet.SigTypePositiveCert || sig.SigType == packet.SigTypeGenericCert) && sig.IssuerKeyId != nil && *sig.IssuerKeyId == e.PrimaryKey.KeyId { + if err = e.PrimaryKey.VerifyUserIdSignature(pkt.Id, e.PrimaryKey, sig); err != nil { + return errors.StructuralError("user ID self-signature invalid: " + err.Error()) + } + identity.SelfSignature = sig + e.Identities[pkt.Id] = identity + } else { + identity.Signatures = append(identity.Signatures, sig) + } + } + + return nil +} + func addSubkey(e *Entity, packets *packet.Reader, pub *packet.PublicKey, priv *packet.PrivateKey) error { var subKey Subkey subKey.PublicKey = pub subKey.PrivateKey = priv - p, err := packets.Next() - if err == io.EOF { - return io.ErrUnexpectedEOF - } - if err != nil { - return errors.StructuralError("subkey signature invalid: " + err.Error()) + + for { + p, err := packets.Next() + if err == io.EOF { + break + } else if err != nil { + return errors.StructuralError("subkey signature invalid: " + err.Error()) + } + + sig, ok := p.(*packet.Signature) + if !ok { + packets.Unread(p) + break + } + + if sig.SigType != packet.SigTypeSubkeyBinding && sig.SigType != packet.SigTypeSubkeyRevocation { + return errors.StructuralError("subkey signature with wrong type") + } + + if err := e.PrimaryKey.VerifyKeySignature(subKey.PublicKey, sig); err != nil { + return errors.StructuralError("subkey signature invalid: " + err.Error()) + } + + switch sig.SigType { + case packet.SigTypeSubkeyRevocation: + subKey.Sig = sig + case packet.SigTypeSubkeyBinding: + + if shouldReplaceSubkeySig(subKey.Sig, sig) { + subKey.Sig = sig + } + } } - var ok bool - subKey.Sig, ok = p.(*packet.Signature) - if !ok { + + if subKey.Sig == nil { return errors.StructuralError("subkey packet not followed by signature") } - if subKey.Sig.SigType != packet.SigTypeSubkeyBinding && subKey.Sig.SigType != packet.SigTypeSubkeyRevocation { - return errors.StructuralError("subkey signature with wrong type") - } - err = e.PrimaryKey.VerifyKeySignature(subKey.PublicKey, subKey.Sig) - if err != nil { - return errors.StructuralError("subkey signature invalid: " + err.Error()) - } + e.Subkeys = append(e.Subkeys, subKey) + return nil } +func shouldReplaceSubkeySig(existingSig, potentialNewSig *packet.Signature) bool { + if potentialNewSig == nil { + return false + } + + if existingSig == nil { + return true + } + + if existingSig.SigType == packet.SigTypeSubkeyRevocation { + return false // never override a revocation signature + } + + return potentialNewSig.CreationTime.After(existingSig.CreationTime) +} + const defaultRSAKeyBits = 2048 // NewEntity returns an Entity that contains a fresh RSA/RSA keypair with a @@ -487,7 +531,7 @@ func NewEntity(name, comment, email string, config *packet.Config) (*Entity, err } isPrimaryId := true e.Identities[uid.Id] = &Identity{ - Name: uid.Name, + Name: uid.Id, UserId: uid, SelfSignature: &packet.Signature{ CreationTime: currentTime, @@ -501,6 +545,10 @@ func NewEntity(name, comment, email string, config *packet.Config) (*Entity, err IssuerKeyId: &e.PrimaryKey.KeyId, }, } + err = e.Identities[uid.Id].SelfSignature.SignUserId(uid.Id, e.PrimaryKey, e.PrivateKey, config) + if err != nil { + return nil, err + } // If the user passes in a DefaultHash via packet.Config, // set the PreferredHash for the SelfSignature. @@ -508,6 +556,11 @@ func NewEntity(name, comment, email string, config *packet.Config) (*Entity, err e.Identities[uid.Id].SelfSignature.PreferredHash = []uint8{hashToHashId(config.DefaultHash)} } + // Likewise for DefaultCipher. + if config != nil && config.DefaultCipher != 0 { + e.Identities[uid.Id].SelfSignature.PreferredSymmetric = []uint8{uint8(config.DefaultCipher)} + } + e.Subkeys = make([]Subkey, 1) e.Subkeys[0] = Subkey{ PublicKey: packet.NewRSAPublicKey(currentTime, &encryptingPriv.PublicKey), @@ -525,13 +578,16 @@ func NewEntity(name, comment, email string, config *packet.Config) (*Entity, err } e.Subkeys[0].PublicKey.IsSubkey = true e.Subkeys[0].PrivateKey.IsSubkey = true - + err = e.Subkeys[0].Sig.SignKey(e.Subkeys[0].PublicKey, e.PrivateKey, config) + if err != nil { + return nil, err + } return e, nil } -// SerializePrivate serializes an Entity, including private key material, to -// the given Writer. For now, it must only be used on an Entity returned from -// NewEntity. +// SerializePrivate serializes an Entity, including private key material, but +// excluding signatures from other entities, to the given Writer. +// Identities and subkeys are re-signed in case they changed since NewEntry. // If config is nil, sensible defaults will be used. func (e *Entity) SerializePrivate(w io.Writer, config *packet.Config) (err error) { err = e.PrivateKey.Serialize(w) @@ -569,8 +625,8 @@ func (e *Entity) SerializePrivate(w io.Writer, config *packet.Config) (err error return nil } -// Serialize writes the public part of the given Entity to w. (No private -// key material will be output). +// Serialize writes the public part of the given Entity to w, including +// signatures from other entities. No private key material will be output. func (e *Entity) Serialize(w io.Writer) error { err := e.PrimaryKey.Serialize(w) if err != nil { diff --git a/vendor/golang.org/x/crypto/openpgp/keys_data_test.go b/vendor/golang.org/x/crypto/openpgp/keys_data_test.go new file mode 100644 index 0000000..7779bd9 --- /dev/null +++ b/vendor/golang.org/x/crypto/openpgp/keys_data_test.go @@ -0,0 +1,200 @@ +package openpgp + +const expiringKeyHex = "988d0451d1ec5d010400ba3385721f2dc3f4ab096b2ee867ab77213f0a27a8538441c35d2fa225b08798a1439a66a5150e6bdc3f40f5d28d588c712394c632b6299f77db8c0d48d37903fb72ebd794d61be6aa774688839e5fdecfe06b2684cc115d240c98c66cb1ef22ae84e3aa0c2b0c28665c1e7d4d044e7f270706193f5223c8d44e0d70b7b8da830011010001b40f4578706972792074657374206b657988be041301020028050251d1ec5d021b03050900278d00060b090807030206150802090a0b0416020301021e01021780000a091072589ad75e237d8c033503fd10506d72837834eb7f994117740723adc39227104b0d326a1161871c0b415d25b4aedef946ca77ea4c05af9c22b32cf98be86ab890111fced1ee3f75e87b7cc3c00dc63bbc85dfab91c0dc2ad9de2c4d13a34659333a85c6acc1a669c5e1d6cecb0cf1e56c10e72d855ae177ddc9e766f9b2dda57ccbb75f57156438bbdb4e42b88d0451d1ec5d0104009c64906559866c5cb61578f5846a94fcee142a489c9b41e67b12bb54cfe86eb9bc8566460f9a720cb00d6526fbccfd4f552071a8e3f7744b1882d01036d811ee5a3fb91a1c568055758f43ba5d2c6a9676b012f3a1a89e47bbf624f1ad571b208f3cc6224eb378f1645dd3d47584463f9eadeacfd1ce6f813064fbfdcc4b5a53001101000188a504180102000f021b0c050251d1f06b050900093e89000a091072589ad75e237d8c20e00400ab8310a41461425b37889c4da28129b5fae6084fafbc0a47dd1adc74a264c6e9c9cc125f40462ee1433072a58384daef88c961c390ed06426a81b464a53194c4e291ddd7e2e2ba3efced01537d713bd111f48437bde2363446200995e8e0d4e528dda377fd1e8f8ede9c8e2198b393bd86852ce7457a7e3daf74d510461a5b77b88d0451d1ece8010400b3a519f83ab0010307e83bca895170acce8964a044190a2b368892f7a244758d9fc193482648acb1fb9780d28cc22d171931f38bb40279389fc9bf2110876d4f3db4fcfb13f22f7083877fe56592b3b65251312c36f83ffcb6d313c6a17f197dd471f0712aad15a8537b435a92471ba2e5b0c72a6c72536c3b567c558d7b6051001101000188a504180102000f021b0c050251d1f07b050900279091000a091072589ad75e237d8ce69e03fe286026afacf7c97ee20673864d4459a2240b5655219950643c7dba0ac384b1d4359c67805b21d98211f7b09c2a0ccf6410c8c04d4ff4a51293725d8d6570d9d8bb0e10c07d22357caeb49626df99c180be02d77d1fe8ed25e7a54481237646083a9f89a11566cd20b9e995b1487c5f9e02aeb434f3a1897cd416dd0a87861838da3e9e" +const subkeyUsageHex = "988d04533a52bc010400d26af43085558f65b9e7dbc90cb9238015259aed5e954637adcfa2181548b2d0b60c65f1f42ec5081cbf1bc0a8aa4900acfb77070837c58f26012fbce297d70afe96e759ad63531f0037538e70dbf8e384569b9720d99d8eb39d8d0a2947233ed242436cb6ac7dfe74123354b3d0119b5c235d3dd9c9d6c004f8ffaf67ad8583001101000188b7041f010200210502533b8552170c8001ce094aa433f7040bb2ddf0be3893cb843d0fe70c020700000a0910a42704b92866382aa98404009d63d916a27543da4221c60087c33f1c44bec9998c5438018ed370cca4962876c748e94b73eb39c58eb698063f3fd6346d58dd2a11c0247934c4a9d71f24754f7468f96fb24c3e791dd2392b62f626148ad724189498cbf993db2df7c0cdc2d677c35da0f16cb16c9ce7c33b4de65a4a91b1d21a130ae9cc26067718910ef8e2b417556d627261203c756d627261407379642e65642e61753e88b80413010200220502533a52bc021b03060b090807030206150802090a0b0416020301021e01021780000a0910a42704b92866382a47840400c0c2bd04f5fca586de408b395b3c280a278259c93eaaa8b79a53b97003f8ed502a8a00446dd9947fb462677e4fcac0dac2f0701847d15130aadb6cd9e0705ea0cf5f92f129136c7be21a718d46c8e641eb7f044f2adae573e11ae423a0a9ca51324f03a8a2f34b91fa40c3cc764bee4dccadedb54c768ba0469b683ea53f1c29b88d04533a52bc01040099c92a5d6f8b744224da27bc2369127c35269b58bec179de6bbc038f749344222f85a31933224f26b70243c4e4b2d242f0c4777eaef7b5502f9dad6d8bf3aaeb471210674b74de2d7078af497d55f5cdad97c7bedfbc1b41e8065a97c9c3d344b21fc81d27723af8e374bc595da26ea242dccb6ae497be26eea57e563ed517e90011010001889f0418010200090502533a52bc021b0c000a0910a42704b92866382afa1403ff70284c2de8a043ff51d8d29772602fa98009b7861c540535f874f2c230af8caf5638151a636b21f8255003997ccd29747fdd06777bb24f9593bd7d98a3e887689bf902f999915fcc94625ae487e5d13e6616f89090ebc4fdc7eb5cad8943e4056995bb61c6af37f8043016876a958ec7ebf39c43d20d53b7f546cfa83e8d2604b88d04533b8283010400c0b529316dbdf58b4c54461e7e669dc11c09eb7f73819f178ccd4177b9182b91d138605fcf1e463262fabefa73f94a52b5e15d1904635541c7ea540f07050ce0fb51b73e6f88644cec86e91107c957a114f69554548a85295d2b70bd0b203992f76eb5d493d86d9eabcaa7ef3fc7db7e458438db3fcdb0ca1cc97c638439a9170011010001889f0418010200090502533b8283021b0c000a0910a42704b92866382adc6d0400cfff6258485a21675adb7a811c3e19ebca18851533f75a7ba317950b9997fda8d1a4c8c76505c08c04b6c2cc31dc704d33da36a21273f2b388a1a706f7c3378b66d887197a525936ed9a69acb57fe7f718133da85ec742001c5d1864e9c6c8ea1b94f1c3759cebfd93b18606066c063a63be86085b7e37bdbc65f9a915bf084bb901a204533b85cd110400aed3d2c52af2b38b5b67904b0ef73d6dd7aef86adb770e2b153cd22489654dcc91730892087bb9856ae2d9f7ed1eb48f214243fe86bfe87b349ebd7c30e630e49c07b21fdabf78b7a95c8b7f969e97e3d33f2e074c63552ba64a2ded7badc05ce0ea2be6d53485f6900c7860c7aa76560376ce963d7271b9b54638a4028b573f00a0d8854bfcdb04986141568046202192263b9b67350400aaa1049dbc7943141ef590a70dcb028d730371d92ea4863de715f7f0f16d168bd3dc266c2450457d46dcbbf0b071547e5fbee7700a820c3750b236335d8d5848adb3c0da010e998908dfd93d961480084f3aea20b247034f8988eccb5546efaa35a92d0451df3aaf1aee5aa36a4c4d462c760ecd9cebcabfbe1412b1f21450f203fd126687cd486496e971a87fd9e1a8a765fe654baa219a6871ab97768596ab05c26c1aeea8f1a2c72395a58dbc12ef9640d2b95784e974a4d2d5a9b17c25fedacfe551bda52602de8f6d2e48443f5dd1a2a2a8e6a5e70ecdb88cd6e766ad9745c7ee91d78cc55c3d06536b49c3fee6c3d0b6ff0fb2bf13a314f57c953b8f4d93bf88e70418010200090502533b85cd021b0200520910a42704b92866382a47200419110200060502533b85cd000a091042ce2c64bc0ba99214b2009e26b26852c8b13b10c35768e40e78fbbb48bd084100a0c79d9ea0844fa5853dd3c85ff3ecae6f2c9dd6c557aa04008bbbc964cd65b9b8299d4ebf31f41cc7264b8cf33a00e82c5af022331fac79efc9563a822497ba012953cefe2629f1242fcdcb911dbb2315985bab060bfd58261ace3c654bdbbe2e8ed27a46e836490145c86dc7bae15c011f7e1ffc33730109b9338cd9f483e7cef3d2f396aab5bd80efb6646d7e778270ee99d934d187dd98" +const revokedKeyHex = "988d045331ce82010400c4fdf7b40a5477f206e6ee278eaef888ca73bf9128a9eef9f2f1ddb8b7b71a4c07cfa241f028a04edb405e4d916c61d6beabc333813dc7b484d2b3c52ee233c6a79b1eea4e9cc51596ba9cd5ac5aeb9df62d86ea051055b79d03f8a4fa9f38386f5bd17529138f3325d46801514ea9047977e0829ed728e68636802796801be10011010001889f04200102000905025331d0e3021d03000a0910a401d9f09a34f7c042aa040086631196405b7e6af71026b88e98012eab44aa9849f6ef3fa930c7c9f23deaedba9db1538830f8652fb7648ec3fcade8dbcbf9eaf428e83c6cbcc272201bfe2fbb90d41963397a7c0637a1a9d9448ce695d9790db2dc95433ad7be19eb3de72dacf1d6db82c3644c13eae2a3d072b99bb341debba012c5ce4006a7d34a1f4b94b444526567205265766f6b657220283c52656727732022424d204261726973746122204b657920262530305c303e5c29203c72656740626d626172697374612e636f2e61753e88b704130102002205025331ce82021b03060b090807030206150802090a0b0416020301021e01021780000a0910a401d9f09a34f7c0019c03f75edfbeb6a73e7225ad3cc52724e2872e04260d7daf0d693c170d8c4b243b8767bc7785763533febc62ec2600c30603c433c095453ede59ff2fcabeb84ce32e0ed9d5cf15ffcbc816202b64370d4d77c1e9077d74e94a16fb4fa2e5bec23a56d7a73cf275f91691ae1801a976fcde09e981a2f6327ac27ea1fecf3185df0d56889c04100102000605025331cfb5000a0910fe9645554e8266b64b4303fc084075396674fb6f778d302ac07cef6bc0b5d07b66b2004c44aef711cbac79617ef06d836b4957522d8772dd94bf41a2f4ac8b1ee6d70c57503f837445a74765a076d07b829b8111fc2a918423ddb817ead7ca2a613ef0bfb9c6b3562aec6c3cf3c75ef3031d81d95f6563e4cdcc9960bcb386c5d757b104fcca5fe11fc709df884604101102000605025331cfe7000a09107b15a67f0b3ddc0317f6009e360beea58f29c1d963a22b962b80788c3fa6c84e009d148cfde6b351469b8eae91187eff07ad9d08fcaab88d045331ce820104009f25e20a42b904f3fa555530fe5c46737cf7bd076c35a2a0d22b11f7e0b61a69320b768f4a80fe13980ce380d1cfc4a0cd8fbe2d2e2ef85416668b77208baa65bf973fe8e500e78cc310d7c8705cdb34328bf80e24f0385fce5845c33bc7943cf6b11b02348a23da0bf6428e57c05135f2dc6bd7c1ce325d666d5a5fd2fd5e410011010001889f04180102000905025331ce82021b0c000a0910a401d9f09a34f7c0418003fe34feafcbeaef348a800a0d908a7a6809cc7304017d820f70f0474d5e23cb17e38b67dc6dca282c6ca00961f4ec9edf2738d0f087b1d81e4871ef08e1798010863afb4eac4c44a376cb343be929c5be66a78cfd4456ae9ec6a99d97f4e1c3ff3583351db2147a65c0acef5c003fb544ab3a2e2dc4d43646f58b811a6c3a369d1f" +const revokedSubkeyHex = "988d04533121f6010400aefc803a3e4bb1a61c86e8a86d2726c6a43e0079e9f2713f1fa017e9854c83877f4aced8e331d675c67ea83ddab80aacbfa0b9040bb12d96f5a3d6be09455e2a76546cbd21677537db941cab710216b6d24ec277ee0bd65b910f416737ed120f6b93a9d3b306245c8cfd8394606fdb462e5cf43c551438d2864506c63367fc890011010001b41d416c696365203c616c69636540626d626172697374612e636f2e61753e88bb041301020025021b03060b090807030206150802090a0b0416020301021e01021780050253312798021901000a09104ef7e4beccde97f015a803ff5448437780f63263b0df8442a995e7f76c221351a51edd06f2063d8166cf3157aada4923dfc44aa0f2a6a4da5cf83b7fe722ba8ab416c976e77c6b5682e7f1069026673bd0de56ba06fd5d7a9f177607f277d9b55ff940a638c3e68525c67517e2b3d976899b93ca267f705b3e5efad7d61220e96b618a4497eab8d04403d23f8846041011020006050253312910000a09107b15a67f0b3ddc03d96e009f50b6365d86c4be5d5e9d0ea42d5e56f5794c617700a0ab274e19c2827780016d23417ce89e0a2c0d987d889c04100102000605025331cf7a000a0910a401d9f09a34f7c0ee970400aca292f213041c9f3b3fc49148cbda9d84afee6183c8dd6c5ff2600b29482db5fecd4303797be1ee6d544a20a858080fec43412061c9a71fae4039fd58013b4ae341273e6c66ad4c7cdd9e68245bedb260562e7b166f2461a1032f2b38c0e0e5715fb3d1656979e052b55ca827a76f872b78a9fdae64bc298170bfcebedc1271b41a416c696365203c616c696365407379646973702e6f722e61753e88b804130102002205025331278b021b03060b090807030206150802090a0b0416020301021e01021780000a09104ef7e4beccde97f06a7003fa03c3af68d272ebc1fa08aa72a03b02189c26496a2833d90450801c4e42c5b5f51ad96ce2d2c9cef4b7c02a6a2fcf1412d6a2d486098eb762f5010a201819c17fd2888aec8eda20c65a3b75744de7ee5cc8ac7bfc470cbe3cb982720405a27a3c6a8c229cfe36905f881b02ed5680f6a8f05866efb9d6c5844897e631deb949ca8846041011020006050253312910000a09107b15a67f0b3ddc0347bc009f7fa35db59147469eb6f2c5aaf6428accb138b22800a0caa2f5f0874bacc5909c652a57a31beda65eddd5889c04100102000605025331cf7a000a0910a401d9f09a34f7c0316403ff46f2a5c101256627f16384d34a38fb47a6c88ba60506843e532d91614339fccae5f884a5741e7582ffaf292ba38ee10a270a05f139bde3814b6a077e8cd2db0f105ebea2a83af70d385f13b507fac2ad93ff79d84950328bb86f3074745a8b7f9b64990fb142e2a12976e27e8d09a28dc5621f957ac49091116da410ac3cbde1b88d04533121f6010400cbd785b56905e4192e2fb62a720727d43c4fa487821203cf72138b884b78b701093243e1d8c92a0248a6c0203a5a88693da34af357499abacaf4b3309c640797d03093870a323b4b6f37865f6eaa2838148a67df4735d43a90ca87942554cdf1c4a751b1e75f9fd4ce4e97e278d6c1c7ed59d33441df7d084f3f02beb68896c70011010001889f0418010200090502533121f6021b0c000a09104ef7e4beccde97f0b98b03fc0a5ccf6a372995835a2f5da33b282a7d612c0ab2a97f59cf9fff73e9110981aac2858c41399afa29624a7fd8a0add11654e3d882c0fd199e161bdad65e5e2548f7b68a437ea64293db1246e3011cbb94dc1bcdeaf0f2539bd88ff16d95547144d97cead6a8c5927660a91e6db0d16eb36b7b49a3525b54d1644e65599b032b7eb901a204533127a0110400bd3edaa09eff9809c4edc2c2a0ebe52e53c50a19c1e49ab78e6167bf61473bb08f2050d78a5cbbc6ed66aff7b42cd503f16b4a0b99fa1609681fca9b7ce2bbb1a5b3864d6cdda4d7ef7849d156d534dea30fb0efb9e4cf8959a2b2ce623905882d5430b995a15c3b9fe92906086788b891002924f94abe139b42cbbfaaabe42f00a0b65dc1a1ad27d798adbcb5b5ad02d2688c89477b03ff4eebb6f7b15a73b96a96bed201c0e5e4ea27e4c6e2dd1005b94d4b90137a5b1cf5e01c6226c070c4cc999938101578877ee76d296b9aab8246d57049caacf489e80a3f40589cade790a020b1ac146d6f7a6241184b8c7fcde680eae3188f5dcbe846d7f7bdad34f6fcfca08413e19c1d5df83fc7c7c627d493492e009c2f52a80400a2fe82de87136fd2e8845888c4431b032ba29d9a29a804277e31002a8201fb8591a3e55c7a0d0881496caf8b9fb07544a5a4879291d0dc026a0ea9e5bd88eb4aa4947bbd694b25012e208a250d65ddc6f1eea59d3aed3b4ec15fcab85e2afaa23a40ab1ef9ce3e11e1bc1c34a0e758e7aa64deb8739276df0af7d4121f834a9b88e70418010200090502533127a0021b02005209104ef7e4beccde97f047200419110200060502533127a0000a0910dbce4ee19529437fe045009c0b32f5ead48ee8a7e98fac0dea3d3e6c0e2c552500a0ad71fadc5007cfaf842d9b7db3335a8cdad15d3d1a6404009b08e2c68fe8f3b45c1bb72a4b3278cdf3012aa0f229883ad74aa1f6000bb90b18301b2f85372ca5d6b9bf478d235b733b1b197d19ccca48e9daf8e890cb64546b4ce1b178faccfff07003c172a2d4f5ebaba9f57153955f3f61a9b80a4f5cb959908f8b211b03b7026a8a82fc612bfedd3794969bcf458c4ce92be215a1176ab88d045331d144010400a5063000c5aaf34953c1aa3bfc95045b3aab9882b9a8027fecfe2142dc6b47ba8aca667399990244d513dd0504716908c17d92c65e74219e004f7b83fc125e575dd58efec3ab6dd22e3580106998523dea42ec75bf9aa111734c82df54630bebdff20fe981cfc36c76f865eb1c2fb62c9e85bc3a6e5015a361a2eb1c8431578d0011010001889f04280102000905025331d433021d03000a09104ef7e4beccde97f02e5503ff5e0630d1b65291f4882b6d40a29da4616bb5088717d469fbcc3648b8276de04a04988b1f1b9f3e18f52265c1f8b6c85861691c1a6b8a3a25a1809a0b32ad330aec5667cb4262f4450649184e8113849b05e5ad06a316ea80c001e8e71838190339a6e48bbde30647bcf245134b9a97fa875c1d83a9862cae87ffd7e2c4ce3a1b89013d04180102000905025331d144021b0200a809104ef7e4beccde97f09d2004190102000605025331d144000a0910677815e371c2fd23522203fe22ab62b8e7a151383cea3edd3a12995693911426f8ccf125e1f6426388c0010f88d9ca7da2224aee8d1c12135998640c5e1813d55a93df472faae75bef858457248db41b4505827590aeccf6f9eb646da7f980655dd3050c6897feddddaca90676dee856d66db8923477d251712bb9b3186b4d0114daf7d6b59272b53218dd1da94a03ff64006fcbe71211e5daecd9961fba66cdb6de3f914882c58ba5beddeba7dcb950c1156d7fba18c19ea880dccc800eae335deec34e3b84ac75ffa24864f782f87815cda1c0f634b3dd2fa67cea30811d21723d21d9551fa12ccbcfa62b6d3a15d01307b99925707992556d50065505b090aadb8579083a20fe65bd2a270da9b011" + +const missingCrossSignatureKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- +Charset: UTF-8 + +mQENBFMYynYBCACVOZ3/e8Bm2b9KH9QyIlHGo/i1bnkpqsgXj8tpJ2MIUOnXMMAY +ztW7kKFLCmgVdLIC0vSoLA4yhaLcMojznh/2CcUglZeb6Ao8Gtelr//Rd5DRfPpG +zqcfUo+m+eO1co2Orabw0tZDfGpg5p3AYl0hmxhUyYSc/xUq93xL1UJzBFgYXY54 +QsM8dgeQgFseSk/YvdP5SMx1ev+eraUyiiUtWzWrWC1TdyRa5p4UZg6Rkoppf+WJ +QrW6BWrhAtqATHc8ozV7uJjeONjUEq24roRc/OFZdmQQGK6yrzKnnbA6MdHhqpdo +9kWDcXYb7pSE63Lc+OBa5X2GUVvXJLS/3nrtABEBAAG0F2ludmFsaWQtc2lnbmlu +Zy1zdWJrZXlziQEoBBMBAgASBQJTnKB5AhsBAgsHAhUIAh4BAAoJEO3UDQUIHpI/ +dN4H/idX4FQ1LIZCnpHS/oxoWQWfpRgdKAEM0qCqjMgiipJeEwSQbqjTCynuh5/R +JlODDz85ABR06aoF4l5ebGLQWFCYifPnJZ/Yf5OYcMGtb7dIbqxWVFL9iLMO/oDL +ioI3dotjPui5e+2hI9pVH1UHB/bZ/GvMGo6Zg0XxLPolKQODMVjpjLAQ0YJ3spew +RAmOGre6tIvbDsMBnm8qREt7a07cBJ6XK7xjxYaZHQBiHVxyEWDa6gyANONx8duW +/fhQ/zDTnyVM/ik6VO0Ty9BhPpcEYLFwh5c1ilFari1ta3e6qKo6ZGa9YMk/REhu +yBHd9nTkI+0CiQUmbckUiVjDKKe5AQ0EUxjKdgEIAJcXQeP+NmuciE99YcJoffxv +2gVLU4ZXBNHEaP0mgaJ1+tmMD089vUQAcyGRvw8jfsNsVZQIOAuRxY94aHQhIRHR +bUzBN28ofo/AJJtfx62C15xt6fDKRV6HXYqAiygrHIpEoRLyiN69iScUsjIJeyFL +C8wa72e8pSL6dkHoaV1N9ZH/xmrJ+k0vsgkQaAh9CzYufncDxcwkoP+aOlGtX1gP +WwWoIbz0JwLEMPHBWvDDXQcQPQTYQyj+LGC9U6f9VZHN25E94subM1MjuT9OhN9Y +MLfWaaIc5WyhLFyQKW2Upofn9wSFi8ubyBnv640Dfd0rVmaWv7LNTZpoZ/GbJAMA +EQEAAYkBHwQYAQIACQUCU5ygeQIbAgAKCRDt1A0FCB6SP0zCB/sEzaVR38vpx+OQ +MMynCBJrakiqDmUZv9xtplY7zsHSQjpd6xGflbU2n+iX99Q+nav0ETQZifNUEd4N +1ljDGQejcTyKD6Pkg6wBL3x9/RJye7Zszazm4+toJXZ8xJ3800+BtaPoI39akYJm ++ijzbskvN0v/j5GOFJwQO0pPRAFtdHqRs9Kf4YanxhedB4dIUblzlIJuKsxFit6N +lgGRblagG3Vv2eBszbxzPbJjHCgVLR3RmrVezKOsZjr/2i7X+xLWIR0uD3IN1qOW +CXQxLBizEEmSNVNxsp7KPGTLnqO3bPtqFirxS9PJLIMPTPLNBY7ZYuPNTMqVIUWF +4artDmrG +=7FfJ +-----END PGP PUBLIC KEY BLOCK-----` + +const invalidCrossSignatureKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQENBFMYynYBCACVOZ3/e8Bm2b9KH9QyIlHGo/i1bnkpqsgXj8tpJ2MIUOnXMMAY +ztW7kKFLCmgVdLIC0vSoLA4yhaLcMojznh/2CcUglZeb6Ao8Gtelr//Rd5DRfPpG +zqcfUo+m+eO1co2Orabw0tZDfGpg5p3AYl0hmxhUyYSc/xUq93xL1UJzBFgYXY54 +QsM8dgeQgFseSk/YvdP5SMx1ev+eraUyiiUtWzWrWC1TdyRa5p4UZg6Rkoppf+WJ +QrW6BWrhAtqATHc8ozV7uJjeONjUEq24roRc/OFZdmQQGK6yrzKnnbA6MdHhqpdo +9kWDcXYb7pSE63Lc+OBa5X2GUVvXJLS/3nrtABEBAAG0F2ludmFsaWQtc2lnbmlu +Zy1zdWJrZXlziQEoBBMBAgASBQJTnKB5AhsBAgsHAhUIAh4BAAoJEO3UDQUIHpI/ +dN4H/idX4FQ1LIZCnpHS/oxoWQWfpRgdKAEM0qCqjMgiipJeEwSQbqjTCynuh5/R +JlODDz85ABR06aoF4l5ebGLQWFCYifPnJZ/Yf5OYcMGtb7dIbqxWVFL9iLMO/oDL +ioI3dotjPui5e+2hI9pVH1UHB/bZ/GvMGo6Zg0XxLPolKQODMVjpjLAQ0YJ3spew +RAmOGre6tIvbDsMBnm8qREt7a07cBJ6XK7xjxYaZHQBiHVxyEWDa6gyANONx8duW +/fhQ/zDTnyVM/ik6VO0Ty9BhPpcEYLFwh5c1ilFari1ta3e6qKo6ZGa9YMk/REhu +yBHd9nTkI+0CiQUmbckUiVjDKKe5AQ0EUxjKdgEIAIINDqlj7X6jYKc6DjwrOkjQ +UIRWbQQar0LwmNilehmt70g5DCL1SYm9q4LcgJJ2Nhxj0/5qqsYib50OSWMcKeEe +iRXpXzv1ObpcQtI5ithp0gR53YPXBib80t3bUzomQ5UyZqAAHzMp3BKC54/vUrSK +FeRaxDzNLrCeyI00+LHNUtwghAqHvdNcsIf8VRumK8oTm3RmDh0TyjASWYbrt9c8 +R1Um3zuoACOVy+mEIgIzsfHq0u7dwYwJB5+KeM7ZLx+HGIYdUYzHuUE1sLwVoELh ++SHIGHI1HDicOjzqgajShuIjj5hZTyQySVprrsLKiXS6NEwHAP20+XjayJ/R3tEA +EQEAAYkCPgQYAQIBKAUCU5ygeQIbAsBdIAQZAQIABgUCU5ygeQAKCRCpVlnFZmhO +52RJB/9uD1MSa0wjY6tHOIgquZcP3bHBvHmrHNMw9HR2wRCMO91ZkhrpdS3ZHtgb +u3/55etj0FdvDo1tb8P8FGSVtO5Vcwf5APM8sbbqoi8L951Q3i7qt847lfhu6sMl +w0LWFvPTOLHrliZHItPRjOltS1WAWfr2jUYhsU9ytaDAJmvf9DujxEOsN5G1YJep +54JCKVCkM/y585Zcnn+yxk/XwqoNQ0/iJUT9qRrZWvoeasxhl1PQcwihCwss44A+ +YXaAt3hbk+6LEQuZoYS73yR3WHj+42tfm7YxRGeubXfgCEz/brETEWXMh4pe0vCL +bfWrmfSPq2rDegYcAybxRQz0lF8PAAoJEO3UDQUIHpI/exkH/0vQfdHA8g/N4T6E +i6b1CUVBAkvtdJpCATZjWPhXmShOw62gkDw306vHPilL4SCvEEi4KzG72zkp6VsB +DSRcpxCwT4mHue+duiy53/aRMtSJ+vDfiV1Vhq+3sWAck/yUtfDU9/u4eFaiNok1 +8/Gd7reyuZt5CiJnpdPpjCwelK21l2w7sHAnJF55ITXdOxI8oG3BRKufz0z5lyDY +s2tXYmhhQIggdgelN8LbcMhWs/PBbtUr6uZlNJG2lW1yscD4aI529VjwJlCeo745 +U7pO4eF05VViUJ2mmfoivL3tkhoTUWhx8xs8xCUcCg8DoEoSIhxtOmoTPR22Z9BL +6LCg2mg= +=Dhm4 +-----END PGP PUBLIC KEY BLOCK-----` + +const goodCrossSignatureKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1 + +mI0EVUqeVwEEAMufHRrMPWK3gyvi0O0tABCs/oON9zV9KDZlr1a1M91ShCSFwCPo +7r80PxdWVWcj0V5h50/CJYtpN3eE/mUIgW2z1uDYQF1OzrQ8ubrksfsJvpAhENom +lTQEppv9mV8qhcM278teb7TX0pgrUHLYF5CfPdp1L957JLLXoQR/lwLVABEBAAG0 +E2dvb2Qtc2lnbmluZy1zdWJrZXmIuAQTAQIAIgUCVUqeVwIbAwYLCQgHAwIGFQgC +CQoLBBYCAwECHgECF4AACgkQNRjL95IRWP69XQQAlH6+eyXJN4DZTLX78KGjHrsw +6FCvxxClEPtPUjcJy/1KCRQmtLAt9PbbA78dvgzjDeZMZqRAwdjyJhjyg/fkU2OH +7wq4ktjUu+dLcOBb+BFMEY+YjKZhf6EJuVfxoTVr5f82XNPbYHfTho9/OABKH6kv +X70PaKZhbwnwij8Nts65AaIEVUqftREEAJ3WxZfqAX0bTDbQPf2CMT2IVMGDfhK7 +GyubOZgDFFjwUJQvHNvsrbeGLZ0xOBumLINyPO1amIfTgJNm1iiWFWfmnHReGcDl +y5mpYG60Mb79Whdcer7CMm3AqYh/dW4g6IB02NwZMKoUHo3PXmFLxMKXnWyJ0clw +R0LI/Qn509yXAKDh1SO20rqrBM+EAP2c5bfI98kyNwQAi3buu94qo3RR1ZbvfxgW +CKXDVm6N99jdZGNK7FbRifXqzJJDLcXZKLnstnC4Sd3uyfyf1uFhmDLIQRryn5m+ +LBYHfDBPN3kdm7bsZDDq9GbTHiFZUfm/tChVKXWxkhpAmHhU/tH6GGzNSMXuIWSO +aOz3Rqq0ED4NXyNKjdF9MiwD/i83S0ZBc0LmJYt4Z10jtH2B6tYdqnAK29uQaadx +yZCX2scE09UIm32/w7pV77CKr1Cp/4OzAXS1tmFzQ+bX7DR+Gl8t4wxr57VeEMvl +BGw4Vjh3X8//m3xynxycQU18Q1zJ6PkiMyPw2owZ/nss3hpSRKFJsxMLhW3fKmKr +Ey2KiOcEGAECAAkFAlVKn7UCGwIAUgkQNRjL95IRWP5HIAQZEQIABgUCVUqftQAK +CRD98VjDN10SqkWrAKDTpEY8D8HC02E/KVC5YUI01B30wgCgurpILm20kXEDCeHp +C5pygfXw1DJrhAP+NyPJ4um/bU1I+rXaHHJYroYJs8YSweiNcwiHDQn0Engh/mVZ +SqLHvbKh2dL/RXymC3+rjPvQf5cup9bPxNMa6WagdYBNAfzWGtkVISeaQW+cTEp/ +MtgVijRGXR/lGLGETPg2X3Afwn9N9bLMBkBprKgbBqU7lpaoPupxT61bL70= +=vtbN +-----END PGP PUBLIC KEY BLOCK-----` + +const revokedUserIDKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQENBFsgO5EBCADhREPmcjsPkXe1z7ctvyWL0S7oa9JaoGZ9oPDHFDlQxd0qlX2e +DZJZDg0qYvVixmaULIulApq1puEsaJCn3lHUbHlb4PYKwLEywYXM28JN91KtLsz/ +uaEX2KC5WqeP40utmzkNLq+oRX/xnRMgwbO7yUNVG2UlEa6eI+xOXO3YtLdmJMBW +ClQ066ZnOIzEo1JxnIwha1CDBMWLLfOLrg6l8InUqaXbtEBbnaIYO6fXVXELUjkx +nmk7t/QOk0tXCy8muH9UDqJkwDUESY2l79XwBAcx9riX8vY7vwC34pm22fAUVLCJ +x1SJx0J8bkeNp38jKM2Zd9SUQqSbfBopQ4pPABEBAAG0I0dvbGFuZyBHb3BoZXIg +PG5vLXJlcGx5QGdvbGFuZy5jb20+iQFUBBMBCgA+FiEE5Ik5JLcNx6l6rZfw1oFy +9I6cUoMFAlsgO5ECGwMFCQPCZwAFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQ +1oFy9I6cUoMIkwf8DNPeD23i4jRwd/pylbvxwZintZl1fSwTJW1xcOa1emXaEtX2 +depuqhP04fjlRQGfsYAQh7X9jOJxAHjTmhqFBi5sD7QvKU00cPFYbJ/JTx0B41bl +aXnSbGhRPh63QtEZL7ACAs+shwvvojJqysx7kyVRu0EW2wqjXdHwR/SJO6nhNBa2 +DXzSiOU/SUA42mmG+5kjF8Aabq9wPwT9wjraHShEweNerNMmOqJExBOy3yFeyDpa +XwEZFzBfOKoxFNkIaVf5GSdIUGhFECkGvBMB935khftmgR8APxdU4BE7XrXexFJU +8RCuPXonm4WQOwTWR0vQg64pb2WKAzZ8HhwTGbQiR29sYW5nIEdvcGhlciA8cmV2 +b2tlZEBnb2xhbmcuY29tPokBNgQwAQoAIBYhBOSJOSS3Dcepeq2X8NaBcvSOnFKD +BQJbIDv3Ah0AAAoJENaBcvSOnFKDfWMIAKhI/Tvu3h8fSUxp/gSAcduT6bC1JttG +0lYQ5ilKB/58lBUA5CO3ZrKDKlzW3M8VEcvohVaqeTMKeoQd5rCZq8KxHn/KvN6N +s85REfXfniCKfAbnGgVXX3kDmZ1g63pkxrFu0fDZjVDXC6vy+I0sGyI/Inro0Pzb +tvn0QCsxjapKK15BtmSrpgHgzVqVg0cUp8vqZeKFxarYbYB2idtGRci4b9tObOK0 +BSTVFy26+I/mrFGaPrySYiy2Kz5NMEcRhjmTxJ8jSwEr2O2sUR0yjbgUAXbTxDVE +/jg5fQZ1ACvBRQnB7LvMHcInbzjyeTM3FazkkSYQD6b97+dkWwb1iWG5AQ0EWyA7 +kQEIALkg04REDZo1JgdYV4x8HJKFS4xAYWbIva1ZPqvDNmZRUbQZR2+gpJGEwn7z +VofGvnOYiGW56AS5j31SFf5kro1+1bZQ5iOONBng08OOo58/l1hRseIIVGB5TGSa +PCdChKKHreJI6hS3mShxH6hdfFtiZuB45rwoaArMMsYcjaezLwKeLc396cpUwwcZ +snLUNd1Xu5EWEF2OdFkZ2a1qYdxBvAYdQf4+1Nr+NRIx1u1NS9c8jp3PuMOkrQEi +bNtc1v6v0Jy52mKLG4y7mC/erIkvkQBYJdxPaP7LZVaPYc3/xskcyijrJ/5ufoD8 +K71/ShtsZUXSQn9jlRaYR0EbojMAEQEAAYkBPAQYAQoAJhYhBOSJOSS3Dcepeq2X +8NaBcvSOnFKDBQJbIDuRAhsMBQkDwmcAAAoJENaBcvSOnFKDkFMIAIt64bVZ8x7+ +TitH1bR4pgcNkaKmgKoZz6FXu80+SnbuEt2NnDyf1cLOSimSTILpwLIuv9Uft5Pb +OraQbYt3xi9yrqdKqGLv80bxqK0NuryNkvh9yyx5WoG1iKqMj9/FjGghuPrRaT4l +QinNAghGVkEy1+aXGFrG2DsOC1FFI51CC2WVTzZ5RwR2GpiNRfESsU1rZAUqf/2V +yJl9bD5R4SUNy8oQmhOxi+gbhD4Ao34e4W0ilibslI/uawvCiOwlu5NGd8zv5n+U +heiQvzkApQup5c+BhH5zFDFdKJ2CBByxw9+7QjMFI/wgLixKuE0Ob2kAokXf7RlB +7qTZOahrETw= +=IKnw +-----END PGP PUBLIC KEY BLOCK-----` + +const keyWithSubKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- + +mI0EWyKwKQEEALwXhKBnyaaNFeK3ljfc/qn9X/QFw+28EUfgZPHjRmHubuXLE2uR +s3ZoSXY2z7Dkv+NyHYMt8p+X8q5fR7JvUjK2XbPyKoiJVnHINll83yl67DaWfKNL +EjNoO0kIfbXfCkZ7EG6DL+iKtuxniGTcnGT47e+HJSqb/STpLMnWwXjBABEBAAG0 +I0dvbGFuZyBHb3BoZXIgPG5vLXJlcGx5QGdvbGFuZy5jb20+iM4EEwEKADgWIQQ/ +lRafP/p9PytHbwxMvYJsOQdOOAUCWyKwKQIbAwULCQgHAwUVCgkICwUWAgMBAAIe +AQIXgAAKCRBMvYJsOQdOOOsFBAC62mXww8XuqvYLcVOvHkWLT6mhxrQOJXnlfpn7 +2uBV9CMhoG/Ycd43NONsJrB95Apr9TDIqWnVszNbqPCuBhZQSGLdbiDKjxnCWBk0 +69qv4RNtkpOhYB7jK4s8F5oQZqId6JasT/PmJTH92mhBYhhTQr0GYFuPX2UJdkw9 +Sn9C67iNBFsisDUBBAC3A+Yo9lgCnxi/pfskyLrweYif6kIXWLAtLTsM6g/6jt7b +wTrknuCPyTv0QKGXsAEe/cK/Xq3HvX9WfXPGIHc/X56ZIsHQ+RLowbZV/Lhok1IW +FAuQm8axr/by80cRwFnzhfPc/ukkAq2Qyj4hLsGblu6mxeAhzcp8aqmWOO2H9QAR +AQABiLYEKAEKACAWIQQ/lRafP/p9PytHbwxMvYJsOQdOOAUCWyK16gIdAAAKCRBM +vYJsOQdOOB1vA/4u4uLONsE+2GVOyBsHyy7uTdkuxaR9b54A/cz6jT/tzUbeIzgx +22neWhgvIEghnUZd0vEyK9k1wy5vbDlEo6nKzHso32N1QExGr5upRERAxweDxGOj +7luDwNypI7QcifE64lS/JmlnunwRCdRWMKc0Fp+7jtRc5mpwyHN/Suf5RokBagQY +AQoAIBYhBD+VFp8/+n0/K0dvDEy9gmw5B044BQJbIrA1AhsCAL8JEEy9gmw5B044 +tCAEGQEKAB0WIQSNdnkaWY6t62iX336UXbGvYdhXJwUCWyKwNQAKCRCUXbGvYdhX +JxJSA/9fCPHP6sUtGF1o3G1a3yvOUDGr1JWcct9U+QpbCt1mZoNopCNDDQAJvDWl +mvDgHfuogmgNJRjOMznvahbF+wpTXmB7LS0SK412gJzl1fFIpK4bgnhu0TwxNsO1 +8UkCZWqxRMgcNUn9z6XWONK8dgt5JNvHSHrwF4CxxwjL23AAtK+FA/UUoi3U4kbC +0XnSr1Sl+mrzQi1+H7xyMe7zjqe+gGANtskqexHzwWPUJCPZ5qpIa2l8ghiUim6b +4ymJ+N8/T8Yva1FaPEqfMzzqJr8McYFm0URioXJPvOAlRxdHPteZ0qUopt/Jawxl +Xt6B9h1YpeLoJwjwsvbi98UTRs0jXwoY +=3fWu +-----END PGP PUBLIC KEY BLOCK-----` + +const keyWithSubKeyAndBadSelfSigOrder = `-----BEGIN PGP PUBLIC KEY BLOCK----- + +mI0EWyLLDQEEAOqIOpJ/ha1OYAGduu9tS3rBz5vyjbNgJO4sFveEM0mgsHQ0X9/L +plonW+d0gRoO1dhJ8QICjDAc6+cna1DE3tEb5m6JtQ30teLZuqrR398Cf6w7NNVz +r3lrlmnH9JaKRuXl7tZciwyovneBfZVCdtsRZjaLI1uMQCz/BToiYe3DABEBAAG0 +I0dvbGFuZyBHb3BoZXIgPG5vLXJlcGx5QGdvbGFuZy5jb20+iM4EEwEKADgWIQRZ +sixZOfQcZdW0wUqmgmdsv1O9xgUCWyLLDQIbAwULCQgHAwUVCgkICwUWAgMBAAIe +AQIXgAAKCRCmgmdsv1O9xql2A/4pix98NxjhdsXtazA9agpAKeADf9tG4Za27Gj+ +3DCww/E4iP2X35jZimSm/30QRB6j08uGCqd9vXkkJxtOt63y/IpVOtWX6vMWSTUm +k8xKkaYMP0/IzKNJ1qC/qYEUYpwERBKg9Z+k99E2Ql4kRHdxXUHq6OzY79H18Y+s +GdeM/riNBFsiyxsBBAC54Pxg/8ZWaZX1phGdwfe5mek27SOYpC0AxIDCSOdMeQ6G +HPk38pywl1d+S+KmF/F4Tdi+kWro62O4eG2uc/T8JQuRDUhSjX0Qa51gPzJrUOVT +CFyUkiZ/3ZDhtXkgfuso8ua2ChBgR9Ngr4v43tSqa9y6AK7v0qjxD1x+xMrjXQAR +AQABiQFxBBgBCgAmAhsCFiEEWbIsWTn0HGXVtMFKpoJnbL9TvcYFAlsizTIFCQAN +MRcAv7QgBBkBCgAdFiEEJcoVUVJIk5RWj1c/o62jUpRPICQFAlsiyxsACgkQo62j +UpRPICQq5gQApoWIigZxXFoM0uw4uJBS5JFZtirTANvirZV5RhndwHeMN6JttaBS +YnjyA4+n1D+zB2VqliD2QrsX12KJN6rGOehCtEIClQ1Hodo9nC6kMzzAwW1O8bZs +nRJmXV+bsvD4sidLZLjdwOVa3Cxh6pvq4Uur6a7/UYx121hEY0Qx0s8JEKaCZ2y/ +U73GGi0D/i20VW8AWYAPACm2zMlzExKTOAV01YTQH/3vW0WLrOse53WcIVZga6es +HuO4So0SOEAvxKMe5HpRIu2dJxTvd99Bo9xk9xJU0AoFrO0vNCRnL+5y68xMlODK +lEw5/kl0jeaTBp6xX0HDQOEVOpPGUwWV4Ij2EnvfNDXaE1vK1kffiQFrBBgBCgAg +AhsCFiEEWbIsWTn0HGXVtMFKpoJnbL9TvcYFAlsi0AYAv7QgBBkBCgAdFiEEJcoV +UVJIk5RWj1c/o62jUpRPICQFAlsiyxsACgkQo62jUpRPICQq5gQApoWIigZxXFoM +0uw4uJBS5JFZtirTANvirZV5RhndwHeMN6JttaBSYnjyA4+n1D+zB2VqliD2QrsX +12KJN6rGOehCtEIClQ1Hodo9nC6kMzzAwW1O8bZsnRJmXV+bsvD4sidLZLjdwOVa +3Cxh6pvq4Uur6a7/UYx121hEY0Qx0s8JEKaCZ2y/U73GRl0EAJokkXmy4zKDHWWi +wvK9gi2gQgRkVnu2AiONxJb5vjeLhM/07BRmH6K1o+w3fOeEQp4FjXj1eQ5fPSM6 +Hhwx2CTl9SDnPSBMiKXsEFRkmwQ2AAsQZLmQZvKBkLZYeBiwf+IY621eYDhZfo+G +1dh1WoUCyREZsJQg2YoIpWIcvw+a +=bNRo +-----END PGP PUBLIC KEY BLOCK----- +` diff --git a/vendor/golang.org/x/crypto/openpgp/keys_test.go b/vendor/golang.org/x/crypto/openpgp/keys_test.go index f768e68..0eb1a9e 100644 --- a/vendor/golang.org/x/crypto/openpgp/keys_test.go +++ b/vendor/golang.org/x/crypto/openpgp/keys_test.go @@ -12,7 +12,10 @@ import ( ) func TestKeyExpiry(t *testing.T) { - kring, _ := ReadKeyRing(readerFromHex(expiringKeyHex)) + kring, err := ReadKeyRing(readerFromHex(expiringKeyHex)) + if err != nil { + t.Fatal(err) + } entity := kring[0] const timeFormat = "2006-01-02" @@ -26,16 +29,16 @@ func TestKeyExpiry(t *testing.T) { // // So this should select the newest, non-expired encryption key. key, _ := entity.encryptionKey(time1) - if id := key.PublicKey.KeyIdShortString(); id != "96A672F5" { - t.Errorf("Expected key 1ABB25A0 at time %s, but got key %s", time1.Format(timeFormat), id) + if id, expected := key.PublicKey.KeyIdShortString(), "96A672F5"; id != expected { + t.Errorf("Expected key %s at time %s, but got key %s", expected, time1.Format(timeFormat), id) } // Once the first encryption subkey has expired, the second should be // selected. time2, _ := time.Parse(timeFormat, "2013-07-09") key, _ = entity.encryptionKey(time2) - if id := key.PublicKey.KeyIdShortString(); id != "96A672F5" { - t.Errorf("Expected key 96A672F5 at time %s, but got key %s", time2.Format(timeFormat), id) + if id, expected := key.PublicKey.KeyIdShortString(), "96A672F5"; id != expected { + t.Errorf("Expected key %s at time %s, but got key %s", expected, time2.Format(timeFormat), id) } // Once all the keys have expired, nothing should be returned. @@ -102,9 +105,39 @@ func TestGoodCrossSignature(t *testing.T) { } } +func TestRevokedUserID(t *testing.T) { + // This key contains 2 UIDs, one of which is revoked: + // [ultimate] (1) Golang Gopher + // [ revoked] (2) Golang Gopher + keys, err := ReadArmoredKeyRing(bytes.NewBufferString(revokedUserIDKey)) + if err != nil { + t.Fatal(err) + } + + if len(keys) != 1 { + t.Fatal("Failed to read key with a revoked user id") + } + + var identities []*Identity + for _, identity := range keys[0].Identities { + identities = append(identities, identity) + } + + if numIdentities, numExpected := len(identities), 1; numIdentities != numExpected { + t.Errorf("obtained %d identities, expected %d", numIdentities, numExpected) + } + + if identityName, expectedName := identities[0].Name, "Golang Gopher "; identityName != expectedName { + t.Errorf("obtained identity %s expected %s", identityName, expectedName) + } +} + // TestExternallyRevokableKey attempts to load and parse a key with a third party revocation permission. func TestExternallyRevocableKey(t *testing.T) { - kring, _ := ReadKeyRing(readerFromHex(subkeyUsageHex)) + kring, err := ReadKeyRing(readerFromHex(subkeyUsageHex)) + if err != nil { + t.Fatal(err) + } // The 0xA42704B92866382A key can be revoked by 0xBE3893CB843D0FE70C // according to this signature that appears within the key: @@ -125,7 +158,10 @@ func TestExternallyRevocableKey(t *testing.T) { } func TestKeyRevocation(t *testing.T) { - kring, _ := ReadKeyRing(readerFromHex(revokedKeyHex)) + kring, err := ReadKeyRing(readerFromHex(revokedKeyHex)) + if err != nil { + t.Fatal(err) + } // revokedKeyHex contains these keys: // pub 1024R/9A34F7C0 2014-03-25 [revoked: 2014-03-25] @@ -144,8 +180,49 @@ func TestKeyRevocation(t *testing.T) { } } +func TestKeyWithRevokedSubKey(t *testing.T) { + // This key contains a revoked sub key: + // pub rsa1024/0x4CBD826C39074E38 2018-06-14 [SC] + // Key fingerprint = 3F95 169F 3FFA 7D3F 2B47 6F0C 4CBD 826C 3907 4E38 + // uid Golang Gopher + // sub rsa1024/0x945DB1AF61D85727 2018-06-14 [S] [revoked: 2018-06-14] + + keys, err := ReadArmoredKeyRing(bytes.NewBufferString(keyWithSubKey)) + if err != nil { + t.Fatal(err) + } + + if len(keys) != 1 { + t.Fatal("Failed to read key with a sub key") + } + + identity := keys[0].Identities["Golang Gopher "] + + // Test for an issue where Subkey Binding Signatures (RFC 4880 5.2.1) were added to the identity + // preceding the Subkey Packet if the Subkey Packet was followed by more than one signature. + // For example, the current key has the following layout: + // PUBKEY UID SELFSIG SUBKEY REV SELFSIG + // The last SELFSIG would be added to the UID's signatures. This is wrong. + if numIdentitySigs, numExpected := len(identity.Signatures), 0; numIdentitySigs != numExpected { + t.Fatalf("got %d identity signatures, expected %d", numIdentitySigs, numExpected) + } + + if numSubKeys, numExpected := len(keys[0].Subkeys), 1; numSubKeys != numExpected { + t.Fatalf("got %d subkeys, expected %d", numSubKeys, numExpected) + } + + subKey := keys[0].Subkeys[0] + if subKey.Sig == nil { + t.Fatalf("subkey signature is nil") + } + +} + func TestSubkeyRevocation(t *testing.T) { - kring, _ := ReadKeyRing(readerFromHex(revokedSubkeyHex)) + kring, err := ReadKeyRing(readerFromHex(revokedSubkeyHex)) + if err != nil { + t.Fatal(err) + } // revokedSubkeyHex contains these keys: // pub 1024R/4EF7E4BECCDE97F0 2014-03-25 @@ -177,8 +254,56 @@ func TestSubkeyRevocation(t *testing.T) { } } +func TestKeyWithSubKeyAndBadSelfSigOrder(t *testing.T) { + // This key was altered so that the self signatures following the + // subkey are in a sub-optimal order. + // + // Note: Should someone have to create a similar key again, look into + // gpgsplit, gpg --dearmor, and gpg --enarmor. + // + // The packet ordering is the following: + // PUBKEY UID UIDSELFSIG SUBKEY SELFSIG1 SELFSIG2 + // + // Where: + // SELFSIG1 expires on 2018-06-14 and was created first + // SELFSIG2 does not expire and was created after SELFSIG1 + // + // Test for RFC 4880 5.2.3.3: + // > An implementation that encounters multiple self-signatures on the + // > same object may resolve the ambiguity in any way it sees fit, but it + // > is RECOMMENDED that priority be given to the most recent self- + // > signature. + // + // This means that we should keep SELFSIG2. + + keys, err := ReadArmoredKeyRing(bytes.NewBufferString(keyWithSubKeyAndBadSelfSigOrder)) + if err != nil { + t.Fatal(err) + } + + if len(keys) != 1 { + t.Fatal("Failed to read key with a sub key and a bad selfsig packet order") + } + + key := keys[0] + + if numKeys, expected := len(key.Subkeys), 1; numKeys != expected { + t.Fatalf("Read %d subkeys, expected %d", numKeys, expected) + } + + subKey := key.Subkeys[0] + + if lifetime := subKey.Sig.KeyLifetimeSecs; lifetime != nil { + t.Errorf("The signature has a key lifetime (%d), but it should be nil", *lifetime) + } + +} + func TestKeyUsage(t *testing.T) { - kring, _ := ReadKeyRing(readerFromHex(subkeyUsageHex)) + kring, err := ReadKeyRing(readerFromHex(subkeyUsageHex)) + if err != nil { + t.Fatal(err) + } // subkeyUsageHex contains these keys: // pub 1024R/2866382A created: 2014-04-01 expires: never usage: SC @@ -305,100 +430,66 @@ func TestNewEntityWithoutPreferredHash(t *testing.T) { } } -const expiringKeyHex = "988d0451d1ec5d010400ba3385721f2dc3f4ab096b2ee867ab77213f0a27a8538441c35d2fa225b08798a1439a66a5150e6bdc3f40f5d28d588c712394c632b6299f77db8c0d48d37903fb72ebd794d61be6aa774688839e5fdecfe06b2684cc115d240c98c66cb1ef22ae84e3aa0c2b0c28665c1e7d4d044e7f270706193f5223c8d44e0d70b7b8da830011010001b40f4578706972792074657374206b657988be041301020028050251d1ec5d021b03050900278d00060b090807030206150802090a0b0416020301021e01021780000a091072589ad75e237d8c033503fd10506d72837834eb7f994117740723adc39227104b0d326a1161871c0b415d25b4aedef946ca77ea4c05af9c22b32cf98be86ab890111fced1ee3f75e87b7cc3c00dc63bbc85dfab91c0dc2ad9de2c4d13a34659333a85c6acc1a669c5e1d6cecb0cf1e56c10e72d855ae177ddc9e766f9b2dda57ccbb75f57156438bbdb4e42b88d0451d1ec5d0104009c64906559866c5cb61578f5846a94fcee142a489c9b41e67b12bb54cfe86eb9bc8566460f9a720cb00d6526fbccfd4f552071a8e3f7744b1882d01036d811ee5a3fb91a1c568055758f43ba5d2c6a9676b012f3a1a89e47bbf624f1ad571b208f3cc6224eb378f1645dd3d47584463f9eadeacfd1ce6f813064fbfdcc4b5a53001101000188a504180102000f021b0c050251d1f06b050900093e89000a091072589ad75e237d8c20e00400ab8310a41461425b37889c4da28129b5fae6084fafbc0a47dd1adc74a264c6e9c9cc125f40462ee1433072a58384daef88c961c390ed06426a81b464a53194c4e291ddd7e2e2ba3efced01537d713bd111f48437bde2363446200995e8e0d4e528dda377fd1e8f8ede9c8e2198b393bd86852ce7457a7e3daf74d510461a5b77b88d0451d1ece8010400b3a519f83ab0010307e83bca895170acce8964a044190a2b368892f7a244758d9fc193482648acb1fb9780d28cc22d171931f38bb40279389fc9bf2110876d4f3db4fcfb13f22f7083877fe56592b3b65251312c36f83ffcb6d313c6a17f197dd471f0712aad15a8537b435a92471ba2e5b0c72a6c72536c3b567c558d7b6051001101000188a504180102000f021b0c050251d1f07b050900279091000a091072589ad75e237d8ce69e03fe286026afacf7c97ee20673864d4459a2240b5655219950643c7dba0ac384b1d4359c67805b21d98211f7b09c2a0ccf6410c8c04d4ff4a51293725d8d6570d9d8bb0e10c07d22357caeb49626df99c180be02d77d1fe8ed25e7a54481237646083a9f89a11566cd20b9e995b1487c5f9e02aeb434f3a1897cd416dd0a87861838da3e9e" -const subkeyUsageHex = "988d04533a52bc010400d26af43085558f65b9e7dbc90cb9238015259aed5e954637adcfa2181548b2d0b60c65f1f42ec5081cbf1bc0a8aa4900acfb77070837c58f26012fbce297d70afe96e759ad63531f0037538e70dbf8e384569b9720d99d8eb39d8d0a2947233ed242436cb6ac7dfe74123354b3d0119b5c235d3dd9c9d6c004f8ffaf67ad8583001101000188b7041f010200210502533b8552170c8001ce094aa433f7040bb2ddf0be3893cb843d0fe70c020700000a0910a42704b92866382aa98404009d63d916a27543da4221c60087c33f1c44bec9998c5438018ed370cca4962876c748e94b73eb39c58eb698063f3fd6346d58dd2a11c0247934c4a9d71f24754f7468f96fb24c3e791dd2392b62f626148ad724189498cbf993db2df7c0cdc2d677c35da0f16cb16c9ce7c33b4de65a4a91b1d21a130ae9cc26067718910ef8e2b417556d627261203c756d627261407379642e65642e61753e88b80413010200220502533a52bc021b03060b090807030206150802090a0b0416020301021e01021780000a0910a42704b92866382a47840400c0c2bd04f5fca586de408b395b3c280a278259c93eaaa8b79a53b97003f8ed502a8a00446dd9947fb462677e4fcac0dac2f0701847d15130aadb6cd9e0705ea0cf5f92f129136c7be21a718d46c8e641eb7f044f2adae573e11ae423a0a9ca51324f03a8a2f34b91fa40c3cc764bee4dccadedb54c768ba0469b683ea53f1c29b88d04533a52bc01040099c92a5d6f8b744224da27bc2369127c35269b58bec179de6bbc038f749344222f85a31933224f26b70243c4e4b2d242f0c4777eaef7b5502f9dad6d8bf3aaeb471210674b74de2d7078af497d55f5cdad97c7bedfbc1b41e8065a97c9c3d344b21fc81d27723af8e374bc595da26ea242dccb6ae497be26eea57e563ed517e90011010001889f0418010200090502533a52bc021b0c000a0910a42704b92866382afa1403ff70284c2de8a043ff51d8d29772602fa98009b7861c540535f874f2c230af8caf5638151a636b21f8255003997ccd29747fdd06777bb24f9593bd7d98a3e887689bf902f999915fcc94625ae487e5d13e6616f89090ebc4fdc7eb5cad8943e4056995bb61c6af37f8043016876a958ec7ebf39c43d20d53b7f546cfa83e8d2604b88d04533b8283010400c0b529316dbdf58b4c54461e7e669dc11c09eb7f73819f178ccd4177b9182b91d138605fcf1e463262fabefa73f94a52b5e15d1904635541c7ea540f07050ce0fb51b73e6f88644cec86e91107c957a114f69554548a85295d2b70bd0b203992f76eb5d493d86d9eabcaa7ef3fc7db7e458438db3fcdb0ca1cc97c638439a9170011010001889f0418010200090502533b8283021b0c000a0910a42704b92866382adc6d0400cfff6258485a21675adb7a811c3e19ebca18851533f75a7ba317950b9997fda8d1a4c8c76505c08c04b6c2cc31dc704d33da36a21273f2b388a1a706f7c3378b66d887197a525936ed9a69acb57fe7f718133da85ec742001c5d1864e9c6c8ea1b94f1c3759cebfd93b18606066c063a63be86085b7e37bdbc65f9a915bf084bb901a204533b85cd110400aed3d2c52af2b38b5b67904b0ef73d6dd7aef86adb770e2b153cd22489654dcc91730892087bb9856ae2d9f7ed1eb48f214243fe86bfe87b349ebd7c30e630e49c07b21fdabf78b7a95c8b7f969e97e3d33f2e074c63552ba64a2ded7badc05ce0ea2be6d53485f6900c7860c7aa76560376ce963d7271b9b54638a4028b573f00a0d8854bfcdb04986141568046202192263b9b67350400aaa1049dbc7943141ef590a70dcb028d730371d92ea4863de715f7f0f16d168bd3dc266c2450457d46dcbbf0b071547e5fbee7700a820c3750b236335d8d5848adb3c0da010e998908dfd93d961480084f3aea20b247034f8988eccb5546efaa35a92d0451df3aaf1aee5aa36a4c4d462c760ecd9cebcabfbe1412b1f21450f203fd126687cd486496e971a87fd9e1a8a765fe654baa219a6871ab97768596ab05c26c1aeea8f1a2c72395a58dbc12ef9640d2b95784e974a4d2d5a9b17c25fedacfe551bda52602de8f6d2e48443f5dd1a2a2a8e6a5e70ecdb88cd6e766ad9745c7ee91d78cc55c3d06536b49c3fee6c3d0b6ff0fb2bf13a314f57c953b8f4d93bf88e70418010200090502533b85cd021b0200520910a42704b92866382a47200419110200060502533b85cd000a091042ce2c64bc0ba99214b2009e26b26852c8b13b10c35768e40e78fbbb48bd084100a0c79d9ea0844fa5853dd3c85ff3ecae6f2c9dd6c557aa04008bbbc964cd65b9b8299d4ebf31f41cc7264b8cf33a00e82c5af022331fac79efc9563a822497ba012953cefe2629f1242fcdcb911dbb2315985bab060bfd58261ace3c654bdbbe2e8ed27a46e836490145c86dc7bae15c011f7e1ffc33730109b9338cd9f483e7cef3d2f396aab5bd80efb6646d7e778270ee99d934d187dd98" -const revokedKeyHex = "988d045331ce82010400c4fdf7b40a5477f206e6ee278eaef888ca73bf9128a9eef9f2f1ddb8b7b71a4c07cfa241f028a04edb405e4d916c61d6beabc333813dc7b484d2b3c52ee233c6a79b1eea4e9cc51596ba9cd5ac5aeb9df62d86ea051055b79d03f8a4fa9f38386f5bd17529138f3325d46801514ea9047977e0829ed728e68636802796801be10011010001889f04200102000905025331d0e3021d03000a0910a401d9f09a34f7c042aa040086631196405b7e6af71026b88e98012eab44aa9849f6ef3fa930c7c9f23deaedba9db1538830f8652fb7648ec3fcade8dbcbf9eaf428e83c6cbcc272201bfe2fbb90d41963397a7c0637a1a9d9448ce695d9790db2dc95433ad7be19eb3de72dacf1d6db82c3644c13eae2a3d072b99bb341debba012c5ce4006a7d34a1f4b94b444526567205265766f6b657220283c52656727732022424d204261726973746122204b657920262530305c303e5c29203c72656740626d626172697374612e636f2e61753e88b704130102002205025331ce82021b03060b090807030206150802090a0b0416020301021e01021780000a0910a401d9f09a34f7c0019c03f75edfbeb6a73e7225ad3cc52724e2872e04260d7daf0d693c170d8c4b243b8767bc7785763533febc62ec2600c30603c433c095453ede59ff2fcabeb84ce32e0ed9d5cf15ffcbc816202b64370d4d77c1e9077d74e94a16fb4fa2e5bec23a56d7a73cf275f91691ae1801a976fcde09e981a2f6327ac27ea1fecf3185df0d56889c04100102000605025331cfb5000a0910fe9645554e8266b64b4303fc084075396674fb6f778d302ac07cef6bc0b5d07b66b2004c44aef711cbac79617ef06d836b4957522d8772dd94bf41a2f4ac8b1ee6d70c57503f837445a74765a076d07b829b8111fc2a918423ddb817ead7ca2a613ef0bfb9c6b3562aec6c3cf3c75ef3031d81d95f6563e4cdcc9960bcb386c5d757b104fcca5fe11fc709df884604101102000605025331cfe7000a09107b15a67f0b3ddc0317f6009e360beea58f29c1d963a22b962b80788c3fa6c84e009d148cfde6b351469b8eae91187eff07ad9d08fcaab88d045331ce820104009f25e20a42b904f3fa555530fe5c46737cf7bd076c35a2a0d22b11f7e0b61a69320b768f4a80fe13980ce380d1cfc4a0cd8fbe2d2e2ef85416668b77208baa65bf973fe8e500e78cc310d7c8705cdb34328bf80e24f0385fce5845c33bc7943cf6b11b02348a23da0bf6428e57c05135f2dc6bd7c1ce325d666d5a5fd2fd5e410011010001889f04180102000905025331ce82021b0c000a0910a401d9f09a34f7c0418003fe34feafcbeaef348a800a0d908a7a6809cc7304017d820f70f0474d5e23cb17e38b67dc6dca282c6ca00961f4ec9edf2738d0f087b1d81e4871ef08e1798010863afb4eac4c44a376cb343be929c5be66a78cfd4456ae9ec6a99d97f4e1c3ff3583351db2147a65c0acef5c003fb544ab3a2e2dc4d43646f58b811a6c3a369d1f" -const revokedSubkeyHex = "988d04533121f6010400aefc803a3e4bb1a61c86e8a86d2726c6a43e0079e9f2713f1fa017e9854c83877f4aced8e331d675c67ea83ddab80aacbfa0b9040bb12d96f5a3d6be09455e2a76546cbd21677537db941cab710216b6d24ec277ee0bd65b910f416737ed120f6b93a9d3b306245c8cfd8394606fdb462e5cf43c551438d2864506c63367fc890011010001b41d416c696365203c616c69636540626d626172697374612e636f2e61753e88bb041301020025021b03060b090807030206150802090a0b0416020301021e01021780050253312798021901000a09104ef7e4beccde97f015a803ff5448437780f63263b0df8442a995e7f76c221351a51edd06f2063d8166cf3157aada4923dfc44aa0f2a6a4da5cf83b7fe722ba8ab416c976e77c6b5682e7f1069026673bd0de56ba06fd5d7a9f177607f277d9b55ff940a638c3e68525c67517e2b3d976899b93ca267f705b3e5efad7d61220e96b618a4497eab8d04403d23f8846041011020006050253312910000a09107b15a67f0b3ddc03d96e009f50b6365d86c4be5d5e9d0ea42d5e56f5794c617700a0ab274e19c2827780016d23417ce89e0a2c0d987d889c04100102000605025331cf7a000a0910a401d9f09a34f7c0ee970400aca292f213041c9f3b3fc49148cbda9d84afee6183c8dd6c5ff2600b29482db5fecd4303797be1ee6d544a20a858080fec43412061c9a71fae4039fd58013b4ae341273e6c66ad4c7cdd9e68245bedb260562e7b166f2461a1032f2b38c0e0e5715fb3d1656979e052b55ca827a76f872b78a9fdae64bc298170bfcebedc1271b41a416c696365203c616c696365407379646973702e6f722e61753e88b804130102002205025331278b021b03060b090807030206150802090a0b0416020301021e01021780000a09104ef7e4beccde97f06a7003fa03c3af68d272ebc1fa08aa72a03b02189c26496a2833d90450801c4e42c5b5f51ad96ce2d2c9cef4b7c02a6a2fcf1412d6a2d486098eb762f5010a201819c17fd2888aec8eda20c65a3b75744de7ee5cc8ac7bfc470cbe3cb982720405a27a3c6a8c229cfe36905f881b02ed5680f6a8f05866efb9d6c5844897e631deb949ca8846041011020006050253312910000a09107b15a67f0b3ddc0347bc009f7fa35db59147469eb6f2c5aaf6428accb138b22800a0caa2f5f0874bacc5909c652a57a31beda65eddd5889c04100102000605025331cf7a000a0910a401d9f09a34f7c0316403ff46f2a5c101256627f16384d34a38fb47a6c88ba60506843e532d91614339fccae5f884a5741e7582ffaf292ba38ee10a270a05f139bde3814b6a077e8cd2db0f105ebea2a83af70d385f13b507fac2ad93ff79d84950328bb86f3074745a8b7f9b64990fb142e2a12976e27e8d09a28dc5621f957ac49091116da410ac3cbde1b88d04533121f6010400cbd785b56905e4192e2fb62a720727d43c4fa487821203cf72138b884b78b701093243e1d8c92a0248a6c0203a5a88693da34af357499abacaf4b3309c640797d03093870a323b4b6f37865f6eaa2838148a67df4735d43a90ca87942554cdf1c4a751b1e75f9fd4ce4e97e278d6c1c7ed59d33441df7d084f3f02beb68896c70011010001889f0418010200090502533121f6021b0c000a09104ef7e4beccde97f0b98b03fc0a5ccf6a372995835a2f5da33b282a7d612c0ab2a97f59cf9fff73e9110981aac2858c41399afa29624a7fd8a0add11654e3d882c0fd199e161bdad65e5e2548f7b68a437ea64293db1246e3011cbb94dc1bcdeaf0f2539bd88ff16d95547144d97cead6a8c5927660a91e6db0d16eb36b7b49a3525b54d1644e65599b032b7eb901a204533127a0110400bd3edaa09eff9809c4edc2c2a0ebe52e53c50a19c1e49ab78e6167bf61473bb08f2050d78a5cbbc6ed66aff7b42cd503f16b4a0b99fa1609681fca9b7ce2bbb1a5b3864d6cdda4d7ef7849d156d534dea30fb0efb9e4cf8959a2b2ce623905882d5430b995a15c3b9fe92906086788b891002924f94abe139b42cbbfaaabe42f00a0b65dc1a1ad27d798adbcb5b5ad02d2688c89477b03ff4eebb6f7b15a73b96a96bed201c0e5e4ea27e4c6e2dd1005b94d4b90137a5b1cf5e01c6226c070c4cc999938101578877ee76d296b9aab8246d57049caacf489e80a3f40589cade790a020b1ac146d6f7a6241184b8c7fcde680eae3188f5dcbe846d7f7bdad34f6fcfca08413e19c1d5df83fc7c7c627d493492e009c2f52a80400a2fe82de87136fd2e8845888c4431b032ba29d9a29a804277e31002a8201fb8591a3e55c7a0d0881496caf8b9fb07544a5a4879291d0dc026a0ea9e5bd88eb4aa4947bbd694b25012e208a250d65ddc6f1eea59d3aed3b4ec15fcab85e2afaa23a40ab1ef9ce3e11e1bc1c34a0e758e7aa64deb8739276df0af7d4121f834a9b88e70418010200090502533127a0021b02005209104ef7e4beccde97f047200419110200060502533127a0000a0910dbce4ee19529437fe045009c0b32f5ead48ee8a7e98fac0dea3d3e6c0e2c552500a0ad71fadc5007cfaf842d9b7db3335a8cdad15d3d1a6404009b08e2c68fe8f3b45c1bb72a4b3278cdf3012aa0f229883ad74aa1f6000bb90b18301b2f85372ca5d6b9bf478d235b733b1b197d19ccca48e9daf8e890cb64546b4ce1b178faccfff07003c172a2d4f5ebaba9f57153955f3f61a9b80a4f5cb959908f8b211b03b7026a8a82fc612bfedd3794969bcf458c4ce92be215a1176ab88d045331d144010400a5063000c5aaf34953c1aa3bfc95045b3aab9882b9a8027fecfe2142dc6b47ba8aca667399990244d513dd0504716908c17d92c65e74219e004f7b83fc125e575dd58efec3ab6dd22e3580106998523dea42ec75bf9aa111734c82df54630bebdff20fe981cfc36c76f865eb1c2fb62c9e85bc3a6e5015a361a2eb1c8431578d0011010001889f04280102000905025331d433021d03000a09104ef7e4beccde97f02e5503ff5e0630d1b65291f4882b6d40a29da4616bb5088717d469fbcc3648b8276de04a04988b1f1b9f3e18f52265c1f8b6c85861691c1a6b8a3a25a1809a0b32ad330aec5667cb4262f4450649184e8113849b05e5ad06a316ea80c001e8e71838190339a6e48bbde30647bcf245134b9a97fa875c1d83a9862cae87ffd7e2c4ce3a1b89013d04180102000905025331d144021b0200a809104ef7e4beccde97f09d2004190102000605025331d144000a0910677815e371c2fd23522203fe22ab62b8e7a151383cea3edd3a12995693911426f8ccf125e1f6426388c0010f88d9ca7da2224aee8d1c12135998640c5e1813d55a93df472faae75bef858457248db41b4505827590aeccf6f9eb646da7f980655dd3050c6897feddddaca90676dee856d66db8923477d251712bb9b3186b4d0114daf7d6b59272b53218dd1da94a03ff64006fcbe71211e5daecd9961fba66cdb6de3f914882c58ba5beddeba7dcb950c1156d7fba18c19ea880dccc800eae335deec34e3b84ac75ffa24864f782f87815cda1c0f634b3dd2fa67cea30811d21723d21d9551fa12ccbcfa62b6d3a15d01307b99925707992556d50065505b090aadb8579083a20fe65bd2a270da9b011" -const missingCrossSignatureKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- -Charset: UTF-8 - -mQENBFMYynYBCACVOZ3/e8Bm2b9KH9QyIlHGo/i1bnkpqsgXj8tpJ2MIUOnXMMAY -ztW7kKFLCmgVdLIC0vSoLA4yhaLcMojznh/2CcUglZeb6Ao8Gtelr//Rd5DRfPpG -zqcfUo+m+eO1co2Orabw0tZDfGpg5p3AYl0hmxhUyYSc/xUq93xL1UJzBFgYXY54 -QsM8dgeQgFseSk/YvdP5SMx1ev+eraUyiiUtWzWrWC1TdyRa5p4UZg6Rkoppf+WJ -QrW6BWrhAtqATHc8ozV7uJjeONjUEq24roRc/OFZdmQQGK6yrzKnnbA6MdHhqpdo -9kWDcXYb7pSE63Lc+OBa5X2GUVvXJLS/3nrtABEBAAG0F2ludmFsaWQtc2lnbmlu -Zy1zdWJrZXlziQEoBBMBAgASBQJTnKB5AhsBAgsHAhUIAh4BAAoJEO3UDQUIHpI/ -dN4H/idX4FQ1LIZCnpHS/oxoWQWfpRgdKAEM0qCqjMgiipJeEwSQbqjTCynuh5/R -JlODDz85ABR06aoF4l5ebGLQWFCYifPnJZ/Yf5OYcMGtb7dIbqxWVFL9iLMO/oDL -ioI3dotjPui5e+2hI9pVH1UHB/bZ/GvMGo6Zg0XxLPolKQODMVjpjLAQ0YJ3spew -RAmOGre6tIvbDsMBnm8qREt7a07cBJ6XK7xjxYaZHQBiHVxyEWDa6gyANONx8duW -/fhQ/zDTnyVM/ik6VO0Ty9BhPpcEYLFwh5c1ilFari1ta3e6qKo6ZGa9YMk/REhu -yBHd9nTkI+0CiQUmbckUiVjDKKe5AQ0EUxjKdgEIAJcXQeP+NmuciE99YcJoffxv -2gVLU4ZXBNHEaP0mgaJ1+tmMD089vUQAcyGRvw8jfsNsVZQIOAuRxY94aHQhIRHR -bUzBN28ofo/AJJtfx62C15xt6fDKRV6HXYqAiygrHIpEoRLyiN69iScUsjIJeyFL -C8wa72e8pSL6dkHoaV1N9ZH/xmrJ+k0vsgkQaAh9CzYufncDxcwkoP+aOlGtX1gP -WwWoIbz0JwLEMPHBWvDDXQcQPQTYQyj+LGC9U6f9VZHN25E94subM1MjuT9OhN9Y -MLfWaaIc5WyhLFyQKW2Upofn9wSFi8ubyBnv640Dfd0rVmaWv7LNTZpoZ/GbJAMA -EQEAAYkBHwQYAQIACQUCU5ygeQIbAgAKCRDt1A0FCB6SP0zCB/sEzaVR38vpx+OQ -MMynCBJrakiqDmUZv9xtplY7zsHSQjpd6xGflbU2n+iX99Q+nav0ETQZifNUEd4N -1ljDGQejcTyKD6Pkg6wBL3x9/RJye7Zszazm4+toJXZ8xJ3800+BtaPoI39akYJm -+ijzbskvN0v/j5GOFJwQO0pPRAFtdHqRs9Kf4YanxhedB4dIUblzlIJuKsxFit6N -lgGRblagG3Vv2eBszbxzPbJjHCgVLR3RmrVezKOsZjr/2i7X+xLWIR0uD3IN1qOW -CXQxLBizEEmSNVNxsp7KPGTLnqO3bPtqFirxS9PJLIMPTPLNBY7ZYuPNTMqVIUWF -4artDmrG -=7FfJ ------END PGP PUBLIC KEY BLOCK-----` - -const invalidCrossSignatureKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- - -mQENBFMYynYBCACVOZ3/e8Bm2b9KH9QyIlHGo/i1bnkpqsgXj8tpJ2MIUOnXMMAY -ztW7kKFLCmgVdLIC0vSoLA4yhaLcMojznh/2CcUglZeb6Ao8Gtelr//Rd5DRfPpG -zqcfUo+m+eO1co2Orabw0tZDfGpg5p3AYl0hmxhUyYSc/xUq93xL1UJzBFgYXY54 -QsM8dgeQgFseSk/YvdP5SMx1ev+eraUyiiUtWzWrWC1TdyRa5p4UZg6Rkoppf+WJ -QrW6BWrhAtqATHc8ozV7uJjeONjUEq24roRc/OFZdmQQGK6yrzKnnbA6MdHhqpdo -9kWDcXYb7pSE63Lc+OBa5X2GUVvXJLS/3nrtABEBAAG0F2ludmFsaWQtc2lnbmlu -Zy1zdWJrZXlziQEoBBMBAgASBQJTnKB5AhsBAgsHAhUIAh4BAAoJEO3UDQUIHpI/ -dN4H/idX4FQ1LIZCnpHS/oxoWQWfpRgdKAEM0qCqjMgiipJeEwSQbqjTCynuh5/R -JlODDz85ABR06aoF4l5ebGLQWFCYifPnJZ/Yf5OYcMGtb7dIbqxWVFL9iLMO/oDL -ioI3dotjPui5e+2hI9pVH1UHB/bZ/GvMGo6Zg0XxLPolKQODMVjpjLAQ0YJ3spew -RAmOGre6tIvbDsMBnm8qREt7a07cBJ6XK7xjxYaZHQBiHVxyEWDa6gyANONx8duW -/fhQ/zDTnyVM/ik6VO0Ty9BhPpcEYLFwh5c1ilFari1ta3e6qKo6ZGa9YMk/REhu -yBHd9nTkI+0CiQUmbckUiVjDKKe5AQ0EUxjKdgEIAIINDqlj7X6jYKc6DjwrOkjQ -UIRWbQQar0LwmNilehmt70g5DCL1SYm9q4LcgJJ2Nhxj0/5qqsYib50OSWMcKeEe -iRXpXzv1ObpcQtI5ithp0gR53YPXBib80t3bUzomQ5UyZqAAHzMp3BKC54/vUrSK -FeRaxDzNLrCeyI00+LHNUtwghAqHvdNcsIf8VRumK8oTm3RmDh0TyjASWYbrt9c8 -R1Um3zuoACOVy+mEIgIzsfHq0u7dwYwJB5+KeM7ZLx+HGIYdUYzHuUE1sLwVoELh -+SHIGHI1HDicOjzqgajShuIjj5hZTyQySVprrsLKiXS6NEwHAP20+XjayJ/R3tEA -EQEAAYkCPgQYAQIBKAUCU5ygeQIbAsBdIAQZAQIABgUCU5ygeQAKCRCpVlnFZmhO -52RJB/9uD1MSa0wjY6tHOIgquZcP3bHBvHmrHNMw9HR2wRCMO91ZkhrpdS3ZHtgb -u3/55etj0FdvDo1tb8P8FGSVtO5Vcwf5APM8sbbqoi8L951Q3i7qt847lfhu6sMl -w0LWFvPTOLHrliZHItPRjOltS1WAWfr2jUYhsU9ytaDAJmvf9DujxEOsN5G1YJep -54JCKVCkM/y585Zcnn+yxk/XwqoNQ0/iJUT9qRrZWvoeasxhl1PQcwihCwss44A+ -YXaAt3hbk+6LEQuZoYS73yR3WHj+42tfm7YxRGeubXfgCEz/brETEWXMh4pe0vCL -bfWrmfSPq2rDegYcAybxRQz0lF8PAAoJEO3UDQUIHpI/exkH/0vQfdHA8g/N4T6E -i6b1CUVBAkvtdJpCATZjWPhXmShOw62gkDw306vHPilL4SCvEEi4KzG72zkp6VsB -DSRcpxCwT4mHue+duiy53/aRMtSJ+vDfiV1Vhq+3sWAck/yUtfDU9/u4eFaiNok1 -8/Gd7reyuZt5CiJnpdPpjCwelK21l2w7sHAnJF55ITXdOxI8oG3BRKufz0z5lyDY -s2tXYmhhQIggdgelN8LbcMhWs/PBbtUr6uZlNJG2lW1yscD4aI529VjwJlCeo745 -U7pO4eF05VViUJ2mmfoivL3tkhoTUWhx8xs8xCUcCg8DoEoSIhxtOmoTPR22Z9BL -6LCg2mg= -=Dhm4 ------END PGP PUBLIC KEY BLOCK-----` - -const goodCrossSignatureKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- -Version: GnuPG v1 - -mI0EVUqeVwEEAMufHRrMPWK3gyvi0O0tABCs/oON9zV9KDZlr1a1M91ShCSFwCPo -7r80PxdWVWcj0V5h50/CJYtpN3eE/mUIgW2z1uDYQF1OzrQ8ubrksfsJvpAhENom -lTQEppv9mV8qhcM278teb7TX0pgrUHLYF5CfPdp1L957JLLXoQR/lwLVABEBAAG0 -E2dvb2Qtc2lnbmluZy1zdWJrZXmIuAQTAQIAIgUCVUqeVwIbAwYLCQgHAwIGFQgC -CQoLBBYCAwECHgECF4AACgkQNRjL95IRWP69XQQAlH6+eyXJN4DZTLX78KGjHrsw -6FCvxxClEPtPUjcJy/1KCRQmtLAt9PbbA78dvgzjDeZMZqRAwdjyJhjyg/fkU2OH -7wq4ktjUu+dLcOBb+BFMEY+YjKZhf6EJuVfxoTVr5f82XNPbYHfTho9/OABKH6kv -X70PaKZhbwnwij8Nts65AaIEVUqftREEAJ3WxZfqAX0bTDbQPf2CMT2IVMGDfhK7 -GyubOZgDFFjwUJQvHNvsrbeGLZ0xOBumLINyPO1amIfTgJNm1iiWFWfmnHReGcDl -y5mpYG60Mb79Whdcer7CMm3AqYh/dW4g6IB02NwZMKoUHo3PXmFLxMKXnWyJ0clw -R0LI/Qn509yXAKDh1SO20rqrBM+EAP2c5bfI98kyNwQAi3buu94qo3RR1ZbvfxgW -CKXDVm6N99jdZGNK7FbRifXqzJJDLcXZKLnstnC4Sd3uyfyf1uFhmDLIQRryn5m+ -LBYHfDBPN3kdm7bsZDDq9GbTHiFZUfm/tChVKXWxkhpAmHhU/tH6GGzNSMXuIWSO -aOz3Rqq0ED4NXyNKjdF9MiwD/i83S0ZBc0LmJYt4Z10jtH2B6tYdqnAK29uQaadx -yZCX2scE09UIm32/w7pV77CKr1Cp/4OzAXS1tmFzQ+bX7DR+Gl8t4wxr57VeEMvl -BGw4Vjh3X8//m3xynxycQU18Q1zJ6PkiMyPw2owZ/nss3hpSRKFJsxMLhW3fKmKr -Ey2KiOcEGAECAAkFAlVKn7UCGwIAUgkQNRjL95IRWP5HIAQZEQIABgUCVUqftQAK -CRD98VjDN10SqkWrAKDTpEY8D8HC02E/KVC5YUI01B30wgCgurpILm20kXEDCeHp -C5pygfXw1DJrhAP+NyPJ4um/bU1I+rXaHHJYroYJs8YSweiNcwiHDQn0Engh/mVZ -SqLHvbKh2dL/RXymC3+rjPvQf5cup9bPxNMa6WagdYBNAfzWGtkVISeaQW+cTEp/ -MtgVijRGXR/lGLGETPg2X3Afwn9N9bLMBkBprKgbBqU7lpaoPupxT61bL70= -=vtbN ------END PGP PUBLIC KEY BLOCK-----` +func TestNewEntityCorrectName(t *testing.T) { + entity, err := NewEntity("Golang Gopher", "Test Key", "no-reply@golang.com", nil) + if err != nil { + t.Fatal(err) + } + if len(entity.Identities) != 1 { + t.Fatalf("len(entity.Identities) = %d, want 1", len(entity.Identities)) + } + var got string + for _, i := range entity.Identities { + got = i.Name + } + want := "Golang Gopher (Test Key) " + if got != want { + t.Fatalf("Identity.Name = %q, want %q", got, want) + } +} + +func TestNewEntityWithPreferredSymmetric(t *testing.T) { + c := &packet.Config{ + DefaultCipher: packet.CipherAES256, + } + entity, err := NewEntity("Golang Gopher", "Test Key", "no-reply@golang.com", c) + if err != nil { + t.Fatal(err) + } + + for _, identity := range entity.Identities { + if len(identity.SelfSignature.PreferredSymmetric) == 0 { + t.Fatal("didn't find a preferred cipher in self signature") + } + if identity.SelfSignature.PreferredSymmetric[0] != uint8(c.DefaultCipher) { + t.Fatalf("Expected preferred cipher to be %d, got %d", uint8(c.DefaultCipher), identity.SelfSignature.PreferredSymmetric[0]) + } + } +} + +func TestNewEntityWithoutPreferredSymmetric(t *testing.T) { + entity, err := NewEntity("Golang Gopher", "Test Key", "no-reply@golang.com", nil) + if err != nil { + t.Fatal(err) + } + + for _, identity := range entity.Identities { + if len(identity.SelfSignature.PreferredSymmetric) != 0 { + t.Fatalf("Expected preferred cipher to be empty but got length %d", len(identity.SelfSignature.PreferredSymmetric)) + } + } +} + +func TestNewEntityPublicSerialization(t *testing.T) { + entity, err := NewEntity("Golang Gopher", "Test Key", "no-reply@golang.com", nil) + if err != nil { + t.Fatal(err) + } + serializedEntity := bytes.NewBuffer(nil) + entity.Serialize(serializedEntity) + + _, err = ReadEntity(packet.NewReader(bytes.NewBuffer(serializedEntity.Bytes()))) + if err != nil { + t.Fatal(err) + } +} diff --git a/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key.go b/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key.go index 266840d..02b372c 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key.go @@ -42,12 +42,18 @@ func (e *EncryptedKey) parse(r io.Reader) (err error) { switch e.Algo { case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly: e.encryptedMPI1.bytes, e.encryptedMPI1.bitLength, err = readMPI(r) + if err != nil { + return + } case PubKeyAlgoElGamal: e.encryptedMPI1.bytes, e.encryptedMPI1.bitLength, err = readMPI(r) if err != nil { return } e.encryptedMPI2.bytes, e.encryptedMPI2.bitLength, err = readMPI(r) + if err != nil { + return + } } _, err = consumeAll(r) return @@ -72,7 +78,8 @@ func (e *EncryptedKey) Decrypt(priv *PrivateKey, config *Config) error { // padding oracle attacks. switch priv.PubKeyAlgo { case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly: - b, err = rsa.DecryptPKCS1v15(config.Random(), priv.PrivateKey.(*rsa.PrivateKey), e.encryptedMPI1.bytes) + k := priv.PrivateKey.(*rsa.PrivateKey) + b, err = rsa.DecryptPKCS1v15(config.Random(), k, padToKeySize(&k.PublicKey, e.encryptedMPI1.bytes)) case PubKeyAlgoElGamal: c1 := new(big.Int).SetBytes(e.encryptedMPI1.bytes) c2 := new(big.Int).SetBytes(e.encryptedMPI2.bytes) diff --git a/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key_test.go b/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key_test.go index fee14cf..f2fcf4d 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key_test.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/encrypted_key_test.go @@ -39,39 +39,44 @@ var encryptedKeyPriv = &PrivateKey{ } func TestDecryptingEncryptedKey(t *testing.T) { - const encryptedKeyHex = "c18c032a67d68660df41c70104005789d0de26b6a50c985a02a13131ca829c413a35d0e6fa8d6842599252162808ac7439c72151c8c6183e76923fe3299301414d0c25a2f06a2257db3839e7df0ec964773f6e4c4ac7ff3b48c444237166dd46ba8ff443a5410dc670cb486672fdbe7c9dfafb75b4fea83af3a204fe2a7dfa86bd20122b4f3d2646cbeecb8f7be8" - const expectedKeyHex = "d930363f7e0308c333b9618617ea728963d8df993665ae7be1092d4926fd864b" - - p, err := Read(readerFromHex(encryptedKeyHex)) - if err != nil { - t.Errorf("error from Read: %s", err) - return - } - ek, ok := p.(*EncryptedKey) - if !ok { - t.Errorf("didn't parse an EncryptedKey, got %#v", p) - return - } - - if ek.KeyId != 0x2a67d68660df41c7 || ek.Algo != PubKeyAlgoRSA { - t.Errorf("unexpected EncryptedKey contents: %#v", ek) - return - } - - err = ek.Decrypt(encryptedKeyPriv, nil) - if err != nil { - t.Errorf("error from Decrypt: %s", err) - return - } - - if ek.CipherFunc != CipherAES256 { - t.Errorf("unexpected EncryptedKey contents: %#v", ek) - return - } - - keyHex := fmt.Sprintf("%x", ek.Key) - if keyHex != expectedKeyHex { - t.Errorf("bad key, got %s want %x", keyHex, expectedKeyHex) + for i, encryptedKeyHex := range []string{ + "c18c032a67d68660df41c70104005789d0de26b6a50c985a02a13131ca829c413a35d0e6fa8d6842599252162808ac7439c72151c8c6183e76923fe3299301414d0c25a2f06a2257db3839e7df0ec964773f6e4c4ac7ff3b48c444237166dd46ba8ff443a5410dc670cb486672fdbe7c9dfafb75b4fea83af3a204fe2a7dfa86bd20122b4f3d2646cbeecb8f7be8", + // MPI can be shorter than the length of the key. + "c18b032a67d68660df41c70103f8e520c52ae9807183c669ce26e772e482dc5d8cf60e6f59316e145be14d2e5221ee69550db1d5618a8cb002a719f1f0b9345bde21536d410ec90ba86cac37748dec7933eb7f9873873b2d61d3321d1cd44535014f6df58f7bc0c7afb5edc38e1a974428997d2f747f9a173bea9ca53079b409517d332df62d805564cffc9be6", + } { + const expectedKeyHex = "d930363f7e0308c333b9618617ea728963d8df993665ae7be1092d4926fd864b" + + p, err := Read(readerFromHex(encryptedKeyHex)) + if err != nil { + t.Errorf("#%d: error from Read: %s", i, err) + return + } + ek, ok := p.(*EncryptedKey) + if !ok { + t.Errorf("#%d: didn't parse an EncryptedKey, got %#v", i, p) + return + } + + if ek.KeyId != 0x2a67d68660df41c7 || ek.Algo != PubKeyAlgoRSA { + t.Errorf("#%d: unexpected EncryptedKey contents: %#v", i, ek) + return + } + + err = ek.Decrypt(encryptedKeyPriv, nil) + if err != nil { + t.Errorf("#%d: error from Decrypt: %s", i, err) + return + } + + if ek.CipherFunc != CipherAES256 { + t.Errorf("#%d: unexpected EncryptedKey contents: %#v", i, ek) + return + } + + keyHex := fmt.Sprintf("%x", ek.Key) + if keyHex != expectedKeyHex { + t.Errorf("#%d: bad key, got %s want %s", i, keyHex, expectedKeyHex) + } } } @@ -121,7 +126,7 @@ func TestEncryptingEncryptedKey(t *testing.T) { keyHex := fmt.Sprintf("%x", ek.Key) if keyHex != expectedKeyHex { - t.Errorf("bad key, got %s want %x", keyHex, expectedKeyHex) + t.Errorf("bad key, got %s want %s", keyHex, expectedKeyHex) } } diff --git a/vendor/golang.org/x/crypto/openpgp/packet/opaque_test.go b/vendor/golang.org/x/crypto/openpgp/packet/opaque_test.go index f27bbfe..61159f4 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/opaque_test.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/opaque_test.go @@ -32,7 +32,7 @@ func TestOpaqueParseReason(t *testing.T) { break } // try to parse opaque packet - p, err := op.Parse() + p, _ := op.Parse() switch pkt := p.(type) { case *UserId: uid = pkt diff --git a/vendor/golang.org/x/crypto/openpgp/packet/packet.go b/vendor/golang.org/x/crypto/openpgp/packet/packet.go index 3eded93..5af64c5 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/packet.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/packet.go @@ -11,10 +11,12 @@ import ( "crypto/aes" "crypto/cipher" "crypto/des" - "golang.org/x/crypto/cast5" - "golang.org/x/crypto/openpgp/errors" + "crypto/rsa" "io" "math/big" + + "golang.org/x/crypto/cast5" + "golang.org/x/crypto/openpgp/errors" ) // readFull is the same as io.ReadFull except that reading zero bytes returns @@ -402,14 +404,16 @@ const ( type PublicKeyAlgorithm uint8 const ( - PubKeyAlgoRSA PublicKeyAlgorithm = 1 - PubKeyAlgoRSAEncryptOnly PublicKeyAlgorithm = 2 - PubKeyAlgoRSASignOnly PublicKeyAlgorithm = 3 - PubKeyAlgoElGamal PublicKeyAlgorithm = 16 - PubKeyAlgoDSA PublicKeyAlgorithm = 17 + PubKeyAlgoRSA PublicKeyAlgorithm = 1 + PubKeyAlgoElGamal PublicKeyAlgorithm = 16 + PubKeyAlgoDSA PublicKeyAlgorithm = 17 // RFC 6637, Section 5. PubKeyAlgoECDH PublicKeyAlgorithm = 18 PubKeyAlgoECDSA PublicKeyAlgorithm = 19 + + // Deprecated in RFC 4880, Section 13.5. Use key flags instead. + PubKeyAlgoRSAEncryptOnly PublicKeyAlgorithm = 2 + PubKeyAlgoRSASignOnly PublicKeyAlgorithm = 3 ) // CanEncrypt returns true if it's possible to encrypt a message to a public @@ -500,19 +504,17 @@ func readMPI(r io.Reader) (mpi []byte, bitLength uint16, err error) { numBytes := (int(bitLength) + 7) / 8 mpi = make([]byte, numBytes) _, err = readFull(r, mpi) - return -} - -// mpiLength returns the length of the given *big.Int when serialized as an -// MPI. -func mpiLength(n *big.Int) (mpiLengthInBytes int) { - mpiLengthInBytes = 2 /* MPI length */ - mpiLengthInBytes += (n.BitLen() + 7) / 8 + // According to RFC 4880 3.2. we should check that the MPI has no leading + // zeroes (at least when not an encrypted MPI?), but this implementation + // does generate leading zeroes, so we keep accepting them. return } // writeMPI serializes a big integer to w. func writeMPI(w io.Writer, bitLength uint16, mpiBytes []byte) (err error) { + // Note that we can produce leading zeroes, in violation of RFC 4880 3.2. + // Implementations seem to be tolerant of them, and stripping them would + // make it complex to guarantee matching re-serialization. _, err = w.Write([]byte{byte(bitLength >> 8), byte(bitLength)}) if err == nil { _, err = w.Write(mpiBytes) @@ -525,6 +527,18 @@ func writeBig(w io.Writer, i *big.Int) error { return writeMPI(w, uint16(i.BitLen()), i.Bytes()) } +// padToKeySize left-pads a MPI with zeroes to match the length of the +// specified RSA public. +func padToKeySize(pub *rsa.PublicKey, b []byte) []byte { + k := (pub.N.BitLen() + 7) / 8 + if len(b) >= k { + return b + } + bb := make([]byte, k) + copy(bb[len(bb)-len(b):], b) + return bb +} + // CompressionAlgo Represents the different compression algorithms // supported by OpenPGP (except for BZIP2, which is not currently // supported). See Section 9.3 of RFC 4880. diff --git a/vendor/golang.org/x/crypto/openpgp/packet/private_key.go b/vendor/golang.org/x/crypto/openpgp/packet/private_key.go index 34734cc..bd31cce 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/private_key.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/private_key.go @@ -64,14 +64,19 @@ func NewECDSAPrivateKey(currentTime time.Time, priv *ecdsa.PrivateKey) *PrivateK return pk } -// NewSignerPrivateKey creates a sign-only PrivateKey from a crypto.Signer that +// NewSignerPrivateKey creates a PrivateKey from a crypto.Signer that // implements RSA or ECDSA. func NewSignerPrivateKey(currentTime time.Time, signer crypto.Signer) *PrivateKey { pk := new(PrivateKey) + // In general, the public Keys should be used as pointers. We still + // type-switch on the values, for backwards-compatibility. switch pubkey := signer.Public().(type) { + case *rsa.PublicKey: + pk.PublicKey = *NewRSAPublicKey(currentTime, pubkey) case rsa.PublicKey: pk.PublicKey = *NewRSAPublicKey(currentTime, &pubkey) - pk.PubKeyAlgo = PubKeyAlgoRSASignOnly + case *ecdsa.PublicKey: + pk.PublicKey = *NewECDSAPublicKey(currentTime, pubkey) case ecdsa.PublicKey: pk.PublicKey = *NewECDSAPublicKey(currentTime, &pubkey) default: diff --git a/vendor/golang.org/x/crypto/openpgp/packet/private_key_test.go b/vendor/golang.org/x/crypto/openpgp/packet/private_key_test.go index c16ef78..cc08b48 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/private_key_test.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/private_key_test.go @@ -14,7 +14,6 @@ import ( "crypto/x509" "encoding/hex" "hash" - "io" "testing" "time" ) @@ -162,15 +161,7 @@ func TestECDSAPrivateKey(t *testing.T) { } type rsaSigner struct { - priv *rsa.PrivateKey -} - -func (s *rsaSigner) Public() crypto.PublicKey { - return s.priv.PublicKey -} - -func (s *rsaSigner) Sign(rand io.Reader, msg []byte, opts crypto.SignerOpts) ([]byte, error) { - return s.priv.Sign(rand, msg, opts) + *rsa.PrivateKey } func TestRSASignerPrivateKey(t *testing.T) { @@ -181,12 +172,8 @@ func TestRSASignerPrivateKey(t *testing.T) { priv := NewSignerPrivateKey(time.Now(), &rsaSigner{rsaPriv}) - if priv.PubKeyAlgo != PubKeyAlgoRSASignOnly { - t.Fatal("NewSignerPrivateKey should have made a sign-only RSA private key") - } - sig := &Signature{ - PubKeyAlgo: PubKeyAlgoRSASignOnly, + PubKeyAlgo: PubKeyAlgoRSA, Hash: crypto.SHA256, } msg := []byte("Hello World!") @@ -208,15 +195,7 @@ func TestRSASignerPrivateKey(t *testing.T) { } type ecdsaSigner struct { - priv *ecdsa.PrivateKey -} - -func (s *ecdsaSigner) Public() crypto.PublicKey { - return s.priv.PublicKey -} - -func (s *ecdsaSigner) Sign(rand io.Reader, msg []byte, opts crypto.SignerOpts) ([]byte, error) { - return s.priv.Sign(rand, msg, opts) + *ecdsa.PrivateKey } func TestECDSASignerPrivateKey(t *testing.T) { @@ -228,7 +207,7 @@ func TestECDSASignerPrivateKey(t *testing.T) { priv := NewSignerPrivateKey(time.Now(), &ecdsaSigner{ecdsaPriv}) if priv.PubKeyAlgo != PubKeyAlgoECDSA { - t.Fatal("NewSignerPrivateKey should have made a ECSDA private key") + t.Fatal("NewSignerPrivateKey should have made an ECSDA private key") } sig := &Signature{ diff --git a/vendor/golang.org/x/crypto/openpgp/packet/public_key.go b/vendor/golang.org/x/crypto/openpgp/packet/public_key.go index ead2623..fcd5f52 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/public_key.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/public_key.go @@ -244,7 +244,12 @@ func NewECDSAPublicKey(creationTime time.Time, pub *ecdsa.PublicKey) *PublicKey } pk.ec.p.bytes = elliptic.Marshal(pub.Curve, pub.X, pub.Y) - pk.ec.p.bitLength = uint16(8 * len(pk.ec.p.bytes)) + + // The bit length is 3 (for the 0x04 specifying an uncompressed key) + // plus two field elements (for x and y), which are rounded up to the + // nearest byte. See https://tools.ietf.org/html/rfc6637#section-6 + fieldBytes := (pub.Curve.Params().BitSize + 7) & ^7 + pk.ec.p.bitLength = uint16(3 + fieldBytes + fieldBytes) pk.setFingerPrintAndKeyId() return pk @@ -515,7 +520,7 @@ func (pk *PublicKey) VerifySignature(signed hash.Hash, sig *Signature) (err erro switch pk.PubKeyAlgo { case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly: rsaPublicKey, _ := pk.PublicKey.(*rsa.PublicKey) - err = rsa.VerifyPKCS1v15(rsaPublicKey, sig.Hash, hashBytes, sig.RSASignature.bytes) + err = rsa.VerifyPKCS1v15(rsaPublicKey, sig.Hash, hashBytes, padToKeySize(rsaPublicKey, sig.RSASignature.bytes)) if err != nil { return errors.SignatureError("RSA verification failure") } @@ -566,7 +571,7 @@ func (pk *PublicKey) VerifySignatureV3(signed hash.Hash, sig *SignatureV3) (err switch pk.PubKeyAlgo { case PubKeyAlgoRSA, PubKeyAlgoRSASignOnly: rsaPublicKey := pk.PublicKey.(*rsa.PublicKey) - if err = rsa.VerifyPKCS1v15(rsaPublicKey, sig.Hash, hashBytes, sig.RSASignature.bytes); err != nil { + if err = rsa.VerifyPKCS1v15(rsaPublicKey, sig.Hash, hashBytes, padToKeySize(rsaPublicKey, sig.RSASignature.bytes)); err != nil { return errors.SignatureError("RSA verification failure") } return diff --git a/vendor/golang.org/x/crypto/openpgp/packet/public_key_test.go b/vendor/golang.org/x/crypto/openpgp/packet/public_key_test.go index 7ad7d91..103696e 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/public_key_test.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/public_key_test.go @@ -6,7 +6,10 @@ package packet import ( "bytes" + "crypto/ecdsa" + "crypto/elliptic" "encoding/hex" + "math/big" "testing" "time" ) @@ -186,6 +189,29 @@ func TestEcc384Serialize(t *testing.T) { } } +func TestP256KeyID(t *testing.T) { + // Confirm that key IDs are correctly calculated for ECC keys. + ecdsaPub := &ecdsa.PublicKey{ + Curve: elliptic.P256(), + X: fromHex("81fbbc20eea9e8d1c3ceabb0a8185925b113d1ac42cd5c78403bd83da19235c6"), + Y: fromHex("5ed6db13d91db34507d0129bf88981878d29adbf8fcd1720afdb767bb3fcaaff"), + } + pub := NewECDSAPublicKey(time.Unix(1297309478, 0), ecdsaPub) + + const want = uint64(0xd01055fbcadd268e) + if pub.KeyId != want { + t.Errorf("want key ID: %x, got %x", want, pub.KeyId) + } +} + +func fromHex(hex string) *big.Int { + n, ok := new(big.Int).SetString(hex, 16) + if !ok { + panic("bad hex number: " + hex) + } + return n +} + const rsaFingerprintHex = "5fb74b1d03b1e3cb31bc2f8aa34d7e18c20c31bb" const rsaPkDataHex = "988d044d3c5c10010400b1d13382944bd5aba23a4312968b5095d14f947f600eb478e14a6fcb16b0e0cac764884909c020bc495cfcc39a935387c661507bdb236a0612fb582cac3af9b29cc2c8c70090616c41b662f4da4c1201e195472eb7f4ae1ccbcbf9940fe21d985e379a5563dde5b9a23d35f1cfaa5790da3b79db26f23695107bfaca8e7b5bcd0011010001" diff --git a/vendor/golang.org/x/crypto/openpgp/packet/signature.go b/vendor/golang.org/x/crypto/openpgp/packet/signature.go index 6ce0cbe..b2a24a5 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/signature.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/signature.go @@ -542,7 +542,7 @@ func (sig *Signature) Sign(h hash.Hash, priv *PrivateKey, config *Config) (err e r, s, err = ecdsa.Sign(config.Random(), pk, digest) } else { var b []byte - b, err = priv.PrivateKey.(crypto.Signer).Sign(config.Random(), digest, nil) + b, err = priv.PrivateKey.(crypto.Signer).Sign(config.Random(), digest, sig.Hash) if err == nil { r, s, err = unwrapECDSASig(b) } diff --git a/vendor/golang.org/x/crypto/openpgp/packet/userattribute.go b/vendor/golang.org/x/crypto/openpgp/packet/userattribute.go index 96a2b38..d19ffbc 100644 --- a/vendor/golang.org/x/crypto/openpgp/packet/userattribute.go +++ b/vendor/golang.org/x/crypto/openpgp/packet/userattribute.go @@ -80,7 +80,7 @@ func (uat *UserAttribute) Serialize(w io.Writer) (err error) { // ImageData returns zero or more byte slices, each containing // JPEG File Interchange Format (JFIF), for each photo in the -// the user attribute packet. +// user attribute packet. func (uat *UserAttribute) ImageData() (imageData [][]byte) { for _, sp := range uat.Contents { if sp.SubType == UserAttrImageSubpacket && len(sp.Contents) > 16 { diff --git a/vendor/golang.org/x/crypto/openpgp/read_test.go b/vendor/golang.org/x/crypto/openpgp/read_test.go index 1fbfbac..f5bba30 100644 --- a/vendor/golang.org/x/crypto/openpgp/read_test.go +++ b/vendor/golang.org/x/crypto/openpgp/read_test.go @@ -124,7 +124,7 @@ func checkSignedMessage(t *testing.T, signedHex, expected string) { return } - if !md.IsSigned || md.SignedByKeyId != 0xa34d7e18c20c31bb || md.SignedBy == nil || md.IsEncrypted || md.IsSymmetricallyEncrypted || len(md.EncryptedToKeyIds) != 0 || md.IsSymmetricallyEncrypted { + if !md.IsSigned || md.SignedByKeyId != 0xa34d7e18c20c31bb || md.SignedBy == nil || md.IsEncrypted || md.IsSymmetricallyEncrypted || len(md.EncryptedToKeyIds) != 0 || md.DecryptedWith != (Key{}) { t.Errorf("bad MessageDetails: %#v", md) } diff --git a/vendor/golang.org/x/crypto/openpgp/write.go b/vendor/golang.org/x/crypto/openpgp/write.go index 65a304c..4ee7178 100644 --- a/vendor/golang.org/x/crypto/openpgp/write.go +++ b/vendor/golang.org/x/crypto/openpgp/write.go @@ -164,12 +164,12 @@ func hashToHashId(h crypto.Hash) uint8 { return v } -// Encrypt encrypts a message to a number of recipients and, optionally, signs -// it. hints contains optional information, that is also encrypted, that aids -// the recipients in processing the message. The resulting WriteCloser must -// be closed after the contents of the file have been written. -// If config is nil, sensible defaults will be used. -func Encrypt(ciphertext io.Writer, to []*Entity, signed *Entity, hints *FileHints, config *packet.Config) (plaintext io.WriteCloser, err error) { +// writeAndSign writes the data as a payload package and, optionally, signs +// it. hints contains optional information, that is also encrypted, +// that aids the recipients in processing the message. The resulting +// WriteCloser must be closed after the contents of the file have been +// written. If config is nil, sensible defaults will be used. +func writeAndSign(payload io.WriteCloser, candidateHashes []uint8, signed *Entity, hints *FileHints, config *packet.Config) (plaintext io.WriteCloser, err error) { var signer *packet.PrivateKey if signed != nil { signKey, ok := signed.signingKey(config.Now()) @@ -185,6 +185,83 @@ func Encrypt(ciphertext io.Writer, to []*Entity, signed *Entity, hints *FileHint } } + var hash crypto.Hash + for _, hashId := range candidateHashes { + if h, ok := s2k.HashIdToHash(hashId); ok && h.Available() { + hash = h + break + } + } + + // If the hash specified by config is a candidate, we'll use that. + if configuredHash := config.Hash(); configuredHash.Available() { + for _, hashId := range candidateHashes { + if h, ok := s2k.HashIdToHash(hashId); ok && h == configuredHash { + hash = h + break + } + } + } + + if hash == 0 { + hashId := candidateHashes[0] + name, ok := s2k.HashIdToString(hashId) + if !ok { + name = "#" + strconv.Itoa(int(hashId)) + } + return nil, errors.InvalidArgumentError("cannot encrypt because no candidate hash functions are compiled in. (Wanted " + name + " in this case.)") + } + + if signer != nil { + ops := &packet.OnePassSignature{ + SigType: packet.SigTypeBinary, + Hash: hash, + PubKeyAlgo: signer.PubKeyAlgo, + KeyId: signer.KeyId, + IsLast: true, + } + if err := ops.Serialize(payload); err != nil { + return nil, err + } + } + + if hints == nil { + hints = &FileHints{} + } + + w := payload + if signer != nil { + // If we need to write a signature packet after the literal + // data then we need to stop literalData from closing + // encryptedData. + w = noOpCloser{w} + + } + var epochSeconds uint32 + if !hints.ModTime.IsZero() { + epochSeconds = uint32(hints.ModTime.Unix()) + } + literalData, err := packet.SerializeLiteral(w, hints.IsBinary, hints.FileName, epochSeconds) + if err != nil { + return nil, err + } + + if signer != nil { + return signatureWriter{payload, literalData, hash, hash.New(), signer, config}, nil + } + return literalData, nil +} + +// Encrypt encrypts a message to a number of recipients and, optionally, signs +// it. hints contains optional information, that is also encrypted, that aids +// the recipients in processing the message. The resulting WriteCloser must +// be closed after the contents of the file have been written. +// If config is nil, sensible defaults will be used. +func Encrypt(ciphertext io.Writer, to []*Entity, signed *Entity, hints *FileHints, config *packet.Config) (plaintext io.WriteCloser, err error) { + if len(to) == 0 { + return nil, errors.InvalidArgumentError("no encryption recipient provided") + } + // These are the possible ciphers that we'll use for the message. candidateCiphers := []uint8{ uint8(packet.CipherAES128), @@ -194,6 +271,7 @@ func Encrypt(ciphertext io.Writer, to []*Entity, signed *Entity, hints *FileHint // These are the possible hash functions that we'll use for the signature. candidateHashes := []uint8{ hashToHashId(crypto.SHA256), + hashToHashId(crypto.SHA384), hashToHashId(crypto.SHA512), hashToHashId(crypto.SHA1), hashToHashId(crypto.RIPEMD160), @@ -241,33 +319,6 @@ func Encrypt(ciphertext io.Writer, to []*Entity, signed *Entity, hints *FileHint } } - var hash crypto.Hash - for _, hashId := range candidateHashes { - if h, ok := s2k.HashIdToHash(hashId); ok && h.Available() { - hash = h - break - } - } - - // If the hash specified by config is a candidate, we'll use that. - if configuredHash := config.Hash(); configuredHash.Available() { - for _, hashId := range candidateHashes { - if h, ok := s2k.HashIdToHash(hashId); ok && h == configuredHash { - hash = h - break - } - } - } - - if hash == 0 { - hashId := candidateHashes[0] - name, ok := s2k.HashIdToString(hashId) - if !ok { - name = "#" + strconv.Itoa(int(hashId)) - } - return nil, errors.InvalidArgumentError("cannot encrypt because no candidate hash functions are compiled in. (Wanted " + name + " in this case.)") - } - symKey := make([]byte, cipher.KeySize()) if _, err := io.ReadFull(config.Random(), symKey); err != nil { return nil, err @@ -279,49 +330,38 @@ func Encrypt(ciphertext io.Writer, to []*Entity, signed *Entity, hints *FileHint } } - encryptedData, err := packet.SerializeSymmetricallyEncrypted(ciphertext, cipher, symKey, config) + payload, err := packet.SerializeSymmetricallyEncrypted(ciphertext, cipher, symKey, config) if err != nil { return } - if signer != nil { - ops := &packet.OnePassSignature{ - SigType: packet.SigTypeBinary, - Hash: hash, - PubKeyAlgo: signer.PubKeyAlgo, - KeyId: signer.KeyId, - IsLast: true, - } - if err := ops.Serialize(encryptedData); err != nil { - return nil, err - } - } + return writeAndSign(payload, candidateHashes, signed, hints, config) +} - if hints == nil { - hints = &FileHints{} +// Sign signs a message. The resulting WriteCloser must be closed after the +// contents of the file have been written. hints contains optional information +// that aids the recipients in processing the message. +// If config is nil, sensible defaults will be used. +func Sign(output io.Writer, signed *Entity, hints *FileHints, config *packet.Config) (input io.WriteCloser, err error) { + if signed == nil { + return nil, errors.InvalidArgumentError("no signer provided") } - w := encryptedData - if signer != nil { - // If we need to write a signature packet after the literal - // data then we need to stop literalData from closing - // encryptedData. - w = noOpCloser{encryptedData} - - } - var epochSeconds uint32 - if !hints.ModTime.IsZero() { - epochSeconds = uint32(hints.ModTime.Unix()) - } - literalData, err := packet.SerializeLiteral(w, hints.IsBinary, hints.FileName, epochSeconds) - if err != nil { - return nil, err + // These are the possible hash functions that we'll use for the signature. + candidateHashes := []uint8{ + hashToHashId(crypto.SHA256), + hashToHashId(crypto.SHA384), + hashToHashId(crypto.SHA512), + hashToHashId(crypto.SHA1), + hashToHashId(crypto.RIPEMD160), } - - if signer != nil { - return signatureWriter{encryptedData, literalData, hash, hash.New(), signer, config}, nil + defaultHashes := candidateHashes[len(candidateHashes)-1:] + preferredHashes := signed.primaryIdentity().SelfSignature.PreferredHash + if len(preferredHashes) == 0 { + preferredHashes = defaultHashes } - return literalData, nil + candidateHashes = intersectPreferences(candidateHashes, preferredHashes) + return writeAndSign(noOpCloser{output}, candidateHashes, signed, hints, config) } // signatureWriter hashes the contents of a message while passing it along to diff --git a/vendor/golang.org/x/crypto/openpgp/write_test.go b/vendor/golang.org/x/crypto/openpgp/write_test.go index f2d50a0..cbc8f4d 100644 --- a/vendor/golang.org/x/crypto/openpgp/write_test.go +++ b/vendor/golang.org/x/crypto/openpgp/write_test.go @@ -271,3 +271,92 @@ func TestEncryption(t *testing.T) { } } } + +var testSigningTests = []struct { + keyRingHex string +}{ + { + testKeys1And2PrivateHex, + }, + { + dsaElGamalTestKeysHex, + }, +} + +func TestSigning(t *testing.T) { + for i, test := range testSigningTests { + kring, _ := ReadKeyRing(readerFromHex(test.keyRingHex)) + + passphrase := []byte("passphrase") + for _, entity := range kring { + if entity.PrivateKey != nil && entity.PrivateKey.Encrypted { + err := entity.PrivateKey.Decrypt(passphrase) + if err != nil { + t.Errorf("#%d: failed to decrypt key", i) + } + } + for _, subkey := range entity.Subkeys { + if subkey.PrivateKey != nil && subkey.PrivateKey.Encrypted { + err := subkey.PrivateKey.Decrypt(passphrase) + if err != nil { + t.Errorf("#%d: failed to decrypt subkey", i) + } + } + } + } + + signed := kring[0] + + buf := new(bytes.Buffer) + w, err := Sign(buf, signed, nil /* no hints */, nil) + if err != nil { + t.Errorf("#%d: error in Sign: %s", i, err) + continue + } + + const message = "testing" + _, err = w.Write([]byte(message)) + if err != nil { + t.Errorf("#%d: error writing plaintext: %s", i, err) + continue + } + err = w.Close() + if err != nil { + t.Errorf("#%d: error closing WriteCloser: %s", i, err) + continue + } + + md, err := ReadMessage(buf, kring, nil /* no prompt */, nil) + if err != nil { + t.Errorf("#%d: error reading message: %s", i, err) + continue + } + + testTime, _ := time.Parse("2006-01-02", "2013-07-01") + signKey, _ := kring[0].signingKey(testTime) + expectedKeyId := signKey.PublicKey.KeyId + if md.SignedByKeyId != expectedKeyId { + t.Errorf("#%d: message signed by wrong key id, got: %v, want: %v", i, *md.SignedBy, expectedKeyId) + } + if md.SignedBy == nil { + t.Errorf("#%d: failed to find the signing Entity", i) + } + + plaintext, err := ioutil.ReadAll(md.UnverifiedBody) + if err != nil { + t.Errorf("#%d: error reading contents: %v", i, err) + continue + } + + if string(plaintext) != message { + t.Errorf("#%d: got: %q, want: %q", i, plaintext, message) + } + + if md.SignatureError != nil { + t.Errorf("#%d: signature error: %q", i, md.SignatureError) + } + if md.Signature == nil { + t.Error("signature missing") + } + } +} diff --git a/vendor/golang.org/x/crypto/otr/otr.go b/vendor/golang.org/x/crypto/otr/otr.go index 173b753..29121e9 100644 --- a/vendor/golang.org/x/crypto/otr/otr.go +++ b/vendor/golang.org/x/crypto/otr/otr.go @@ -4,6 +4,10 @@ // Package otr implements the Off The Record protocol as specified in // http://www.cypherpunks.ca/otr/Protocol-v2-3.1.0.html +// +// The version of OTR implemented by this package has been deprecated +// (https://bugs.otr.im/lib/libotr/issues/140). An implementation of OTRv3 is +// available at https://github.com/coyim/otr3. package otr // import "golang.org/x/crypto/otr" import ( @@ -637,7 +641,7 @@ func (c *Conversation) serializeDHKey() []byte { } func (c *Conversation) processDHKey(in []byte) (isSame bool, err error) { - gy, in, ok := getMPI(in) + gy, _, ok := getMPI(in) if !ok { err = errors.New("otr: corrupt DH key message") return diff --git a/vendor/golang.org/x/crypto/pbkdf2/pbkdf2_test.go b/vendor/golang.org/x/crypto/pbkdf2/pbkdf2_test.go index 1379240..f83cb69 100644 --- a/vendor/golang.org/x/crypto/pbkdf2/pbkdf2_test.go +++ b/vendor/golang.org/x/crypto/pbkdf2/pbkdf2_test.go @@ -155,3 +155,22 @@ func TestWithHMACSHA1(t *testing.T) { func TestWithHMACSHA256(t *testing.T) { testHash(t, sha256.New, "SHA256", sha256TestVectors) } + +var sink uint8 + +func benchmark(b *testing.B, h func() hash.Hash) { + password := make([]byte, h().Size()) + salt := make([]byte, 8) + for i := 0; i < b.N; i++ { + password = Key(password, salt, 4096, len(password), h) + } + sink += password[0] +} + +func BenchmarkHMACSHA1(b *testing.B) { + benchmark(b, sha1.New) +} + +func BenchmarkHMACSHA256(b *testing.B) { + benchmark(b, sha256.New) +} diff --git a/vendor/golang.org/x/crypto/pkcs12/crypto.go b/vendor/golang.org/x/crypto/pkcs12/crypto.go index 4bd4470..484ca51 100644 --- a/vendor/golang.org/x/crypto/pkcs12/crypto.go +++ b/vendor/golang.org/x/crypto/pkcs12/crypto.go @@ -124,7 +124,7 @@ func pbDecrypt(info decryptable, password []byte) (decrypted []byte, err error) return } -// decryptable abstracts a object that contains ciphertext. +// decryptable abstracts an object that contains ciphertext. type decryptable interface { Algorithm() pkix.AlgorithmIdentifier Data() []byte diff --git a/vendor/golang.org/x/crypto/pkcs12/internal/rc2/rc2.go b/vendor/golang.org/x/crypto/pkcs12/internal/rc2/rc2.go index 8c70902..7499e3f 100644 --- a/vendor/golang.org/x/crypto/pkcs12/internal/rc2/rc2.go +++ b/vendor/golang.org/x/crypto/pkcs12/internal/rc2/rc2.go @@ -122,7 +122,6 @@ func (c *rc2Cipher) Encrypt(dst, src []byte) { r3 = r3 + c.k[r2&63] for j <= 40 { - // mix r0 r0 = r0 + c.k[j] + (r3 & r2) + ((^r3) & r1) r0 = rotl16(r0, 1) @@ -151,7 +150,6 @@ func (c *rc2Cipher) Encrypt(dst, src []byte) { r3 = r3 + c.k[r2&63] for j <= 60 { - // mix r0 r0 = r0 + c.k[j] + (r3 & r2) + ((^r3) & r1) r0 = rotl16(r0, 1) @@ -244,7 +242,6 @@ func (c *rc2Cipher) Decrypt(dst, src []byte) { r0 = r0 - c.k[r3&63] for j >= 0 { - // unmix r3 r3 = rotl16(r3, 16-5) r3 = r3 - c.k[j] - (r2 & r1) - ((^r2) & r0) diff --git a/vendor/golang.org/x/crypto/pkcs12/internal/rc2/rc2_test.go b/vendor/golang.org/x/crypto/pkcs12/internal/rc2/rc2_test.go index 8a49dfa..51a7efe 100644 --- a/vendor/golang.org/x/crypto/pkcs12/internal/rc2/rc2_test.go +++ b/vendor/golang.org/x/crypto/pkcs12/internal/rc2/rc2_test.go @@ -11,7 +11,6 @@ import ( ) func TestEncryptDecrypt(t *testing.T) { - // TODO(dgryski): add the rest of the test vectors from the RFC var tests = []struct { key string diff --git a/vendor/golang.org/x/crypto/pkcs12/pkcs12.go b/vendor/golang.org/x/crypto/pkcs12/pkcs12.go index eff9ad3..55f7691 100644 --- a/vendor/golang.org/x/crypto/pkcs12/pkcs12.go +++ b/vendor/golang.org/x/crypto/pkcs12/pkcs12.go @@ -7,6 +7,9 @@ // This implementation is distilled from https://tools.ietf.org/html/rfc7292 // and referenced documents. It is intended for decoding P12/PFX-stored // certificates and keys for use with the crypto/tls package. +// +// This package is frozen. If it's missing functionality you need, consider +// an alternative like software.sslmate.com/src/go-pkcs12. package pkcs12 import ( @@ -100,7 +103,7 @@ func unmarshal(in []byte, out interface{}) error { return nil } -// ConvertToPEM converts all "safe bags" contained in pfxData to PEM blocks. +// ToPEM converts all "safe bags" contained in pfxData to PEM blocks. func ToPEM(pfxData []byte, password string) ([]*pem.Block, error) { encodedPassword, err := bmpString(password) if err != nil { @@ -208,7 +211,7 @@ func convertAttribute(attribute *pkcs12Attribute) (key, value string, err error) // Decode extracts a certificate and private key from pfxData. This function // assumes that there is only one certificate and only one private key in the -// pfxData. +// pfxData; if there are more use ToPEM instead. func Decode(pfxData []byte, password string) (privateKey interface{}, certificate *x509.Certificate, err error) { encodedPassword, err := bmpString(password) if err != nil { diff --git a/vendor/golang.org/x/crypto/poly1305/mac_noasm.go b/vendor/golang.org/x/crypto/poly1305/mac_noasm.go new file mode 100644 index 0000000..8387d29 --- /dev/null +++ b/vendor/golang.org/x/crypto/poly1305/mac_noasm.go @@ -0,0 +1,11 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 gccgo appengine + +package poly1305 + +type mac struct{ macGeneric } + +func newMAC(key *[32]byte) mac { return mac{newMACGeneric(key)} } diff --git a/vendor/golang.org/x/crypto/poly1305/poly1305.go b/vendor/golang.org/x/crypto/poly1305/poly1305.go index f562fa5..d076a56 100644 --- a/vendor/golang.org/x/crypto/poly1305/poly1305.go +++ b/vendor/golang.org/x/crypto/poly1305/poly1305.go @@ -2,21 +2,19 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -/* -Package poly1305 implements Poly1305 one-time message authentication code as -specified in https://cr.yp.to/mac/poly1305-20050329.pdf. - -Poly1305 is a fast, one-time authentication function. It is infeasible for an -attacker to generate an authenticator for a message without the key. However, a -key must only be used for a single message. Authenticating two different -messages with the same key allows an attacker to forge authenticators for other -messages with the same key. - -Poly1305 was originally coupled with AES in order to make Poly1305-AES. AES was -used with a fixed key in order to generate one-time keys from an nonce. -However, in this package AES isn't used and the one-time key is specified -directly. -*/ +// Package poly1305 implements Poly1305 one-time message authentication code as +// specified in https://cr.yp.to/mac/poly1305-20050329.pdf. +// +// Poly1305 is a fast, one-time authentication function. It is infeasible for an +// attacker to generate an authenticator for a message without the key. However, a +// key must only be used for a single message. Authenticating two different +// messages with the same key allows an attacker to forge authenticators for other +// messages with the same key. +// +// Poly1305 was originally coupled with AES in order to make Poly1305-AES. AES was +// used with a fixed key in order to generate one-time keys from an nonce. +// However, in this package AES isn't used and the one-time key is specified +// directly. package poly1305 // import "golang.org/x/crypto/poly1305" import "crypto/subtle" @@ -31,3 +29,55 @@ func Verify(mac *[16]byte, m []byte, key *[32]byte) bool { Sum(&tmp, m, key) return subtle.ConstantTimeCompare(tmp[:], mac[:]) == 1 } + +// New returns a new MAC computing an authentication +// tag of all data written to it with the given key. +// This allows writing the message progressively instead +// of passing it as a single slice. Common users should use +// the Sum function instead. +// +// The key must be unique for each message, as authenticating +// two different messages with the same key allows an attacker +// to forge messages at will. +func New(key *[32]byte) *MAC { + return &MAC{ + mac: newMAC(key), + finalized: false, + } +} + +// MAC is an io.Writer computing an authentication tag +// of the data written to it. +// +// MAC cannot be used like common hash.Hash implementations, +// because using a poly1305 key twice breaks its security. +// Therefore writing data to a running MAC after calling +// Sum causes it to panic. +type MAC struct { + mac // platform-dependent implementation + + finalized bool +} + +// Size returns the number of bytes Sum will return. +func (h *MAC) Size() int { return TagSize } + +// Write adds more data to the running message authentication code. +// It never returns an error. +// +// It must not be called after the first call of Sum. +func (h *MAC) Write(p []byte) (n int, err error) { + if h.finalized { + panic("poly1305: write to MAC after Sum") + } + return h.mac.Write(p) +} + +// Sum computes the authenticator of all data written to the +// message authentication code. +func (h *MAC) Sum(b []byte) []byte { + var mac [TagSize]byte + h.mac.Sum(&mac) + h.finalized = true + return append(b, mac[:]...) +} diff --git a/vendor/golang.org/x/crypto/poly1305/poly1305_test.go b/vendor/golang.org/x/crypto/poly1305/poly1305_test.go index 017027f..e35928a 100644 --- a/vendor/golang.org/x/crypto/poly1305/poly1305_test.go +++ b/vendor/golang.org/x/crypto/poly1305/poly1305_test.go @@ -5,7 +5,6 @@ package poly1305 import ( - "bytes" "encoding/hex" "flag" "testing" @@ -14,80 +13,51 @@ import ( var stressFlag = flag.Bool("stress", false, "run slow stress tests") -var testData = []struct { - in, k, correct []byte -}{ - { - []byte("Hello world!"), - []byte("this is 32-byte key for Poly1305"), - []byte{0xa6, 0xf7, 0x45, 0x00, 0x8f, 0x81, 0xc9, 0x16, 0xa2, 0x0d, 0xcc, 0x74, 0xee, 0xf2, 0xb2, 0xf0}, - }, - { - make([]byte, 32), - []byte("this is 32-byte key for Poly1305"), - []byte{0x49, 0xec, 0x78, 0x09, 0x0e, 0x48, 0x1e, 0xc6, 0xc2, 0x6b, 0x33, 0xb9, 0x1c, 0xcc, 0x03, 0x07}, - }, - { - make([]byte, 2007), - []byte("this is 32-byte key for Poly1305"), - []byte{0xda, 0x84, 0xbc, 0xab, 0x02, 0x67, 0x6c, 0x38, 0xcd, 0xb0, 0x15, 0x60, 0x42, 0x74, 0xc2, 0xaa}, - }, - { - make([]byte, 2007), - make([]byte, 32), - make([]byte, 16), - }, - { - // This test triggers an edge-case. See https://go-review.googlesource.com/#/c/30101/. - []byte{0x81, 0xd8, 0xb2, 0xe4, 0x6a, 0x25, 0x21, 0x3b, 0x58, 0xfe, 0xe4, 0x21, 0x3a, 0x2a, 0x28, 0xe9, 0x21, 0xc1, 0x2a, 0x96, 0x32, 0x51, 0x6d, 0x3b, 0x73, 0x27, 0x27, 0x27, 0xbe, 0xcf, 0x21, 0x29}, - []byte{0x3b, 0x3a, 0x29, 0xe9, 0x3b, 0x21, 0x3a, 0x5c, 0x5c, 0x3b, 0x3b, 0x05, 0x3a, 0x3a, 0x8c, 0x0d}, - []byte{0x6d, 0xc1, 0x8b, 0x8c, 0x34, 0x4c, 0xd7, 0x99, 0x27, 0x11, 0x8b, 0xbe, 0x84, 0xb7, 0xf3, 0x14}, - }, - { - // This test generates a result of (2^130-1) % (2^130-5). - []byte{ - 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - }, - []byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, - []byte{4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, - }, - { - // This test generates a result of (2^130-6) % (2^130-5). - []byte{ - 0xfa, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - }, - []byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, - []byte{0xfa, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff}, - }, - { - // This test generates a result of (2^130-5) % (2^130-5). - []byte{ - 0xfb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - }, - []byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, - []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, - }, +type test struct { + in string + key string + tag string } -func testSum(t *testing.T, unaligned bool) { - var out [16]byte +func (t *test) Input() []byte { + in, err := hex.DecodeString(t.in) + if err != nil { + panic(err) + } + return in +} + +func (t *test) Key() [32]byte { + buf, err := hex.DecodeString(t.key) + if err != nil { + panic(err) + } var key [32]byte + copy(key[:], buf[:32]) + return key +} +func (t *test) Tag() [16]byte { + buf, err := hex.DecodeString(t.tag) + if err != nil { + panic(err) + } + var tag [16]byte + copy(tag[:], buf[:16]) + return tag +} + +func testSum(t *testing.T, unaligned bool, sumImpl func(tag *[TagSize]byte, msg []byte, key *[32]byte)) { + var tag [16]byte for i, v := range testData { - in := v.in + in := v.Input() if unaligned { in = unalignBytes(in) } - copy(key[:], v.k) - Sum(&out, in, &key) - if !bytes.Equal(out[:], v.correct) { - t.Errorf("%d: expected %x, got %x", i, v.correct, out[:]) + key := v.Key() + sumImpl(&tag, in, &key) + if tag != v.Tag() { + t.Errorf("%d: expected %x, got %x", i, v.Tag(), tag[:]) } } } @@ -125,10 +95,55 @@ func TestBurnin(t *testing.T) { } } -func TestSum(t *testing.T) { testSum(t, false) } -func TestSumUnaligned(t *testing.T) { testSum(t, true) } +func TestSum(t *testing.T) { testSum(t, false, Sum) } +func TestSumUnaligned(t *testing.T) { testSum(t, true, Sum) } +func TestSumGeneric(t *testing.T) { testSum(t, false, sumGeneric) } +func TestSumGenericUnaligned(t *testing.T) { testSum(t, true, sumGeneric) } -func benchmark(b *testing.B, size int, unaligned bool) { +func TestWriteGeneric(t *testing.T) { testWriteGeneric(t, false) } +func TestWriteGenericUnaligned(t *testing.T) { testWriteGeneric(t, true) } +func TestWrite(t *testing.T) { testWrite(t, false) } +func TestWriteUnaligned(t *testing.T) { testWrite(t, true) } + +func testWriteGeneric(t *testing.T, unaligned bool) { + for i, v := range testData { + key := v.Key() + input := v.Input() + var out [16]byte + + if unaligned { + input = unalignBytes(input) + } + h := newMACGeneric(&key) + h.Write(input[:len(input)/2]) + h.Write(input[len(input)/2:]) + h.Sum(&out) + if tag := v.Tag(); out != tag { + t.Errorf("%d: expected %x, got %x", i, tag[:], out[:]) + } + } +} + +func testWrite(t *testing.T, unaligned bool) { + for i, v := range testData { + key := v.Key() + input := v.Input() + var out [16]byte + + if unaligned { + input = unalignBytes(input) + } + h := New(&key) + h.Write(input[:len(input)/2]) + h.Write(input[len(input)/2:]) + h.Sum(out[:0]) + if tag := v.Tag(); out != tag { + t.Errorf("%d: expected %x, got %x", i, tag[:], out[:]) + } + } +} + +func benchmarkSum(b *testing.B, size int, unaligned bool) { var out [16]byte var key [32]byte in := make([]byte, size) @@ -142,10 +157,33 @@ func benchmark(b *testing.B, size int, unaligned bool) { } } -func Benchmark64(b *testing.B) { benchmark(b, 64, false) } -func Benchmark1K(b *testing.B) { benchmark(b, 1024, false) } -func Benchmark64Unaligned(b *testing.B) { benchmark(b, 64, true) } -func Benchmark1KUnaligned(b *testing.B) { benchmark(b, 1024, true) } +func benchmarkWrite(b *testing.B, size int, unaligned bool) { + var key [32]byte + h := New(&key) + in := make([]byte, size) + if unaligned { + in = unalignBytes(in) + } + b.SetBytes(int64(len(in))) + b.ResetTimer() + for i := 0; i < b.N; i++ { + h.Write(in) + } +} + +func Benchmark64(b *testing.B) { benchmarkSum(b, 64, false) } +func Benchmark1K(b *testing.B) { benchmarkSum(b, 1024, false) } +func Benchmark2M(b *testing.B) { benchmarkSum(b, 2*1024*1024, false) } +func Benchmark64Unaligned(b *testing.B) { benchmarkSum(b, 64, true) } +func Benchmark1KUnaligned(b *testing.B) { benchmarkSum(b, 1024, true) } +func Benchmark2MUnaligned(b *testing.B) { benchmarkSum(b, 2*1024*1024, true) } + +func BenchmarkWrite64(b *testing.B) { benchmarkWrite(b, 64, false) } +func BenchmarkWrite1K(b *testing.B) { benchmarkWrite(b, 1024, false) } +func BenchmarkWrite2M(b *testing.B) { benchmarkWrite(b, 2*1024*1024, false) } +func BenchmarkWrite64Unaligned(b *testing.B) { benchmarkWrite(b, 64, true) } +func BenchmarkWrite1KUnaligned(b *testing.B) { benchmarkWrite(b, 1024, true) } +func BenchmarkWrite2MUnaligned(b *testing.B) { benchmarkWrite(b, 2*1024*1024, true) } func unalignBytes(in []byte) []byte { out := make([]byte, len(in)+1) diff --git a/vendor/golang.org/x/crypto/poly1305/sum_amd64.go b/vendor/golang.org/x/crypto/poly1305/sum_amd64.go index 4dd72fe..2dbf42a 100644 --- a/vendor/golang.org/x/crypto/poly1305/sum_amd64.go +++ b/vendor/golang.org/x/crypto/poly1305/sum_amd64.go @@ -6,17 +6,63 @@ package poly1305 -// This function is implemented in sum_amd64.s //go:noescape -func poly1305(out *[16]byte, m *byte, mlen uint64, key *[32]byte) +func initialize(state *[7]uint64, key *[32]byte) + +//go:noescape +func update(state *[7]uint64, msg []byte) + +//go:noescape +func finalize(tag *[TagSize]byte, state *[7]uint64) // Sum generates an authenticator for m using a one-time key and puts the // 16-byte result into out. Authenticating two different messages with the same // key allows an attacker to forge messages at will. func Sum(out *[16]byte, m []byte, key *[32]byte) { - var mPtr *byte - if len(m) > 0 { - mPtr = &m[0] + h := newMAC(key) + h.Write(m) + h.Sum(out) +} + +func newMAC(key *[32]byte) (h mac) { + initialize(&h.state, key) + return +} + +type mac struct { + state [7]uint64 // := uint64{ h0, h1, h2, r0, r1, pad0, pad1 } + + buffer [TagSize]byte + offset int +} + +func (h *mac) Write(p []byte) (n int, err error) { + n = len(p) + if h.offset > 0 { + remaining := TagSize - h.offset + if n < remaining { + h.offset += copy(h.buffer[h.offset:], p) + return n, nil + } + copy(h.buffer[h.offset:], p[:remaining]) + p = p[remaining:] + h.offset = 0 + update(&h.state, h.buffer[:]) + } + if nn := len(p) - (len(p) % TagSize); nn > 0 { + update(&h.state, p[:nn]) + p = p[nn:] + } + if len(p) > 0 { + h.offset += copy(h.buffer[h.offset:], p) + } + return n, nil +} + +func (h *mac) Sum(out *[16]byte) { + state := h.state + if h.offset > 0 { + update(&state, h.buffer[:h.offset]) } - poly1305(out, mPtr, uint64(len(m)), key) + finalize(out, &state) } diff --git a/vendor/golang.org/x/crypto/poly1305/sum_amd64.s b/vendor/golang.org/x/crypto/poly1305/sum_amd64.s index 2edae63..7d600f1 100644 --- a/vendor/golang.org/x/crypto/poly1305/sum_amd64.s +++ b/vendor/golang.org/x/crypto/poly1305/sum_amd64.s @@ -58,20 +58,17 @@ DATA ·poly1305Mask<>+0x00(SB)/8, $0x0FFFFFFC0FFFFFFF DATA ·poly1305Mask<>+0x08(SB)/8, $0x0FFFFFFC0FFFFFFC GLOBL ·poly1305Mask<>(SB), RODATA, $16 -// func poly1305(out *[16]byte, m *byte, mlen uint64, key *[32]key) -TEXT ·poly1305(SB), $0-32 - MOVQ out+0(FP), DI - MOVQ m+8(FP), SI - MOVQ mlen+16(FP), R15 - MOVQ key+24(FP), AX - - MOVQ 0(AX), R11 - MOVQ 8(AX), R12 - ANDQ ·poly1305Mask<>(SB), R11 // r0 - ANDQ ·poly1305Mask<>+8(SB), R12 // r1 - XORQ R8, R8 // h0 - XORQ R9, R9 // h1 - XORQ R10, R10 // h2 +// func update(state *[7]uint64, msg []byte) +TEXT ·update(SB), $0-32 + MOVQ state+0(FP), DI + MOVQ msg_base+8(FP), SI + MOVQ msg_len+16(FP), R15 + + MOVQ 0(DI), R8 // h0 + MOVQ 8(DI), R9 // h1 + MOVQ 16(DI), R10 // h2 + MOVQ 24(DI), R11 // r0 + MOVQ 32(DI), R12 // r1 CMPQ R15, $16 JB bytes_between_0_and_15 @@ -109,16 +106,42 @@ flush_buffer: JMP multiply done: - MOVQ R8, AX - MOVQ R9, BX + MOVQ R8, 0(DI) + MOVQ R9, 8(DI) + MOVQ R10, 16(DI) + RET + +// func initialize(state *[7]uint64, key *[32]byte) +TEXT ·initialize(SB), $0-16 + MOVQ state+0(FP), DI + MOVQ key+8(FP), SI + + // state[0...7] is initialized with zero + MOVOU 0(SI), X0 + MOVOU 16(SI), X1 + MOVOU ·poly1305Mask<>(SB), X2 + PAND X2, X0 + MOVOU X0, 24(DI) + MOVOU X1, 40(DI) + RET + +// func finalize(tag *[TagSize]byte, state *[7]uint64) +TEXT ·finalize(SB), $0-16 + MOVQ tag+0(FP), DI + MOVQ state+8(FP), SI + + MOVQ 0(SI), AX + MOVQ 8(SI), BX + MOVQ 16(SI), CX + MOVQ AX, R8 + MOVQ BX, R9 SUBQ $0xFFFFFFFFFFFFFFFB, AX SBBQ $0xFFFFFFFFFFFFFFFF, BX - SBBQ $3, R10 + SBBQ $3, CX CMOVQCS R8, AX CMOVQCS R9, BX - MOVQ key+24(FP), R8 - ADDQ 16(R8), AX - ADCQ 24(R8), BX + ADDQ 40(SI), AX + ADCQ 48(SI), BX MOVQ AX, 0(DI) MOVQ BX, 8(DI) diff --git a/vendor/golang.org/x/crypto/poly1305/sum_generic.go b/vendor/golang.org/x/crypto/poly1305/sum_generic.go new file mode 100644 index 0000000..bab76ef --- /dev/null +++ b/vendor/golang.org/x/crypto/poly1305/sum_generic.go @@ -0,0 +1,172 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package poly1305 + +import "encoding/binary" + +const ( + msgBlock = uint32(1 << 24) + finalBlock = uint32(0) +) + +// sumGeneric generates an authenticator for msg using a one-time key and +// puts the 16-byte result into out. This is the generic implementation of +// Sum and should be called if no assembly implementation is available. +func sumGeneric(out *[TagSize]byte, msg []byte, key *[32]byte) { + h := newMACGeneric(key) + h.Write(msg) + h.Sum(out) +} + +func newMACGeneric(key *[32]byte) (h macGeneric) { + h.r[0] = binary.LittleEndian.Uint32(key[0:]) & 0x3ffffff + h.r[1] = (binary.LittleEndian.Uint32(key[3:]) >> 2) & 0x3ffff03 + h.r[2] = (binary.LittleEndian.Uint32(key[6:]) >> 4) & 0x3ffc0ff + h.r[3] = (binary.LittleEndian.Uint32(key[9:]) >> 6) & 0x3f03fff + h.r[4] = (binary.LittleEndian.Uint32(key[12:]) >> 8) & 0x00fffff + + h.s[0] = binary.LittleEndian.Uint32(key[16:]) + h.s[1] = binary.LittleEndian.Uint32(key[20:]) + h.s[2] = binary.LittleEndian.Uint32(key[24:]) + h.s[3] = binary.LittleEndian.Uint32(key[28:]) + return +} + +type macGeneric struct { + h, r [5]uint32 + s [4]uint32 + + buffer [TagSize]byte + offset int +} + +func (h *macGeneric) Write(p []byte) (n int, err error) { + n = len(p) + if h.offset > 0 { + remaining := TagSize - h.offset + if n < remaining { + h.offset += copy(h.buffer[h.offset:], p) + return n, nil + } + copy(h.buffer[h.offset:], p[:remaining]) + p = p[remaining:] + h.offset = 0 + updateGeneric(h.buffer[:], msgBlock, &(h.h), &(h.r)) + } + if nn := len(p) - (len(p) % TagSize); nn > 0 { + updateGeneric(p, msgBlock, &(h.h), &(h.r)) + p = p[nn:] + } + if len(p) > 0 { + h.offset += copy(h.buffer[h.offset:], p) + } + return n, nil +} + +func (h *macGeneric) Sum(out *[16]byte) { + H, R := h.h, h.r + if h.offset > 0 { + var buffer [TagSize]byte + copy(buffer[:], h.buffer[:h.offset]) + buffer[h.offset] = 1 // invariant: h.offset < TagSize + updateGeneric(buffer[:], finalBlock, &H, &R) + } + finalizeGeneric(out, &H, &(h.s)) +} + +func updateGeneric(msg []byte, flag uint32, h, r *[5]uint32) { + h0, h1, h2, h3, h4 := h[0], h[1], h[2], h[3], h[4] + r0, r1, r2, r3, r4 := uint64(r[0]), uint64(r[1]), uint64(r[2]), uint64(r[3]), uint64(r[4]) + R1, R2, R3, R4 := r1*5, r2*5, r3*5, r4*5 + + for len(msg) >= TagSize { + // h += msg + h0 += binary.LittleEndian.Uint32(msg[0:]) & 0x3ffffff + h1 += (binary.LittleEndian.Uint32(msg[3:]) >> 2) & 0x3ffffff + h2 += (binary.LittleEndian.Uint32(msg[6:]) >> 4) & 0x3ffffff + h3 += (binary.LittleEndian.Uint32(msg[9:]) >> 6) & 0x3ffffff + h4 += (binary.LittleEndian.Uint32(msg[12:]) >> 8) | flag + + // h *= r + d0 := (uint64(h0) * r0) + (uint64(h1) * R4) + (uint64(h2) * R3) + (uint64(h3) * R2) + (uint64(h4) * R1) + d1 := (d0 >> 26) + (uint64(h0) * r1) + (uint64(h1) * r0) + (uint64(h2) * R4) + (uint64(h3) * R3) + (uint64(h4) * R2) + d2 := (d1 >> 26) + (uint64(h0) * r2) + (uint64(h1) * r1) + (uint64(h2) * r0) + (uint64(h3) * R4) + (uint64(h4) * R3) + d3 := (d2 >> 26) + (uint64(h0) * r3) + (uint64(h1) * r2) + (uint64(h2) * r1) + (uint64(h3) * r0) + (uint64(h4) * R4) + d4 := (d3 >> 26) + (uint64(h0) * r4) + (uint64(h1) * r3) + (uint64(h2) * r2) + (uint64(h3) * r1) + (uint64(h4) * r0) + + // h %= p + h0 = uint32(d0) & 0x3ffffff + h1 = uint32(d1) & 0x3ffffff + h2 = uint32(d2) & 0x3ffffff + h3 = uint32(d3) & 0x3ffffff + h4 = uint32(d4) & 0x3ffffff + + h0 += uint32(d4>>26) * 5 + h1 += h0 >> 26 + h0 = h0 & 0x3ffffff + + msg = msg[TagSize:] + } + + h[0], h[1], h[2], h[3], h[4] = h0, h1, h2, h3, h4 +} + +func finalizeGeneric(out *[TagSize]byte, h *[5]uint32, s *[4]uint32) { + h0, h1, h2, h3, h4 := h[0], h[1], h[2], h[3], h[4] + + // h %= p reduction + h2 += h1 >> 26 + h1 &= 0x3ffffff + h3 += h2 >> 26 + h2 &= 0x3ffffff + h4 += h3 >> 26 + h3 &= 0x3ffffff + h0 += 5 * (h4 >> 26) + h4 &= 0x3ffffff + h1 += h0 >> 26 + h0 &= 0x3ffffff + + // h - p + t0 := h0 + 5 + t1 := h1 + (t0 >> 26) + t2 := h2 + (t1 >> 26) + t3 := h3 + (t2 >> 26) + t4 := h4 + (t3 >> 26) - (1 << 26) + t0 &= 0x3ffffff + t1 &= 0x3ffffff + t2 &= 0x3ffffff + t3 &= 0x3ffffff + + // select h if h < p else h - p + t_mask := (t4 >> 31) - 1 + h_mask := ^t_mask + h0 = (h0 & h_mask) | (t0 & t_mask) + h1 = (h1 & h_mask) | (t1 & t_mask) + h2 = (h2 & h_mask) | (t2 & t_mask) + h3 = (h3 & h_mask) | (t3 & t_mask) + h4 = (h4 & h_mask) | (t4 & t_mask) + + // h %= 2^128 + h0 |= h1 << 26 + h1 = ((h1 >> 6) | (h2 << 20)) + h2 = ((h2 >> 12) | (h3 << 14)) + h3 = ((h3 >> 18) | (h4 << 8)) + + // s: the s part of the key + // tag = (h + s) % (2^128) + t := uint64(h0) + uint64(s[0]) + h0 = uint32(t) + t = uint64(h1) + uint64(s[1]) + (t >> 32) + h1 = uint32(t) + t = uint64(h2) + uint64(s[2]) + (t >> 32) + h2 = uint32(t) + t = uint64(h3) + uint64(s[3]) + (t >> 32) + h3 = uint32(t) + + binary.LittleEndian.PutUint32(out[0:], h0) + binary.LittleEndian.PutUint32(out[4:], h1) + binary.LittleEndian.PutUint32(out[8:], h2) + binary.LittleEndian.PutUint32(out[12:], h3) +} diff --git a/vendor/golang.org/x/crypto/poly1305/sum_noasm.go b/vendor/golang.org/x/crypto/poly1305/sum_noasm.go new file mode 100644 index 0000000..fcdef46 --- /dev/null +++ b/vendor/golang.org/x/crypto/poly1305/sum_noasm.go @@ -0,0 +1,16 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build s390x,!go1.11 !arm,!amd64,!s390x gccgo appengine nacl + +package poly1305 + +// Sum generates an authenticator for msg using a one-time key and puts the +// 16-byte result into out. Authenticating two different messages with the same +// key allows an attacker to forge messages at will. +func Sum(out *[TagSize]byte, msg []byte, key *[32]byte) { + h := newMAC(key) + h.Write(msg) + h.Sum(out) +} diff --git a/vendor/golang.org/x/crypto/poly1305/sum_ref.go b/vendor/golang.org/x/crypto/poly1305/sum_ref.go deleted file mode 100644 index b2805a5..0000000 --- a/vendor/golang.org/x/crypto/poly1305/sum_ref.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !amd64,!arm gccgo appengine nacl - -package poly1305 - -import "encoding/binary" - -// Sum generates an authenticator for msg using a one-time key and puts the -// 16-byte result into out. Authenticating two different messages with the same -// key allows an attacker to forge messages at will. -func Sum(out *[TagSize]byte, msg []byte, key *[32]byte) { - var ( - h0, h1, h2, h3, h4 uint32 // the hash accumulators - r0, r1, r2, r3, r4 uint64 // the r part of the key - ) - - r0 = uint64(binary.LittleEndian.Uint32(key[0:]) & 0x3ffffff) - r1 = uint64((binary.LittleEndian.Uint32(key[3:]) >> 2) & 0x3ffff03) - r2 = uint64((binary.LittleEndian.Uint32(key[6:]) >> 4) & 0x3ffc0ff) - r3 = uint64((binary.LittleEndian.Uint32(key[9:]) >> 6) & 0x3f03fff) - r4 = uint64((binary.LittleEndian.Uint32(key[12:]) >> 8) & 0x00fffff) - - R1, R2, R3, R4 := r1*5, r2*5, r3*5, r4*5 - - for len(msg) >= TagSize { - // h += msg - h0 += binary.LittleEndian.Uint32(msg[0:]) & 0x3ffffff - h1 += (binary.LittleEndian.Uint32(msg[3:]) >> 2) & 0x3ffffff - h2 += (binary.LittleEndian.Uint32(msg[6:]) >> 4) & 0x3ffffff - h3 += (binary.LittleEndian.Uint32(msg[9:]) >> 6) & 0x3ffffff - h4 += (binary.LittleEndian.Uint32(msg[12:]) >> 8) | (1 << 24) - - // h *= r - d0 := (uint64(h0) * r0) + (uint64(h1) * R4) + (uint64(h2) * R3) + (uint64(h3) * R2) + (uint64(h4) * R1) - d1 := (d0 >> 26) + (uint64(h0) * r1) + (uint64(h1) * r0) + (uint64(h2) * R4) + (uint64(h3) * R3) + (uint64(h4) * R2) - d2 := (d1 >> 26) + (uint64(h0) * r2) + (uint64(h1) * r1) + (uint64(h2) * r0) + (uint64(h3) * R4) + (uint64(h4) * R3) - d3 := (d2 >> 26) + (uint64(h0) * r3) + (uint64(h1) * r2) + (uint64(h2) * r1) + (uint64(h3) * r0) + (uint64(h4) * R4) - d4 := (d3 >> 26) + (uint64(h0) * r4) + (uint64(h1) * r3) + (uint64(h2) * r2) + (uint64(h3) * r1) + (uint64(h4) * r0) - - // h %= p - h0 = uint32(d0) & 0x3ffffff - h1 = uint32(d1) & 0x3ffffff - h2 = uint32(d2) & 0x3ffffff - h3 = uint32(d3) & 0x3ffffff - h4 = uint32(d4) & 0x3ffffff - - h0 += uint32(d4>>26) * 5 - h1 += h0 >> 26 - h0 = h0 & 0x3ffffff - - msg = msg[TagSize:] - } - - if len(msg) > 0 { - var block [TagSize]byte - off := copy(block[:], msg) - block[off] = 0x01 - - // h += msg - h0 += binary.LittleEndian.Uint32(block[0:]) & 0x3ffffff - h1 += (binary.LittleEndian.Uint32(block[3:]) >> 2) & 0x3ffffff - h2 += (binary.LittleEndian.Uint32(block[6:]) >> 4) & 0x3ffffff - h3 += (binary.LittleEndian.Uint32(block[9:]) >> 6) & 0x3ffffff - h4 += (binary.LittleEndian.Uint32(block[12:]) >> 8) - - // h *= r - d0 := (uint64(h0) * r0) + (uint64(h1) * R4) + (uint64(h2) * R3) + (uint64(h3) * R2) + (uint64(h4) * R1) - d1 := (d0 >> 26) + (uint64(h0) * r1) + (uint64(h1) * r0) + (uint64(h2) * R4) + (uint64(h3) * R3) + (uint64(h4) * R2) - d2 := (d1 >> 26) + (uint64(h0) * r2) + (uint64(h1) * r1) + (uint64(h2) * r0) + (uint64(h3) * R4) + (uint64(h4) * R3) - d3 := (d2 >> 26) + (uint64(h0) * r3) + (uint64(h1) * r2) + (uint64(h2) * r1) + (uint64(h3) * r0) + (uint64(h4) * R4) - d4 := (d3 >> 26) + (uint64(h0) * r4) + (uint64(h1) * r3) + (uint64(h2) * r2) + (uint64(h3) * r1) + (uint64(h4) * r0) - - // h %= p - h0 = uint32(d0) & 0x3ffffff - h1 = uint32(d1) & 0x3ffffff - h2 = uint32(d2) & 0x3ffffff - h3 = uint32(d3) & 0x3ffffff - h4 = uint32(d4) & 0x3ffffff - - h0 += uint32(d4>>26) * 5 - h1 += h0 >> 26 - h0 = h0 & 0x3ffffff - } - - // h %= p reduction - h2 += h1 >> 26 - h1 &= 0x3ffffff - h3 += h2 >> 26 - h2 &= 0x3ffffff - h4 += h3 >> 26 - h3 &= 0x3ffffff - h0 += 5 * (h4 >> 26) - h4 &= 0x3ffffff - h1 += h0 >> 26 - h0 &= 0x3ffffff - - // h - p - t0 := h0 + 5 - t1 := h1 + (t0 >> 26) - t2 := h2 + (t1 >> 26) - t3 := h3 + (t2 >> 26) - t4 := h4 + (t3 >> 26) - (1 << 26) - t0 &= 0x3ffffff - t1 &= 0x3ffffff - t2 &= 0x3ffffff - t3 &= 0x3ffffff - - // select h if h < p else h - p - t_mask := (t4 >> 31) - 1 - h_mask := ^t_mask - h0 = (h0 & h_mask) | (t0 & t_mask) - h1 = (h1 & h_mask) | (t1 & t_mask) - h2 = (h2 & h_mask) | (t2 & t_mask) - h3 = (h3 & h_mask) | (t3 & t_mask) - h4 = (h4 & h_mask) | (t4 & t_mask) - - // h %= 2^128 - h0 |= h1 << 26 - h1 = ((h1 >> 6) | (h2 << 20)) - h2 = ((h2 >> 12) | (h3 << 14)) - h3 = ((h3 >> 18) | (h4 << 8)) - - // s: the s part of the key - // tag = (h + s) % (2^128) - t := uint64(h0) + uint64(binary.LittleEndian.Uint32(key[16:])) - h0 = uint32(t) - t = uint64(h1) + uint64(binary.LittleEndian.Uint32(key[20:])) + (t >> 32) - h1 = uint32(t) - t = uint64(h2) + uint64(binary.LittleEndian.Uint32(key[24:])) + (t >> 32) - h2 = uint32(t) - t = uint64(h3) + uint64(binary.LittleEndian.Uint32(key[28:])) + (t >> 32) - h3 = uint32(t) - - binary.LittleEndian.PutUint32(out[0:], h0) - binary.LittleEndian.PutUint32(out[4:], h1) - binary.LittleEndian.PutUint32(out[8:], h2) - binary.LittleEndian.PutUint32(out[12:], h3) -} diff --git a/vendor/golang.org/x/crypto/poly1305/sum_s390x.go b/vendor/golang.org/x/crypto/poly1305/sum_s390x.go new file mode 100644 index 0000000..ec99e07 --- /dev/null +++ b/vendor/golang.org/x/crypto/poly1305/sum_s390x.go @@ -0,0 +1,42 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build s390x,go1.11,!gccgo,!appengine + +package poly1305 + +import ( + "golang.org/x/sys/cpu" +) + +// poly1305vx is an assembly implementation of Poly1305 that uses vector +// instructions. It must only be called if the vector facility (vx) is +// available. +//go:noescape +func poly1305vx(out *[16]byte, m *byte, mlen uint64, key *[32]byte) + +// poly1305vmsl is an assembly implementation of Poly1305 that uses vector +// instructions, including VMSL. It must only be called if the vector facility (vx) is +// available and if VMSL is supported. +//go:noescape +func poly1305vmsl(out *[16]byte, m *byte, mlen uint64, key *[32]byte) + +// Sum generates an authenticator for m using a one-time key and puts the +// 16-byte result into out. Authenticating two different messages with the same +// key allows an attacker to forge messages at will. +func Sum(out *[16]byte, m []byte, key *[32]byte) { + if cpu.S390X.HasVX { + var mPtr *byte + if len(m) > 0 { + mPtr = &m[0] + } + if cpu.S390X.HasVXE && len(m) > 256 { + poly1305vmsl(out, mPtr, uint64(len(m)), key) + } else { + poly1305vx(out, mPtr, uint64(len(m)), key) + } + } else { + sumGeneric(out, m, key) + } +} diff --git a/vendor/golang.org/x/crypto/poly1305/sum_s390x.s b/vendor/golang.org/x/crypto/poly1305/sum_s390x.s new file mode 100644 index 0000000..ca5a309 --- /dev/null +++ b/vendor/golang.org/x/crypto/poly1305/sum_s390x.s @@ -0,0 +1,378 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build s390x,go1.11,!gccgo,!appengine + +#include "textflag.h" + +// Implementation of Poly1305 using the vector facility (vx). + +// constants +#define MOD26 V0 +#define EX0 V1 +#define EX1 V2 +#define EX2 V3 + +// temporaries +#define T_0 V4 +#define T_1 V5 +#define T_2 V6 +#define T_3 V7 +#define T_4 V8 + +// key (r) +#define R_0 V9 +#define R_1 V10 +#define R_2 V11 +#define R_3 V12 +#define R_4 V13 +#define R5_1 V14 +#define R5_2 V15 +#define R5_3 V16 +#define R5_4 V17 +#define RSAVE_0 R5 +#define RSAVE_1 R6 +#define RSAVE_2 R7 +#define RSAVE_3 R8 +#define RSAVE_4 R9 +#define R5SAVE_1 V28 +#define R5SAVE_2 V29 +#define R5SAVE_3 V30 +#define R5SAVE_4 V31 + +// message block +#define F_0 V18 +#define F_1 V19 +#define F_2 V20 +#define F_3 V21 +#define F_4 V22 + +// accumulator +#define H_0 V23 +#define H_1 V24 +#define H_2 V25 +#define H_3 V26 +#define H_4 V27 + +GLOBL ·keyMask<>(SB), RODATA, $16 +DATA ·keyMask<>+0(SB)/8, $0xffffff0ffcffff0f +DATA ·keyMask<>+8(SB)/8, $0xfcffff0ffcffff0f + +GLOBL ·bswapMask<>(SB), RODATA, $16 +DATA ·bswapMask<>+0(SB)/8, $0x0f0e0d0c0b0a0908 +DATA ·bswapMask<>+8(SB)/8, $0x0706050403020100 + +GLOBL ·constants<>(SB), RODATA, $64 +// MOD26 +DATA ·constants<>+0(SB)/8, $0x3ffffff +DATA ·constants<>+8(SB)/8, $0x3ffffff +// EX0 +DATA ·constants<>+16(SB)/8, $0x0006050403020100 +DATA ·constants<>+24(SB)/8, $0x1016151413121110 +// EX1 +DATA ·constants<>+32(SB)/8, $0x060c0b0a09080706 +DATA ·constants<>+40(SB)/8, $0x161c1b1a19181716 +// EX2 +DATA ·constants<>+48(SB)/8, $0x0d0d0d0d0d0f0e0d +DATA ·constants<>+56(SB)/8, $0x1d1d1d1d1d1f1e1d + +// h = (f*g) % (2**130-5) [partial reduction] +#define MULTIPLY(f0, f1, f2, f3, f4, g0, g1, g2, g3, g4, g51, g52, g53, g54, h0, h1, h2, h3, h4) \ + VMLOF f0, g0, h0 \ + VMLOF f0, g1, h1 \ + VMLOF f0, g2, h2 \ + VMLOF f0, g3, h3 \ + VMLOF f0, g4, h4 \ + VMLOF f1, g54, T_0 \ + VMLOF f1, g0, T_1 \ + VMLOF f1, g1, T_2 \ + VMLOF f1, g2, T_3 \ + VMLOF f1, g3, T_4 \ + VMALOF f2, g53, h0, h0 \ + VMALOF f2, g54, h1, h1 \ + VMALOF f2, g0, h2, h2 \ + VMALOF f2, g1, h3, h3 \ + VMALOF f2, g2, h4, h4 \ + VMALOF f3, g52, T_0, T_0 \ + VMALOF f3, g53, T_1, T_1 \ + VMALOF f3, g54, T_2, T_2 \ + VMALOF f3, g0, T_3, T_3 \ + VMALOF f3, g1, T_4, T_4 \ + VMALOF f4, g51, h0, h0 \ + VMALOF f4, g52, h1, h1 \ + VMALOF f4, g53, h2, h2 \ + VMALOF f4, g54, h3, h3 \ + VMALOF f4, g0, h4, h4 \ + VAG T_0, h0, h0 \ + VAG T_1, h1, h1 \ + VAG T_2, h2, h2 \ + VAG T_3, h3, h3 \ + VAG T_4, h4, h4 + +// carry h0->h1 h3->h4, h1->h2 h4->h0, h0->h1 h2->h3, h3->h4 +#define REDUCE(h0, h1, h2, h3, h4) \ + VESRLG $26, h0, T_0 \ + VESRLG $26, h3, T_1 \ + VN MOD26, h0, h0 \ + VN MOD26, h3, h3 \ + VAG T_0, h1, h1 \ + VAG T_1, h4, h4 \ + VESRLG $26, h1, T_2 \ + VESRLG $26, h4, T_3 \ + VN MOD26, h1, h1 \ + VN MOD26, h4, h4 \ + VESLG $2, T_3, T_4 \ + VAG T_3, T_4, T_4 \ + VAG T_2, h2, h2 \ + VAG T_4, h0, h0 \ + VESRLG $26, h2, T_0 \ + VESRLG $26, h0, T_1 \ + VN MOD26, h2, h2 \ + VN MOD26, h0, h0 \ + VAG T_0, h3, h3 \ + VAG T_1, h1, h1 \ + VESRLG $26, h3, T_2 \ + VN MOD26, h3, h3 \ + VAG T_2, h4, h4 + +// expand in0 into d[0] and in1 into d[1] +#define EXPAND(in0, in1, d0, d1, d2, d3, d4) \ + VGBM $0x0707, d1 \ // d1=tmp + VPERM in0, in1, EX2, d4 \ + VPERM in0, in1, EX0, d0 \ + VPERM in0, in1, EX1, d2 \ + VN d1, d4, d4 \ + VESRLG $26, d0, d1 \ + VESRLG $30, d2, d3 \ + VESRLG $4, d2, d2 \ + VN MOD26, d0, d0 \ + VN MOD26, d1, d1 \ + VN MOD26, d2, d2 \ + VN MOD26, d3, d3 + +// pack h4:h0 into h1:h0 (no carry) +#define PACK(h0, h1, h2, h3, h4) \ + VESLG $26, h1, h1 \ + VESLG $26, h3, h3 \ + VO h0, h1, h0 \ + VO h2, h3, h2 \ + VESLG $4, h2, h2 \ + VLEIB $7, $48, h1 \ + VSLB h1, h2, h2 \ + VO h0, h2, h0 \ + VLEIB $7, $104, h1 \ + VSLB h1, h4, h3 \ + VO h3, h0, h0 \ + VLEIB $7, $24, h1 \ + VSRLB h1, h4, h1 + +// if h > 2**130-5 then h -= 2**130-5 +#define MOD(h0, h1, t0, t1, t2) \ + VZERO t0 \ + VLEIG $1, $5, t0 \ + VACCQ h0, t0, t1 \ + VAQ h0, t0, t0 \ + VONE t2 \ + VLEIG $1, $-4, t2 \ + VAQ t2, t1, t1 \ + VACCQ h1, t1, t1 \ + VONE t2 \ + VAQ t2, t1, t1 \ + VN h0, t1, t2 \ + VNC t0, t1, t1 \ + VO t1, t2, h0 + +// func poly1305vx(out *[16]byte, m *byte, mlen uint64, key *[32]key) +TEXT ·poly1305vx(SB), $0-32 + // This code processes up to 2 blocks (32 bytes) per iteration + // using the algorithm described in: + // NEON crypto, Daniel J. Bernstein & Peter Schwabe + // https://cryptojedi.org/papers/neoncrypto-20120320.pdf + LMG out+0(FP), R1, R4 // R1=out, R2=m, R3=mlen, R4=key + + // load MOD26, EX0, EX1 and EX2 + MOVD $·constants<>(SB), R5 + VLM (R5), MOD26, EX2 + + // setup r + VL (R4), T_0 + MOVD $·keyMask<>(SB), R6 + VL (R6), T_1 + VN T_0, T_1, T_0 + EXPAND(T_0, T_0, R_0, R_1, R_2, R_3, R_4) + + // setup r*5 + VLEIG $0, $5, T_0 + VLEIG $1, $5, T_0 + + // store r (for final block) + VMLOF T_0, R_1, R5SAVE_1 + VMLOF T_0, R_2, R5SAVE_2 + VMLOF T_0, R_3, R5SAVE_3 + VMLOF T_0, R_4, R5SAVE_4 + VLGVG $0, R_0, RSAVE_0 + VLGVG $0, R_1, RSAVE_1 + VLGVG $0, R_2, RSAVE_2 + VLGVG $0, R_3, RSAVE_3 + VLGVG $0, R_4, RSAVE_4 + + // skip r**2 calculation + CMPBLE R3, $16, skip + + // calculate r**2 + MULTIPLY(R_0, R_1, R_2, R_3, R_4, R_0, R_1, R_2, R_3, R_4, R5SAVE_1, R5SAVE_2, R5SAVE_3, R5SAVE_4, H_0, H_1, H_2, H_3, H_4) + REDUCE(H_0, H_1, H_2, H_3, H_4) + VLEIG $0, $5, T_0 + VLEIG $1, $5, T_0 + VMLOF T_0, H_1, R5_1 + VMLOF T_0, H_2, R5_2 + VMLOF T_0, H_3, R5_3 + VMLOF T_0, H_4, R5_4 + VLR H_0, R_0 + VLR H_1, R_1 + VLR H_2, R_2 + VLR H_3, R_3 + VLR H_4, R_4 + + // initialize h + VZERO H_0 + VZERO H_1 + VZERO H_2 + VZERO H_3 + VZERO H_4 + +loop: + CMPBLE R3, $32, b2 + VLM (R2), T_0, T_1 + SUB $32, R3 + MOVD $32(R2), R2 + EXPAND(T_0, T_1, F_0, F_1, F_2, F_3, F_4) + VLEIB $4, $1, F_4 + VLEIB $12, $1, F_4 + +multiply: + VAG H_0, F_0, F_0 + VAG H_1, F_1, F_1 + VAG H_2, F_2, F_2 + VAG H_3, F_3, F_3 + VAG H_4, F_4, F_4 + MULTIPLY(F_0, F_1, F_2, F_3, F_4, R_0, R_1, R_2, R_3, R_4, R5_1, R5_2, R5_3, R5_4, H_0, H_1, H_2, H_3, H_4) + REDUCE(H_0, H_1, H_2, H_3, H_4) + CMPBNE R3, $0, loop + +finish: + // sum vectors + VZERO T_0 + VSUMQG H_0, T_0, H_0 + VSUMQG H_1, T_0, H_1 + VSUMQG H_2, T_0, H_2 + VSUMQG H_3, T_0, H_3 + VSUMQG H_4, T_0, H_4 + + // h may be >= 2*(2**130-5) so we need to reduce it again + REDUCE(H_0, H_1, H_2, H_3, H_4) + + // carry h1->h4 + VESRLG $26, H_1, T_1 + VN MOD26, H_1, H_1 + VAQ T_1, H_2, H_2 + VESRLG $26, H_2, T_2 + VN MOD26, H_2, H_2 + VAQ T_2, H_3, H_3 + VESRLG $26, H_3, T_3 + VN MOD26, H_3, H_3 + VAQ T_3, H_4, H_4 + + // h is now < 2*(2**130-5) + // pack h into h1 (hi) and h0 (lo) + PACK(H_0, H_1, H_2, H_3, H_4) + + // if h > 2**130-5 then h -= 2**130-5 + MOD(H_0, H_1, T_0, T_1, T_2) + + // h += s + MOVD $·bswapMask<>(SB), R5 + VL (R5), T_1 + VL 16(R4), T_0 + VPERM T_0, T_0, T_1, T_0 // reverse bytes (to big) + VAQ T_0, H_0, H_0 + VPERM H_0, H_0, T_1, H_0 // reverse bytes (to little) + VST H_0, (R1) + + RET + +b2: + CMPBLE R3, $16, b1 + + // 2 blocks remaining + SUB $17, R3 + VL (R2), T_0 + VLL R3, 16(R2), T_1 + ADD $1, R3 + MOVBZ $1, R0 + CMPBEQ R3, $16, 2(PC) + VLVGB R3, R0, T_1 + EXPAND(T_0, T_1, F_0, F_1, F_2, F_3, F_4) + CMPBNE R3, $16, 2(PC) + VLEIB $12, $1, F_4 + VLEIB $4, $1, F_4 + + // setup [r²,r] + VLVGG $1, RSAVE_0, R_0 + VLVGG $1, RSAVE_1, R_1 + VLVGG $1, RSAVE_2, R_2 + VLVGG $1, RSAVE_3, R_3 + VLVGG $1, RSAVE_4, R_4 + VPDI $0, R5_1, R5SAVE_1, R5_1 + VPDI $0, R5_2, R5SAVE_2, R5_2 + VPDI $0, R5_3, R5SAVE_3, R5_3 + VPDI $0, R5_4, R5SAVE_4, R5_4 + + MOVD $0, R3 + BR multiply + +skip: + VZERO H_0 + VZERO H_1 + VZERO H_2 + VZERO H_3 + VZERO H_4 + + CMPBEQ R3, $0, finish + +b1: + // 1 block remaining + SUB $1, R3 + VLL R3, (R2), T_0 + ADD $1, R3 + MOVBZ $1, R0 + CMPBEQ R3, $16, 2(PC) + VLVGB R3, R0, T_0 + VZERO T_1 + EXPAND(T_0, T_1, F_0, F_1, F_2, F_3, F_4) + CMPBNE R3, $16, 2(PC) + VLEIB $4, $1, F_4 + VLEIG $1, $1, R_0 + VZERO R_1 + VZERO R_2 + VZERO R_3 + VZERO R_4 + VZERO R5_1 + VZERO R5_2 + VZERO R5_3 + VZERO R5_4 + + // setup [r, 1] + VLVGG $0, RSAVE_0, R_0 + VLVGG $0, RSAVE_1, R_1 + VLVGG $0, RSAVE_2, R_2 + VLVGG $0, RSAVE_3, R_3 + VLVGG $0, RSAVE_4, R_4 + VPDI $0, R5SAVE_1, R5_1, R5_1 + VPDI $0, R5SAVE_2, R5_2, R5_2 + VPDI $0, R5SAVE_3, R5_3, R5_3 + VPDI $0, R5SAVE_4, R5_4, R5_4 + + MOVD $0, R3 + BR multiply diff --git a/vendor/golang.org/x/crypto/poly1305/sum_vmsl_s390x.s b/vendor/golang.org/x/crypto/poly1305/sum_vmsl_s390x.s new file mode 100644 index 0000000..e60bbc1 --- /dev/null +++ b/vendor/golang.org/x/crypto/poly1305/sum_vmsl_s390x.s @@ -0,0 +1,909 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build s390x,go1.11,!gccgo,!appengine + +#include "textflag.h" + +// Implementation of Poly1305 using the vector facility (vx) and the VMSL instruction. + +// constants +#define EX0 V1 +#define EX1 V2 +#define EX2 V3 + +// temporaries +#define T_0 V4 +#define T_1 V5 +#define T_2 V6 +#define T_3 V7 +#define T_4 V8 +#define T_5 V9 +#define T_6 V10 +#define T_7 V11 +#define T_8 V12 +#define T_9 V13 +#define T_10 V14 + +// r**2 & r**4 +#define R_0 V15 +#define R_1 V16 +#define R_2 V17 +#define R5_1 V18 +#define R5_2 V19 +// key (r) +#define RSAVE_0 R7 +#define RSAVE_1 R8 +#define RSAVE_2 R9 +#define R5SAVE_1 R10 +#define R5SAVE_2 R11 + +// message block +#define M0 V20 +#define M1 V21 +#define M2 V22 +#define M3 V23 +#define M4 V24 +#define M5 V25 + +// accumulator +#define H0_0 V26 +#define H1_0 V27 +#define H2_0 V28 +#define H0_1 V29 +#define H1_1 V30 +#define H2_1 V31 + +GLOBL ·keyMask<>(SB), RODATA, $16 +DATA ·keyMask<>+0(SB)/8, $0xffffff0ffcffff0f +DATA ·keyMask<>+8(SB)/8, $0xfcffff0ffcffff0f + +GLOBL ·bswapMask<>(SB), RODATA, $16 +DATA ·bswapMask<>+0(SB)/8, $0x0f0e0d0c0b0a0908 +DATA ·bswapMask<>+8(SB)/8, $0x0706050403020100 + +GLOBL ·constants<>(SB), RODATA, $48 +// EX0 +DATA ·constants<>+0(SB)/8, $0x18191a1b1c1d1e1f +DATA ·constants<>+8(SB)/8, $0x0000050403020100 +// EX1 +DATA ·constants<>+16(SB)/8, $0x18191a1b1c1d1e1f +DATA ·constants<>+24(SB)/8, $0x00000a0908070605 +// EX2 +DATA ·constants<>+32(SB)/8, $0x18191a1b1c1d1e1f +DATA ·constants<>+40(SB)/8, $0x0000000f0e0d0c0b + +GLOBL ·c<>(SB), RODATA, $48 +// EX0 +DATA ·c<>+0(SB)/8, $0x0000050403020100 +DATA ·c<>+8(SB)/8, $0x0000151413121110 +// EX1 +DATA ·c<>+16(SB)/8, $0x00000a0908070605 +DATA ·c<>+24(SB)/8, $0x00001a1918171615 +// EX2 +DATA ·c<>+32(SB)/8, $0x0000000f0e0d0c0b +DATA ·c<>+40(SB)/8, $0x0000001f1e1d1c1b + +GLOBL ·reduce<>(SB), RODATA, $32 +// 44 bit +DATA ·reduce<>+0(SB)/8, $0x0 +DATA ·reduce<>+8(SB)/8, $0xfffffffffff +// 42 bit +DATA ·reduce<>+16(SB)/8, $0x0 +DATA ·reduce<>+24(SB)/8, $0x3ffffffffff + +// h = (f*g) % (2**130-5) [partial reduction] +// uses T_0...T_9 temporary registers +// input: m02_0, m02_1, m02_2, m13_0, m13_1, m13_2, r_0, r_1, r_2, r5_1, r5_2, m4_0, m4_1, m4_2, m5_0, m5_1, m5_2 +// temp: t0, t1, t2, t3, t4, t5, t6, t7, t8, t9 +// output: m02_0, m02_1, m02_2, m13_0, m13_1, m13_2 +#define MULTIPLY(m02_0, m02_1, m02_2, m13_0, m13_1, m13_2, r_0, r_1, r_2, r5_1, r5_2, m4_0, m4_1, m4_2, m5_0, m5_1, m5_2, t0, t1, t2, t3, t4, t5, t6, t7, t8, t9) \ + \ // Eliminate the dependency for the last 2 VMSLs + VMSLG m02_0, r_2, m4_2, m4_2 \ + VMSLG m13_0, r_2, m5_2, m5_2 \ // 8 VMSLs pipelined + VMSLG m02_0, r_0, m4_0, m4_0 \ + VMSLG m02_1, r5_2, V0, T_0 \ + VMSLG m02_0, r_1, m4_1, m4_1 \ + VMSLG m02_1, r_0, V0, T_1 \ + VMSLG m02_1, r_1, V0, T_2 \ + VMSLG m02_2, r5_1, V0, T_3 \ + VMSLG m02_2, r5_2, V0, T_4 \ + VMSLG m13_0, r_0, m5_0, m5_0 \ + VMSLG m13_1, r5_2, V0, T_5 \ + VMSLG m13_0, r_1, m5_1, m5_1 \ + VMSLG m13_1, r_0, V0, T_6 \ + VMSLG m13_1, r_1, V0, T_7 \ + VMSLG m13_2, r5_1, V0, T_8 \ + VMSLG m13_2, r5_2, V0, T_9 \ + VMSLG m02_2, r_0, m4_2, m4_2 \ + VMSLG m13_2, r_0, m5_2, m5_2 \ + VAQ m4_0, T_0, m02_0 \ + VAQ m4_1, T_1, m02_1 \ + VAQ m5_0, T_5, m13_0 \ + VAQ m5_1, T_6, m13_1 \ + VAQ m02_0, T_3, m02_0 \ + VAQ m02_1, T_4, m02_1 \ + VAQ m13_0, T_8, m13_0 \ + VAQ m13_1, T_9, m13_1 \ + VAQ m4_2, T_2, m02_2 \ + VAQ m5_2, T_7, m13_2 \ + +// SQUARE uses three limbs of r and r_2*5 to output square of r +// uses T_1, T_5 and T_7 temporary registers +// input: r_0, r_1, r_2, r5_2 +// temp: TEMP0, TEMP1, TEMP2 +// output: p0, p1, p2 +#define SQUARE(r_0, r_1, r_2, r5_2, p0, p1, p2, TEMP0, TEMP1, TEMP2) \ + VMSLG r_0, r_0, p0, p0 \ + VMSLG r_1, r5_2, V0, TEMP0 \ + VMSLG r_2, r5_2, p1, p1 \ + VMSLG r_0, r_1, V0, TEMP1 \ + VMSLG r_1, r_1, p2, p2 \ + VMSLG r_0, r_2, V0, TEMP2 \ + VAQ TEMP0, p0, p0 \ + VAQ TEMP1, p1, p1 \ + VAQ TEMP2, p2, p2 \ + VAQ TEMP0, p0, p0 \ + VAQ TEMP1, p1, p1 \ + VAQ TEMP2, p2, p2 \ + +// carry h0->h1->h2->h0 || h3->h4->h5->h3 +// uses T_2, T_4, T_5, T_7, T_8, T_9 +// t6, t7, t8, t9, t10, t11 +// input: h0, h1, h2, h3, h4, h5 +// temp: t0, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11 +// output: h0, h1, h2, h3, h4, h5 +#define REDUCE(h0, h1, h2, h3, h4, h5, t0, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11) \ + VLM (R12), t6, t7 \ // 44 and 42 bit clear mask + VLEIB $7, $0x28, t10 \ // 5 byte shift mask + VREPIB $4, t8 \ // 4 bit shift mask + VREPIB $2, t11 \ // 2 bit shift mask + VSRLB t10, h0, t0 \ // h0 byte shift + VSRLB t10, h1, t1 \ // h1 byte shift + VSRLB t10, h2, t2 \ // h2 byte shift + VSRLB t10, h3, t3 \ // h3 byte shift + VSRLB t10, h4, t4 \ // h4 byte shift + VSRLB t10, h5, t5 \ // h5 byte shift + VSRL t8, t0, t0 \ // h0 bit shift + VSRL t8, t1, t1 \ // h2 bit shift + VSRL t11, t2, t2 \ // h2 bit shift + VSRL t8, t3, t3 \ // h3 bit shift + VSRL t8, t4, t4 \ // h4 bit shift + VESLG $2, t2, t9 \ // h2 carry x5 + VSRL t11, t5, t5 \ // h5 bit shift + VN t6, h0, h0 \ // h0 clear carry + VAQ t2, t9, t2 \ // h2 carry x5 + VESLG $2, t5, t9 \ // h5 carry x5 + VN t6, h1, h1 \ // h1 clear carry + VN t7, h2, h2 \ // h2 clear carry + VAQ t5, t9, t5 \ // h5 carry x5 + VN t6, h3, h3 \ // h3 clear carry + VN t6, h4, h4 \ // h4 clear carry + VN t7, h5, h5 \ // h5 clear carry + VAQ t0, h1, h1 \ // h0->h1 + VAQ t3, h4, h4 \ // h3->h4 + VAQ t1, h2, h2 \ // h1->h2 + VAQ t4, h5, h5 \ // h4->h5 + VAQ t2, h0, h0 \ // h2->h0 + VAQ t5, h3, h3 \ // h5->h3 + VREPG $1, t6, t6 \ // 44 and 42 bit masks across both halves + VREPG $1, t7, t7 \ + VSLDB $8, h0, h0, h0 \ // set up [h0/1/2, h3/4/5] + VSLDB $8, h1, h1, h1 \ + VSLDB $8, h2, h2, h2 \ + VO h0, h3, h3 \ + VO h1, h4, h4 \ + VO h2, h5, h5 \ + VESRLG $44, h3, t0 \ // 44 bit shift right + VESRLG $44, h4, t1 \ + VESRLG $42, h5, t2 \ + VN t6, h3, h3 \ // clear carry bits + VN t6, h4, h4 \ + VN t7, h5, h5 \ + VESLG $2, t2, t9 \ // multiply carry by 5 + VAQ t9, t2, t2 \ + VAQ t0, h4, h4 \ + VAQ t1, h5, h5 \ + VAQ t2, h3, h3 \ + +// carry h0->h1->h2->h0 +// input: h0, h1, h2 +// temp: t0, t1, t2, t3, t4, t5, t6, t7, t8 +// output: h0, h1, h2 +#define REDUCE2(h0, h1, h2, t0, t1, t2, t3, t4, t5, t6, t7, t8) \ + VLEIB $7, $0x28, t3 \ // 5 byte shift mask + VREPIB $4, t4 \ // 4 bit shift mask + VREPIB $2, t7 \ // 2 bit shift mask + VGBM $0x003F, t5 \ // mask to clear carry bits + VSRLB t3, h0, t0 \ + VSRLB t3, h1, t1 \ + VSRLB t3, h2, t2 \ + VESRLG $4, t5, t5 \ // 44 bit clear mask + VSRL t4, t0, t0 \ + VSRL t4, t1, t1 \ + VSRL t7, t2, t2 \ + VESRLG $2, t5, t6 \ // 42 bit clear mask + VESLG $2, t2, t8 \ + VAQ t8, t2, t2 \ + VN t5, h0, h0 \ + VN t5, h1, h1 \ + VN t6, h2, h2 \ + VAQ t0, h1, h1 \ + VAQ t1, h2, h2 \ + VAQ t2, h0, h0 \ + VSRLB t3, h0, t0 \ + VSRLB t3, h1, t1 \ + VSRLB t3, h2, t2 \ + VSRL t4, t0, t0 \ + VSRL t4, t1, t1 \ + VSRL t7, t2, t2 \ + VN t5, h0, h0 \ + VN t5, h1, h1 \ + VESLG $2, t2, t8 \ + VN t6, h2, h2 \ + VAQ t0, h1, h1 \ + VAQ t8, t2, t2 \ + VAQ t1, h2, h2 \ + VAQ t2, h0, h0 \ + +// expands two message blocks into the lower halfs of the d registers +// moves the contents of the d registers into upper halfs +// input: in1, in2, d0, d1, d2, d3, d4, d5 +// temp: TEMP0, TEMP1, TEMP2, TEMP3 +// output: d0, d1, d2, d3, d4, d5 +#define EXPACC(in1, in2, d0, d1, d2, d3, d4, d5, TEMP0, TEMP1, TEMP2, TEMP3) \ + VGBM $0xff3f, TEMP0 \ + VGBM $0xff1f, TEMP1 \ + VESLG $4, d1, TEMP2 \ + VESLG $4, d4, TEMP3 \ + VESRLG $4, TEMP0, TEMP0 \ + VPERM in1, d0, EX0, d0 \ + VPERM in2, d3, EX0, d3 \ + VPERM in1, d2, EX2, d2 \ + VPERM in2, d5, EX2, d5 \ + VPERM in1, TEMP2, EX1, d1 \ + VPERM in2, TEMP3, EX1, d4 \ + VN TEMP0, d0, d0 \ + VN TEMP0, d3, d3 \ + VESRLG $4, d1, d1 \ + VESRLG $4, d4, d4 \ + VN TEMP1, d2, d2 \ + VN TEMP1, d5, d5 \ + VN TEMP0, d1, d1 \ + VN TEMP0, d4, d4 \ + +// expands one message block into the lower halfs of the d registers +// moves the contents of the d registers into upper halfs +// input: in, d0, d1, d2 +// temp: TEMP0, TEMP1, TEMP2 +// output: d0, d1, d2 +#define EXPACC2(in, d0, d1, d2, TEMP0, TEMP1, TEMP2) \ + VGBM $0xff3f, TEMP0 \ + VESLG $4, d1, TEMP2 \ + VGBM $0xff1f, TEMP1 \ + VPERM in, d0, EX0, d0 \ + VESRLG $4, TEMP0, TEMP0 \ + VPERM in, d2, EX2, d2 \ + VPERM in, TEMP2, EX1, d1 \ + VN TEMP0, d0, d0 \ + VN TEMP1, d2, d2 \ + VESRLG $4, d1, d1 \ + VN TEMP0, d1, d1 \ + +// pack h2:h0 into h1:h0 (no carry) +// input: h0, h1, h2 +// output: h0, h1, h2 +#define PACK(h0, h1, h2) \ + VMRLG h1, h2, h2 \ // copy h1 to upper half h2 + VESLG $44, h1, h1 \ // shift limb 1 44 bits, leaving 20 + VO h0, h1, h0 \ // combine h0 with 20 bits from limb 1 + VESRLG $20, h2, h1 \ // put top 24 bits of limb 1 into h1 + VLEIG $1, $0, h1 \ // clear h2 stuff from lower half of h1 + VO h0, h1, h0 \ // h0 now has 88 bits (limb 0 and 1) + VLEIG $0, $0, h2 \ // clear upper half of h2 + VESRLG $40, h2, h1 \ // h1 now has upper two bits of result + VLEIB $7, $88, h1 \ // for byte shift (11 bytes) + VSLB h1, h2, h2 \ // shift h2 11 bytes to the left + VO h0, h2, h0 \ // combine h0 with 20 bits from limb 1 + VLEIG $0, $0, h1 \ // clear upper half of h1 + +// if h > 2**130-5 then h -= 2**130-5 +// input: h0, h1 +// temp: t0, t1, t2 +// output: h0 +#define MOD(h0, h1, t0, t1, t2) \ + VZERO t0 \ + VLEIG $1, $5, t0 \ + VACCQ h0, t0, t1 \ + VAQ h0, t0, t0 \ + VONE t2 \ + VLEIG $1, $-4, t2 \ + VAQ t2, t1, t1 \ + VACCQ h1, t1, t1 \ + VONE t2 \ + VAQ t2, t1, t1 \ + VN h0, t1, t2 \ + VNC t0, t1, t1 \ + VO t1, t2, h0 \ + +// func poly1305vmsl(out *[16]byte, m *byte, mlen uint64, key *[32]key) +TEXT ·poly1305vmsl(SB), $0-32 + // This code processes 6 + up to 4 blocks (32 bytes) per iteration + // using the algorithm described in: + // NEON crypto, Daniel J. Bernstein & Peter Schwabe + // https://cryptojedi.org/papers/neoncrypto-20120320.pdf + // And as moddified for VMSL as described in + // Accelerating Poly1305 Cryptographic Message Authentication on the z14 + // O'Farrell et al, CASCON 2017, p48-55 + // https://ibm.ent.box.com/s/jf9gedj0e9d2vjctfyh186shaztavnht + + LMG out+0(FP), R1, R4 // R1=out, R2=m, R3=mlen, R4=key + VZERO V0 // c + + // load EX0, EX1 and EX2 + MOVD $·constants<>(SB), R5 + VLM (R5), EX0, EX2 // c + + // setup r + VL (R4), T_0 + MOVD $·keyMask<>(SB), R6 + VL (R6), T_1 + VN T_0, T_1, T_0 + VZERO T_2 // limbs for r + VZERO T_3 + VZERO T_4 + EXPACC2(T_0, T_2, T_3, T_4, T_1, T_5, T_7) + + // T_2, T_3, T_4: [0, r] + + // setup r*20 + VLEIG $0, $0, T_0 + VLEIG $1, $20, T_0 // T_0: [0, 20] + VZERO T_5 + VZERO T_6 + VMSLG T_0, T_3, T_5, T_5 + VMSLG T_0, T_4, T_6, T_6 + + // store r for final block in GR + VLGVG $1, T_2, RSAVE_0 // c + VLGVG $1, T_3, RSAVE_1 // c + VLGVG $1, T_4, RSAVE_2 // c + VLGVG $1, T_5, R5SAVE_1 // c + VLGVG $1, T_6, R5SAVE_2 // c + + // initialize h + VZERO H0_0 + VZERO H1_0 + VZERO H2_0 + VZERO H0_1 + VZERO H1_1 + VZERO H2_1 + + // initialize pointer for reduce constants + MOVD $·reduce<>(SB), R12 + + // calculate r**2 and 20*(r**2) + VZERO R_0 + VZERO R_1 + VZERO R_2 + SQUARE(T_2, T_3, T_4, T_6, R_0, R_1, R_2, T_1, T_5, T_7) + REDUCE2(R_0, R_1, R_2, M0, M1, M2, M3, M4, R5_1, R5_2, M5, T_1) + VZERO R5_1 + VZERO R5_2 + VMSLG T_0, R_1, R5_1, R5_1 + VMSLG T_0, R_2, R5_2, R5_2 + + // skip r**4 calculation if 3 blocks or less + CMPBLE R3, $48, b4 + + // calculate r**4 and 20*(r**4) + VZERO T_8 + VZERO T_9 + VZERO T_10 + SQUARE(R_0, R_1, R_2, R5_2, T_8, T_9, T_10, T_1, T_5, T_7) + REDUCE2(T_8, T_9, T_10, M0, M1, M2, M3, M4, T_2, T_3, M5, T_1) + VZERO T_2 + VZERO T_3 + VMSLG T_0, T_9, T_2, T_2 + VMSLG T_0, T_10, T_3, T_3 + + // put r**2 to the right and r**4 to the left of R_0, R_1, R_2 + VSLDB $8, T_8, T_8, T_8 + VSLDB $8, T_9, T_9, T_9 + VSLDB $8, T_10, T_10, T_10 + VSLDB $8, T_2, T_2, T_2 + VSLDB $8, T_3, T_3, T_3 + + VO T_8, R_0, R_0 + VO T_9, R_1, R_1 + VO T_10, R_2, R_2 + VO T_2, R5_1, R5_1 + VO T_3, R5_2, R5_2 + + CMPBLE R3, $80, load // less than or equal to 5 blocks in message + + // 6(or 5+1) blocks + SUB $81, R3 + VLM (R2), M0, M4 + VLL R3, 80(R2), M5 + ADD $1, R3 + MOVBZ $1, R0 + CMPBGE R3, $16, 2(PC) + VLVGB R3, R0, M5 + MOVD $96(R2), R2 + EXPACC(M0, M1, H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, T_0, T_1, T_2, T_3) + EXPACC(M2, M3, H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, T_0, T_1, T_2, T_3) + VLEIB $2, $1, H2_0 + VLEIB $2, $1, H2_1 + VLEIB $10, $1, H2_0 + VLEIB $10, $1, H2_1 + + VZERO M0 + VZERO M1 + VZERO M2 + VZERO M3 + VZERO T_4 + VZERO T_10 + EXPACC(M4, M5, M0, M1, M2, M3, T_4, T_10, T_0, T_1, T_2, T_3) + VLR T_4, M4 + VLEIB $10, $1, M2 + CMPBLT R3, $16, 2(PC) + VLEIB $10, $1, T_10 + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M1, M2, M3, M4, T_10, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, T_10, M0, M1, M2, M3, M4, T_4, T_5, T_2, T_7, T_8, T_9) + VMRHG V0, H0_1, H0_0 + VMRHG V0, H1_1, H1_0 + VMRHG V0, H2_1, H2_0 + VMRLG V0, H0_1, H0_1 + VMRLG V0, H1_1, H1_1 + VMRLG V0, H2_1, H2_1 + + SUB $16, R3 + CMPBLE R3, $0, square + +load: + // load EX0, EX1 and EX2 + MOVD $·c<>(SB), R5 + VLM (R5), EX0, EX2 + +loop: + CMPBLE R3, $64, add // b4 // last 4 or less blocks left + + // next 4 full blocks + VLM (R2), M2, M5 + SUB $64, R3 + MOVD $64(R2), R2 + REDUCE(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, T_10, M0, M1, T_0, T_1, T_3, T_4, T_5, T_2, T_7, T_8, T_9) + + // expacc in-lined to create [m2, m3] limbs + VGBM $0x3f3f, T_0 // 44 bit clear mask + VGBM $0x1f1f, T_1 // 40 bit clear mask + VPERM M2, M3, EX0, T_3 + VESRLG $4, T_0, T_0 // 44 bit clear mask ready + VPERM M2, M3, EX1, T_4 + VPERM M2, M3, EX2, T_5 + VN T_0, T_3, T_3 + VESRLG $4, T_4, T_4 + VN T_1, T_5, T_5 + VN T_0, T_4, T_4 + VMRHG H0_1, T_3, H0_0 + VMRHG H1_1, T_4, H1_0 + VMRHG H2_1, T_5, H2_0 + VMRLG H0_1, T_3, H0_1 + VMRLG H1_1, T_4, H1_1 + VMRLG H2_1, T_5, H2_1 + VLEIB $10, $1, H2_0 + VLEIB $10, $1, H2_1 + VPERM M4, M5, EX0, T_3 + VPERM M4, M5, EX1, T_4 + VPERM M4, M5, EX2, T_5 + VN T_0, T_3, T_3 + VESRLG $4, T_4, T_4 + VN T_1, T_5, T_5 + VN T_0, T_4, T_4 + VMRHG V0, T_3, M0 + VMRHG V0, T_4, M1 + VMRHG V0, T_5, M2 + VMRLG V0, T_3, M3 + VMRLG V0, T_4, M4 + VMRLG V0, T_5, M5 + VLEIB $10, $1, M2 + VLEIB $10, $1, M5 + + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M1, M2, M3, M4, M5, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + CMPBNE R3, $0, loop + REDUCE(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, T_10, M0, M1, M3, M4, M5, T_4, T_5, T_2, T_7, T_8, T_9) + VMRHG V0, H0_1, H0_0 + VMRHG V0, H1_1, H1_0 + VMRHG V0, H2_1, H2_0 + VMRLG V0, H0_1, H0_1 + VMRLG V0, H1_1, H1_1 + VMRLG V0, H2_1, H2_1 + + // load EX0, EX1, EX2 + MOVD $·constants<>(SB), R5 + VLM (R5), EX0, EX2 + + // sum vectors + VAQ H0_0, H0_1, H0_0 + VAQ H1_0, H1_1, H1_0 + VAQ H2_0, H2_1, H2_0 + + // h may be >= 2*(2**130-5) so we need to reduce it again + // M0...M4 are used as temps here + REDUCE2(H0_0, H1_0, H2_0, M0, M1, M2, M3, M4, T_9, T_10, H0_1, M5) + +next: // carry h1->h2 + VLEIB $7, $0x28, T_1 + VREPIB $4, T_2 + VGBM $0x003F, T_3 + VESRLG $4, T_3 + + // byte shift + VSRLB T_1, H1_0, T_4 + + // bit shift + VSRL T_2, T_4, T_4 + + // clear h1 carry bits + VN T_3, H1_0, H1_0 + + // add carry + VAQ T_4, H2_0, H2_0 + + // h is now < 2*(2**130-5) + // pack h into h1 (hi) and h0 (lo) + PACK(H0_0, H1_0, H2_0) + + // if h > 2**130-5 then h -= 2**130-5 + MOD(H0_0, H1_0, T_0, T_1, T_2) + + // h += s + MOVD $·bswapMask<>(SB), R5 + VL (R5), T_1 + VL 16(R4), T_0 + VPERM T_0, T_0, T_1, T_0 // reverse bytes (to big) + VAQ T_0, H0_0, H0_0 + VPERM H0_0, H0_0, T_1, H0_0 // reverse bytes (to little) + VST H0_0, (R1) + RET + +add: + // load EX0, EX1, EX2 + MOVD $·constants<>(SB), R5 + VLM (R5), EX0, EX2 + + REDUCE(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, T_10, M0, M1, M3, M4, M5, T_4, T_5, T_2, T_7, T_8, T_9) + VMRHG V0, H0_1, H0_0 + VMRHG V0, H1_1, H1_0 + VMRHG V0, H2_1, H2_0 + VMRLG V0, H0_1, H0_1 + VMRLG V0, H1_1, H1_1 + VMRLG V0, H2_1, H2_1 + CMPBLE R3, $64, b4 + +b4: + CMPBLE R3, $48, b3 // 3 blocks or less + + // 4(3+1) blocks remaining + SUB $49, R3 + VLM (R2), M0, M2 + VLL R3, 48(R2), M3 + ADD $1, R3 + MOVBZ $1, R0 + CMPBEQ R3, $16, 2(PC) + VLVGB R3, R0, M3 + MOVD $64(R2), R2 + EXPACC(M0, M1, H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, T_0, T_1, T_2, T_3) + VLEIB $10, $1, H2_0 + VLEIB $10, $1, H2_1 + VZERO M0 + VZERO M1 + VZERO M4 + VZERO M5 + VZERO T_4 + VZERO T_10 + EXPACC(M2, M3, M0, M1, M4, M5, T_4, T_10, T_0, T_1, T_2, T_3) + VLR T_4, M2 + VLEIB $10, $1, M4 + CMPBNE R3, $16, 2(PC) + VLEIB $10, $1, T_10 + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M1, M4, M5, M2, T_10, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, T_10, M0, M1, M3, M4, M5, T_4, T_5, T_2, T_7, T_8, T_9) + VMRHG V0, H0_1, H0_0 + VMRHG V0, H1_1, H1_0 + VMRHG V0, H2_1, H2_0 + VMRLG V0, H0_1, H0_1 + VMRLG V0, H1_1, H1_1 + VMRLG V0, H2_1, H2_1 + SUB $16, R3 + CMPBLE R3, $0, square // this condition must always hold true! + +b3: + CMPBLE R3, $32, b2 + + // 3 blocks remaining + + // setup [r²,r] + VSLDB $8, R_0, R_0, R_0 + VSLDB $8, R_1, R_1, R_1 + VSLDB $8, R_2, R_2, R_2 + VSLDB $8, R5_1, R5_1, R5_1 + VSLDB $8, R5_2, R5_2, R5_2 + + VLVGG $1, RSAVE_0, R_0 + VLVGG $1, RSAVE_1, R_1 + VLVGG $1, RSAVE_2, R_2 + VLVGG $1, R5SAVE_1, R5_1 + VLVGG $1, R5SAVE_2, R5_2 + + // setup [h0, h1] + VSLDB $8, H0_0, H0_0, H0_0 + VSLDB $8, H1_0, H1_0, H1_0 + VSLDB $8, H2_0, H2_0, H2_0 + VO H0_1, H0_0, H0_0 + VO H1_1, H1_0, H1_0 + VO H2_1, H2_0, H2_0 + VZERO H0_1 + VZERO H1_1 + VZERO H2_1 + + VZERO M0 + VZERO M1 + VZERO M2 + VZERO M3 + VZERO M4 + VZERO M5 + + // H*[r**2, r] + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M1, M2, M3, M4, M5, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE2(H0_0, H1_0, H2_0, M0, M1, M2, M3, M4, H0_1, H1_1, T_10, M5) + + SUB $33, R3 + VLM (R2), M0, M1 + VLL R3, 32(R2), M2 + ADD $1, R3 + MOVBZ $1, R0 + CMPBEQ R3, $16, 2(PC) + VLVGB R3, R0, M2 + + // H += m0 + VZERO T_1 + VZERO T_2 + VZERO T_3 + EXPACC2(M0, T_1, T_2, T_3, T_4, T_5, T_6) + VLEIB $10, $1, T_3 + VAG H0_0, T_1, H0_0 + VAG H1_0, T_2, H1_0 + VAG H2_0, T_3, H2_0 + + VZERO M0 + VZERO M3 + VZERO M4 + VZERO M5 + VZERO T_10 + + // (H+m0)*r + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M3, M4, M5, V0, T_10, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE2(H0_0, H1_0, H2_0, M0, M3, M4, M5, T_10, H0_1, H1_1, H2_1, T_9) + + // H += m1 + VZERO V0 + VZERO T_1 + VZERO T_2 + VZERO T_3 + EXPACC2(M1, T_1, T_2, T_3, T_4, T_5, T_6) + VLEIB $10, $1, T_3 + VAQ H0_0, T_1, H0_0 + VAQ H1_0, T_2, H1_0 + VAQ H2_0, T_3, H2_0 + REDUCE2(H0_0, H1_0, H2_0, M0, M3, M4, M5, T_9, H0_1, H1_1, H2_1, T_10) + + // [H, m2] * [r**2, r] + EXPACC2(M2, H0_0, H1_0, H2_0, T_1, T_2, T_3) + CMPBNE R3, $16, 2(PC) + VLEIB $10, $1, H2_0 + VZERO M0 + VZERO M1 + VZERO M2 + VZERO M3 + VZERO M4 + VZERO M5 + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M1, M2, M3, M4, M5, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE2(H0_0, H1_0, H2_0, M0, M1, M2, M3, M4, H0_1, H1_1, M5, T_10) + SUB $16, R3 + CMPBLE R3, $0, next // this condition must always hold true! + +b2: + CMPBLE R3, $16, b1 + + // 2 blocks remaining + + // setup [r²,r] + VSLDB $8, R_0, R_0, R_0 + VSLDB $8, R_1, R_1, R_1 + VSLDB $8, R_2, R_2, R_2 + VSLDB $8, R5_1, R5_1, R5_1 + VSLDB $8, R5_2, R5_2, R5_2 + + VLVGG $1, RSAVE_0, R_0 + VLVGG $1, RSAVE_1, R_1 + VLVGG $1, RSAVE_2, R_2 + VLVGG $1, R5SAVE_1, R5_1 + VLVGG $1, R5SAVE_2, R5_2 + + // setup [h0, h1] + VSLDB $8, H0_0, H0_0, H0_0 + VSLDB $8, H1_0, H1_0, H1_0 + VSLDB $8, H2_0, H2_0, H2_0 + VO H0_1, H0_0, H0_0 + VO H1_1, H1_0, H1_0 + VO H2_1, H2_0, H2_0 + VZERO H0_1 + VZERO H1_1 + VZERO H2_1 + + VZERO M0 + VZERO M1 + VZERO M2 + VZERO M3 + VZERO M4 + VZERO M5 + + // H*[r**2, r] + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M1, M2, M3, M4, M5, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, T_10, M0, M1, M2, M3, M4, T_4, T_5, T_2, T_7, T_8, T_9) + VMRHG V0, H0_1, H0_0 + VMRHG V0, H1_1, H1_0 + VMRHG V0, H2_1, H2_0 + VMRLG V0, H0_1, H0_1 + VMRLG V0, H1_1, H1_1 + VMRLG V0, H2_1, H2_1 + + // move h to the left and 0s at the right + VSLDB $8, H0_0, H0_0, H0_0 + VSLDB $8, H1_0, H1_0, H1_0 + VSLDB $8, H2_0, H2_0, H2_0 + + // get message blocks and append 1 to start + SUB $17, R3 + VL (R2), M0 + VLL R3, 16(R2), M1 + ADD $1, R3 + MOVBZ $1, R0 + CMPBEQ R3, $16, 2(PC) + VLVGB R3, R0, M1 + VZERO T_6 + VZERO T_7 + VZERO T_8 + EXPACC2(M0, T_6, T_7, T_8, T_1, T_2, T_3) + EXPACC2(M1, T_6, T_7, T_8, T_1, T_2, T_3) + VLEIB $2, $1, T_8 + CMPBNE R3, $16, 2(PC) + VLEIB $10, $1, T_8 + + // add [m0, m1] to h + VAG H0_0, T_6, H0_0 + VAG H1_0, T_7, H1_0 + VAG H2_0, T_8, H2_0 + + VZERO M2 + VZERO M3 + VZERO M4 + VZERO M5 + VZERO T_10 + VZERO M0 + + // at this point R_0 .. R5_2 look like [r**2, r] + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M2, M3, M4, M5, T_10, M0, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE2(H0_0, H1_0, H2_0, M2, M3, M4, M5, T_9, H0_1, H1_1, H2_1, T_10) + SUB $16, R3, R3 + CMPBLE R3, $0, next + +b1: + CMPBLE R3, $0, next + + // 1 block remaining + + // setup [r²,r] + VSLDB $8, R_0, R_0, R_0 + VSLDB $8, R_1, R_1, R_1 + VSLDB $8, R_2, R_2, R_2 + VSLDB $8, R5_1, R5_1, R5_1 + VSLDB $8, R5_2, R5_2, R5_2 + + VLVGG $1, RSAVE_0, R_0 + VLVGG $1, RSAVE_1, R_1 + VLVGG $1, RSAVE_2, R_2 + VLVGG $1, R5SAVE_1, R5_1 + VLVGG $1, R5SAVE_2, R5_2 + + // setup [h0, h1] + VSLDB $8, H0_0, H0_0, H0_0 + VSLDB $8, H1_0, H1_0, H1_0 + VSLDB $8, H2_0, H2_0, H2_0 + VO H0_1, H0_0, H0_0 + VO H1_1, H1_0, H1_0 + VO H2_1, H2_0, H2_0 + VZERO H0_1 + VZERO H1_1 + VZERO H2_1 + + VZERO M0 + VZERO M1 + VZERO M2 + VZERO M3 + VZERO M4 + VZERO M5 + + // H*[r**2, r] + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M1, M2, M3, M4, M5, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE2(H0_0, H1_0, H2_0, M0, M1, M2, M3, M4, T_9, T_10, H0_1, M5) + + // set up [0, m0] limbs + SUB $1, R3 + VLL R3, (R2), M0 + ADD $1, R3 + MOVBZ $1, R0 + CMPBEQ R3, $16, 2(PC) + VLVGB R3, R0, M0 + VZERO T_1 + VZERO T_2 + VZERO T_3 + EXPACC2(M0, T_1, T_2, T_3, T_4, T_5, T_6)// limbs: [0, m] + CMPBNE R3, $16, 2(PC) + VLEIB $10, $1, T_3 + + // h+m0 + VAQ H0_0, T_1, H0_0 + VAQ H1_0, T_2, H1_0 + VAQ H2_0, T_3, H2_0 + + VZERO M0 + VZERO M1 + VZERO M2 + VZERO M3 + VZERO M4 + VZERO M5 + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M1, M2, M3, M4, M5, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE2(H0_0, H1_0, H2_0, M0, M1, M2, M3, M4, T_9, T_10, H0_1, M5) + + BR next + +square: + // setup [r²,r] + VSLDB $8, R_0, R_0, R_0 + VSLDB $8, R_1, R_1, R_1 + VSLDB $8, R_2, R_2, R_2 + VSLDB $8, R5_1, R5_1, R5_1 + VSLDB $8, R5_2, R5_2, R5_2 + + VLVGG $1, RSAVE_0, R_0 + VLVGG $1, RSAVE_1, R_1 + VLVGG $1, RSAVE_2, R_2 + VLVGG $1, R5SAVE_1, R5_1 + VLVGG $1, R5SAVE_2, R5_2 + + // setup [h0, h1] + VSLDB $8, H0_0, H0_0, H0_0 + VSLDB $8, H1_0, H1_0, H1_0 + VSLDB $8, H2_0, H2_0, H2_0 + VO H0_1, H0_0, H0_0 + VO H1_1, H1_0, H1_0 + VO H2_1, H2_0, H2_0 + VZERO H0_1 + VZERO H1_1 + VZERO H2_1 + + VZERO M0 + VZERO M1 + VZERO M2 + VZERO M3 + VZERO M4 + VZERO M5 + + // (h0*r**2) + (h1*r) + MULTIPLY(H0_0, H1_0, H2_0, H0_1, H1_1, H2_1, R_0, R_1, R_2, R5_1, R5_2, M0, M1, M2, M3, M4, M5, T_0, T_1, T_2, T_3, T_4, T_5, T_6, T_7, T_8, T_9) + REDUCE2(H0_0, H1_0, H2_0, M0, M1, M2, M3, M4, T_9, T_10, H0_1, M5) + BR next diff --git a/vendor/golang.org/x/crypto/poly1305/vectors_test.go b/vendor/golang.org/x/crypto/poly1305/vectors_test.go new file mode 100644 index 0000000..18d7ff8 --- /dev/null +++ b/vendor/golang.org/x/crypto/poly1305/vectors_test.go @@ -0,0 +1,2943 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package poly1305 + +var testData = [...]test{ + // edge cases + { + // see https://go-review.googlesource.com/#/c/30101/ + key: "3b3a29e93b213a5c5c3b3b053a3a8c0d00000000000000000000000000000000", + tag: "6dc18b8c344cd79927118bbe84b7f314", + in: "81d8b2e46a25213b58fee4213a2a28e921c12a9632516d3b73272727becf2129", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "04000000000000000000000000000000", // (2¹³⁰-1) % (2¹³⁰-5) + in: "ffffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "faffffffffffffffffffffffffffffff", // (2¹³⁰-6) % (2¹³⁰-5) + in: "faffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "00000000000000000000000000000000", // (2¹³⁰-5) % (2¹³⁰-5) + in: "fbffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "f9ffffffffffffffffffffffffffffff", // (2*(2¹³⁰-6)) % (2¹³⁰-5) + in: "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "00000000000000000000000000000000", // (2*(2¹³⁰-5)) % (2¹³⁰-5) + in: "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "f8ffffffffffffffffffffffffffffff", // (3*(2¹³⁰-6)) % (2¹³⁰-5) + in: "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "00000000000000000000000000000000", // (3*(2¹³⁰-5)) % (2¹³⁰-5) + in: "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "f7ffffffffffffffffffffffffffffff", // (4*(2¹³⁰-6)) % (2¹³⁰-5) + in: "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "00000000000000000000000000000000", // (4*(2¹³⁰-5)) % (2¹³⁰-5) + in: "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "f3ffffffffffffffffffffffffffffff", // (8*(2¹³⁰-6)) % (2¹³⁰-5) + in: "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "00000000000000000000000000000000", // (8*(2¹³⁰-5)) % (2¹³⁰-5) + in: "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "ebffffffffffffffffffffffffffffff", // (16*(2¹³⁰-6)) % (2¹³⁰-5) + in: "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "faffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + { + key: "0100000000000000000000000000000000000000000000000000000000000000", + tag: "00000000000000000000000000000000", // (16*(2¹³⁰-5)) % (2¹³⁰-5) + in: "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "fbffffffffffffffffffffffffffffff" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000" + + "00000000000000000000000000000000", + }, + // original smoke tests + { + key: "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + tag: "a6f745008f81c916a20dcc74eef2b2f0", + in: "48656c6c6f20776f726c6421", + }, + { + key: "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + tag: "49ec78090e481ec6c26b33b91ccc0307", + in: "0000000000000000000000000000000000000000000000000000000000000000", + }, + { + key: "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + tag: "da84bcab02676c38cdb015604274c2aa", + in: "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000", + }, + { + key: "0000000000000000000000000000000000000000000000000000000000000000", + tag: "00000000000000000000000000000000", + in: "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000000000" + + "000000000000000000000000000000000000000000000000000000", + }, + // randomly generated + { + key: "52fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c649", + tag: "9566c74d10037c4d7bbb0407d1e2c649", + in: "", + }, + { + key: "81855ad8681d0d86d1e91e00167939cb6694d2c422acd208a0072939487f6999", + tag: "eaa270caaa12faa39b797374a4b8a420", + in: "eb", + }, + { + key: "9d18a44784045d87f3c67cf22746e995af5a25367951baa2ff6cd471c483f15f", + tag: "dbea66e1da48a8f822887c6162c2acf1", + in: "b90b", + }, + { + key: "adb37c5821b6d95526a41a9504680b4e7c8b763a1b1d49d4955c848621632525", + tag: "6ac09aaa88c32ee95a7198376f16abdb", + in: "3fec73", + }, + { + key: "8dd7a9e28bf921119c160f0702448615bbda08313f6a8eb668d20bf505987592", + tag: "b1443487f97fe340b04a74719ed4de68", + in: "1e668a5b", + }, + { + key: "df2c7fc4844592d2572bcd0668d2d6c52f5054e2d0836bf84c7174cb7476364c", + tag: "7463be0f9d99a5348039e4afcbf4019c", + in: "c3dbd968b0", + }, + { + key: "f7172ed85794bb358b0c3b525da1786f9fff094279db1944ebd7a19d0f7bbacb", + tag: "2edaee3bcf303fd05609e131716f8157", + in: "e0255aa5b7d4", + }, + { + key: "4bec40f84c892b9bffd43629b0223beea5f4f74391f445d15afd4294040374f6", + tag: "965f18767420c1d94a4ef657e8d15e1e", + in: "924b98cbf8713f", + }, + { + key: "8d962d7c8d019192c24224e2cafccae3a61fb586b14323a6bc8f9e7df1d92933", + tag: "2bf4a33287dd6d87e1ed4282f7342b6a", + in: "3ff993933bea6f5b", + }, + { + key: "3af6de0374366c4719e43a1b067d89bc7f01f1f573981659a44ff17a4c7215a3", + tag: "c5e987b60373a48893c5af30acf2471f", + in: "b539eb1e5849c6077d", + }, + { + key: "bb5722f5717a289a266f97647981998ebea89c0b4b373970115e82ed6f4125c8", + tag: "19f0f640b309d168ea1b480e6a4faee5", + in: "fa7311e4d7defa922daa", + }, + { + key: "e7786667f7e936cd4f24abf7df866baa56038367ad6145de1ee8f4a8b0993ebd", + tag: "de75e5565d97834b9fa84ad568d31359", + in: "f8883a0ad8be9c3978b048", + }, + { + key: "83e56a156a8de563afa467d49dec6a40e9a1d007f033c2823061bdd0eaa59f8e", + tag: "de184a5a9b826aa203c5c017986d6690", + in: "4da6430105220d0b29688b73", + }, + { + key: "4b8ea0f3ca9936e8461f10d77c96ea80a7a665f606f6a63b7f3dfd2567c18979", + tag: "7478f18d9684905aa5d1a34ee67e4c84", + in: "e4d60f26686d9bf2fb26c901ff", + }, + { + key: "354cde1607ee294b39f32b7c7822ba64f84ab43ca0c6e6b91c1fd3be89904341", + tag: "3b2008a9c52b5308f5538b789ab5506f", + in: "79d3af4491a369012db92d184fc3", + }, + { + key: "9d1734ff5716428953bb6865fcf92b0c3a17c9028be9914eb7649c6c93478009", + tag: "71c8e76a67a505b7370b562ba15ba032", + in: "79d1830356f2a54c3deab2a4b4475d", + }, + { + key: "63afbe8fb56987c77f5818526f1814be823350eab13935f31d84484517e924ae", + tag: "1dc895f74f866bdb3edf6c4430829c1c", + in: "f78ae151c00755925836b7075885650c", + }, + { + key: "30ec29a3703934bf50a28da102975deda77e758579ea3dfe4136abf752b3b827", + tag: "afca2b3ba7b0e1a928001966883e9b16", + in: "1d03e944b3c9db366b75045f8efd69d22ae5411947cb553d7694267aef4e" + + "bcea406b32d6108bd68584f57e37caac6e33feaa3263a399437024ba9c9b" + + "14678a274f01a910ae295f6efbfe5f5abf44ccde263b5606633e2bf0006f" + + "28295d7d39069f01a239c4365854c3af7f6b41d631f92b9a8d12f4125732" + + "5fff332f7576b0620556304a3e3eae14c28d0cea39d2901a52720da85ca1" + + "e4b38eaf3f", + }, + { + key: "44c6c6ef8362f2f54fc00e09d6fc25640854c15dfcacaa8a2cecce5a3aba53ab", + tag: "6f2a09aa76c9b76774e31ec02dcf7991", + in: "705b18db94b4d338a5143e63408d8724b0cf3fae17a3f79be1072fb63c35" + + "d6042c4160f38ee9e2a9f3fb4ffb0019b454d522b5ffa17604193fb89667" + + "10a7960732ca52cf53c3f520c889b79bf504cfb57c7601232d589baccea9" + + "d6e263e25c27741d3f6c62cbbb15d9afbcbf7f7da41ab0408e3969c2e2cd" + + "cf233438bf1774ace7709a4f091e9a83fdeae0ec55eb233a9b5394cb3c78" + + "56b546d313c8a3b4c1c0e05447f4ba370eb36dbcfdec90b302dcdc3b9ef5" + + "22e2a6f1ed0afec1f8e20faabedf6b162e717d3a748a58677a0c56348f89" + + "21a266b11d0f334c62fe52ba53af19779cb2948b6570ffa0b773963c130a" + + "d797ddea", + }, + { + key: "fe4e3ad29b5125210f0ef1c314090f07c79a6f571c246f3e9ac0b7413ef110bd", + tag: "27381e3fc2a356103fb796f107d826e7", + in: "58b00ce73bff706f7ff4b6f44090a32711f3208e4e4b89cb5165ce64002c" + + "bd9c2887aa113df2468928d5a23b9ca740f80c9382d9c6034ad2960c7965" + + "03e1ce221725f50caf1fbfe831b10b7bf5b15c47a53dbf8e7dcafc9e1386" + + "47a4b44ed4bce964ed47f74aa594468ced323cb76f0d3fac476c9fb03fc9" + + "228fbae88fd580663a0454b68312207f0a3b584c62316492b49753b5d502" + + "7ce15a4f0a58250d8fb50e77f2bf4f0152e5d49435807f9d4b97be6fb779" + + "70466a5626fe33408cf9e88e2c797408a32d29416baf206a329cfffd4a75" + + "e498320982c85aad70384859c05a4b13a1d5b2f5bfef5a6ed92da482caa9" + + "568e5b6fe9d8a9ddd9eb09277b92cef9046efa18500944cbe800a0b1527e" + + "a6", + }, + { + key: "4729a861d2f6497a3235c37f4192779ec1d96b3b1c5424fce0b727b03072e641", + tag: "0173965669fb9de88d38a827a0271271", + in: "5a761f03abaa40abc9448fddeb2191d945c04767af847afd0edb5d8857b7" + + "99acb18e4affabe3037ffe7fa68aa8af5e39cc416e734d373c5ebebc9cdc" + + "c595bcce3c7bd3d8df93fab7e125ddebafe65a31bd5d41e2d2ce9c2b1789" + + "2f0fea1931a290220777a93143dfdcbfa68406e877073ff08834e197a403" + + "4aa48afa3f85b8a62708caebbac880b5b89b93da53810164402104e648b6" + + "226a1b78021851f5d9ac0f313a89ddfc454c5f8f72ac89b38b19f53784c1" + + "9e9beac03c875a27db029de37ae37a42318813487685929359ca8c5eb94e" + + "152dc1af42ea3d1676c1bdd19ab8e2925c6daee4de5ef9f9dcf08dfcbd02" + + "b80809398585928a0f7de50be1a6dc1d5768e8537988fddce562e9b948c9" + + "18bba3e933e5c400cde5e60c5ead6fc7ae77ba1d259b188a4b21c86fbc23" + + "d728b45347eada650af24c56d0800a8691332088a805bd55c446e25eb075" + + "90bafcccbec6177536401d9a2b7f512b54bfc9d00532adf5aaa7c3a96bc5" + + "9b489f77d9042c5bce26b163defde5ee6a0fbb3e9346cef81f0ae9515ef3" + + "0fa47a364e75aea9e111d596e685a591121966e031650d510354aa845580" + + "ff560760fd36514ca197c875f1d02d9216eba7627e2398322eb5cf43d72b" + + "d2e5b887d4630fb8d4747ead6eb82acd1c5b078143ee26a586ad23139d50" + + "41723470bf24a865837c", + }, + { + key: "9123461c41f5ff99aa99ce24eb4d788576e3336e65491622558fdf297b9fa007", + tag: "1eb0cdad9237905250d30a24fe172a34", + in: "864bafd7cd4ca1b2fb5766ab431a032b72b9a7e937ed648d0801f29055d3" + + "090d2463718254f9442483c7b98b938045da519843854b0ed3f7ba951a49" + + "3f321f0966603022c1dfc579b99ed9d20d573ad53171c8fef7f1f4e4613b" + + "b365b2ebb44f0ffb6907136385cdc838f0bdd4c812f042577410aca008c2" + + "afbc4c79c62572e20f8ed94ee62b4de7aa1cc84c887e1f7c31e927dfe52a" + + "5f8f46627eb5d3a4fe16fafce23623e196c9dfff7fbaff4ffe94f4589733" + + "e563e19d3045aad3e226488ac02cca4291aed169dce5039d6ab00e40f67a" + + "ab29332de1448b35507c7c8a09c4db07105dc31003620405da3b2169f5a9" + + "10c9d0096e5e3ef1b570680746acd0cc7760331b663138d6d342b051b5df" + + "410637cf7aee9b0c8c10a8f9980630f34ce001c0ab7ac65e502d39b216cb" + + "c50e73a32eaf936401e2506bd8b82c30d346bc4b2fa319f245a8657ec122" + + "eaf4ad5425c249ee160e17b95541c2aee5df820ac85de3f8e784870fd87a" + + "36cc0d163833df636613a9cc947437b6592835b9f6f4f8c0e70dbeebae7b" + + "14cdb9bc41033aa5baf40d45e24d72eac4a28e3ca030c9937ab8409a7cbf" + + "05ae21f97425254543d94d115900b90ae703b97d9856d2441d14ba49a677" + + "de8b18cb454b99ddd9daa7ccbb7500dae4e2e5df8cf3859ebddada6745fb" + + "a6a04c5c37c7ca35036f11732ce8bc27b48868611fc73c82a491bfabd7a1" + + "9df50fdc78a55dbbc2fd37f9296566557fab885b039f30e706f0cd5961e1" + + "9b642221db44a69497b8ad99408fe1e037c68bf7c5e5de1d2c68192348ec" + + "1189fb2e36973cef09ff14be23922801f6eaee41409158b45f2dec82d17c" + + "aaba160cd6", + }, + { + key: "40ff73495fe4a05ce1202ca7287ed3235b95e69f571fa5e656aaa51fae1ebdd7", + tag: "2e619d8ea81b77484e4fddeb29844e4b", + in: "aa6269c2ec7f4057b33593bc84888c970fd528d4a99a1eab9d2420134537" + + "cd6d02282e0981e140232a4a87383a21d1845c408ad757043813032a0bd5" + + "a30dcca6e3aa2df04715d879279a96879a4f3690ac2025a60c7db15e0501" + + "ebc34b734355fe4a059bd3899d920e95f1c46d432f9b08e64d7f9b38965d" + + "5a77a7ac183c3833e1a3425ead69d4f975012fd1a49ed832f69e6e9c63b4" + + "53ec049c9e7a5cf944232d10353f64434abae060f6506ad3fdb1f4415b0a" + + "f9ce8c208bc20ee526741539fa3203c77ecba410fd6718f227e0b430f9bc" + + "b049a3d38540dc222969120ce80f2007cd42a708a721aa29987b45d4e428" + + "811984ecad349cc35dd93515cefe0b002cee5e71c47935e281ebfc4b8b65" + + "2b69ccb092e55a20f1b9f97d046296124621928739a86671cc180152b953" + + "e3bf9d19f825c3dd54ae1688e49efb5efe65dcdad34bc860010e7c8c997c" + + "d5f9e320ca7d39d4ba801a175b1c76f057832f3f36d7d893e216e4c7bbdb" + + "548d0ba48449330027368b34f9c69776b4591532da1c5be68ef4eebe8cb8" + + "fa7dc5483fb70c2c896334cb1f9cb5dfe044fa086197ff5dfd02f2ba3884" + + "c53dd718c8560da743a8e9d4aeae20ccef002d82ca352592b8d8f2a8df3b" + + "0c35f15b9b370dca80d4ca8e9a133eb52094f2dd5c08731f52315d828846" + + "e37df68fd10658b480f2ac84233633957e688e924ffe3713b52c76fd8a56" + + "da8bb07daa8eb4eb8f7334f99256e2766a4109150eed424f0f743543cdea" + + "66e5baaa03edc918e8305bb19fc0c6b4ddb4aa3886cb5090940fc6d4cabe" + + "2153809e4ed60a0e2af07f1b2a6bb5a6017a578a27cbdc20a1759f76b088" + + "9a83ce25ce3ca91a4eb5c2f8580819da04d02c41770c01746de44f3db6e3" + + "402e7873db7635516e87b33e4b412ba3df68544920f5ea27ec097710954f" + + "42158bdba66d4814c064b4112538676095467c89ba98e6a543758d7093a4" + + "94df", + }, + { + key: "5cc36d09c7a6472a41f29c380a987b1ecdcf84765f4e5d3ceefc1c02181f570f", + tag: "0d57b8cbea8090df0541354673dcb4e0", + in: "44fcd629f08dc1ef53c9ae0d8869fe67fdc7a2c67b425f13c5be8d9f630c" + + "1d063c02fd75cf64c1aec9d2e2ef6e6431d5f5ad0489078dc61f46494dcc" + + "f403dad7f094170d2c3e29c198b0f341e284c4be8fa60c1a478d6bd55dd2" + + "c04dad86d2053d5d25b014e3d8b64322cdcb5004faa46cfa2d6ad2ff933b" + + "c3bd9a5a74660af3d048a9a43634c0250427d9a6219197a3f3633f841753" + + "ba7c27f3619f387b6b1a6cb9c1dc227674aa020724d137da2cb87b1615d5" + + "12974fa4747dd1e17d02c9462a44fec150ca3a8f99cc1e4953365e429956" + + "5e108535b1f62e1d4ba18e17a52164418bfd1a933f7fb3a126c860830a87" + + "293d9271da736e4398c1e37fb75c4bf02786e1faf4b610cd1377fbb9ae18" + + "0655a0abefbad700c09473469f1eca5a66d53fa3dc7cd3e7c3b0411d7e14" + + "5f96eb9654ab94913dda503a50f9e773842f4d2a5faa60869bf365830511" + + "f2ededd03e0a73000edb60c9a29a5f5e194cf3b5667a694690384599d116" + + "f8d2fd93b2aed55b7d44b5b054f3f38e788e4fdf36e591568c41d1052cad" + + "0fcb68ca4c4bf5090d57df9db6f0d91dd8b11b804f331adb7efb087a5604" + + "e9e22b4d54db40bcbc6e272ff5eaddfc1471459e59f0554c58251342134a" + + "8daaef1498069ba581ef1da2510be92843487a4eb8111c79a6f0195fc38a" + + "d6aee93c1df2b5897eaa38ad8f47ab2fe0e3aa3e6accbfd4c16d46843318" + + "5fc61c861b96ca65e34d31f24d6f56ee85092314a4d7656205c15322f1c9" + + "7613c079eae292ba966e10d1e700164e518b243f424c46f9ea63db1c2c34" + + "b512c403c128ee19030a6226517b805a072512a5e4cd274b7fd1fa23f830" + + "058208ff1a063b41039c74036b5b3da8b1a0b93135a710352da0f6c31203" + + "a09d1f2329651bb3ab3984ab591f2247e71cd44835e7a1a1b66d8595f7ae" + + "f9bf39d1417d2d31ea3599d405ff4b5999a86f52f3259b452909b57937d8" + + "5364d6c23deb4f14e0d9fcee9184df5994fdc11f045c025c8d561adb0e7d" + + "fd4748fd4b20f84e53322471a410cdb3fd88e48b2e7eb7ae5dae994cb5ea" + + "e3eaf21cf9005db560d6d22e4d9b97d7e9e488751afcd72aa176c0fcde93" + + "16f676fd527d9c42105b851639f09ea70533d26fc60cbeb4b76ed554fc99" + + "177620b28ca6f56a716f8cb384", + }, + { + key: "811c3e356e7c793acf114c624dc86ace38e67bff2a60e5b2a6c20723c1b9f003", + tag: "c6e59044cefc43ee681c3eed872d02b3", + in: "e115b304c023792448794546a2474f04294d7a616215e5dd6c40a65bb6ed" + + "b508c3680b14c176c327fdfb1ee21962c0006b7deb4e5de87db21989d13c" + + "3ab0462d5d2a52ef4ca0d366ae06a314f50e3a21d9247f814037798cc5e1" + + "0a63de027477decdeb8a8e0c279299272490106ddf8683126f60d35772c6" + + "dfc744b0adbfd5dcf118c4f2b06cfaf077881d733a5e643b7c46976647d1" + + "c1d3f8f6237c6218fa86fb47080b1f7966137667bd6661660c43b75b6339" + + "0b514bbe491aa46b524bde1c5b7456255fb214c3f74907b7ce1cba94210b" + + "78b5e68f049fcb002b96a5d38d59df6e977d587abb42d0972d5f3ffc898b" + + "3cbec26f104255761aee1b8a232d703585dd276ee1f43c8cd7e92a993eb1" + + "5107d02f59ba75f8dd1442ee37786ddb902deb88dd0ebdbf229fb25a9dca" + + "86d0ce46a278a45f5517bff2c049cc959a227dcdd3aca677e96ce84390e9" + + "b9a28e0988777331847a59f1225b027a66c1421422683dd6081af95e16f2" + + "48ab03da494112449ce7bdace6c988292f95699bb5e4d9c8d250aa28a6df" + + "44c0c265156deb27e9476a0a4af44f34bdf631b4af1146afe34ea988fc95" + + "3e71fc21ce60b3962313000fe46d757109281f6e55bc950200d0834ceb5c" + + "41553afd12576f3fbb9a8e05883ccc51c9a1269b6d8e9d27123dce5d0bd6" + + "db649c6fea06b4e4e9dea8d2d17709dc50ae8aa38231fd409e9580e255fe" + + "2bf59e6e1b6e310610ea4881206262be76120d6c97db969e003947f08bad" + + "8fa731f149397c47d2c964e84f090e77e19046277e18cd8917c48a776c9d" + + "e627b6656203b522c60e97cc61914621c564243913ae643f1c9c9e0ad00a" + + "14f66eaa45844229ecc35abb2637317ae5d5e338c68691bea8fa1fd469b7" + + "b54d0fccd730c1284ec7e6fccdec800b8fa67e6e55ac574f1e53a65ab976" + + "4c218a404184793cc9892308e296b334c85f7097edc16927c2451c4cd7e5" + + "3f239aa4f4c83241bde178f692898b1ece2dbcb19a97e64c4710326528f2" + + "4b099d0b674bd614fad307d9b9440adab32117f0f15b1450277b00eb366e" + + "0260fca84c1d27e50a1116d2ce16c8f5eb212c77c1a84425744ea3195edb" + + "b54c970b77e090b644942d43fe8c4546a158bad7620217a40e34b9bb84d1" + + "89eff32b20ef3f015714dbb1f150015d6eeb84cbccbd3fffa63bde89", + }, + { + key: "f33691f5db2dea41e1e608af3ff39f3a6988dba204ce1b09214475ae0ea864b8", + tag: "6e50e70411201378c8d67857d7b631d2", + in: "439bc9ea10db4d2b08c7fcf2e8bd89fa9844f8061d462e28f174489e7514" + + "0f84e842040141cc59ce38f9551850cfbdfac2d75337d155090d70d0d930" + + "04340bdfe60062f17c53f3c9005b9995a0feb49f6bef8eaff80f4feb7ef3" + + "f2181733a4b43b6ac43a5130a73a9b3c2cbc93bd296cd5f48c9df022b6c8" + + "2bb752bc21e3d8379be31328aa32edc11efc8a4b4b3f370ee8c870cd281d" + + "614e6bc2c0a5ca303bc48696a3bd574ee34738de4c4c29910f8feb7557bf" + + "ffcfe7428b4703144bd6d7fe5b3f5de748918553df5453b3c6001696f3de" + + "0137e454aadf30cedfb6be36b0b908a38409f1a2dc202fc285610765e4c8" + + "6414692bf4bde20ed899e97727b7ea1d95d7c621717c560f1d260ab3624e" + + "d6168d77c483dd5ce0d234049017795f2e5a7569d7ad323c50a5b1170337" + + "4174a9977026c20cd52c10b72f14e0569a684a3dcf2ccbc148fd3db506e2" + + "8d24f6c55544cb3980a36e86747adc89ebad78d1630618d113fa445f8625" + + "b583cd7be33913c30c419d047cf3baf40fd05219a1fcec717b87a65fa022" + + "1a3aa8143062d77588168019454240ae3d37640996f2967810459bc658df" + + "e556de4d07263dc3d9158ec242008226d1c6aea7f0846e12ce2d316e80da" + + "522343264ec9451ec23aaaa367d640faad4af3d44d6d86544ade34c93518" + + "2843f6b4d1c934996778affa9ee962e7dfef5e70d933d4309f0f343e9606" + + "1b91b11ac380a9675e17a96099fe411bedc28a298cd78d5496e28fbbd4f5" + + "b0a27735d1144348e22be5b75724d8f125e99c4cb4e9c3a1f0b4e9da5146" + + "e6afaa33d02fda74bf58a8badee2b634b989c01755afa6ab20ee494c6ae4" + + "c2c6f17af6b53b61d2947d83a18eb3b8a1612aad5d3ea7e8e35f325c9168" + + "ac490f22cb713ddb61fbd96011c5849ac8e2fcd42db820349bdf9157dcc0" + + "0d9f9ed9c099b10c7194d48b623b0df43759734b2a2e5f8a35e7192bf9a0" + + "03dcb9d16a54bd84d922f85b6021b28aacc5264fe9e83deb48f18f864cbd" + + "367eb163d39c45b0eb907311a2a4b09fb26109088df782ce031b02f3caff" + + "d2dbe25b1cbde9f35ba7c47292a4fd49e7def7a28824f3dfda259a86c3de" + + "59257c255c712686ee47d128a55c7b9e8c546035eab7e2da420f32ed5c94" + + "bc12a34dc68eb99257a7ea03b69d6c760b0681fa24e4ca97b7c377182ab5" + + "fee30a278b08c44c988a8f925af2997883111c750d176b432735868208f4" + + "0de7137331b544f2d28040a3581d195e82811c945c3f9fde68fc21b36a44" + + "e1cfa2d8eb625f3102461539b3f13c660936a5ddb29a0ae791fbf52c2f69" + + "7bd334653f3605b362d91cd78569b41dbd09b2a5892440b5097fa08d0b4b" + + "291fc5b934585dd8d5adc80d573fdd194b2eae26dfc49f5e51c1f1607d7e" + + "87740702f244bf39ca1d52423e0ae84891dfdf4f43ef984c7a5f293a2007" + + "a1e00e39c757f064518953f55621f955986f63", + }, + { + key: "d115b6ac998a65b48b3dae5977abaf985258d3d1cfe1616cec3d6a77f7a75785", + tag: "b431c9318ec2769fc8ee8f5fc3c079c3", + in: "7e7eb43839a6d7616b8a7b1fb7144817904342a9bd34167051162941a6b1" + + "b85db5e587f76e4a53211755d5ab29c11822d7711a97b3f1ff5b21f2485d" + + "9c86241fb56cdd6796245d3112df11ad9a7344db44d09934c4efb280ed65" + + "80cfcafb5c97a32993cbbf4917183e0b7bb38f2ce2479c28e1d39f673962" + + "17a7010448dfd39a4e7f406c8bd2d804f993bb410fffa4eb57518a531ecf" + + "259a8af068230acb826d9ffc20ee0fc43885221a321e3928971bb28615f0" + + "d9f099f5b68a80503a910fdba0bc643c60b64837900be38770b6b30c362c" + + "4580722b5dbb1b9c8cd02a18fd7b5661d2c4d28aa941c50af6655c826690" + + "37312fbf9f1cf4adb0b9400532755011b40e8252bd0e3c7a22efb0ef9122" + + "1e04b4aa8316d4a4ffeaa11909d38cc264650e7ca416835ded0953f39e29" + + "b01d3a33bba454760fb0a96d9fe50b3e42c95271e57840380d1fd39a375b" + + "3e5513a31a4b80a2dad8731d4fd1ced5ff61e1fbe8ff3ff90a277e6b5631" + + "f99f046c4c3c66158554f61af2ede73aede97e94b1d1f129aaadf9b53548" + + "553cc2304103e245b77701f134d94d2a3658f2b41108c5a519c2c8f450db" + + "027824f1c0ab94010589a4139ff521938b4f0c7bf0986585f535b6e292e5" + + "b3ded23bf81cec17c8420fe67a449e508864e4cbb7eaf335975668f013e9" + + "da70b33bd52a72094a8f03762ea7440ce9fcd10e251837cfc9ccc1a8cc47" + + "0c67379f6a32f16cf70ea8c19d1a67779a9b2d2b379665e0e908a88b26e7" + + "8c9f94f17acefa6d5feb70a7095e0297c53e091cf98df132a23a5ce5aa72" + + "59f1154b92e079f0b6f95d2a38aa5d62a2fd97c12ee7b085e57cc4652863" + + "8defacc1e70c3aceab82a9fa04e6aa70f5fbfd19de075bee4e3aac4a87d0" + + "ad0226a463a554816f1ebac08f30f4c3a93fa85d79b92f0da06348b4f008" + + "880fac2df0f768d8f9d082f5a747afb0f62eb29c89d926de9fc491921474" + + "1d8647c67d57ac55f94751389ee466bbd44dbe186f2f38abbc61a0425613" + + "e9b6a64e6bcb45a2e2bb783b9103483643d5610a7e2dcdb10b5d78423285" + + "506b42a99b00a4fb7b619b4526bb4ec78299dd01ad894fde2f053e18c55b" + + "6047f86333f2690c2cb8e87d9834ab8a5e339aa346e4d9952ed62dc083e3" + + "b11a823a67f23fec099a033f127ebe8626a89fa1a5a6b3520aa0d215a8e7" + + "dea3af37907686c16521739a95d6c532cc259c497bf397fceaea49cd46b9" + + "ad5c1b39a36fdd2f0d2225fef1b6ca2bb73fe604646c10ba4c572ab13a26" + + "559ededc98f5a34c874cc25621e65ba4852529b5a4e9c1b2bf8e1a8f8ff0" + + "5a31095b84696c6381eb9ad37ac0db184fe5fccf3554e514946a33cabe6f" + + "4d617b549d28ad1cc4642dac96e0215ee1596481600d3619e8f45e2c9ae1" + + "da834d44aca216bba0efef6254503ca90339f2d7ca508b2722d50c08def8" + + "a736590fa44855cd9eb9979c743783aa26e633696739f2ae25ff7b72ceb2" + + "4dff4455b85bbd675c8cb71ad18386dc58c371bdf37b4b3875b98a9423ff" + + "3becfc0d0ba2aacab3ee7683cb3b345095fefcaca5751ca793da63c89428", + }, + { + key: "f3717306b9729be998cdb2c9d856306c5ae3d89da2cdcef12f86f6110c98d873", + tag: "907dba0f4849c7cf4570b5128b5f31d5", + in: "079572187d4559f24d8e48dc366441acf226a4db79e214ec3ee288acc349" + + "887e2e377419bcafa377d0151497b52e4d9cf2a02b0fc91ad9516482bdf6" + + "eccd1497954b53241bfb0bc5c04cc45045c6251f23a510060fee32721872" + + "bbc95cd8d400dff00bcac2ecce6229c7d73d8f85ed5a87afdccf6dedd299" + + "2d5c7b5b8090c47c737ded036ff0e9aedf02a2242fd9820be618b9601e73" + + "d3ba5d8f1ae9805cfd2306251704bc74e3546997f109f1dfae20c03ff31f" + + "17564769aa49f01233c9c4b79f90fa3d1433d18cdc497914046ad77d2792" + + "2588a7d0e61d4258d7d80cdab8503e3111ddca22cf7f39c1f80f1e16a68d" + + "9e21db8b53dd316dfa4233cb453a39a90101c60efc08514a3057db007e96" + + "507745bd4a0764ed8717a250bffb5fd1ea58474bdfb5b869681939693926" + + "40d832a3387ed4ac9cdab0d2af8fcb51b86e4d927097f1e79b5af96574ec" + + "d59d0dd150a0208978c41de28ad6cadf72a49279cffd6dc281c640f2e294" + + "4cde49a13ed390da1dd92e3011ce0f4a0863375a9db3f67fca1e3b8288a0" + + "78611161d7cb668ecdb932e1ff3733982c8c460eeeff2bca46c96e8a02cf" + + "b55d770940de556373a4dd676e3a0dd66f1280c8cb77a85136b3f003fab4" + + "887dad548de7bfe6488ae55e7a71da4097db03900d4b94e776a939530328" + + "83492da900b2a6c3e73d7a6f12ee30c9dd06cc34e5a3893976eb1de5864d" + + "32e792ac02e68d052d9d0cfc7cfb40b77728422f6c26cf68987c6b40fcfe" + + "9d660abc657360eb129de11bd70af5eb8fe350af2c27a6ece2cdf81b94c8" + + "0e68e8c51106497cfa5171236efe2d71d76b5dff3352af9b407dc5aab60f" + + "46b5683646f5b28732b7c750d351a08a507243d8e437cc4bef13a3edaa20" + + "5fc4e9968b4e563fa0dc965ba20b8e48bc188a321b16d3213bed69647512" + + "7a20afc1a3680ef261df6d37b017dee05cfc3a42e4130216e5540cf715c4" + + "e638d7d615c50bef576eeb19b3b15b2c2b454dfcef2b18161a143ddf52fc" + + "8e88fa71cbe34c92cd4b5a0adc81e5c33e11d2721bc1b95a9e693ac3cabc" + + "490889a8a42bf7e22375b679e8598c8faef22a006ed2da8ab1c08aaed2f5" + + "6d6f26649036335c0881bfec1e3a5346335c3b3707ee92173f1a7a3305c2" + + "933f78e995da8f1df64daf12b81ce23c8813c27fd4551103dc33561c2e80" + + "45b6b6770fa03498fd359a104884699d628020173edbcc4398b977e456e4" + + "885964840466176a490e7c513ba5d66090277c1ab1632a995a54f555a452" + + "1170a000507865b6650730aa6d6050a55959102836fff3d37e4773340e59" + + "2e56951ff9652519de4421d9c5b63edbeb30a3852a1ea110a9a29721aee3" + + "23d5a306de1624cecc87badc47aa87f489635d2fb60bff62ba67f5257999" + + "6af0a1f1a6fbcd8704e119196fcc289a6db6a4170a2cae31a1d30744b702" + + "2536d1526d41659c2dcc8b39c26aecfc0f8a707136d81b2827a158fd7386" + + "a537514471c213a8c859016748e0264cf3fbde10f40c620840ec4df99432" + + "e2b9e1e368e33f126ec40c572e841c2618d49d4eb098b9533b1f4ae00b46" + + "8d15de8c8ab6d0b650e599576f2bd90a124c9c6a0f911fd1bd8253bac272" + + "942cbdf8864f3747ff7f09d8a5a9d8599be7ee1744e5f1faf3e526cd2a06" + + "b157527272af9d38565957c9ce663c295766c0e0e464971c6282b70d4c0c" + + "1fb3b69856b34c089ad2b2c745f5a033cee1429c5b855581ee285278893c" + + "43a5968d9c28384b7abe8d072ba69089c938685cb1eab461f05314ad6d06" + + "eaa58512f8738bde35b7b15ef359dd2e8753cb1ed6", + }, + { + key: "9772c1a4b74cbf53586e5df04369b35f1fdca390565872251bc6844bc81bda88", + tag: "68eb7fc459ecc3be819485001ab438dc", + in: "e115cc2f33e367cb85c01a914b3a512404ad6a98b5b0c3a211d4bffd5802" + + "ee43b3fb07451c74524ec8b4eddbb41ca33dd6e49791875d716a44bec97b" + + "7c2d4546616939ffa3b1ab9b8ba1d1a637e7c985cc922606caa0453085e3" + + "5f2fe0bd2de129d1d1856ade975a3281a62965927d8bb695e54514e69558" + + "89361a2a00a1b24e62bda78d0b71a0d40147016fcdaf1a702331dda8e678" + + "d8f476dcc91698da1688c610ec0cb1d9b8fbcd45dfde6d1503ba60a01337" + + "ae5b2f5c854a82c3087779babd2e522dd92f4718cd9f8c649ac226745ca2" + + "fa1696442764758f67cd926369578ae87612790dc56ed9cda935281a490e" + + "5c984950ec7a4e930520d273a69da4ed3a330e532508e26f942961fed0e3" + + "efeed52a7b96250d723155aa39a8ae85131c255c32bf406b647de1a37fba" + + "dc61e302bb5b70adec4505ee66b3a1d1b7bfe9c58b11e53ad556d56e5807" + + "017bb30b71be94e8f86aaf1496e8b8d6db75ec0afbe1cd336c23963c745d" + + "7b4ba1787ceb30728f1762b46f6eaad5064c8029d29b86266b87f93142a2" + + "74f519f3281d8c1cb43c23eb184ae41f3f625cf624b05a48d73cd7783fdf" + + "14954a03ec1a930e9a954424eff030e3f15357de4c19983f484619a0e9e2" + + "b67221cf965e9aa8d8926595c793adfe0181050df8b845ce648a66df532f" + + "78b10c83ecc86374a4f8abf8edcc303654bafd3dcc7de9c77a0a9d1d98fb" + + "121534b47d16f75b55fdc2a5e2e6799f8a2f8000d4292282e56863ae422a" + + "5779900ad6881b78946e750d7777f33f2f013a75c19615632c0e40b98338" + + "1e9b8d35a26abe30242c45662eebb157e6d7a8a5519de60268ac289b8295" + + "5d4feb47b9eef6da65031c6f52c2c4f5baa36fce3618b6a331f1e8bdd621" + + "48954fcf0846afeeb0a6cadb495c909a7fe671b021d5b0b4669961052187" + + "d01b67d44218471bfb04c1a3d82bf7b776208013fc8adabaefb11719f7a7" + + "e6cb0b92d4cc39b403ceb56bd806cbdcc9ee75362ab4aaeb760e170fdc6a" + + "23c038d45f465d8ec8519af8b0aad2eb5fae2972c603ed35ff8e46644803" + + "fc042ff8044540280766e35d8aaddcaa81e7c0c7eba28674f710492924c6" + + "1743da4d241e12b0c519910d4e31de332c2672ea77c9a3d5c60cd78a35d7" + + "924fda105b6f0a7cc11523157982418405be0bacf554b6398aeb9a1a3b12" + + "fe411c09e9bfb66416a47dd51cbd29abf8fbbd264dd57ba21a388c7e19e8" + + "12e66768b2584ad8471bef36245881fc04a22d9900a246668592ca35cfc3" + + "a8faf77da494df65f7d5c3daa129b7c98cef57e0826dee394eb927b3d6b3" + + "a3c42fa2576dcc6efd1259b6819da9544c82728276b324a36121a519aee5" + + "ae850738a44349cdec1220a6a933808aee44ba48ce46ec8fb7d897bd9e6b" + + "c4c325a27d1b457eb6be5c1806cd301c5d874d2e863fb0a01cbd3e1f5b0f" + + "8e0c771fca0c0b14042a7b0f3ae6264294a82212119b73821dcfbbfd85bb" + + "625b6f75e4dc0ee0292ab4f17daf1d507e6c97364260480d406bd43b7d8e" + + "8c2f26672a916321b482d5fa7166e282bfeed9b3598c8f8c19d2f8c8b98d" + + "f24c2500c8ad41cd6ed3f2835737916d846f1a6406cda1125ed7740fe301" + + "d1144559b7c95fa407599ae40a795226513153f86c9b8abe7d8aa6963c99" + + "5646ec586cbf20a03a698cc0681b7bd333402d00fa8e15cb32300b5a24ea" + + "316c5e1df67de78891846cb9183a4b112c3bcc17bcaa5fecd6c1dbbf6ef8" + + "272d9269e7f0ba9f17050a6aa5f11cb28874360396ab647941f2c9a85cb0" + + "6a969919b16997b0827af8f909c614545f1ad638ebb23109f6bab6b49b22" + + "b2285cabbb998b3e1bf42771b4d4e52330b224e5a1d63169ec85fe1c7dd2" + + "46dbafa6138448420f463d547a41c2b26026d4621b854bc7786ab3a0a93a" + + "e5390dd840f2454028b7c3bb87680f04f084089bbc8786ee42cf06904d01" + + "7e405144d2fae141599e2babe71abfbe7644fb25ec8a8a44a8928ff77a59" + + "a3e235de6bd7c7b803cf3cf60435e473e3315f02d7292b1c3f5a19c93646" + + "3cc4ccd6b24961083756f86ffa107322c5c7dd8d2e4ca0466f6725e8a35b" + + "574f0439f34ca52a393b2f017d2503ba2018fb4a0991fddc1949832d370a" + + "27c42e", + }, + { + key: "d18a328b63a1d0f34e987682fe6ca3d48b4834b4312a17e99b3d88827b8d2238", + tag: "938b43b80cb3935e39b21dd8ba133cf8", + in: "bc2b0baf92580ee6c5efe640f2a029a791a3c77bec459be74cbc30931508" + + "d9f312c3a0944212831cbe4fc92e8f107f2f750c91bcc09f7624fa9a09b4" + + "9b7712cf5d619ea9da100fc23068ae2f4e353047e3956b215884bdb12235" + + "3f06b8ee98f36c3212493d61ae9ce151cd0453f3075b18a12d7d73da3de7" + + "dc2d98376cfb420069ca8148c511ca6bbae57572394a3c615a6fefb30c5f" + + "d727f964b4065ac9ee252bdd2bcae3e70162fe0e8069974e073f0a093d45" + + "be52d7de16a8f5f65c548aa6525822ffb00dc642530fedf355f7188ef017" + + "56384760c80afb61ad903d10119a7d615ec4fbdc79c490160bdeaf200915" + + "e405f2a921a2380c0ab9d2ac1e4fdc8ec4b907368c004458598efac13dc7" + + "2751e7faded538e3dc8b16590cac9b7ec294da0ad53e22cb9c05d8ef494f" + + "a04f6ab7c843c867fbe3cf1b4eb146d65339b0b03392259f12627a8e98e8" + + "0f4896c30b8ecd210acb2365539a872541921dcd8e1e54caf4936dfc7e1f" + + "68f3bbce61d325b447a8cce7f0fcad28494f2e47dae46b136594b5dfca7a" + + "bdafd6856f91496c05b21079aa55aa8c41628220a2cf0cdd755893375b7b" + + "b13d914c9a1d1db4a18f8fa36c55e52d0342352052032fb62d32fcd51cb1" + + "ac46f44b06e682db5d96d583cda03b966c650c03ae53542e8da1066b6884" + + "4a7e2280c664415e413f270b1fdcfbb40b9daa6131d071ee7eb1553dc5b1" + + "a50677971223dc316d2d326d57cbd529c88698facdca425e2d5c6b10d7ae" + + "cae28b8890aa44ede9b9193dbe8d1d8aa1fa580ca384b57eadcbefc96dd8" + + "bfccbe3b855a96f1fd4913035f817b75954ef1827c7718aab24d353e41cb" + + "a73748e14e0c2750d5b6a9752125708cc7ee7a498c7fbadf4186e7f8fa93" + + "bfdf281a49400f877621651b8ba87edda5231e80b758564e75139b61b1a9" + + "9fb9ec694f928ab1f47c6c4287bd4182d1b2be053380616e98da06f3ef57" + + "b570ade17c51da1d602b6ebc5a638ebde30d99bf4f91d0e01557c7dcd8f7" + + "9e5120143c935fc699eb5616ccd3cac56b5f8a53ed9e6c47ba896bfefe71" + + "2004ad908c12cf6d954b83bec8fb0e641cc261ff8f542b86e62d90e227f2" + + "a5bd59c9d390c0dd857f6da2b7624787a0bb31908bae84896890b283da61" + + "d8ec4f56eea38b22b438d6374b42243f9c1d94288874e53ab90c554cc1f1" + + "d736acde67aff55007fd4b3becc4d0f3ddd96f10dc75255cb0327aa47076" + + "2b3a3a656e33c87b02a682658b6cd2a75d9c0462803c9bbffa51441501a0" + + "3a2fbb2344aa13d27ffb9e98704ea6720b6a9992e53449688cd74d0648fa" + + "e8e776b0ea6bf048b2ec05341e5948cab0af015328b284ae7bd89a5f763c" + + "eaf5ca3e647a9f5bff7197e4d357e4359fa5fe30709545453149be510e3b" + + "ff86beeba5110c79c0215fbe9ac9339a8ac7d41f7488588ab14ac657aaf7" + + "d5c03a353932bbb2b261f0e83f3526c5e8e0c2348a10ab4eed6ecdcf9014" + + "7550abcb0a722f257e01d38bad47cdd5a64eef43ef4e741bf50da275720a" + + "0aee47adfc5cd2534b911dc269197c3c396820b303f6941e3fd85b5ed21d" + + "6d8136745c3eeb9f36b1f226434e334dc94be8a5606079cb7643136aacd2" + + "da9c38b2eb7e2b898bd8632003767bf0c87d00a3c2fcee48bbbcdd949af3" + + "3455128216709df25879b0ce894ac4f121dfca6b8c7865002b828696641d" + + "14ffc59924fbda50866fded0afaea545c8008c564a3a0b023f519a9980ea" + + "d541d91d1c07a739fd02286ea5660e473f80494236a68e84ea31aad71348" + + "e45055ded69c39941e31d51df257a4d0b0d8f025dbedee093f2b91795bc1" + + "533dc472020769a157a187abd6d8d52e1693e2ef56b2212759d0c0120e54" + + "c425d0084fdb3925e296dd6cdd8e677043a90674904057d88ebdea5998aa" + + "03562a790adecc4399352df43e5179cf8c584d95ef8e4b37295946b1d37f" + + "faf4b3b7b98869184e42ea8b304fe1059f180ff83d14a0861ca7c0682c34" + + "b48a70df8653bd8d9a26f9489e1271fa44e41b392e648d0e619ecdad2c53" + + "952094802eeb70ade4ffe096e3049867de93a824217e31364b18204e9681" + + "dd8e84ae2678aad155b238f59dd9bf9ce07e97183a690b2a46a8f3624843" + + "5b2f713e7d8dcda4dea1e3c4cf9692dda082322c51f7bb1f63d92aa987ec" + + "cf1355a043e21a7b8d60a2b97f18487f6fff4c77df92dbfdc9837540c518" + + "9fd9585731bc6e726a34ca21154b0499522c9d1016953dd0fa2eb6a92b6d" + + "14d6e3da5c12fabe92bd639e253983fc91041091791643", + }, + { + key: "46e8eb27acfdc8f4be622d8741c7bc414464c149e21da97ab4afbf3e07b98b0e", + tag: "56b5f49be824c7a19b19faabf0787a87", + in: "ced52b76c057872a60107194b432cf04b7be05e65209045d2952ea0284d8" + + "3e2ed5a15cfdc58071204573c18ab03765b4d5e63a601419e039c42075b2" + + "7ebb2827de9c6233d6632e6d3db9140bdb4a9291d53f33734c2dc8e24df9" + + "0764dc10e0d321d20fdf659bfa2a81bc9e04fd0f83448143276647c08bfa" + + "dcfe3bc23898eda655c9353693ed7b022f43eefa23c21db7660c5029ca64" + + "a6085d93029ea6c43197356f56b7624d4819f5008d053357d981ffbe7f40" + + "96d6c55d8417002d36189b04bbb2c637339d90f4910a400833a8d422d88d" + + "c816c1636e8d9f7f926c244a28d9e0a956cec11e81d0fd81d4b2b5d4904a" + + "d1a5f55b5ec078dcb5c2bc1112bbfd5efc8c2577fe6d9872a985ee129e5b" + + "953e9cebf28cf23c6f9c6a5e09cb09ab586c6a50e4389cd3110777591d7f" + + "0608a3fd95b99f6ba03984fb0e13c6bbbde3668c59f2f2b69d7caadffa94" + + "6f67e725d56280e59e66dca025a18d4616e81abd9801835bd94485bb2025" + + "dee81fba440005b181ee81dc1d7796cbec92e4ec1c9016c8e8073cf281ce" + + "f749993f09a618a4671d58b476feffa454600f82955c591882715148a826" + + "586f68bb50059914dce1c1c85e5e3951647c9964ec9316005209a58baeb5" + + "2c6d01e6b4c275c0050a7e2bdc52133e433b050a700b556d4314e5c041d1" + + "93ee47f47adc971aed1b63259dd5cd4f95854a71a947eae3d3d12d0d7b52" + + "c6cd2fef2d2e892607a9681d73ac3236fad21ee30a4f857010bc95c00d5f" + + "6f0c6b3fe50cd6452be6eec4f5f01542dc2cb5e2db1f52224f11348fe2a0" + + "5d1e5885f1317f2d06ce2813dc4c723008e836a2ee95d0aac66855fe4c3b" + + "1b2e02ba0700be759b1ef1c2a3123ee4ccf9200d8d4de5e0d503f04c2053" + + "66393d1e91b648392ca28389d976aa618b4796acbfe8aa356ecdce1f7786" + + "bf09af226bb9402317b6fa319bbb9248d8ce00b1f49f066c69d4df93266b" + + "938342cd7fd4b07c320c2409ef72d8a57c21d0c6d6d493f7ca94d01b9852" + + "e4fca6a9291e9060154bc38af6c86932645f53914709fc90e11db56ec471" + + "6d600ee6452041248ea8244f79534f793bfc1f2020855d817cb4ca3c48ea" + + "7f6441ce9af9bda61936c226d810086c04a35e8654fdc30d4b35701adccc" + + "016d5895b2121ba4066e44d694f6371d97911786edb73dc3020ba186a01f" + + "ee3dd6036c0e205a8d05979bad228fd12c0fd2fded6c7f1e4c11354d266e" + + "d9c2f706269c43cd90504997d93a17b39b10dab0ff083ab3bd06540ce612" + + "d08f46ce75a16ef330525737410a0d98fb3d484968f9c12edcaf50103fdc" + + "c14128ea4ad6c30b56247eab28197fe617e5f88afa5cbe003c63d423647a" + + "d3042626fafd2084a0582ff1b1efdb5baa162662048019546234e2f6b6a1" + + "d8bb971114aae41df7795b4f3598f2af9e8921a9aadc7fab6c780aaa32a3" + + "84865a4ccb02351dbc55ec92a3152d1e66ec9d478be5dca17b4a131b4a0d" + + "3d4420fc6123fef80fd56ca266407d58a7880d6b7e5ce2b6bdc9a3721071" + + "7feec573d83c83a2e3f7d4023f2f68e785cde728fdbf5054060e4c89faa6" + + "1c9dd10524a08811d15c627b3b4ada549a3fa1d8dd77c005daaf2addeb10" + + "0abf694da8dd692f113965cd6366a5a7b0c17e1f2a320243e2c90b01418e" + + "22426d0401a2c8fd02cb3129a14fdfa6cbcaa1f1c2f17706e9ac374a3458" + + "777761e986ee4c358d26f8e420d33230d198fd86704e77298dd4c40c5205" + + "7566ac0cd92993b21937c3a3b4a8b89110a97cf38c781ad758bdc28f3565" + + "60cf3acbedfa8e05b396d226ef619746e8e4fa84c8e00a7f0e6d652808c8" + + "9c9b123d9bd802624cfa949eb68af85ca459b9aa85b81dbc0b630856cb9d" + + "7e18cdc96b3c069a006dd5b716e218a5ed1f580be3e3ccf0083017607902" + + "a7967a02d0a439e7c54b3b7ca4cc9d94a7754efba0bb5e192e8d1a6e7c79" + + "4aa59e410869b21009d9443204213f7bceb880ccf1f61edb6a67c395a361" + + "ff14144262b4d90c0e715dbefce92339ff704cc4065d56118624a7e429e4" + + "cadf0b9d2e7ffc4eb31c6078474a5265beba0774209c79bf81a930b302bd" + + "0f142534a6ae402da6d355a010d8c82dc379ea16d49b9d859a7de4db6e62" + + "40f6976ae0f47bc583b327df7ec88f5bd68f713b5d53796e72e28c29e843" + + "6c64cd411d335623ff4f5d167f3c7b8cba411e82f03714662425c8e1bc1e" + + "fbf435d28df541a914a55317de0ded8c744a1c3a6e047590244b207bcdcb" + + "f4bd1f9f81210deddd629192c58e6fd73e83812f084ef52f21c67bea98ee" + + "17554437d9642e2e", + }, + { + key: "b41210e5ef845bd5a8128455c4e67b533e3e2b19dffc1fb754caa528c234d6a0", + tag: "72c9534aec8c1d883eef899f04e1c65e", + in: "7eeca180bb20d99635e36b9208221b2b8ef073fbf5a57f5190e19cb86c49" + + "89b0e8150d22ec3aaf56f6ed9cb6720284d13a4b0a34cd3d7f7fc7089326" + + "6d1893fa4185269fb806677ff490aec8f889896fca50d6c80d295875b1d5" + + "4a779b6d49305360b31011b48537157d0f323ff4e865d46fba6bd23a06c1" + + "46878cf9404360d325432312ff08ce495edca63a3c93c44d79c050e3f1de" + + "4b6ca5fedbbd43dbdef9ceb26d440a59c7e0be3a8e461c4f15b6b1e1dc36" + + "a71fc723ad593fb903e83d0804ce497fc49bfc6b6a602b9dc6e9891010b1" + + "4ca066cb1c68044c1ad837c638076dd3708078509cba49fdc54922cdf5d7" + + "715fb43e9b5a5942cb8950eade143577bc9dcedde58d51deddc70075e452" + + "bbceab1e95b5d003eb96bea69687faa6d50d9c605769cb4287b5d9924dd6" + + "8881c699abaa6f93e41dac7639cdbbbd0259099a3ed096f482a1fa322b15" + + "ffc379812c74e09e95f1bd3706347eac421fe56895e738a47fcd3e118773" + + "c3a7e7e264cc7ff5a53a80e436df058265dab9756fdf6913786a47e98bbc" + + "411052d58ffec9ee948e28cbaadaae471c5d828eaf3b3c87d3bfd495477b" + + "403da54f1418a15ace0d4d0df68f6a8f2b0457b127d5eae1f45ae055afa1" + + "8f058d5dd7eea559de3ae9378ca53f7d6dc9a9465ea1f945295f16ee0404" + + "7fc9dd3deda8ee32631d7af70c20edc1e12c5f8abd2e78f43dbd4cd6407f" + + "038efab144a24ea8a090a7ba3e6499345a60106220c2959a388e1a73d070" + + "1d854bfaaa86165a5aee934b615ac7f45da7c43a1e8f74613917ed10dcd2" + + "27e4b070414412e77851db5bc053e5f502bb4e2b2645bca074c18643e814" + + "4caeccb58be49ea9a552913c0616382c899635eea79a166988c206b9aaa0" + + "977c7ced89c4c7aaeaa8fb89b38030c44530a97187fda592b088198b63a5" + + "2dfad59a0a4c1aadf812bdf1881924e8b51b8fd4dbca8e73b2986b3ab484" + + "171e9d0cbb08be40ae60de8818bd7f400191b42c7b3200c27643f06720a7" + + "e0a17441f34131629388ac43955b78c31ea6602a70dd665f872e7669e865" + + "f6f40e634e8772d747608cd3a570e1726eb1ddca64f08582b022bb026eda" + + "6a913dc83f174ce3c18b9fc0503d3ac74e2fe45691d6dfb4af8c86d752a1" + + "6d6664fab4de08afe8858392fcc35cb9ea82fc42c42d48c0c0556267ea0d" + + "cc19b10f05e0318c4488ffe704b5036908f5cb938eebd3163503acaa874f" + + "592d945448fbeb93a877a26a72306a36e181745ba300afdc30cb7986919f" + + "3dbdc5c47ef1fa052a9e4aeeda3955f61ce2f30a0593a81dbaffebac5a49" + + "e5a8d1308352701d1ca9e620a67a89abdf5f0f8b1a0acfde5819981d4b77" + + "58799c0fe41030b86754837712af821c315301aa8dd50d1387b9fb92ee63" + + "10777e08229edd54e5e86b086ac281bd321082ef46ce298a6211aaa3aa4f" + + "6e55b5a4641220ec94cca73087760da1b1ac3e0da3f438214e691aa184b0" + + "535950b715a64d11485940dcaa3f72e0aa521002b1443f5e7880e2a85b83" + + "40d32db0fc4c4702e10f0fa24a35da9307850e945f608ad34d6cfdf6f2b9" + + "ff4f6b8e9eb5a883546578e2ff3cc5787322e4384640f42dc5bd05f432d9" + + "610dcf7c06cdf34762dd2a5e805e24aee8cebb3b4db9e4d1471da995bba9" + + "a72cf59ea8a040671b1d8ce24a3dce4fc86d2df85c8ab5e1eb2b0567c186" + + "4fb464f48c3ca72c7df2749542ed4d4be51b63769012ce3d06356856b2a4" + + "24995a2429a156ad93bc79c705e7b163149ce53a42c34a19680dfe4fd0f7" + + "fce38c30dffe9da9bc941d131f435c1398f8284a230e9d6e3992710074c3" + + "881d03aa309a9edd0fde7a39c33f6455dfcc5ae3fa20ea0e0d6549a43536" + + "b4cd8a2991a135b7d7a4265fb840318813091274414108f13fe191db7774" + + "6a5f4270f6d51a29ff523954f84cb76131d4abee79161dcbd97dc1ef24cf" + + "db1fade057dddee00a1e0de0db1afaeed1b535f7bb402afa3b297551fd14" + + "8c8f3e05f1351d3a8ee2948daaf14e7fc448c4670c906ae076eac5a7c656" + + "fd5f9cd937b91e26c9e5adb43c138f8d65e447b0022a524e059f879c6e27" + + "4ff7e671f75717233aae70853d5bd7bbb41b43c47bb08d6dc2f54f9ec606" + + "9487d1267add72403d01552a3d138abab9ca8a0d2dc32439759aa5695f70" + + "1a17d28dfb85850fdb55fddadcdde4d220e4b05821e5736d346e7dc9c945" + + "72743366488b1de8975184771361894b6520e3407c5c2e38473430969e35" + + "b106024da8618665d58c9d084824a28991a33658d6ec702139e01b65b7d0" + + "cc537a644caeee880657803d95f5f67816948d5ab362922f8ffbd531473e" + + "b0ff8fde2afc37a4abfa28dbed0be1b3d4ed48a1d02358e8403905d33b12" + + "3066e7a9fe2491ee9eb24fc9de7dbd322c8ddbc5ebcd0d92cd102ebac96b" + + "90e2fd784fd6d4b699304df23b17d963080a013794322690456be525c071" + + "b78fcd2d1148026e44ff14c4d0f942cd44d2b3263f4a93b79ec7a618b4b0" + + "d77ae7a1f6e6c7c7e2f498b825bf1954df348bae45ae1d7c87b6787f1212" + + "60c9a724429a4a2491ef989f65acfdc72fa717486dcf1984905218e11cc3" + + "970a09d71061e6df751f100abfbf", + }, + { + key: "d9b0dc303188756312c12d08488c29f43a72e78714560fe476703c1d9d3e20c1", + tag: "6b9782f2a09b59653aa448348a49291b", + in: "dbde1820035997dc8a8ff3015b4e0674e7ce7bf0c2d994b7977f2d91b49b" + + "f200995040daeb1218a0f4307b6b8211913992b070d321bdb947b4ba5017" + + "a0885e7e5502710a75cbbcb56d49e1bdc2bc2afa5a0e83851162dec41340" + + "bafc41c5e11fcbf4ea2ac45bc57def4742281bbf734777f83c9ae1ea3d5e" + + "d42380230570f59c40d5dd9a2d89b75fa3c92664f12a274d965ed8de79a8" + + "b37f3763939ad21d1703ad794f617c8b32b20cc4dd7c1b7f969a65e1bafa" + + "f6c43f30c9eba256f10201910e2cc31a9b13a46ad29257024ef8f2ee29b2" + + "ee63cc5b6230ab9f87cd5cb534f4b0bb08a790466e0d57b849fffa1ed21b" + + "fb0b27804e3ff9df7bebf14e100cf91691a493e53870abfad6321f6711c5" + + "0fbcf1f0b2c1e5231d6c0a08e710525176355f6f82bedc1f787f0d3cb41f" + + "a11e91ebf9f4cbae46035a371232d63ef0d8bda0355af8cd0a2f7d1327d8" + + "0ab769ea0f1da0f76ec99cc737b5ce84675fa8a9ac0c98342bb82b5848bf" + + "656d35327ea01a1b09d84ab974c307511af68a30cd6978b529a8f58c68a5" + + "9d476062ace8897ec0d1a90d5d167e29ebaa6f46d93d697760c8771417ce" + + "94c0f3698985a98702833d1b68641b811840ca3d935386dbd4600fbc81c8" + + "728c4fd0e4588be739a048f03bd4ac651ceecd7e2fb120fe7190011f957f" + + "cbbfdc025f1ca0b356208db8cad87fcd53c5d3a30a7c2a48140ccd4cdb49" + + "f3961cef742caedd1e848bf3cacafb0da030416bf3177877aa0bc5f9d1cc" + + "41fafcb829d5e3ace9394028683d712552579e024084a6b855830ad9f567" + + "ff58f05d3ec263eddd6f56adec378f167e8dabbeaf7d0a9e65c71660314d" + + "6c8d54beeca2711113fbc32a2ff8c0daa8373278d10085d2a0660ad53f4e" + + "1ade74a483be180180acf9e9ad3ea5bdd9162ccd69599163a451c6837d5e" + + "a5e115bd9a560f395128ea002ee739009a44fa46078b18959933fb6e866f" + + "eb4612a56ce93b1affcb95fccaa18d71a148582ba1412a5daa07404fcb39" + + "c3cb4a2519cc506c1172c6c326016ae2e5410f6a438569f35a50d45cbf3c" + + "c46188651aa22c257858f60649cee8c05c75953ce49358dfe5980445fce9" + + "614ccd16d333ad236e29d204691ca0bf46f29da954bcaae52e41016556d2" + + "f4cae1d37565bcbe84de1b49f344d0200478a38187da29c155cc98184d9d" + + "33dca088d70054e0fce321f7a90c48a14963d0ace2b4e7a24b21c14a5e67" + + "1994fe1f7d22d1135d4df9268dd18d323fde3603288735626a5449582d35" + + "30e2c2225414e05a8c7b987c873a82e272a5d83e59b90f3d7264631d6ad0" + + "4a0cf3b5e96596a66ed5bfbc24ab6e4870aeec0acbad2cc5affaee06de32" + + "dca06f175bf763cf8e7fdf95941a177e934f0078be7dbaa4c9b6f5c16b4a" + + "5607bab5d56144a6ba3c7d9a084b8d1f4b24b6f9754ed207b230d3a2cc26" + + "259ccc725e1f8a44c4df8143e13edb5ebf073e2c9d2da5f1562df4feece2" + + "f6480987f093f642eb7afa3aa92dce2a8b60bb925cd2d11cf6c2ae7d2153" + + "1a9c8f068d71d0e682023932fe64e956a49347aed22b21084c4a84480491" + + "244ac6b337b6d12d5551ad5684766c68bacca62bdcafab6603c81bdbd8e6" + + "80d9d8b3825eaea4df023142e840f98ee251466a0422d810a54726a9f03a" + + "7e0afeb0043e60e2ba4908f951d2e87fcbc372096f2a9f4f2a95ad5faede" + + "3796b11ecf4401c3ee3d268bd8c46476c61e0ffc5c43c0f3c58c79e20f75" + + "520c102aa3c260972a870fc50f8841fa0553a9e30bf37ad282fb51b34adc" + + "7a933ca1691a8a706605ce0b906fdccbe954f8e5f2f63c42599a483c4be7" + + "3a041ef90ad930fe60e7e6d44bab29eebde5abb111e433447825c8a46ef7" + + "070d1f65862b30418efd93bfea9c2b601a994354a2ff1fc11c383e7bc555" + + "9e7546b8bf8d44358b1ce8cb63978dd194260e00a88a8fd17df06373aa80" + + "04a89172a6051bd5b8cea41bdaf3f23fc0612197f5573f3f72bce39c9f89" + + "faf3fb48d8ca918586d4feaea7e0f2a0d7a6afca096a081af462ea5318cc" + + "898a9cc09e8258a837559570cbd5eb901e8c0e04ee88ba31c81a76b000b8" + + "0e544feba576b3eb5272b53e46e96a0b35b9c759caadcec61444f8ec47c3" + + "45a1d2304e2708eeddfbfa75a98eab3493889047d690e84431d445407fdd" + + "99560c0bdd287e0944116f8ac62ab992ed3f1e2b415aea784b03c6904795" + + "f4326ff60bc839615f2894570dc9c27cf928ef192047528a1a19ec990978" + + "3b0d1a13dd4baf4a19e49bf798975abe2ad167dd574b32b3d0c22aa4d9b5" + + "2761e8f56cf2100fe5a39fceae3d865f3724d4f299d07ff899fed6baf7fc" + + "eb7189357bf56cf94a6493e61301b43e3ed158cb9c7a0e615fd9888c2db0" + + "7f7689762f62ef6b3ad4125e06b07a422f5040c3aa8b8f205d68356c9225" + + "56fc4c976165fed9599daeb297498ecf744bf6c7dc5e30604c461ad99402" + + "2eea0fb6fe33f82a97b5c272fd24162a94b761ec7e52173e7bb42e88b343" + + "64f5fa2c141ed04a86b8d00fd9c25bf77a8dc3e63f5543331405be6bf421" + + "6a891089b316aa4f887cb4aff0dfb4e80c2ccd65ddd9daa74b17b4411c0f" + + "c849dc748d9b138279dcd9ebfc6e6759a53f5c28a41bb82107d71cc161fa" + + "81291a8290", + }, + { + key: "fb70ae7ec12264ff9f51124da188e5b11dbf53cae2671363f6054b575b1ddcc1", + tag: "d9ab81fab28b3be96fa3331714e78c9a", + in: "c62edf20b1d53962b42386eb570b10378f9764421ecbd7c4802853332747" + + "19ff4c89c06005050fa9ba6579a844060eb7ece6c43bab520e683e0f36ba" + + "49cba259edc6ae35d41e0d7812a7d5edbe4d90cd5e0504d16f4c3f70d01f" + + "5a0313de55934b661ce1ec317968c2c4de60f45c66cded8c10565a1ca6d2" + + "3a84bf182df2fcb05956ed4d46b49fc0fe3bd23961d9466fde070341ce41" + + "bc6e148449360a31634fe10e91082d82def90d9da2c250ea72c58add2058" + + "d046b4392b78bc3af5b3936ed568733e8ad5672dabbfa3130a6a535ec73b" + + "da8e7223535f49f96cd35d56ed4792c5cb7076720d5461d96a2692b2ada5" + + "2be08fb7bad15d15a0108143790024f0f15f5adc275e783aa56b70844061" + + "e30952a040e4cb9650f2a010417812790105d8f58bd25d99b0db3cb16229" + + "3f6322e86cd5b0bb1505a7b998fb0f81d1e1915faca3c2c8ddea39115507" + + "80339430a7955521839deff5b301f3fad54edd5ebd2ac4ec9b1795cb4dc0" + + "e2eb62ebca8e886c3f1e507d10a0228c3027b472a7104b815f5ec8dae55e" + + "0783ff7ae9a3e6b99e381ad788206b135520cb870ba0cdbe876feea843b8" + + "5a82adc95a6d71c555f798da92b82daf0abfcdbc82ec30b1f12d78490b06" + + "7315735017a94ac150b44dfaace151896f873923310ffcd41e91bac04de6" + + "d70ea71565948c907ab21c4a23703fbbd2a8de6d3095f3d8f901538968e3" + + "60e7bfddb9d22036b1c23f4f5f1b2ee22623426a2d5de68c1e1a38e38e08" + + "e2b5670aac1edff69e9c73c2ca56cb69c709009ef1d541aff1fdb2b40c92" + + "9b87f162f394b76cdbba1f5605993e4dd9c312321d59b0aa5c6e33be1b10" + + "bfd00b92d4c02db064d0e4a98f2913c89051b0f0ead163deb5087b6466d9" + + "84f57553b0fa53850eaa142e072fd91802eb9f0d2eb7318dd620555e6ce1" + + "86706b866d41cf6ba81f100342faa14d801dc6f3d522db38fab17a879fcb" + + "b6acfe922163505bd23a6842f6ef6397ae5fb6e6016421998bd43b0142b0" + + "3ca3b16d6ccb7a47891c75c687d791a930b26aaa2e3412e7aa16e2cf1501" + + "7bf6df6d2e1c289af0d7ce03954a60c1dfcee5e4b3da51eb43ddd14faf59" + + "082005d0c8b104561f66c002ff426be60be769282fc5685cfd1968df1941" + + "73667e48e9ad681d35757f1199f1d93377bbad093c8cc3efa2bcb6ecb703" + + "694422772d15aaa58cab9e9ab277ed510f684114cc4a44ccadb3eb1c9a76" + + "d8619a9b7743106df6fb6f927ac49b22ae5bb9a9a4d231e340a2cd0e3282" + + "53f6d75df694826f60e4b3e758398793eaf73ef5d4b56cd1471e16400f40" + + "4a947e9737f4f874fe09a29ad799f4525156e3abbf0585c3c3c0a3744c86" + + "5d56db3d2ecba6bcbb1adcc8bf5f3b2a2d46d3eba18cda55201598a8112f" + + "d8f14e205f0e615f081b8ff6c5aa6669da776bfc7c34d5af4d0b26d0d819" + + "f6aacc53cf3c6653138b9a962acee9d6ea01d280c35bb1f05d1509238ccf" + + "004c5013167f804d1780d9f4ef9d45742fccac346b0472bde24ff5db9ae0" + + "16455a3c02256358fcd8e6a9aae94f8a37a1a3da58a889bbe3d295e16544" + + "2e580f59bdd31c92ffcab40c49c1cdbb4db1dd4882b66edc10fcb1704203" + + "c518c1d8d4c268588ce13fc38e0210aeb47d11d2603d4b3de5c6ff5e969b" + + "9d5904abb282b699bd04a6e9f1cb323679e30400d725aab128a032745dc0" + + "be05a46b02b34b93bff02523cd8498c021fc35a488f164a70ef1ceb873d9" + + "14a681d3a3a34cc76bfd5a547e2630d7741a284511bae5897d9f7a197fc2" + + "456af5c6cd7e1a93d3388c7a990b5feacd7749cf39fdecdc20adfdd540c6" + + "9d330195db7cc0d4555ea5f5356a3647e2265399f153c34ed1e217c5dafd" + + "c2c5dd3d566c332c7ddacb0d76ecd3a0ad505a4165443aa81b0f43cabfb4" + + "62942fe74a77c22b8f68a8b1a6d712d1e9b86e6a750005a3796ba1545396" + + "13170906d228dabf572ab969c762f8b296054f23d5d4a37bff64bf9cc46f" + + "43b491b41101256018376d487fe8097f1653a7a9e99e1ef2492600598fb0" + + "bbb7df8270be8b9106126d6f491f8b342a96ab95df6133e883d3db4c6a99" + + "402aeb58d371263a32dcf76d33c8904395b9cf0016fdfc15608eb43e20b0" + + "99cbe7455f7a76f69bba058ef96f83ae752587485657f89c7f26fde7fbeb" + + "a82ede581ee92821dc13b8202930aa58bd4f1c86f68926baca0d06fee642" + + "ea8c652d226af91a9638a0244f1a03c7ce56969b87cd5c1f86110d192e0b" + + "98dd979d74acca6c1956b1127d9a1f456053d17974081ed8ced0faa4293a" + + "319e5b25ba285c1151214f52c283e39c35af51c4572c8e395b7856697bfe" + + "dfc4145ab4ed0bdbe43ba509c06a196ae6bf30d7582550cb546c63b51833" + + "cb0dfff7196d83f6a1c6d6d712cce2ec1989fd9ff5a0a22ac5022b49d566" + + "58f196703e4809e7624fe7cfa6c13b378f5aac7e66e657ed7eaa942d1a00" + + "544a947199f24d736b8976ec2cfb563433c49ba131bd08b63636854219d4" + + "c45100c98e3092773ef492dd9210bfd8f54cfe2cddafcf5c05468d90e620" + + "0c2ef99d17fa6992cc45eff3072b7cfd51cabb07ea3019582c245b3ff758" + + "0302e88edc2c13fc43646ba34de37338568baa66ecff3accfebad88d143a" + + "fd1c3b09ae39c501e3f116af33b0b720d6c2baf5acd7f31220788b2f9017" + + "3ed7a51f400054e174d3b692273fcab263eb87bc38b1f486e707d399fe8d" + + "5a3f0a7ed4f5e443d477d1ab30bc0b312b7d85754cb886e9", + }, + { + key: "f7e7affceb80a0127d9ce2f27693f447be80efc695d2e3ee9ca37c3f1b4120f4", + tag: "41c32ced08a16bb35ac8c23868f58ac9", + in: "5a3607fb98eaea52e4d642e98aa35719bfce5b7d7902950995f4a87c3dc6" + + "ad6238aadc71b7884318c2b93cd24139eed13d68773f901307a90189e272" + + "6471e4bf9e786b2e4cf144764f33c3ac3e66521f845f6f0688f09eaa227f" + + "e71033b0f74295f6ddb91fe741323f2b54f420cb9b774d4291b06219f1fb" + + "4410b55900425c5e6fcabec76a5c2424d637a1641db6f0f6cad564a36a91" + + "0f49894bfd598e91f38ceea65e8253c1284f210cf7b50a96e664e562f3cc" + + "01c4fc490fa6d4679fd63fbb3ed8995a8a05166b573e92d22ef4370c6aac" + + "74ae94c94177e5f71143c6f340efceefda679ae76f6ed7f26eaa4848a8de" + + "8c40894316efbb06400f9695b18ba279e8947c032a84a40ca647d9ace457" + + "6dd0082494d6bd7be4e7928e749c78110af8774a5d43e9c9479964e2fddc" + + "ee51146460eac734311225d08c60706e40f298a7cb97f369ef599be097ac" + + "3bf1c275497bbd68968a235fdf8a61bc7cfeef0fe451bb04e662ca39f34e" + + "a8e3acdd0befe9762f9eeb275c0cdd43c80fc91131d1e0e790020975ab65" + + "afbea81f303ebd86760821efb4cad7cc01fd6d6fd194ac5ffe7703d890d0" + + "169e21b444cdbaf691fc741a5d99bd47357c37785755fa72582ca4754a03" + + "b4def86ded39aa6d9eb3f38801077e6d17e3cee3fb57ae83f30c79c3cf29" + + "0e2739c6b7323612cec3a561ebeadb4faa642f150323aaa9d270658c907c" + + "4c1610a5e1834730c08be3379cf1abc50c30e2bf01ce903927c27d85e135" + + "3db9e216dda8860c45925e2bb791abe5c8281ee6d16607bdca87f60662dc" + + "bd6e20224e7f009a86db66fadd8e37e0a59559328385090c6953cd20bb61" + + "f28a734fb056714f5159977f18e5c5f11de75f7a00ba807e47a29e4da32d" + + "5c67ec76ce4d7b669b5e6ee17e1df7c673dd8a7c87fce665cda8adb9547d" + + "1dccbdbe7be44846b4b121b0bfa65e4ed530789510d79bc4477e50178060" + + "f2668ac8956f39ef422ecb0e4cf90b8ce508552eedeeefa6c7d1bccc077e" + + "8088bd7e0e6aaf0bda9f11c412c270ee2ad6912f9808f9344a4bb137bdac" + + "b5b9372b00b0de026a8f5d1fb13972e1290b5005689f7636c43aee2fd443" + + "93d390371ae573f0e064b2d7df552b9adf04bf173d71c621795b9fb503dc" + + "5e918536c6ad25ce4a76f70e6b752b6d44be321187269a19bcf33ec899ca" + + "40e88b4eb23217095a85057bf95d8a54812cae4a7d32e0c2966a21376110" + + "74c6c8c3dd45a553c43c675d23308709f91be0b235d0222aa5e1e1ce08f9" + + "c6b45ceb5b47bcd7d7b2d4380bcdbd6eced452d93e6d8cbe18123277889c" + + "7f86b15fb991364a501fbf5d8244f2e3332ea0ab49e833c6f765017a4006" + + "cc7cd1a0365945a8d8873cb21832b210c83e451c01ac949de2fb0f7a420e" + + "405bf64eb251c6f022181595d68174b91e503187d3b3f49b60c23e44ea40" + + "ca20311305b413047bb22e89672758b74d6bd1a06decf09e9556421087a4" + + "0c1d2c44c5fb13d4d9625581ac4ccef1a1b5eeb5689aac5c0291aebda276" + + "50daf9d4396a64d02c6d58bcbd609d9a0017880ae0cbaf02ad0f1fc8d1b3" + + "ec987ffe13102d77352690c9b761bf13ea0b3a8ebad4a0823817fcaab4d0" + + "9b0bf03486620761dc77a6ba007ba07153b17425c4026597473e78863cbf" + + "430c0e5e9b04a83ad11506b61b8d9be3aeb06b5114e0d53d4724863eba12" + + "4f3b974bdb0d02743520409910621cd730c97ca984fe2921c38055f83ee8" + + "c4611db92e52d8ea51d89203e89df7586c574df15f3a96ed5a10bf04cb27" + + "f9656b5b11cf35fd21360b029ab26e9a741c6b3e6357aa1a41de2cac6e85" + + "f9a49e3441e60a60e74f434e1b8cd4454b11962e5507ebf904e9d6c52a7d" + + "9722300517c434758fbd6191f4550108b143eb16c0b60094fdc29327492c" + + "18a3f36737e506fda2ae48cd48691533f525acfffb619d356bf8347a8bbb" + + "4babdc2ac866e497f192e65a694d620687cfb4f631fbd6ae5d20ac2e3a12" + + "4d85f9391a240b616d829ac2adceedf8f3451ee77e4835639b13c622ef8c" + + "48a181fc7598eacb419fa438d4046aa971942c86b36eb8e16eab67105783" + + "d27fc56f5b66f35451b2a407d4648a87ae70807e45bccf14983b3abcb198" + + "d661d562dfcb00ffc569ca967171746e4e36f839946bc7d2ea9a0eda85b5" + + "a5594f6a9c1b179f7230eaa7797a6aaf8628d67fd538050cf47aa654778c" + + "11dbdc149458c1ec2233c7ca5cb172356424eb79479b6a3eed1deb9f3278" + + "5282a1034ba165032b0d30733912e7cd775cdb7e0f2616b05d521dc407a2" + + "ae7dfcf46fbae30547b56f14dbb0ead11b3666666c45d345cd5dbfa200ae" + + "24d5d0b747cdc29dfe7d9029a3e8c94d205c0b78b56d5e18613b3169bd44" + + "1b3c31513528fe102f9bac588c400f29c515d59bbcb0725a62c2e5bfb32b" + + "5cf291d737e67f923080f52d8a79f2324e45a3bd051bd51bac2816c501af" + + "873b27f253ef9b92ba4d7a422e2fb26a35c1e99eca605acc10d2a60369d0" + + "1f52bca5850299a522b3aa126f470675fa2ec84793a31e9ac0d11beab08e" + + "2c66d989a1e1b89db8d11439ad0d0e79617eafe0160e88384f936c15eb15" + + "ece4ff00e1ba80b0f9fb7a7d6138bdf0bf48d5d2ad494deae0ccf448c4bd" + + "60f0788d3f2b76de8ad1456f7572bd0ffd27bc2836d704d95e9c0df34571" + + "9dab267dd805577fafda03b834dd225ad9714d2bd182b4103faa5975180f" + + "90d5d6cac1825a19b9d4c87cc825512ae9dbeb33d2759c990905050f960c" + + "db3eb364c15b593524c882902b2a1d7fe40ea3f54fb0202fd8821463c7e3" + + "4b02a1209ba0048a9805f0468a13e03d18009318ecd92042959be263a51a" + + "407f1e660632c4247419659a4e073a8e9cd4a226763a7daea464d5427270" + + "7efd053cb4efc0504602c4f63e7d247b55db2ce1c07138f585d16cec97a3" + + "0731d5aec2166cb4de41695feb76280cbae1af8a2e67c2d5a3ac5487ffe8" + + "640f308ace6137e83576b79d586b663122221c20aba7a6bf60f73958f436" + + "59f087f850ba6e2d7fd862249c5fa6b20e3e43d4f2aa10d4c9cebfcbdf02" + + "6b8d103e4f89b93dd8af172f421001c8b162bd6d0b847a58ac108b6d6cc4" + + "9c7a9ba069deee", + }, + { + key: "e3d21f9674f72ae65661aebe726a8a6496dd3cc4b3319f797e75ccbc98125caa", + tag: "3c95668130de728d24f7bca0c91588bc", + in: "baaea2b4b4cbe9dbc4fa193c376271f40a9e216836dc35ac8012476e9abd" + + "43dac6b9ce67dc6815904e6c84a5730cea0f9b4c6900a04ae2f7344fd846" + + "58a99513ffb268c6899dfe98d605c11e7dc77de77b0d30986f3051754503" + + "7c26be7b719aa9ca1140cfdf4c586b7fe726a8bc403249396a11cfee0a6a" + + "f6c5e72259785cfd13c2897384fe527100170001ea19106aed38f7d5d9a7" + + "ad43f0b41451e19989192a46b4f9734a774b6304cb74feb7d83822044a24" + + "2e51d55c0b8318e0439493bd1a57cc13f6079166cabc46877d003dcd39b2" + + "c0b90f6b32fc77acf04a6c125e11b35d91e2b18401cd53df4aff804e3c67" + + "a8bb3894b27c6e9b0070b53a85aafab0c0a253f9cfd4d3cd3be52428385b" + + "24a3f9f71660ca2c38474d14a0309e2f400e2c21af6e379099283ff241d7" + + "51da5a96a8dcbfdc43b913b29cc8cf8020eebb4a67f5bed31f2e383f8656" + + "8c815ff172382b425e95902e80f5fc219eccb51b656d37b56660f749e5b1" + + "4976a23648680a472d02ba71476e0afb29a0e084984f4eac3befbf8dd802" + + "2b7dca4dadd18bbe58e49c49ce48a06a71557a9a620c51e2623f818e4d62" + + "c2564c7ba04595cc109685869b183faeff2ac7a65049fc57cb10fb01951e" + + "a525332782d691f9759ec2ecd68bebb9c7aece5d522a08ce7830be520db4" + + "c9d60a2e490eaa0c91e37b256a97f84b39fe3c77953748c3b86fd84e9547" + + "a298c049cb28b8c85d59548b8dce635d59487c9de615802d16a8adc4c0e7" + + "80f35b9f10588a431b39b499dca929ab9d225f26e5721820627fe62427fe" + + "06d5773a50878b6effe840dc55bd3ea0c35168f6b6a972d57e8f88c5993d" + + "1ae33e0b7e9459c123753b518c184de7aaf429df078c9a18a29af77c727b" + + "796f5c1a501fa8105ee873c4e78c907142eb19690638a182fddb413adb06" + + "d66db19c7f6f46dac582bd72a6347b4427a576eb769d233febaf7be8f768" + + "337273c12253924f15653f9f3602b783703a81454a1dd7a8772a9ab1eeb8" + + "51be33e0c6c0708f3cc2012cabe8e2f0c38e35372abe27bc148fc4e1054d" + + "9d151f80aec0232a3a92dd77928a3678ebd7d09ba7b4e1d83227257292c0" + + "b8bc4a76de36bff6c9deb383029afaf4f37d5b935dc080a18665545e4acc" + + "195da0b9545d8902408886204b64f8548b32d012e0cdc520c17d9fb3be97" + + "800c2e2b945cb09a75a0a49e5d4d81c4194d91e839333b2b9b9e34d588e4" + + "e20cc1e911ca0a1429fa70ff063f0090fd842f89dfc5cc44affcce4e1e1b" + + "8b11c612f66b074c03ac2a055fd8f51ac9ed4f2e624589ff5730721d077a" + + "fb4c19e43abf8cf3ffa698362be8be51e92c2c91a4a56be64d9ac6d3fbaf" + + "5536a24c7fd0adaf74ca84c508e5e8c8bf7d4254e0c44158bd26acdf3f64" + + "e78438b3aaff89ac9986cef1e3a88d5bf2016340367a1cacd01ec167ec6d" + + "185d93a2a220d718b43ce1d429d2cb598605660b030e51e8d75fdbdd5b8f" + + "8677675e196a40a88285b18b24c5d2d594bab3d457e6f9e503e38cd470a6" + + "9ff8037c9a0a0f110a434335d954fa856a3721e0edcfb14287c3dd9639ba" + + "4db32b7da0670dd0a872e468e3819741d0d4ecf0a4f7a011bbae1493c01e" + + "642757491189f8664be3ec6437c4f3c76abfb0276e44a4d28871d3487c2c" + + "ce2f230452cb06184bb8620919659a7ba0a3d5c12ec25678b03403715ee4" + + "acb6a53d281036d8f3a085143cf5ecc3a0c6c92129caa7ac1f645c7bb95e" + + "4f63da38dc319e2ccff4a9006f9b9b1a38c4c39f6dc686bb82d43fb9fce4" + + "0c767d3ff22f52c5f9900130c65bb6a9cc7408a777d49b70946665f4a733" + + "5099376b276a43dc9a6382bb2d40425f6481b1846148434c672b84dd7a20" + + "33deb5140d43ba39e04ffe83659b6deb48629e1abf51e68748deffb756a3" + + "ed9e0807506b248a024cd509f539f4161366547c62c72933584e851599b6" + + "82ec16f1d79e9c6a01cff6f51ba7f46b67cdca09f3ab8496322b990a6116" + + "8d7574854a1cb1cb8f30a303dbd13a095df56dbb940dd16ce79879cd2d73" + + "80a419842fa1b34da668286de4c1ff5917b7aaa64713c349dc8f855d04ae" + + "de9a3a4d0739dfc36510b1e7bb1695418164285c44631b4b1a7c5798ecb2" + + "d976c1a3679a827bf0e8c662567e402bcc1354222036ad5959a6f0b8508c" + + "6a8c7d4a63e7dde154d778fc80a011592771d55801c7e1297b00b77f80d6" + + "314ebd1f5b3057398d1943599897cfabb65e7568d8fbdfcbecfd4b8a83ca" + + "0a7bed08ab9a656424831e0d7718c15727af7c83b2ef5eb5684aa044eca2" + + "ba896811246766248b20a325094a4b4159f9cde1ee349be6dc3c9a190453" + + "0349212a9537f65ae333c288753cd2bef6c5beb2f4164168d965a2c0fb9c" + + "c8c73d9e776e23d53ddcfb83bb7dfe2a1b8c781280f449d6f310faf8b53e" + + "89e6a611d6d3f42f2aaed5259730d149b3e7dabdc9f865bc1555374738c8" + + "456abe112e9628fb31efc2ecdc972da05987aafce728ccaed246cfcdf518" + + "3fe5dae528bbfb99d33194167e0f84d462d3d0da83e92227cf57922c7956" + + "4fe44648d87c69ad708e797972c44c4a5183fd5d1150a1182e3d39c3cd16" + + "3920f1d7ed83992bc4116d9351ae1c6c4827d1374242e374310409f32d5f" + + "0f38c78b6489c568b791c70394d29ea2516dcb10e51bdad862ce3339d5e6" + + "14fe14f150961809c36e0a2c8eb872e9f7a1c0956fbc9194cb63ff9993e5" + + "d0dcf62c0f49e81dbe99f3656c4dea57b766ae9a11254f9970618f1b33c8" + + "f339f440de240170f7a21f03ff2da42102b323ce2b9b7d0de5aae324d1ba" + + "c87b1e4c5279a566bf659778f8b03882aded57377a0f1b063af2897060e4" + + "23be7cefd4aa9a28479c16773944d254fc21d3e1acdf508b7972372b5991" + + "3b8b088e93471a7d54c6ae4c52ba465ef07f19f269677fc2f64d3fb3d7f1" + + "9069d6c7001d4b002ed6683c59bd5651a450503b68a4a00820b8c17e3263" + + "18f32c21dfbcb2a02a104edaeff67ec09533aaf3d1a7fb41aa5d506ccdbb" + + "e6e35fa0a263c0aad3acc91182addf8c5bdfbd0626702694b8d652a63c65" + + "8d6b2b7c75d015630de508195e1fca9573b61bc549ca017c4bd888194d44" + + "3e031f36170215a301f922736a819f3ffda69117170d1933300366c5f2ae" + + "1052446ef7c3b82c5868be158a881597132f51c91c80c24ebf621393dc45" + + "05fe057364a76ae67494a8a5f67acb551cfe89f447df272ed9c1509fc330" + + "2c3e16541452d4d68438f26858724012ad3b72c094b9f166c6bedb8336a3" + + "41e032988f39cf53535789b320b5424d07b6bf5f8792e3aceb0e868765b8" + + "611d7905089949e0c273e2410c72a146cd63981f420405bd883e5390e985" + + "8214a8db714e8400a21d0636d7e5d9671a3582ab9ff032170b8dd6b9d5a2" + + "144d065228fa54aea9a22654df67f3f62c5fc59d68914d8b219829b536cd" + + "2ae937ecccdb6031d94cb3", + }, + { + key: "84373472e362a356bd5c9b50f55c588d067b939009944f02564f136c62dac36b", + tag: "12dd5297cfcec53deae1dd5f9325d894", + in: "860d9b2954c3daf18fd67eb8bd9e6e3de2e4988ad9b04b1987219204dee2" + + "388db1c59a935de27bce29e7cd3ebdf038785efb35eabd4c3785a62b1d9c" + + "3ffa25e2273cfe5eb10b4ec6152cd8f21dea415421b452efc7cc4ea6bf1a" + + "b85fa6614e7f6d650125424865386ff8ab53247a63ff023b2d0753a9e5bd" + + "458d6ab0156fd3cf2d5002f902f927a847e8c4a8426b0a5191f5e237d590" + + "2659ce9be9024750d1d618a6b8dd57efb6c2bbac2930858f1132639391aa" + + "9e8a620a2a7d64bb7e943c77753401b5b619d95ef857df25a52b4eb97372" + + "a05416706b2644e2687bf1d42c0cf06e5eef8a1fc7e178440bfebb85c44a" + + "4837f69e43a1789728a999c5e04291576e757510f22bca11583a4e93688b" + + "442f2b2dab8d5ea9441ff09b8287862ca538ad979297cc75510a3d9ef36a" + + "662b4b7c373f184202befa5bf3f315642e6210763d033b7e2c59731cb356" + + "045e9470bf2f83cd62f11b3e904b0c0b1be99bcb805150ba7ef12b8df3ca" + + "bfc5055640687d710ab88e0fa8034b26112ebfd044a4b290b1c6f6d18c31" + + "ba9880b1cf2d81b5d02f00d6d351da5dbf47b6a5cb7b53eaf6de52c8a68d" + + "053602ccffa37ccb44a7683ab4f8a58c4bbc9e140e4e6f3cc10a5c07ebd6" + + "070818db983f9f415168606011efab6b8d7b4e61e8eadd8bfd8d028b89bf" + + "b0a16996252d7b4ee4f9ab50fc9d6e482ecf99beeabc38d70efbb9a0d4b7" + + "9a1c5d2835adf8e25111352eabd24d562644efc97637f695e4792f2049c6" + + "00f4d889ceb951cfe289adf159865d013046985d7fe2598014bf2dbbc528" + + "b4166fc2180e724ded8e7ea1c8d66338ec50d955d5594a0a7b4655338b70" + + "e8978485a722df814fdc6fd2436dbc060121fcb575672b2a5e454c1209bc" + + "2bb21a99d39dcb3c697306dbc2104d60fd8051c43ea2fce268987d0ec249" + + "a5c02f91d3b0dfee181b3cf8ef1ba9665daf7ea1f1d3b216e378943b78b6" + + "bb41e5dba095748bc776f8df6383033a1f5504955da3f42153b1c7ea83e2" + + "f90b990ea0c5bd3906b5c4060b19f447ec7762916b8766e5a23bc4d39cdf" + + "8e27752df8129b60ccee1731e47383b589d4fcad865eed4041a186df206e" + + "9fb69ab6ea092e36f186a6fea8d77bd7f3ab0fa0e29404d617317c75c832" + + "854427848237cfc18486c95f7213b9d53f324da036e8d298133b5003984a" + + "b9d71836f9f1b059db90005a9067c261bd85aaeed4d623df2220eb52b73d" + + "d683abcdee5cebd411996f853752f638bd28df6d78bec2ed3e00d7beea06" + + "2b81c19682ffb2f6abe3a3623a2e0570650c1384f1818d76fbefe3a7ef3f" + + "46138160ef897f9934e00e066e215230e719c23905dc60d7fa4d666fa52f" + + "e7737db15126d3262c3a4c385cdb23ff3b56c131e43b241f4a6062a1a248" + + "de9f13eb82c11f7b6a22c28904a1eb6513cdb11179067b13c7b5f83a58c1" + + "4f2753f19fdb356f124f52923249d6e4a2c8dadc8bb0fc91e360155a14c5" + + "c194334b9f0a566d51fad98592b59c1cc4b40eeddb34e64f337f83874884" + + "0583f853398c343dabc29b9444be1e316309fb8d81304d654b3d4bc4cff3" + + "55fc31278fe22e649324ef10acd247c0b72397edf96a1c16bbbef0640296" + + "4d219575fd23c36efc1fb8f8a34b510ba9bdfb3b478e236777ef7c6c47f5" + + "5a2bd0383d8eed3759456ffcffb15e61985b08c022658a5ffc875821bdf8" + + "83f69f096dcc72a96888c3af76db57a54be701759670bf05cc9015f5bf1a" + + "745cf755a25b1403a870875701427f820c4b29eccc260f30113629ba03e2" + + "785014bdcbf34d0c67aa6aca20d2dece811788686d5a45820d2980bf7d69" + + "d5c820a09bad7bd95166f63dcfbe8652565c285e60e2704955d69b3037d8" + + "7f5e6567d95b8891276d5cf7c59047d10a02ae4a28794405e2524ec2d595" + + "1b36ad1b9d5265fa098a033b88aa66cd9eaf01eea49c7dc4cc51c486f624" + + "507a2be23f152f43709b2cfecee44945ca506950e90e70164b77e12e1c13" + + "0b4d1021c2afa20038f190096276cd22e89b6e7dd10fd58fa033c9d42536" + + "98de3f4908203be8dbf259112f840c76726d982b4a837cae7139e27182b6" + + "1b4dfbcc50e42d5ab8532edfbd30f668879824e9ebc34b63ff1526cda81a" + + "e38352a774d79f73219500e57f0159a32326195d8895d965071834876a45" + + "c1a3c0bc4b1638535f7d40011cd5b23343fc27fa318c1aa3f9d8c43351c6" + + "6148dc2175e0e620813266da3000954dfa22048f305244629d512e852376" + + "6248a897a3ec3e2983aaa8a0f025f18feea57a5153a59b02604ebfcc7a9f" + + "b03e62443df88ead9dee955e23bcf6528c278a353f254c9484a67a7b263d" + + "a301923a4efb6866aeaaafd428e6da48781365bc49e90cd16b2388220d08" + + "bb9f79d14012b5a8299a651917b6a829488753b6ca449a14e8dd8c5fd5ef" + + "657d627b8e7773475b802655dc033694f24376e3b01e519d1aa8365d0e55" + + "92d0a4adbf555639b6d75d7ee59a7d12c6c11317b7927f11bbe75ed90508" + + "b0698420e231206704d22dd1f1740edbdcaf19a47d66ace4eecbcefb77b0" + + "85cfcfaced4d2d6048ce76434eb79990f0898adb4af2c377b581ebab3f3a" + + "150f40dcae002d4caa60050591c0de4ba83bfd59a08670beaa4641aa9829" + + "bdbb720d6eb8b2f3e864a98676a67271a82cffdca2b3590a0b5f97efa5d4" + + "ba062b4798707159782bedc75e5363d5f5d55ec2bef70db22955adf401fa" + + "c3b7af937816eb25d54d9f2a92e5a2a04bd8b8d7568204fd289f5ed2e033" + + "a76209d288e11e8a4dbb06b9029e90cb186446746853f02d738e06bba538" + + "894e03e2658ab3d7f9ac861d2cffdf12396004d1cd15f18812d3803ab9e0" + + "6f41c9b374d6a0678bb82ce06d9e3b9dbc8d2e90b8f64d0d040f3fa8a3fa" + + "8be71d2b3183cceae1bcbfa2353689d842f7d7052e5699dcc70ab2b58761" + + "7041e5aa1e2f41911d525505f061d3ca45152f5a7a1fab50c674e4597a52" + + "b46aafb4ba57413879cad1308321843abb7c39696fc2f2e225878bb1191e" + + "e151cc76f1a1b8d491c1672fecbf710db82dcd32554361967fc839c8e5d4" + + "e488856e1b9382eb3fc3bdc3b6886a3cd79761b02bafa080a745ef6afa26" + + "822f1d10d5e8eefb842837d82c9986e78fc3390caa142b7643de8f613e5a" + + "890a57f5883409549537f8139534f4ca1b60f33e42be25433f1d82add530" + + "6a4cfce258c0d4f1f3c9148ffb5c4b626d51f78ac20bff0393b7fdb4b9cd" + + "70fee7f69892c8a9ee089c6c5c7bee0a1b825e5b9517f2c82d6c149735fe" + + "45a8839812c2deb2a355b6230697053092eca450b7b0d3242b2689efe364" + + "09e820d91fa4932034d96495d9dd3baa4b385da815a7cb69438ff648b326" + + "e7efe8d688e88570ba59df7c439faf72c95317a10c984c5ec0043407e9fc" + + "9b46487810eac19d2bb40e0a654935f76e7d8861480c5f48419eb33084d4" + + "0e1070e5ad542c94f58b49e67dd05b6637a2c67d41451b7e00ba30eff221" + + "755d6d427ec634a2b95980d274a89579feccf1c7df3787a9435e588f2496" + + "06a93b7ac41c8aaa84b91c95cad9463d4881de7353d95b13bbde4c9da90b" + + "f1fe96257309a416407c64368b5564f022c4a493f2a39df1696f45801e42" + + "a5", + }, + { + key: "2d0035a30d19b9cbc7a27561f3ab474c01115c4499b4adec660ea06ebaa1a14c", + tag: "a2c77b55cb0c076d8ea83cfe0e64f293", + in: "4e667580ba4f38f64e5cb5566bffb486dcae10cd17acb3754251e837767f" + + "16429bba2b832f29ba538f97f3556548d163be25e69f88fff0743150623b" + + "e0a1d82af9384ca335927a0e9cacc3dadbdf1e24fa5c81f2602d109e1400" + + "33929e409b9a0fa4f2653944edcb8b3ef963ba7f8806196c73bff0ded670" + + "c6def5d240c5f3daa121f8d5bec9b2a0b0f1d62d54b013dc742d6bd46325" + + "460f692b76d4991f0796820ddebf150c7d33829795784dd2759b334d2706" + + "70a7264941be5d99d460d078a9eedc3660cb3176ad302f9365f0bd698e46" + + "9f3e63511abc81109995dba17be1abe8bcd28407c7fc8d02c14794bb033e" + + "178a94f6dc73719d5bc235f980a16eccb4121ca83b13c4e165931ae4f192" + + "4292f8cfdf1c3ed40feb71e13d919b48fa296dddb4d23114a3d86ec10f16" + + "f314de4cef813ed24b49f4c7bc44cb8424df1f70e8d77366161c7cdd709e" + + "97610aca3a24fb2202ffe15eaaa25d711cb5179212a2c6497a13e5d7c365" + + "7bc502b3d2ebde2e57b714dd9bc21e73795f3d35d620613918c4c9aa0e89" + + "031481c97a5a4c15ec6abe42d40498c33d71c823bf1d5bb5fee457e2fff0" + + "bf777c80c6e3336ab3ce793440e74b336a8f7034f6ea2e4ff5ea4ea7c350" + + "65cf2ccd2da1d6df29bde10f4cc0202b5e4cf7ed097da49b970a6db41e5e" + + "98f3845b42f46663b1d1ff01da71389a8737ba8f51eac1ef357ba5ac9a80" + + "dd2c7f9476111dcd651fc33f4c86dc8658656f3f02a8878bc38ff0d0a1af" + + "2e31fb92eaef08c50195490818661feaf90e8b6f5daa1ebedb2cdbc8d5dc" + + "16db3505f9611ac46bc37931e02c1fd6aad6e4b7e187d5e6f990fddc9563" + + "2b33f55bf68b0db3890b11113ecc839a4fa4de25160e574289aabe4d8fb7" + + "9cecf9d2fa75ac8d0195beefbdfe0815f8d7d9751c1280a29b547149ec7c" + + "2295f5afa53cfb516158086bf203357eec2a5db71143f996c81555a47f92" + + "209719a71570a5553f1ff9b4b41827dd74657b463f36623565f0c9f4d2ee" + + "8735d6af56ceb3b3d0ec516b22f0ddafbc24647481f61ab169e2616c91c0" + + "e1f6a35436598ed801670e1dba76226cbd0544959ebe70f836c8a7df575c" + + "b907d780ed5aa0d6e4e8e0d2f457efe89a777374aa49d4961db96dbb787f" + + "021d99231001360d532a70ee1fb94bd6f26524dd4b7556c6d40e08723d7f" + + "9905aca66c4743f2bf8b34493bdabcfca617809a867bfe0a4f94c756a6a3" + + "dcd04ffc0a3ac671a0afefe0d5d447efcec48c6368998760db6a572676d4" + + "29b6d3d6e0c815650447748c4b27541c5447acfb8f7261b6378f3fc0fdd7" + + "375eb9d458648c7fe9cd96344f11aca912cc5098e9ee39e0b6794cc1dc2d" + + "f1b10f927102705efa20e667b63a91f935c17764650b287f5289d5790766" + + "555f31985c5aad94c652ba41fa9c0195d15405f1fcce9e23054a42c8a252" + + "da83bf6268782ba44edec5d8f94a20b1830cd1c5894cc6b9b52ad0b12a5e" + + "cf3195a32a0b02483ae3b954ac6f3af1e0f334221279d03a72138f3a2cb2" + + "1e706427c4d604674dab88d429f28a67be7a996126e077a1dcf8989d90d0" + + "8b08f4abb9a546b3c64ecaa287bf3468c59add86365b885f52afe13ed8d2" + + "69ea61832a7ecbb96ff3336f58a1eeaa6dde3611f3ff7c2cc8c9b745b0e8" + + "b5919914245a49ac192cd77d10deb9a249623f696065a532c20eef9e9b0f" + + "e706579566a9eeb14d4e8251a7750e29eaa60f034c1a7a1d51aa03a45fff" + + "89acf41080deec5506128b06f003fa46bc4021a82fad6a8052a49744ed69" + + "45bd9331b5ae80d873cd042bff079b2b9d8af8065a22c449c32a56dbbe7a" + + "80d0f3e30b9167532506915883dce0aa9cb749e4368c595c5bd33b57e36d" + + "98cc9bf91cbfa47331d69b5cbe9c92bc66c0fc9ca8717bfc108e1f710333" + + "14dba02a28b9aa05890cb01ae9175806c3c4215bd446f6cc96ec5d08982b" + + "4f83cd1646160e1d306b3cdec02d251f0901b03e8c3c35464eaa5082586b" + + "b55482db97599d513ed8d7a82e32fae302684b7ede058474c1fac7893444" + + "16fec93fb982accd162dd956ba2f31a894e9366eca00e6e997fbbf9a2980" + + "8b83a139f6432147a717381bb8baa2205715f735c1e0db273cdda6897c9f" + + "39bf0d7eb7caf93f657ef4d3fecea28baf69cf36d3cf347081df3114455e" + + "b4fe3e49ad3c3f14435e0b39b6c0d16db0fbcfd7ba8da8760d5952c03667" + + "251e7a4c3008cfb0904225e55c23b884bb09d26631650460c4240bd5a165" + + "b531ee76ba5749b3bc60adad35de519321c1672b47bc35fb59f7792a3495" + + "11b2bb3504ba4a28717823a27a1f99ce6970290b26efcf1e7a0399b10eb1" + + "0c1299c09b80f4520d00e7908d004d5b6a72a411759cfa9523f6b2912234" + + "481b1d8fe4c2365961c0528bd593d42bebb398b5836ae6ca013fe440adbb" + + "0090e8ea274f4d8bcae483e3663051a328f7c12870b40e4973a9797a2336" + + "3d3c53e1b0d1a9159bfb26158f44734b3c34b571be641bba2db937d4ae1e" + + "edc807b95b1c2a7d44804885536316ad38aedf0d83b1519661f2bb5283cb" + + "9c50dd61c3753433e988189f26962d1f4befd444257d0b6d5b819d5fd572" + + "22c9fdff032e07a4d8686d451e71de4748965309c0a2d7c422ab7cf3d96a" + + "8c0a1b0afb229debd1c9421cb828b9f2be96bb9d6b5be7ef8134bd9ccf81" + + "51620937d720d83dbdddbfaba8ecd2eab6f1974090efde0ca963e9fdd691" + + "ed0cc5e074c5780779222552fa46ddcd951763a32aa3a044ff4a73cbab41" + + "dabb3c2c03fcda68303477f0dc26f35bdb5c9bde721fba1a2db732a89629" + + "a8de3cfebc3918df1a9d5053d09da5b7316e3285bf62156ca28cb64d343e" + + "72445fd66757bf4ab374fe7932a65f3d7fb6e42cb12e5b67ddf8530383a4" + + "6c1ee7ec8883e454a467df1aa7e468a6e7035515f473901efca5d46ff358" + + "70e0cc2575bbd7f8866c8e73cb157903a1694ff3051424f28de826984dcd" + + "065dc3658df144ae3a6d37b88c367e3cf7c58169dfdedda4a2821ce22188" + + "40472ff72f0dd1a6b0100555ff188b80f835259a634405e3dad61fc299f9" + + "307e27503b2cb7714bf3b636cc64b61d2e374119c8ef8adb21f1516c7fe2" + + "38c807818065bf312003c12e02525d69d9629a99e4ac66ad2e792f302cd2" + + "a6f5f702dd28040738a084a7052f2c3ed0924c33b7a5d357b7c9a29cebd8" + + "621a4bfb7bb34676ff210d59f7f9d4eafb7c5c490c9ea48402af5bb072c4" + + "731bdebcbed4e8e08a67931b6d7342d4ef7bc4a75ca1dfbd32ed6027d8fc" + + "b71e3f55565c02e06daa8c579b69774889181291c470576a99e11f2c5acf" + + "77e091ef65ed243d4287176f7f6ac7aba6908c9ff1fa43b894a499b642ad" + + "c01b2fa1c4b58801411941bb448f1f7a04794d2cfe5db1be61f7b86d6eca" + + "c547ee51d4c9050f9e9f318dae958c150acc21c878f0c7df6065294eb1d9" + + "a278c920838a0db752b080a32e67ac312fa76b589a385f31847196076ed8" + + "1021fcc375bfcc8e1361878e2693860eb21ff0595e4eaaf7897f2b79367f" + + "7c4f711279bf0c93a97dcb1cd8d87e444ad5f4cb5c1de44e37868c6743f1" + + "cd72cec376726f26c8bd4836f9a9f9c68042f95ca6f9d7cde493e531c553" + + "8bf7ace6dd768db69ac7b41ce93e8ca27ff20a83ff2148ec5b89e05d8b8f" + + "5d78d0fe16b96f6eb8d3b20126a186085c6825df81aa16b3dbf57eabc360" + + "71299ccdda60e250c652408d9cd1da94d73c728440ae08fddb901aec0fac" + + "1050a778b10f94f84883bee158bc53b1c001807c43a3151fbf581b18dda2" + + "527430872834e5c380575c54b7aa50f817cf3249fb943d46933cad32092e" + + "bfc575bd31cc744b7405580a5f2eabe27a02eec31e0d7306750adbbb9f08" + + "c78cb2d4c738b2274c7310cbf8dd0e59138b6a91b8253ae9512fe3d7367e" + + "a965ac44d54a7ed664e5e5c3c6c2d942eac388cd32beffb38f", + }, + { + key: "2f29d71d73f7af98f96b34e939e1a21e2789ec6271b878bbebd14d7942d30080", + tag: "ec02f4953a9a63ab6f2bfc3501e4fab8", + in: "0e0950987f3508239063e26a13727fefcdfd2cea6a903615c64bf12d9ed3" + + "887f9b2cf7ccaa196ccc7756b09471475b9daefd4261e69abd23b9faf9c5" + + "1fd5d5788bb39d3c068fa6807d30f6201d3f6dfd31715d08b1733440cde1" + + "049608d23c4e45c5ed61f863350232f85827e7c292dc5f1eced1cbc912e3" + + "f5c420bd945911d3881ede5153d3b2cc85371fff98d2caf97cad6ef59001" + + "4017f9690cab08989851c2647e77e81401714a93ed9f938b79f8f54e3133" + + "fc2cdef259df2ba0d48f37bf9e43792e3a777214cf4aab6dde6deeb543a8" + + "813b71b5974136c1220d6218a252881f0f5677ff5b6aba127f19a5f3c5aa" + + "c988543d7839a90a3f947c4e4d5c6ae1ab48dbd40456d1aa65339a4c15eb" + + "520e8ff9f965ac4c37735937cf09942e7958f8a6cddee41707423f715903" + + "ffe0d15af8c3140d3a736d23be7485fceb9f07c6509f2c506eda4ec9d30c" + + "cc133708f48d8828e332808c84a745d337296d871b9794de1c5d06534aaf" + + "65587526a84e2521f8b332645e0e72564bb308ecf99b7bc69608474389d1" + + "686ffab8c49b7f04dadc28d2ecdd0f508dad2135843304e378b3bc7a4f25" + + "7fa4316be956e0a021edb8045f39fa9f002087f067199bd6001acaadd261" + + "4bf6aefd3f098f92a959685f24bb2206c347359d9c6adc6847117bb434ac" + + "6c40ec618f6ae8b75a5e2e4d44c332b7b06c8b4d521493b9b0bde8894209" + + "717a24b320214297b62dec741cea018ea681c9b56702068528b3726953e8" + + "c5e4ccd5029e4183e772d9834a56a88d45bf87603dfda40e03f7e894766a" + + "7623ab4dcc0dfc3086d17566945069173935916f772e2a5f8e1547348f28" + + "782400fc069ac0e2b94242e9e0f1ba2d0e76898f9b986540e61ea64d7f69" + + "1006b86ce61565da75eb16a8b4c5865ca4eebdde2190e354734bda94fe7e" + + "12ff47dcb5d5e6ad93cfadcc491cb350b09ffe391a157e14b65e3a211b5d" + + "4e447c3ff95571dbab33a83126d68dfddf9383b4359d4103ca64af1e6963" + + "d09e17eb944aa71e76711dca33168586bfc44ebe9fdc55497d83f238c66d" + + "bcb16063bc85635f0f1a6280563bca49ef971db96a41b6ac5e0642643262" + + "61eb4662f3d6ad4cac826db895de22c9b8aa35e6464a7f44e1ae7238e355" + + "068d68754ffcca76c50b7ce7ef9bfebac9eeab32c87d059cc7ef2adb5d57" + + "c7419adb394eef48441952253e8391e555730e29789d6293c3696f441449" + + "0aebe2bbe541e191a6652ffbec1192f0f9395b7ea370aefc1f1cc8438035" + + "d7681f12f1e11d6e334da188b10c302fc0f4bcf1de448090510a8f1d5683" + + "0c943a3c388b33a038c26741a4cf3487313f755fe7a28e25e44b5383c5f4" + + "cd6ef34d7dd73462226281899dc3f2e69809a0150f694673f31addc89888" + + "072a7d4ecd63d6b90540f9522ec05829a7f17d48728345ad808fb0203883" + + "3cbd018d612992a88df944b8e34a70920b3f26cda2e8bb16c3aa38b12b33" + + "b395c9ba5e809f60ff05f087112151af1b5987403cff8bb2dce79093f431" + + "2c744f911a6f3091e4f9ef9375c4dce4c241d2f6024a1797321851ca316c" + + "4e460fc060e7839deaff8ab5e8bf682c0f21ab6952eb793cffe690db911f" + + "50b11f56ea352942c43bfff51d4360882754faeb7cf28b6b32bf7fc9ca71" + + "fbfe1d72be05b8bac9ba513d731e2c9d13d6f2f10eb926edaaf0e3996656" + + "da8718a8e103c59326529e91ebac6ed52657c9690ccbf81028cd9fb189ec" + + "4de94fc0771e53302c8d9082835a68780cccd772660a110a1b40c57bef3a" + + "c1d69428aea549ed17663a96895a66a3bb5ff6ff61dc64908df49b760caf" + + "a5aff05e2766a418dbaa1e7d189a9edd55a04fee8c9d6e506d299abc36a9" + + "d67be035fea5d220f41d081af67615fe627c4dd04bd8659c7fa4f57f35d0" + + "db40d9684aa178d7483ed5d86f04eaea412e0ea05a4698377dbff4fc3a39" + + "1f6ce0cb833d3118d6c69319b511cce65fdc74928e270da0c537f8201eff" + + "77416155d4a39c7ad38c22cdbf7d2b7ff7d85383c178a835ec604c3f9ee3" + + "7399f7dd826e34f1a35ab75da44ba56f86097ddc0f3658ef5bd65a24f4de" + + "4255d0b03411a9d7f0ddc29e33cb865da23393471aa94e6c9e72e789206d" + + "3ba118aecd39727068f528f01b25fae2280d70033e4ee46b41b864bb922e" + + "001d8bf46d6fbaa5a594e926f45eb3a4d2f074506d7834b606f43c89699a" + + "6db00b374658d9333700894d440a712a1f25f5538f9e7c8ee57ae7e612df" + + "13292c8ba9dbede4fb77cc6c8944aaef59ea6ad3b36db398f4bb0f82d40b" + + "44879835f224d6e05992b1b8a68dd58c3dbda2fd73786492ee48c7a25f87" + + "264b766930fe9427487504fad17f8d230934f044e49ba219f26ead728856" + + "cb30eecc33a3946d3b1b781061f2458c7c46f6d96f3e06f369f97be91835" + + "f23b38347d1e381ad5be4419275772c2abd549522a0203c1ee9c96faefe1" + + "df413c4b7b2624417890e0716854b7092b3b3b368cb674035d3e6bab2357" + + "e7c262b606f7141b6dad2f6145ebc1deb7597814719784f3c17848a90ffb" + + "cb0289e2f3cc7da12442b837c4e47f468bca3eb4e944a31c48562c2f144e" + + "9e920ab5e4cf90a14ccadbae29af13db38cda911e3c8f6f525e6722809b5" + + "31a4de1926ab12f643d25af87eb8610df59eded6ec278242247dc69a4213" + + "13f7c2b26ae7a917c1bdaf66c56876e9104d40b59e6ca1431ddb77fc89f3" + + "14b46a154cf127688564a4f9e120d7b5816cd24a6e095dc8ab8b43bc3639" + + "329719f0e0f723e2f5136d82638e2249e648ebca67cf0306741e9e8d45cb" + + "903bca85485c4007397c88a1ce07266f4f611b96b7e0ace3074247a7dfb1" + + "cdbbdd66e25e172fd2bda74abde7f3b4cb5cc7ee7859f053b2f04f9de03b" + + "a8e96264117f502087c3ddbee8d850bf3618b4de90f7b3e562dfa57e4426" + + "5357236e35e71d1669226d63bca50b1b944ac07a1f794e73e80985689b25" + + "f18fc709367d63b8639d71865cee667536040be827145c08cf3e57a66678" + + "4c81115706a146eccadc7aa1a9f074b47e95bcba7db8108a13279077bef2" + + "64699fb87e5abf5b05ff3879d7c7c5169c7cae817c13f0859d4e9c05db0f" + + "74c045ecc30a51e515feea627da387ff780719395b5b9ad93179b16fad10" + + "5856049169dcebd43a7f39c549762405f807378e854b1654a1179d895ef0" + + "85aafc72c7fe1e0e1cd3abf8e20935e331145bbcece4f17ad24ebb6c64ea" + + "73bd98a7494c134859206c9422f7c4a057db0ae0770c4bcb08c1a6b9ca4b" + + "7dd8c1cdb3e4977c7ce6c1e79b9d6ad98e27d2759b53cee73ec037a8b686" + + "f1ff78eb8421f41c74ce9c62a90d38b75159ec925f232e0db71362f31e29" + + "4336f5580a34b26c5a01ee3454cba227c7f400f6889a319d7121dcea27b9" + + "584f33ac796d48a9a24cc5b6799ee12f10725fbc10d7cf83e4b87d9c444b" + + "f43e2f5ee49d8f3b531ebb58fed4234cb8bcab1b8b18bf50956506baae8b" + + "c1b7492250f3adf64294310387f1d4bcac12652895d4f2dce26f380733ce" + + "0b5820e9fcd8512a1585a49940a32fc8875ac3c9542a4270602e5e97e720" + + "90ed71b51badb775340429fdbe45b887fb9ee61cf9e091c06092cf0a2129" + + "b26572574c46910cb458bca7c63eddd29d89753d57e568323e380065794d" + + "3fa1ffb874543f5b0ddc702b087e91e22604d9600d37fa0dd90d7acb2458" + + "4cd408a4e66bb781dde5f39efda6a8fc26be0d08ffdf851e422ab1500c28" + + "bf6b4c85bdfa94e8aef5cda22870c39ad49c3c6acdbb3b0d58cd05424c65" + + "20740b5c2bce4336545eda12716317df58e6fb764fcb3004f5248c5ccd84" + + "f63abdc0dd2a64e447c0de4da4a1082a729d8ebe14810d396933085cde18" + + "318278481fdb9a748b637cacb491f5234bfe16b53a35da6677336baeedb7" + + "4a28c19a412e7812dace251446d40ec07afd63854c3dffbd5c0f6a9a3cac" + + "ee3bab07fba94800fd1fa0fe44f5f2ecb2b4a188cd02b8a2df0728347c50" + + "7d0cc58fcd5d54dffdbda11dd1bcc59758396ed8db77498fbe13238d3d8a" + + "0040194dfe66811542ddaa658094a9580d4e4b4e29", + }, + { + key: "1285f117bd90b70ef078ae62f37d2218419e894b7d334759ddb2d88833b287b5", + tag: "429b2b39195a10357043c9601590a277", + in: "00ef065a1adb4ce7108b497813ccc748933fa8442689a7cb8dc7c1ffdbf6" + + "c09adfe05ca2cc5ec3acb7493f3497ee8f9cd9bb8a4b332c18e33f78114a" + + "c8f9a72ddb9f13494e934ad711818909831013ba195b53f5e9e5b4689399" + + "6d0b669f3860958a32b85a21009d47fddbc8697b7c9b92dc75d5060eb4fb" + + "40aed7a1dbe69dbbeb6296f5467ea2426cd17d323671fa408855bc53e5c2" + + "d111203ae38cecac7719c0bd7f21f6bd6a1588187b3b513983627b80ac0b" + + "300b7fa038af1cc8512403ac2cea6e406595202ec3e74014d94cf8780ed0" + + "33c570e887ca7fb35ee4768202aa52427d02c24e63f7f2cede95ca9909e9" + + "dfa86246a27db757750667c198c9aff4ce348f7ac51864b36ef5695df713" + + "d17b8f561a972d0136bd9ee9aa16079c2ab5d29ac9ab472255ade05dc49c" + + "b966e0c1c04258ef9ec59ded01f402d9fdcd9a2020a2038a8c78892ca218" + + "30136069485527069132959dab2b81c73ca590fde2a7ecff761d95a54d63" + + "a2664aa5a6deec163e46b5225bc98976a4f363063b0f42e29f792d138af8" + + "eae68d3854b5c1985d5cd1c9f49f529b0b4d2c936887b5b92cdebacef992" + + "c35e0b7bbd52114aff8c6b261852e28e451b02099814f809b0289cba0586" + + "04a363e3f969aad3d982f645ec4c549f943fb360fb8fa0d5a597bf89842f" + + "8ced6014a5b2590ef71524a7ad50fe0ef0e2f81b6e26b99f9ebbc8036549" + + "f7eacbf6ab884710c6406ff59788e03ede35c30d4781ad5af171e0623e8f" + + "cf5344d71165f0475e256e9159040f702b359a2963116ed135dd6c1d111d" + + "2a1e33e15c178ca4f02c5fb15593c50cf9a8a492f01e04778dbb81d26c99" + + "0c58cf50a9bcf4fe38fbfc0fc0685d8bd422a773c7bce649f7a86c59118e" + + "f5f857b2c72508cd1ef05e1a0c0b7ab4687fdd57437092eb49bf41a9ae8b" + + "bd98272ea2f8ee2515ff267fa6ae892c266a7effe61ed54984924aefc461" + + "6cf483dec024ad666bc797beaa429a742d1b8806f67d451b6d3a85b4d474" + + "003cfe9e9dd906df47da5559c41f15afabecc3e6af279cca0f2a200eb2e8" + + "31437e034d457fc880f60f5ae635690bce82bf6d1ad6b4f5344ec042bf25" + + "7d010273c861e3ac516e9ee2bab3a255f570baa32298467bf704bf6d9076" + + "a4c0b08a528a05cd1fcbdf51f3885fbaba7891a144fc058919903b269b4a" + + "29f43926eda32c38853b814a7d528156c223748d674d8f7f5448350f011b" + + "bfab1511001b8014e20fee37ccd4a0456f638c197c86dc116b34f955c0b7" + + "dee10bac5ea0c2fec8a780ac05098b51b902ca6afff4db3c6fb4f761df79" + + "b2039dc5f16d9402442a6fcf6c4297769e6c36824d908beba8e584ea0b3a" + + "91b9017baeefac651d0307bd89f517789236c0693c65a5a20f244d39684c" + + "eb810cd2ffd3c78fe9285d2eb9f55d133b86113efb8dffcbc6d258e84c38" + + "2dd8f4d7d63b65672516d9bfcc3310a79ce244b60d380128d529487f99b7" + + "d532d5f5c28fad8b9a071fd2fab8fd98f6d7ed9dadbd2fc4396476eba6e2" + + "1a1b1cc594a31fbd3418d98e4aa736cab285a2786fbbd4650e49f9b080ed" + + "3fda34941c28d25545395e1408fc3e60730d0696061f821a4d24123cadf2" + + "3af3d37ba7ce1ba3cde1368d468f136df82c02f9be9210022192aa02117a" + + "ef5ff70bcfeffd47bc37b920826a4d3db001f956939abc0df520f3ec1613" + + "ba1c4b3385cad97e42bfd15a3150711fe86ba4562f17780cee1cdf198615" + + "ca06270db84986f33e1d53d552b0da82397c496a23c7a78ca7641a908e71" + + "89249cc657c0431f1e09ae0213f28a27e6267e9d17b5bba0ea4f3c21f266" + + "fe538e215ec62f85517ae6bd87799ac5ce68453f09cbbc50d6e2a168f0cf" + + "7166ad50cb65b6c76406c326573c00e04a3186251c6181933828c58f4198" + + "f8208c4484805639b0d428fd05b57e4356239638f458a84000c7a7a8de62" + + "ec25b54d1e39d2579ec9c512fec475f243576f35efc02a1cd6b0478e2dc8" + + "be5f17aa4e3849cd42e76fbffe6e7d6f912d6edf80f718f94a7e48e1fc10" + + "6cac29627d9d4b82f05a30cd7c739f7f3ef7ea368d22612f189da450e274" + + "de7b61c6361521e684d639be5af4cb11fefa5fce6f8a5065c90873e504c1" + + "2c940571ea7bd7e9221129b83039d2edb069e8b5bb68567d8fcae34c6ee0" + + "cb94474d8b056cc3c7403873f2fe6db3b567a44e702e4f4813b2a264231b" + + "0a998207b41916715ef94e5eec281589d0a711f8e74be32bc60f43d693de" + + "77f21d5f7eef892abe87725f3d2b01d9ddb6dee15f40735a8fb67766dbcd" + + "020a93b8eef4361dc3a891d521551f65dbe6e3f68c60819b0a540b0991c6" + + "4449d207cf5b1c198c17ad6caf3adc628d09fa0baae7a696d84e1879577c" + + "ffe9b3f62669d4ea5ebab6364f08c66d170ee4a94d61fb77d60b33dd6b60" + + "650f034c5c9879243d5c16f853dd7a89885a9047a341b076912d47872b3b" + + "3de49edf7451b435698ac4e182d16c339be83e18531a34aebad36c5c7c93" + + "aaf121cf99ff92d3844d40740fe001eeca9ee71300d826bc3cfc87a29d39" + + "ea108a3cf259657ec4b967fbb534e7513ef3a96bffb35abc5ce0e890696e" + + "54fab515af3d2c0be6e003747504e486c0ec6e30fa4ca79d6596ae0425f3" + + "396e40fd37432e52c74f812250dad603b3502f97ada48a26e39fd4d44584" + + "6591bfa5ffb3770d95d3dbd49e9c3a38c6305796b8f7d79bd0845170925d" + + "575774445299bdf9d3f8ad3dc2dc5cfd3ef0293b84d6e11370851af05ebf" + + "b3510a22edd930797dcb76b759a9b5a77ed8dd5130e79ff5ac44b01901bb" + + "79603cecf674202bc5d84076ff41b3c806454ce80cb9e5fa9db77294d20e" + + "6d3008ae3017aba712862ecd4b32daafef1b8cc8b19ee8f8bc3835e2372b" + + "5cec66222ad5ea9df753c033508ec43c8b5995e88c36c13ea3465c8bc462" + + "ae0a659d9767db34499e9d01fb1588410257d6f588b3fdb766a66bce28b5" + + "e0880f8cf988a2e5eb5bf80cd7d83192b7392fbb2e3a07d51aea2b6bfac0" + + "d74d304f56d5af3598a0712cb09c04c5dc14194eca8e1b9b29f88344c0ea" + + "55638c0f8ebb70b6242b797fe2525fa1bde76293dbc0a66ab4715e6f9b11" + + "f7ecd8f35a20ee4ff3552caf01bb307e257ec0576023d624d6094d43d25a" + + "aadfce939a6808f8baacb2109c3de50a1cfada9e384cdba3e97d2c9025a3" + + "2377bb195fce68c5569d2d1267e1bc68fcd925ddb4acf567fb29ea80517a" + + "7e4056fb014cdee597333ac2408157ff60cfa1afdc363a11fd4883308cab" + + "d9a8fe56c2b41c95eaef854f20bf5941ed23156d86de3bd413465a3bc74d" + + "5acffcd15722879849c261c1bbe987f89a1f00b3069453841b7da667d566" + + "e41fd894d94de44c23fed08d9bdffb723aa8449bf236261240d865efd7b1" + + "74a4460e5004ff77f4196d1d421227dff7c78f1726df7b5eebddb4bb5f57" + + "5ade25296dda2e71ab87ea2b44ef2ce8742a7ad5c1e7a40e097eb336561e" + + "865515f7ee0efbe01d5a928f208f7c9f2f58974d1c11af0e737c673dc446" + + "1795da9757010cefc6e7f2784658717938735ed8cbcbd7981a1bb8f31cab" + + "b901c87a3218dd1195c59f64d0bc3ce8b72580fe38e6dbf1181e0090e5c6" + + "d162df9f31cc52fa6a8ac61897e9b4b3cb0ca2bfb38a38d9b78e46d775d5" + + "7645d2d6da16bda8edd8675e2ba121f7f85400cf7cacb9ffcdfae583fb93" + + "753d07985a00afc3a4e26c9939a5116d9b61196502f5d774ab4c7fb6cfa6" + + "01bcfddcfabfcd28055e858d7d3c19feb6bd7c02565add3a3af61bfba8b6" + + "f4b52c072a8613e878368318383143059a98a85ba521f781a8983c2486ba" + + "b83f5b91fce02acee0be8d0dda7489975f0506c8f363b5adc48ba971adeb" + + "4e1c830b5f264ed42da36d2b5ce2fdab1e63333b1061ec5a44ec1b6e99da" + + "0f25e7f7250e788fe3f1b8e64467d3d709aeb7360720f854afe38e190cc0" + + "925c6cbd77fbfccc07d8beeb0ce68e47442fadaf13b53c30a03ce317cf79" + + "dc9155ddf96814583695f15c970fd0b6cea0b04b1825eb26e65ea9351bf2" + + "f7a841ddaa8c9f8e885b7c30b9985bac23d3ce777b", + }, + { + key: "491ebd0dddefc9f0117176772f9bab61b92a1f1de13796176091c56d1e53dfbe", + tag: "fbd3f884a3dc2a8be06ce03883282e1e", + in: "953b9a40789b206fb507ec2c5e9c88ca1baf25ad24c11a62f664db1da8bf" + + "dbe9b54f8e93b0bfb4adb12f8873096b8960fd91eb92a8ddb53232ac9141" + + "57caced33424cff943a8db129049af7e7b733afbec6637d8ee4f39d063e2" + + "be241cca6a339e48d72372efabceac57220692c40856532d95529adfae87" + + "a71c72f30244126d01a875375ad8836ef8db929bc81027935042a05c346f" + + "bc94dcc057db015e55c56064d2b11154596b813ee64b73bcac05d2688bf6" + + "f1fbb0cf3f8307b3df44c3e2dd1d226a4d0e9dc5f7482bada9611970f887" + + "f656dcb19ce1f8c5c86f4cbd1e4f49b18f170ecfd184028e769e79d7424f" + + "d01cb315897c21111f53f4d41c3b71402eea695272cb5b4e5f33abb9df50" + + "cbdaa55ed629d3ed7d93b43e550295502db1f2ed884afc320518e88be4c6" + + "b62a13f8d3636ba091d07dbc6c20c7e7fda016c05b2fadcfc9ea32f4ee2c" + + "4893de78ad8a1771aacf6efdbd8fb1f6ee9b0572ced3edc6313185b5d398" + + "88ce77950aa4c5201a256e3ae3e74f05b70faada14124b35b105a70e7769" + + "7184576b69708eaabd36e0ba885fc6bafd5738a67307a1181792333cddfd" + + "a4ef19c88497c82fccff05a8f9f732fc7505f0467a14e135288ee018aef3" + + "d0412f6b0760573d8ee4ab455d2789b4d22a42eebdf60616fe403627cfca" + + "fea672bd0a49e8e7b80e7b7b8feebce3381f2fc16819a8996a99ea230c3a" + + "84b510cf2e0d914610d646a2f45a14268ec1d6fca03d0aea5c9ae1c8d519" + + "b0e8b0f6fb8ad176b5d6aa620b253cc492b5e5645353fbd9b6c02bea48f0" + + "286e2c669782b5ffefa4d8f3f1037151026d9cca78e7808dfbe61df29e82" + + "951d7154f3c97606cd1e99300012578ea6a776dcef0811338b56606b51a6" + + "9893fe68f762af6c9c26066b1d503e64877d8cd988b443af66a36af8bdfa" + + "41b4dfb3721d1d81895884755b9c52527030afdfaecd66d4638fab1d1786" + + "3d5517ef7ee7d081b5555d24991810f1edde30930fd392f817cfe632b4ca" + + "6fb0460c36bde4a5620b9c369bf51c7d870c43998b8171a553d2f643fe8a" + + "58aabfce8cf7363ea978ff4d53f58284db822ca95b80306ec02a64d26a29" + + "c98520f1924c70d161682c54d08a2c48f54bb72980a8cf5babd0aaf0fd72" + + "7d5b1b9d9b731dc49bad228fe83f7347750e277a4fbd526983c206e075d6" + + "a03d68957b3e925a71bc1ea7304c77660d112a5d19fd21a785d4a8d7f2eb" + + "dc4183376d8125341eb28b2df5be0b4e04bbf95c47d2fe2aed939619cb97" + + "79548b752f57b723cf8295dfce69c9b7486b75a4e900f91926636f3fc78f" + + "7b7720a5151abdf5868fecf1e1a1d830cd6a4c5e3cd739da4432cf1fe2af" + + "a1090d6a1eeb32e7236ecfddb9d07b97220ab8e23edcc93d91abc11b0c30" + + "460d2027869d1c2487070cf60b85ad0b8bc5df566f6fdb0e58fd044da530" + + "6d277e564ca6cbfa820ca73fb6201b240a5a94c4ecd11d466cdc44046a66" + + "32478221bfa69b3a2cebd16baa302a573c90895d7f4cab453b11e3a4d8bb" + + "b5a9bf264781ce5b9796e3c47d0fa57f46b923889af4d073270a360dae8d" + + "51d85ea916f14787c6500d2d906ccaaa92d20d93edd09139f79bfeb5fcd9" + + "8c1cdbcbe9f2587e9c9094e3c4a32ab9ba56f400b929e80c0551f953896b" + + "e8eda6ecf22e6d4a541957dec21d6a9cf388ff0ba58169ab934902892a58" + + "86e1126b16118e965a271495ffa339c49466209ed3875b568a4290b7b949" + + "69d0465744a3c2a75c599c3a04ab1a3fd09125fe8f45724b2f48c7822b9f" + + "ef95af4b758ae66a8b6646df7a0a1aabe2a24c052fd6d30561cae0389263" + + "e3388c4c1effe431a04356c334aac64f36593544885c4b7295b57dc39638" + + "b665b22dcbf7dd6da867615de38c6a575cc66391135d47f8e1f0c73c6129" + + "17ada4099723933a758d83311b384364263cad5fe14bdd7c825d9601c400" + + "3537a5aca7f9da4710c132ce8b0f1464cee625633ef57f507739a0ab1cd2" + + "21ae634d4d0b3ff07e9ecb1baaef0a82a97279d46543a0464855cd62c07d" + + "5e890265612906a9eac88bec07b1dea5f67054c31ae40f8c673296cc5df7" + + "f0dd8cc9e643b44fd90dc2d1e870ad8acdbe165237642fd04c00965837cf" + + "bd2344ae830887a5719a3c16dc8ec08bd9131d055bfb959b64ff4cb638a1" + + "002a4fe02e369871cc4e3ffda17dd85343e679fab43e11970e60198b424b" + + "676ab17fb0dee10cc9c2e92b32b68d5b05b7a559176f822850c0557ed98b" + + "7454916e32af549a0027db95f02b88cfc5e7e05f28f53757dd97cc0f0594" + + "212f8801e58043cb17b040413c226dfce2104a172d218caa4353890de17d" + + "be1f53af6ceda24b8781801516cc51de9ca459e469b3c322be13d8c9541f" + + "755c518ca41a0ed42e44b9f87faa2a968b0292216e9f3d3e8987282103e5" + + "016fe9f7681496e1e8d663eb2d8bc30b41d735465527f19e336a98d2dc54" + + "d7c020bfab30fe6c62cbae7d09f84af69bc2c51a1839ffba15015d381ba0" + + "a44a3758771c4f18d13827f518f30bb74f4bff29a87d4b9e949f1063f63f" + + "662721cfd64ffe1dab3761852387f78fa83fb48ae2c75fc567475b673da6" + + "fa8f53770b6e5a3c9fad951ec099c6bc1e72d1c489e1ae620e7f12ddc29f" + + "ed65f29c65cef75014b999d739e2e6e015f928a30f2fee3f2e59bf65b54d" + + "89948bf2bfde98b076e5460643952befd02fc1b0f472a8b75195c53ea296" + + "6403b9028db529cd04b97231bac3068855fa211f4d976a88bc27a0088f04" + + "576e2487ac0467992066ef7667ca8429faee92db38003728e5c219c751f6" + + "6f011b5d679fdd957f4575a0cfb6b54693a9624f2c7e66c578f5f0367005" + + "c66addd1e3ab7ea1ac404e357cbdab9438b9b4f80b3a6761b864b006f1df" + + "689ae4c0434b06b686d5353d3e421b57381ea24fdcf6199195ccdb3d5cf4" + + "623a6bb1f9eba9b22fa15395f65f8093b5f90455061c1cbf8128b44a31e3" + + "910862a59e187aa7f4d22e0317ae6c177cef24eebc44171f70c25efac73b" + + "38ada0cba0b74f72d1c171277a734819c1111ebe46d5db20a6ff20e2c1a9" + + "a57edae95a3c1f80ddf2b12c86d3df0078a7bf68695b16ccf92053c727a4" + + "80586b8d87d0d1772e456fde0c20a7927f351a641bff5f22f9ee2217b6a2" + + "d0983c8102d7d5356dea60a19e105ce366b9d000987c8c33396569f97c56" + + "2d0fc0bc5859779aa10efd1f8df0909c307a9110083cc6d9748456c9bddf" + + "16dccee52b7974867cec718bb0b76b3353379a621257094277a30148ac38" + + "e5cf67ed7cc9c1bae12dbdeb99d7d880ce98e17f0dc93c5330d1824a3c9e" + + "ffd86f89e15b59a4bee5a48d4f674766896e187abaa39917b83f8d2f3265" + + "bbe7aac44c9f8d92f775fe6493e85ab44e6e28f79f28eff156c21e1abdae" + + "d10a291b88c4020b1ae8be001080870847a852d073e82bfc751028ac62d5" + + "6aeac1b18f2cff1c0c7d336bf08f8cd5099d9d3b28f9e16077e9caabab49" + + "f2d234616a7522a6bde1a3b3c608df4cc74a6c633d4c8068138abda8d26b" + + "4ca70f95d152888fb32bdee5dfad8ff4a5b002a0a327c873656db8d6fdd8" + + "ed882e47ce8e47c729e1292db9122ce2e9fa275f9bb986eb7e0a1dccb7cf" + + "abd0449c92fd35e2aedc4aa89caf53bcd28170cae85e93f93988e723a896" + + "10cefb4edb6fa545835fba3107e21dceb272c5a32da26fa77df070f41d7c" + + "ad1d68b836199ff0f1221e36b9b976b5e69bed54b5bfec67fe9cbb383484" + + "696265204797634594bc335150daea92dbc1004f613b4c27bf5c699debf9" + + "4365041b5a894701da68a93bcb61f4e546c553fe61f14ab0322b45915da6" + + "ecacaa093b0071f2516ca8c3fef2f1e3c403993d734403c47bfe5f4379e9" + + "cb5b613fde3c0d880cecef4101aad8b8b1c60a92ac5185f6c243fdf1711b" + + "0b56f0fd8e5ed6cc0f99da888e4f156455a0f0eb365b8964347eedd15d80" + + "2f297977af667ed1376dfcc610f5152421b97afaaf16f9db57a435328595" + + "b9aa00b5ed9ff106c66970fafef379f4d2f98f2c5984ea05aad64651fbf7" + + "7968c8cbc4e959859b85302a88a3c2faed37765f3f6ced59d8feb6c72e71" + + "f9d4497d98bccf95fcb650f29131e1df1bf06a5443f8af844aa1a7b5a68e" + + "bb250c7de3a65ae9b1086cf83f832050e55030d0f67c6a54ea2a1dbe18e2" + + "8a96c9e0dea2966997bfc5c5afd4244e3c8477c4f5e8bee8fc8ca9a5cde4" + + "d9c5a2c7f3d2e811b1de7ce4279229319e432674c609b4c8b70dc6172e9e" + + "653fe1969bbc2cb3685e64fd81d96d33", + }, + { + key: "b41db44465a0f0d70093f0303bbd7776017bca8461c92116595ae89f1da1e95f", + tag: "d8a111a09db22b841fa28367ce35438b", + in: "b074b0984fb83749586881e8ec2c5ce9e086cfb2aad17b42b2429d4cf43a" + + "0400fd15352d182e6c51e9338da892f886f460d40bd178d81c52e9ab9c1c" + + "bdd812594e6fe7a9bb7fb729c11328d3288604097600a0c151fa3d9e4268" + + "de75866558e9f47d8dd331994bf69f826fd4a6cb475ae5e18365f59a477a" + + "dde7fbcf7e40b4e3dee020a115830b86f0faae561751e9b596c07491c42d" + + "e02fc979e69071113953729d7b99f1867116d058a90f1b8c0f9ba12c6322" + + "4ebd1b563a87734f5d6e2d4e6715d5f0213e33316500cc4b23784f78a9bf" + + "13fdf99bfe149cf47aeaaeb9df1cee140c3c1264fe89bcde8acda6bde16c" + + "e3d770ba51950b67ad2c5232ae0cff048ddfda8540cf18e673582dc96987" + + "4b127f655e7d4e08859f2c6b95403cd5b4e2c21f72bb872e49e592306286" + + "48ba1b16fc9637709636b198f9a297aec364d4c3bc869dcad32b1830e434" + + "b556b429136f0012a0a0b6fb3797bc8668014b010ea51674ef8865348dcc" + + "197672047fcf72e6b6910a0e32a4f110d85e28db0e338d9cfdec715a8800" + + "b4f007a7951d09e41620815848c89f8768344c50bd522c46f64ac6c98e53" + + "92176651961c7a70b62f3d1819bfda674e2ecd3167415edc4b97419e8ae4" + + "9974b56cd8d52e1d05b82610b59606a750b34844ca33bfc9b21fb970738d" + + "b66f48928df79cf67730a30b0b612f8c15c22892120548ab460a6b9bb3ac" + + "e30554c86c9681c797821a1b1ce91d0e87fe90ad4097c974cfbdfd5c4c24" + + "a5f808f388e1b1473e858f48a387614501c8c39d6973ded69b1764663cd5" + + "166be02b596a49e392d637e3d8afc91323f7450318b79d5488c040e346cf" + + "0cee512044514b570aa66bb98d639a9ee23a7cebe28474592623d082873b" + + "73efb3eaa4721fc4761e15a390497cb13cce181107e8b1a0186b9e47a5a4" + + "b67a5be3cd88a43d341ef63f10af6970aaf56035db938655020809033a92" + + "8d4fe6d2f5424fbde2fe82adfd991d388edf293cb4e3eb68d876f225a5f1" + + "58208bcb1aaefcbc28d6763d267406aa8d6ecb413d18cff7a318ba031ba6" + + "0ac4560748c248de64eec56dd4540124b38581604f502d94a2004f9eb1d6" + + "edb009e16af6c6d3ccbea79b10743da98aee7ace407a90c6cfdde694f36b" + + "e0271e722618a457be68619b980754795f4ac95ebf4f1820b85ca8e3fbff" + + "a2430f8e01ab422d7140751f7741f2c921400dac404b04e049736738a87b" + + "6f49bd54b1b447b922c473831a65f224ab84fc96e4551a0333bc6187e15c" + + "c0f0ad628068bcd7c043bd1e3036ec01e7fdc3d157476149917baafaced0" + + "15d09fafb92181a0ec65b00c9c13631e65de184377416e04d3d93b847e0e" + + "286c1d88245d4d550d30d4fbfcb416ff26a39a94275631c2deafc7cb6780" + + "f149e4d0e9c4515b708fcd62be5252485407a6ceeb9247de34e0266ef384" + + "976f6d31284c97468b3b03e951d87a5a00836ea303a266147a79ff3431b4" + + "b382e86c74d92661e0f65e266b7d569c03994b667a8137f3080eda2ff542" + + "0f0b52b427558dc26932a22a615c9e6b1834a251c6b68fdfc0bbe0e8781e" + + "36adf669f2d78bd23509ef7e086634e526258e8d11a1e0be0a678ac09c7b" + + "b4e3c5758504011e701dc85997fe2a3e40c7af83f032bdbe7adc10ef1e4a" + + "666946c2bf31dd8e3a383211c9684d5302f89dafcf77976d5a02c14e2462" + + "09d2d99918e82402cb0eacaa12032ad8316315af1b3d3bd5058f7c935d35" + + "ef0d4e71373958fd5e4140a9a586d89c53e4144c00148a4706a524896eb0" + + "5b1479a0de5d3f57be46b3f5fa4e49bffe027c81a33e37abc01a4cafe08b" + + "8e21fa86b42be52d75d6407e6cdf399de7aedb9b61a6917b2677b211c979" + + "33536664c637a57ce2234e3319fe8b4a77d7285ae6347464dfd0aab3e6f1" + + "178e0029686770d3b0dd541490b097f001e95f27efe8eb16e4747937d643" + + "cdefd49e586ecad541270cedc3064bdb7c79f086bf1fa8c666304d977a15" + + "54ae268881e17d8bc3fe51fa9969f7e560e3d3e050424febec0998b35f2a" + + "7378b2c3e384cbfc80c4987734d76c78224cb81cc5376f88f0ceda28aa50" + + "44e956537c3ee209071d84a66173384e0aa466d989759fb1f2f17fe627a0" + + "ffeaae7c5a3884b237f5151278a07117c2e833f1815c7e0e0b1611f25058" + + "ca338d21deb1a571faf1d0486667cb7c58e2814c3722d24fb77ce1b7e018" + + "2ae5746442b5ad00208b17c0a68bab4df8a8f36edead4fbe79b4c9220dd6" + + "acea6d23c7caaf6ce7cabeeca677a1c764d610ea6c7e994d6a9c88f57fda" + + "ef160b251e7595578ea2cc1441d480c14b8b6945e76a001891b1f214979b" + + "c52ec15e9480d706a40cb6e3b259ee99a9e84e63a738f1b52cf71c8ecb04" + + "fc833c2c680bfed587aa1541e5ffe8bbd7b21302bbf745011e559f94f952" + + "8b7fad8a37f6d855306a5be22725859cc950bcc334179d49564af3b9c78c" + + "e1de59a9cb45086a33856ba7195c17cef573950155bea73ed16645768bf0" + + "a5cefce78ba3ff98a54a8e8afc5dfcb0d422bd811ba9b7770a663b081dbb" + + "40aefffbeabca955a9638830f0c5d70663cbf5b26067cd061c4a3f5cf8fa" + + "4b6678d82d9a2aa33f8538b7499a3466f6b0ae2a1daf280ab91a6c220684" + + "12705245f353b4b83db50bedd3bf99d42bde6363fd6212cb745467acb007" + + "b678128f6580629a06171f7f3af272f8900b801af3bf47439167871e7b0c" + + "33f198333992a6c52c32be46071738cfbf245937d48f816ebb88ff0e726a" + + "dc41de4c771ff0bd320a4c0b1fcccd9fd6c42ec9c5185943c70e9a4b7c26" + + "a980afe104bb1f99576671a254704c7d4233eaf9915e1d56c103ba9f6e8a" + + "46aff466933bf58c9842796ae9cd21f7ac6aa96ef42ca54e390203bac354" + + "b7c1de7d1887c48255201335f819020e2782a2ee8af92ceb206b651ae92b" + + "3f4fdefed05e08974aee0a353d104b1be9a5e75c7f958f1981271b0a6928" + + "05a7a2f28a0448d86102b4fadf9ab4ec2f98e31e64fcfdf2b524780b3342" + + "7a2a3100c2032fc93199f3ea7a9e8063fe73282dcb1fafaa9496c7da868f" + + "dcf33bbb761df0bfc6fef30fadd2b6efef4fd3216a8aee48a2ef28102491" + + "cf7278b567c272d1064a277eb193b3f6f01df641ddb729f72454943cbd3b" + + "671ec077f9e3548f5f57d063c653ebee4f228a78f8a128d26f7f4b44160a" + + "07e942bab87b2d043c77ecdf10c1a419e0a1c4162a99c21d4abae0558b8f" + + "4dc0b7f1ca3892a6babf71f2f70aaca26bb813ac884ee5d71abd273ff1c4" + + "add230a771b678afbb12a1ca7fbcb2c0f5589c9ce67fe8f78a8db87825b3" + + "09ca34f48ac35aa7ac69c2fb2423807650fcf47ee5529e9d79dd2628718e" + + "230ffe5b83f9d5bdfd9c5d211282e71cbcacf972995bf1b13d21419f7fa2" + + "8829ed1dcc459da35883b9269a474f7fceff01d44ab78caf1ef7d8117f50" + + "cc83eb624062b149a6ed06ddd1cd1feafccdee7122353e7b3eb82978ca69" + + "247fde52d2d6cfe7324f04af5259e1b5c2460889da4541b431ba342a1c25" + + "3a1b1b65fce7120829e5466e7ad2fe4e0f773c7c13954a9c92d906c91aa1" + + "de211f40916596bfa8245344e257e5907a2c49ebcc864cfbe28663e700d8" + + "472c50355313d5cf088e9e8a19cdd85bcfc483520498c6386050e53a3ff8" + + "1e2b77b55b116a853d71f60d621265166cd7e95ff5cb4466226d7cef68ff" + + "d0a35b61e76a43cdcfa8da7fff9558e2f89b981ec6be632b126303ca1fe8" + + "53d5c628d967d39317b60ac904d6a882beb0746f6925a86693aff4deaac2" + + "e5b64b611de86767d55a6e11221605508b1c5cc828251539b1b6f65c2c04" + + "8e65be5422c1b11194eb687d906c559068c0a810713b23b30d8b17f10df7" + + "0962c5e7e782aff7bb95adfe4cba9d90b0ebc975fa56822025100b5cb8b3" + + "8bdc8928c1a2a8034dd66e2a763696d7ce6cef4dd586b83f7d01749d37fc" + + "4fe8d7abd324d4ff1efdbdbfeb0a2fbb8b266fc2bce8e5e5b95d0089e7c5" + + "d7de4db837d1822ac8db8198889d6bfe778d0b19e842f12b5afd740aaecd" + + "e36e2cefc2cf0b082aa0c4f75684d024b8d828d8f2911fe1aae270251f62" + + "4f49584e40bb193577c9d8e04eb16c094653cdf9a15fe9210f724c7a7c73" + + "74cfd1a74abb5ceae88ea54f7e7569f8eb674529cbec965ed05bb62f1968" + + "8fdaa97297268bfeefd06eb21f700cc56f9bf7f6cecbbbe7278ada8399fb" + + "960371a2d5cdb852b11c9fa17650e614c5297bf46cb7889d52bcf49d2560" + + "720852822b75bb16524d88273cb366b84b88282da91875562e5a1fe73973" + + "afe90e5cdd3f5381612d3ba7bfa058d023a9326e403ec474d8938313fb32" + + "bdb5bf899b900c3818c43c8a0af6a061bd26e847ed75983402ee8a9cf4ef" + + "85bba5545a0d329ba81495157eda0286f1917de512fe448251697dea406d" + + "a510adcb05", + }, + { + key: "b78d5b3019688e6ef5980c17d28d7f543ca5b8f9f360f805ee459717ca0d85a1", + tag: "f01babc4901e957d0c2032a7279321e1", + in: "ba7d35b2ef8af1118bce1e78018c9314b0c8c320591e103d23f715acb05e" + + "dc98fbc618de06627661df5842dbba9f604c2d20d664e5db06e949b11d49" + + "665088dbafdb0d39d20beaca7d723f8dcdc57e9c5583d303b6cdfdbecf95" + + "7d8daf2f1c72b2a6fa27e3d18841f4841abafd334c110cd2b74efb6191db" + + "ab9b8fc8427ee17664082f31db98d30bf15dda967e20730a9ef525abe9f3" + + "f620e559ed22bf74d347c9869f0311f33da7f1a3dc858b3a8aa73a35989d" + + "b055a4a2c269c95e352259c57de8b94d8de48984ecde426d3ef60ec1c7b4" + + "41cc950f7764f55bd0cf52d069b9ad446d1f765f35d02ec104ffcc00bf1e" + + "dc1b951ef953acd19984ff1b41041bea0e9f5326a7c9ed97e6aab42174ee" + + "971ea1dbe2fd1c1f67f977ab215962b0195417170f6b7748fd57262424d6" + + "cf7c235b34425f4047191232722932213b3eb73904cadd6a2e9c7571d7c6" + + "6c2f705b5039ff75e5e71c5aa738bf4177653e6eb0b49303a4bc0e641e91" + + "2691f217296a3325431d578d615afddf47784e4618a2ca40ccecb05d621d" + + "a52f272b8cf84f7fd8177c83af1580d25a764cc06436d67171cb5d1e3b39" + + "367b46d9a59d849d87ab6bfcf3fb9bac2b1ebfcd1cef4459e74b0e1b7080" + + "dabd2dea79f75581a55de63c4b23ff67d986ad060102933fc6cce8d614c9" + + "c86dc84068828dd9e21ffc5665c809d83b09432fd315dfce5d7a4ebd8143" + + "181953e3f8716e47b0b30cc1f753e31a7d509f2dbd4177b6da310cf3cd02" + + "5db270adf98e96259a5ae1b81f5be4d5c76f502a612ca73c76b91e0ca695" + + "aa921f9489948619482c2956205ae71fffc3aba4476ff754e4878e36c763" + + "2c935c076857c5b90cd63ea4764efbcee53e2ddc9bdce54b1cbbcf0e7544" + + "d023e7c2b79419ad92221a1f76abe31a8236e370d38e2493cc9ca2aaa811" + + "30fc713d11f500fd071d6eba6861e8b0859b372e62fe60b627a96c377f66" + + "236aedf307e1d148a61bdad072b93d7d2a73367c595b1e048f7023e72729" + + "1ec508326f5424a5bbf4e010d0240b71fa9137e6642ab40c5e4fff79877d" + + "b3253c663a221b49b3e77ea307c7b9f3f72a0f3a54d0112c45c64a0c0034" + + "baf2b55ae36ea6f811bbb480cee663136474dacac174c73b1e8be817916c" + + "fd4eb1876582bb3a36cfbabad91776aa676305ddf568a86e3a5eb687fa81" + + "67771fca7b5ca00e974b3cc3e322b4bd9bcee2a87d0ae7976da5e04fa18c" + + "219fa988d4f6fce62f194b05c26ed3ae1b066cd9751a2d916d53426a454d" + + "58f9c3b2fb49374e5791b412fdee1b6029144f1ca787f56fece4f64f4fac" + + "bfe4cfd8ba7c807a83cf44008fe5126a283ab2631a87acd8e2a3bd10979c" + + "4b07a84a49b0687a45a4798ded0b5e9b2acce30e714d78395bfa8f33ca91" + + "e68b2138bd67d8a694cd87c88dcefcd101a3b408d7a9095cc6a4b38898ec" + + "c8b375f5a67deaaf73eb7e99b10314ca6bba824658bee85dd731d9a1475f" + + "976b7c0aed4b67b088f0db5ca5091273217f724969dff6cf184181377c45" + + "5722beb23fd9d097a82ea2d8d527ba6284acc20cb30f2e52af28800c61fd" + + "1faf9f4f619550e0162a1a63758e202533889b27420fe7d0eac9a47a6e11" + + "1d80054412340e0426cdddbb3c7b9b823b8db3ef58230fad7a3ac21a7805" + + "d30878d4ea78dda95c951b7a5dc552e9434c35e03e1dd88652d3714f8fbe" + + "a39936cc0717c2e0335371f2a751204f5d9386baaec853f019325edfd1b0" + + "719d1fdac3fbd774a64bf957fc54039501f66df94b5b9b82c2076c597065" + + "dfcfe58b2e215a3734066aeb685ef97759c704b5f32dd672ba59b74806cf" + + "ad5daeeb98d16f7332ff0ca713d541c84e4aef0750bab7477ea707e2e497" + + "e12882dbc0765106070ec6a722d08fe5c84a677817b28fa3a41a6117f2f5" + + "465c2a2f0eb2b8be4f36e676b4115008bade3573c86cfb1370c03b6b0dc4" + + "bbbb0ada4dedac10a593655068a26febc2bf10d869cac84e046c9c846ce7" + + "927431f606f07b92abdfd81260199ae05ed01dfa07088c56a6a8de9c6d51" + + "d61d6a6d3f9904c216ea8329467a006a3d2495a768a39ef99a21827d2def" + + "909bb743fed7209f7fe59ff1c1e710095b05f166c6173deef5c6ec4105c5" + + "fc3b87c8269c786bebd999af4acbf12d20453b125f338aee87e9509ee405" + + "9c9e568e336304d7be9ffe81d1700555b0800242d9b7450d7256f2b17f6e" + + "d46a39f67bb2980572ce73169e352070dbafd4c7fa5a6be78cf9b72981c0" + + "a01f1e1e30ee3736c59828b791d2373799854497a28a44bbe0e074925723" + + "4986696fbb06ef9ea83fbd49c45a583ce12ff10258ba06127c67b0f66dd1" + + "09f1366d8036853973d8884f93de54fb2a12949eefc020717eff47898cef" + + "306b5de068411f1e113ffdfe2556e0faedc3e27d95a45b8afc15ba0eeeff" + + "eb86da7b4324e20af80c62bf0ceb4aee1515f5912f71c6bf2febf20123e3" + + "dd3a82dc1e58a108f1039942dcdacdeb1f0ad0b2ef34488d98d6a52311ae" + + "acbd03c12f6e775e375d5979c7c295bb049f2cfd3580e3da3841ddd8e6af" + + "4de5e6512ca79cebcab9280554524881da37984d340e8f0163fe10a02ed0" + + "88682560bc6d3c4dbcf1a542ffb3dcc2ed16a2eb96896e8269697ffeb50b" + + "73f2cc354092e782a0072fc12e1eaff117c2cc8a5a1ad8b47802ac9e23fb" + + "91a0cef9e4027595e0885464e61563093ee2b1dc5f22dfd04af7de6a70d5" + + "977d3751a4b3cc0c71a71c59c0534cb1f8c0eeddcf1c0e1b3e5ad0d083b6" + + "6e8b998ddf9ae9d3b365c851d42e995b9afdf8d66b2ac40bf514ce32e456" + + "0880afd38c42c08926067eb243c4b1184e667ba756c14ace5f525eb48df7" + + "ebb429d0a23d159664f8021d27dc7167081de331c7114c9c6456e1ffdb42" + + "2172a81c06d8deca995e158c48df27261a83f83e0127f5e056a139be9b76" + + "e25dadf534d3d1ed6ebc0b5d77d51e5b90ff86f30d4023066115bc11b33c" + + "c827b1103098826d0bf8777176b2da6f1e5b580e407ccf7e614fdf4f5b53" + + "3ef6d30b20c1bee61eab90e983b1a97173a62720ffd27abb8976a948d532" + + "d06596c23b0ef31c79831bead8f8e99ad209af3658cac0cb3c3f9c88379b" + + "9bc871d8e84171d53400902da1243f664afeaff60bd96ba2639a7644676c" + + "a79f43130af12ba2c877d67f7ec030a4217a72f5368af7c9f24e643db6ac" + + "97a04adaf57dbc53762d8dfa1afd49667c4041adcb5ec303e191b786273b" + + "bb065cd9f16a3a4a399c6a7aab9c1a6604998264e8b3dbd13d8f2228b13b" + + "2c2b9fec5055d8e9f2df1d9a25e4bfe2029776389877bbef7e2c7621f06b" + + "c0b7fc0786e2b2d042483ccd4a59d2872a6c5ac73e217123e5c8401580a8" + + "d967e0895aaa28f4d25ce68c90b4394d8113bc423e9fae46ac47bc2ac191" + + "fb97b80b5a85feb2bb54f84c493235c1408662fe253c6786fcf6fdb8be87" + + "dc66a72cc847f94dfb5214af5905b7039a7363a1b23a07853daa26862783" + + "ba08a80846fbb93ce98700a4f9961115128dd67bd7d19e0c588fdf6196c1" + + "1cb0154002ae862f11421f5dc3a57b6c0870b452272be556a1d14eab1af0" + + "a91ff5b89de6bbeed6e03bc64f5efddf9e54da71c594bc5ef78e0192cfde" + + "da36e4ad1a6b0b51110c1b24d20dea1f19e18cb1184d80189f842d4f07ac" + + "834744dd009aa3771b1e5502fe4b65a403a4bb319e1880ff6ba852e90a8f" + + "4fcb52cf374c88408428cdb1255291b04ed58c992310955198d61fa1fd9d" + + "762d48f2f65a287773efc67d549981c291b427889d3e3dfc0cc6cd68415c" + + "dbed81b516786dacf431472a7dfc99688d15bb6c1b85b1a2015a106e5de8" + + "cb9eec4c80b17d00fdcf4a9c64de4643a95dade8fa9f1bc5c839037d86c1" + + "3800a244188e3b18561a74912ed72f99f2365f0126732d037dd54a3ab77f" + + "9a9f6a1c1469ea92eb707482066bd4990dec4d7614ccb4ea6dd4deb8bee2" + + "2c4dc0b9b4d4cc70a500d2c8a5ac3ef88a38439b7dc254a6d920cfd317a8" + + "4d7747148c65b6730709e43369d4c995b03c58b9df444f77f216944e70f6" + + "6446554d8d513b8f7f28ef0a2d7ad5ca2f6110304196953247a7ac184f68" + + "61fba896c2d5a59007ec2b2c8e263957e54cdc1f3b4a145228823fdf0960" + + "c33a28f59b03ee4be21001d2f56fd49ed14db33b2c4eec2c3f41b250a624" + + "99a9b6602c1e838526a54cdcd058af1c252d56009d4c7769deace53bdb66" + + "543f5a081cdde775e61efa70956fe2a7a6019a164c6e413ded314bc928b4" + + "aebccb946ffdf3eb33e187bf421febe26112b3262a526de65678cd1fa03b" + + "83513705108fe0bb87aa99aceb28af3641c46a2c4427cc1063de01aedaea" + + "fba68155d4de494a27ff6b7fcc8f5c5c3f7d3a115c397a1a295bc55aec8f" + + "7f150cbce2a8aa4706d54ec863877bb966ad441c57e612a1b5d438b98d9e" + + "fcdfe6d4f66e885f96407e038015cf974ae5a3540692b054d2ddfde59b28" + + "ede7e2f581eeb56c5b88e2779aea60c1d8ca6107b0cdda1ac93e6c7520da" + + "edc66afeed12f980e20e1e1c327d15ade4bb90de30b011a9cb33855ca3ca" + + "e2", + }, + { + key: "2b0b0fd3347e73c2fa3a9234e2787e690a11aec97a1c6d555ff7b4047b36f372", + tag: "81b1a6633f849ab0aa7baafa58a5d9b8", + in: "427f3a7a5f1142ffa68e83df5f917e07b2bc454f3adce068a8ae9e0908e1" + + "3e0099aaa9074697593c6d8c2528fedddeca05e3888be1a0a201c389a72d" + + "20cb661017544d95a431e70e7c6580d8fb46ea4495bc59db6ae2cd69510a" + + "02426c50de1b6110120f759960605aca718d4d0a497e003e1ea2b8ae9a53" + + "df3c1eb4f704eb32f8f05eb08cecba0fd4a94f0daa3b0984c30a38f94b7a" + + "10cde723182d30588bc40f1f9d38a3bab4800fdd5148e34e396144763696" + + "c9b3e9b8adfdb337123d54237c7413f98bb2056152b256e37a27bb947c67" + + "240fa3ce8da62ab367db540bcdd9eb873d6c71c75a08fe99b5c11ec8e6af" + + "f926d2adfcf073479de394d4aac5fdc6241824d944b8773db604c59afc01" + + "495ee755905e5616f256c8a64321d743a1c9368d46418826d99b762e2f6b" + + "f998d37a995969cdc1de85f0ce3987c6550459f5e5bfd9173bfcb9e0112a" + + "d91f092de446beba14fb3b8ce3fb2f9c941815b2cb5a3b406e2d887b7912" + + "bba07c8dc7caab9836827da93ca71fa5ada810da1e5e9b09738524564d8c" + + "923746d19c78dc9107b9f20f653e05d7f2eb6bd90cf5eb30fdd7b587eb46" + + "74a1064c70ef0af2e75373044d32b78d96eb1db3112342d38dca0e47b96e" + + "9307fcdd711b1c66355186369a28481cb47ef6bf6651c2ff7ee4665247cb" + + "12b573933d3b626d1c6264c88bd77873c2e73e73ee649216bf0b6d6615ab" + + "245c43569d0b8096596f25ceca8667661de1cd60dd575697370ebd63f7e9" + + "5333e8a2cdb829b75ea83d72cd246d50358f7c094c8a515805fda03165d5" + + "21391617c9f9a2ea562b419632df611a67912d2b369e5e505dbd5c719253" + + "16d66cd608cc4a9583a8eaa4661b7279870345fac3031631c1a220551527" + + "5be7d8d89b71960e687aace3a0e8f206e475053d6fbf97717b154c75406f" + + "2caa97d1ab66048f1c99281c188a2f37b8bfc736c25840a9130ef2031c05" + + "6acd9dc10592eddf94f5bac85319b10ae46cc136a0738aa803837287ed7e" + + "dafe08d1fcf31d5e63763e39a5e1f4d7d0edab368d44e63fdb33c28905ff" + + "d6be406a024c017081b4f2d70860776e9d2556cd008fa5017b58733da13c" + + "634938407a118827a80baa28d4e605db59430f65862b90cd8356baa287b8" + + "4e6d9199fd80abb9fa697e2c2c4c760128e4ec0438388cf407e2a2fe0f57" + + "908187ed8efd4c5cb83cc91dbe6a11444eede85099149ca82921bc28bdd6" + + "b9999594a41d97307f8854b1bf77b697e8cdd4daead2aa49fbc571aa44c0" + + "bc84a57cb5fd85f06847ad897ceaf449eec45bddd4e4eb1e1e119d15d5e7" + + "90957e686acbdda1bbe47ea935ebc4b8c2e3cf9b7157cc6dc03bcb19508d" + + "a9e19cb76d166da55559ec7e0995d9b50c6c45932d5b46eee400c56d9dee" + + "618977dcf6f76e3e86bc5207493afbc2aae9f569ec9277f33d9f61c03d59" + + "dd6d8250ee8cb3e54e5e941afb74f0735c41d52ef967610c9f55b2b52868" + + "4b549a99ae3392a7237bb52ff5f8d97327e2837268e767bed0bea51f76bf" + + "88bf0286bf22b881f93f1d54fab5cd4e3c148c96c39e7aeef375de249df0" + + "4d89d1bd97a7afb2be0cbfd3380cb861d31e4ad1ea8627721e4518b9db3c" + + "cda20273ec23549c4adc3c027e3ac9558de2010a0263c1225a77dac8be60" + + "d498b913f91391d8b2656ffddb06e748cb454dc2b7226745f11030a6b9ae" + + "09ac8ac428d9c6500801fb540650c94610ab70465b1210c6db2064dc84dd" + + "7f52573f8f40c281470e85176c85ec6de3c718663d30ad6b3dfc1a3a9606" + + "1936744357ca62fb8bb066aa1fcac6d7a2adf0a635cd546bef39fbd3ee0a" + + "8802ab0466ec9b049b5892a9befa4377cd199a887c34569b6f90852139a7" + + "86babc0049ee2b527aa96b988237a52eae8b4b49d2ee15ee5294118cee62" + + "3c3e11cecb836b21af88555f10be2eff8379beb615b7b3d6c01d545cacf6" + + "61be8ebbf7a3c58ac5e0e7b17997659a2bf15f2b2e3d680d142fd29d23a7" + + "aea9890f3ff7c337fce49ecedaf38573edfae07810ba9806723e576d687e" + + "a11700b8ccb96a6559259c367cef4e3999a05a373ab00a5672ce8b3d1dec" + + "a414187f383e449d10021b73c1f7e39ce01516b7af96193f9993036049fc" + + "72ac059ef36b2bcfbe13acf140d41592880fb8294ebffb98eb428ce9e65e" + + "1094521bcf8ecd71b84c7064539a7a1aac1ad2a8a22558fb3febe8a44b87" + + "72fc00c735773d4ce2868a0b478ee574b4f2e2ceb189221d36780b66212c" + + "dd8fd3627cf2faaa23a3d0b3cd7779b4d2b7f5b01eb8f1d78f5b6549c32a" + + "cc27945b5209f2dc82979324aebb5a80ab8a3b02129d358a7a98003e701c" + + "788a64de89726da470010eda8fdcf3da58b020fadc8970fafb08a29bef20" + + "2bd0707e994015258b08958fc2af4c86c3a570443fe6e1d786d7617b0c66" + + "29a6d9a97740c487622b5b8186c529d7f8af04d9f0a9f883043f08103ca4" + + "d70057ee76639f3b1046d86928d54cd79fb5bb7b46defdf15d2f8578568f" + + "1d7b73e475e798ec6812586700e038ed4791b23ac9439d679a1a4bc04cea" + + "e328330c24b065c9cdcdcedfbaf58e5299779e6f48783d29ec3b1643bc8f" + + "1095c724dea75770583b15797fc666f787510d91e65a8e2090cc1ed2013f" + + "e63ab17bc7640ee817487f4eac8326e9c4698cb4df05d01bae8c0d00fc00" + + "08919484d5e386c8f60b8ac097c93c025d74faa56e8cb688d1f0c554fc95" + + "aae30873e09aae39b2b53b1fd330b8546e82d9e09bbb80132d794c46263f" + + "4fd7b45fda61f86576dec52c49f2373e4dca31f276d033e155bbcdda82af" + + "8f823948498f4949bf23a08f4c8ca5fcc8598b89c7691a13e5aba3299ee0" + + "0b479b031463a11b97a9d0ed3189d60a6b6c2390fa5c27ce27e28384e4fb" + + "04291b476f01689292ace4db14abcb22a1a37556675c3497ac08098dfd94" + + "d682401cabec239377dff592c91aca7eb86634e9d5a2848161dc9f8c0c3a" + + "f7b6a728371fac9be057107b32634478476a34cbc8b95f83e5b7c08d28f6" + + "fb793e557513ca4c5342b124ad7808c7de9ecd2ac22d35d6d3c9ce2f8418" + + "7f16103879ed1f4827d1537f7a92b5bbd7cd12d1ecc13b91b2257ad073b7" + + "a9b1ea8f56b781bea1bddf19b3d7b5973f1065fb72105bb4aeecca5b7513" + + "ffd44d62bf41751e58490f171eb9e9eb6d57ffebedd4f77dd32f4016b769" + + "fed08dd96929e8efb39774d3c694b0d30c58610541dcfab3c1cd34970195" + + "7bf50204acd498da7e83947815e40f42338204392563a7b9039c8583a4dc" + + "faba5eaf2d0c27ada3b357b4fccd1595b9de09c607ebf20c537eb5b214b8" + + "e358cd97992fa5487bc1572c8459c583116a71e87c45c0ba2ca801931a47" + + "a18ef0785ebbe420790a30278d2d0d42a0225d211900618438d1a0b2d5be" + + "d14f8b4be850dc8cb08d775a011683a69ee1970bb114d8d5017de492f672" + + "09062d9ba3616e256d24078536f30489e4dacd6429ed37aab9b73c53fdd8" + + "a8a7aff1b914b9d82d75a46d0ccf85f48d3ce9a8d3f959b596ae9994ac3e" + + "3b4af137d0c8e07ece1b21fd8aa05522ba98f85a7ab24ed8c1e265fadf4e" + + "9a18c5ab5684d8ba8d3382ad53b415c73ebfaba35abeebaf973b6f18e0d8" + + "7f019420eb34e09bbb12afc5b149f1e9e9b6ae36ebde429d437ada1a2d52" + + "b998f7c75ef731132aafc3bb106a2ad3ae11223a355804d4869ebaa47166" + + "2df261d95d48ac6eb17c1781e81c0027ccf8f05c39e1eda7793cb16622be" + + "ce7a1ad5d2f72f8bf4bdb2f4f4dcadac3db3bf727f0d447adddad4500360" + + "09ee011bf4155e5e46c74b00d72e8e6a88de9a81a5a4685651b90e874dfe" + + "eba41698c98370fd9e99619ce59ebb8342417d03fc724f9c910ae36ac5e5" + + "b46c424141073199aaac34232a8e17ebbfdd80eb75e82290de92968f3893" + + "0ab53dc83ac433833576e86fbabfb9d7cd792c7e062811f4cb017710f841" + + "1e0fb65ea4b3cd68b0af132cb08330aa13579196ec632091476f268b44ba" + + "8f2e64b482427dfc535d40d3f58b4dee99053b35a3fed1cb245c711fa16f" + + "c141974c8db04f4c525205dad6ca23ccaebde585cd3bc91f5874452ed473" + + "08de95cb6164102744f90b3007e511e091653c97d364fe0cbd7f4cd3249c" + + "1f5c452becd722ccc8c6b4e371e2631337dff78efd903a8fc195a90ca5a2" + + "aa4513bc63cd43794ff06c5337329055c43d4fb547e63d6e4d14fbe37b52" + + "1411caf2f1b0df51a68f677db59aa227c725cf494ccb7f8cacc5a06ac5bd" + + "f135a2603175a5fd5e5af615fd2e7cea61934e6d938b9e672290aaccd99a" + + "7e26dc55efe928e56ae6354168264e61668a61f842a581cd0c4b39e0e429" + + "04631c01320857b4d7e260a39c7fbed0593875b495a76aa782b51fee4f88" + + "84ca8ddb8dda560b695323cdde78f82dd85757cadea12ef7cf205138c7ba" + + "db6a7361a8d7868c7aefa7aaf15f212f5f5ab090fd40113e5e3ad1ab04f9" + + "b7f68a12ad0c6db642d4efb3d9f54070cc80d05842272991bcdae54cd484" + + "9a017d2879fd2f6d6ebce27469dda28ad5c345c7f3c9738038667cc9a5bf" + + "97f8f3bc", + }, + { + key: "aa3a83a6843cec16ab9a02db3725654cb177e55ec9c0c4abd03ada0fbafca99a", + tag: "719dbe5a028d634398ce98e6702a164b", + in: "643883153c215352a4ff2bb2d6c857bafa6444f910653cacd2bbdb50ffdb" + + "cae23cc297a66e3afefbd85ab885e8ccf8d8f4930e403662fb4db5121aca" + + "82dfcc3069bd5f90be4f5bfd3c10f8038272021f155e5de0a381d1716abe" + + "0b64b6d0f73c30baf6ddfe0e6a700483cad0fa14f637afb2f72361e84915" + + "78ba117e1c03f01fd61aa8f31da6464f3d0c529524d12dc53b68f4d4b326" + + "db7fc45c63f75244002b8f9a185556f8aab85948647818f1486d32c73614" + + "b8c4763e2645bdb457721ff3901327588da01622a37ccbbd0374fec6fd1b" + + "cce62157e64c4cde22c3a5f14c54cd6db63db0bd77e14579989f1dd46461" + + "4c8691ef26406984b3f794bb7b612e8b160374be11586ec91e3dbb3d2ccc" + + "dbfd9c4b52f0069df27f04853e7cc8b2e382323345b82ce19473c30296cc" + + "453f479af9a09ec759597337221e37e395b5ef958d91767eeb2df37069a4" + + "f3a530399961b6bf01a88ce9dfcc21c573e899b7951723d76d3993666b7e" + + "24dc2570afe738cbe215272ccedb9d752e1a2da00d76adb4bc0bd05b52c3" + + "fa08445671c7c99981a1b535582e9b3228ce61662a1d90a9c79afbdcfcd4" + + "74def2b7880cac6533ba0a73fa0ba595e81fd9a72ec26965acc0f4159ba5" + + "08cd42553c23540bc582e6e9ac996a95a63309f3fa012eac14128818a377" + + "4d39936338827bbaafad7316e500a89ed0df7af81be99e2f6aae6bb62568" + + "1dfa7e100ebca5c8d70f67be3c1e534f25446738d990ee821c195c98d19c" + + "fd901e7722b4e388da90b95ac0b5b5dc5d052ad6b54f6ea34a824bcf0cd8" + + "7f1fc9a07e8f5b8aa0793e3c9c1022109a7c7ae97ee2a2867fd0cf0f8971" + + "34b3d150d3b24fcf8323de929b73cca01244df02510393f0b3905caa0268" + + "7fe35f64391e7d4b30be1cc98319716528ca4f35bb75d7e55cf7749968c5" + + "37136eddb149a9f91c456fde51937c0f35e7e524647311077e6fbe7f3c12" + + "37b9584fcf3b0f78744c7b2d3b452823aca06d144e4463eb5b01014201cc" + + "bfed1adf3414427072135d48e705b1b36ab602cae69428e7c19d39cbb4e0" + + "ca26a871d607ed4daa158b5c58a0a9f4aa935c18a66bdeff42f3dc44166b" + + "a299d71a2141877f23213b11c52d068b5afadc1fad76387cf1e76571e334" + + "0b066ade8da02fe3b0bdc575b1d9ec5d5f5a5f78599f14b62db0bef7ccc6" + + "1711482dfa4787957d42a58fdc2f99525c32962b06492229399980601bd2" + + "ee252306b1464914424de9aa414a0a6e5dadf8ffbf789e6d18a761035d3e" + + "f2ff0753becbd2dd19fc1c28f9acebec86f934f20b608a9ef735ac91f6b7" + + "83d9327cce7f4870d39bbbfb0100838dee83e6baf2b40cfc98415dd174ed" + + "72e393ad0459e8035dce7eb18eb3af2f39d2712846b9e1852cd61d06dfc3" + + "5e34fb761b67e2a711ceb4a82557371ed32ca8db2e4cd7fea0b6bd026177" + + "4057b9abc45dae6869cab1097459473a389a80a4523e5de696554f8b0bec" + + "0ca605e6acfaa00386fb5a48e0f5893860a29f35e680be979cf3bf81ee7e" + + "ed88262dc80af042b8cfe6359cf8b475560bb704728034e2bd67e590bd76" + + "1632e516e3292b564c7265d7a6dc15c75ba6f6a447b1c98c25315ac7de59" + + "9edc4993e4dc7d1dbfcea7e50ebd0b226e096500216c42de3abe352e5b09" + + "a3c9754aa35d00883906599c90a80284d172a90abbeaf7e156fe2166ada1" + + "794420fe55b1a166d752d0eb7f04e822d021c615e84777101e7c9f9dd12e" + + "565b7d093fe978f85e6142c1ca26798b45f4b8d23ecff6be836e810e314f" + + "ebd2ea66f2ac95bad84b39b7a6bac41448f237b45e9ec579235ba2bf5fa1" + + "f00286379ec107c743f06ae0d11b57a2f5b32e3bc5f1697aae812d7ca303" + + "b196a8a43259257f7697bae67adc7f121be561b2d0725982532ffc06cb22" + + "839d9066dce0e4d683d9348899089f6732de62751ca77f1c439e43054468" + + "2c531b9c61977bc221b66030f7571dfb3ddfb91d9838529dbc99612f650a" + + "d72bb78de061192068941a81d6ac341101aeb745b61bd7a87a35a2714d50" + + "c3eb2c3ea148fb9ebed948307f8b491aec277ac01903ba36e6ad54f89fe4" + + "280a17f8e7ae639e75aec16d56576f03c2a1efe4af995eb825ccaa6efe0f" + + "d6d878299a351591d791c286cac5cb049834580d47a9bb7720d0603e3141" + + "ad7c1ec2dd23d3002e15d73c1828a7f08062848b1b6fcf816bd954743547" + + "6f0d6f882125bd03095eb1b1a846d535730e258fc279f7095de7c2d3fcca" + + "a4640a2e2d5ce0974c1e073c60bb78171c1c88ae62c7213a95d36ea9ab17" + + "59093813b85d17ff106e69100bd739ede9656388bf47cc52730766a8a186" + + "9dcc623e09e43cfba1f83ae1d9f16789064ec73504c29686760ea02c6634" + + "a929ca10c6d334b1751494c6d143671ce8e1e7dcc9bcda25af895a193032" + + "ce27c1016ccc4d85507fd2265ebf280d3419f54f66ba2a161c068491578f" + + "be056f02f97be745db443e25ed2647c5348f278f4ad8bf5b2a2c2d56e795" + + "532e25585984a3a94f435ef2742a0413abed7230ff2e9724187c91f73a7a" + + "726ebf36bc8d0d959418dd586452664990889358c56720c1001c004ff768" + + "54b9850890ce1b31735fd9f4a3640622ef0b25c659e8a937daa0df7a21f1" + + "77be13dfdb8f729da1f48e39a05f592d8c98da416b022fd8edab8e6132eb" + + "a80c00501f5cc1e0243b6b096c8dbe7f8c6ffa2f8bcc7f309fb80b489b92" + + "c4878fabad42d91876e10ee64ccd415124461cdc7d86c7bb6bcd9133f3c0" + + "dfa8f629ddb43ab914c0ac5ecddf4398052229876fd838b9ae72523946cb" + + "bba0906a6b3ef26672c78cb24cbf691a5ec869d9fc912009d840772b7da0" + + "c7f47856037c7608705cd533918c207a744f75fdfac618a6981778e09332" + + "5c7d22170da85bdc61044b4c397919d601a30746cefefa798c58f02cb827" + + "0d130c813cbeb67b77fe67da37a1b04bf3f1e9ee95b104939220fb8a0394" + + "86ab8954b2a1468016f546406d1946d531966eadce8af3e02a1f59043ff6" + + "e1efc237dbf4dfd482c876531d131c9b120af8b8fd9662cef1a47a32da40" + + "da96c57dc4efad707a4e86d0b84262d850b451bda48e630c482ef7ede5bd" + + "c55147f69e2ff8d49262d9fe66368d1e38ecdb5c1d4e4042effff0670e69" + + "04e47d7d3047a971d65372126ff5d0426d82b12b253bb4b55005e7a22de5" + + "6fa54f1dfcce30b1e4b4f12b1e3c0de27cea30ce79b08c8c1aceb1ffa285" + + "c317d203a9f2e01d542874fc8035b7670f3648eec79561d6ff2fc20d114f" + + "ba4fbed462f1cd975ee78763c41663849b44cb2827ee875e500b445193e1" + + "4556bcccfaba833bb4ea331d24a6a3bd8ec09906c7b75598b44ce1820a49" + + "fca4a0c1501e6c67515d4fa7f88f6aa3cd7fbc6802131a7b14b219e154db" + + "9ed241133e10ace40e4d963f904dd9f3bdaaade99f19de1ddfe8af2b3cc4" + + "0a48374dd8eb559782bea5410f8f9a1cd128523c0157b6baad9ea331c273" + + "311492fa65c032d0d3b513d23b13b86201840d51759021e4133f873f2781" + + "8f54f34ba73b4f33107d49c8de1533856ec37bb440f3c67d42148765610c" + + "3296bce932c839fd866bec3762a38406ac2b39d0d93730d0c88cb8f765dc" + + "d8ee71263fc96068b538da06fc49e25dbeaa10a5111a9af8e8f8d78e6ed1" + + "3752ad021d9f2c6b5ff18a859fee9651d23a7237bd5a5c29029db3882c47" + + "0470de59fd19fb3bfbd25d116f2f13ef5c534bf3a84284ae03e3cf9cf01d" + + "9e984af9a2e63de54e030857b1a071267cc33d22843b28b64b66e4e02803" + + "c6ab5635291aefa69cfeb3958c09d0b37176842b902da26caae3f0d305e7" + + "c6ab550414e862e1d13d9bb9dc6122cb90ddb1a7bc6d31c55f146659baa9" + + "6cca4ea283e5e1639967889543ecb6849e355b6c0227572097221dd46c1d" + + "f8600b230e9644ba611ba45cd83fa4ac7df647b3be57387b6db12682018a" + + "de9be50a8ea7d5f7c743bf0c6382964bb385b3c207c0cdd63279c16130b3" + + "73ba974125291673344b35c8ef9a33be5a8a394e28dc1448f54d46af675a" + + "edc88ce85a11ad7e50058df4f3f2364abd243683d58a2b13fcb0dc0eed21" + + "380b666eb87f4be75e7f2842bae916c15af3e9658c55408537b2301faa6e" + + "42af4d94e3eda6a41d6d302be281e2a9299e9d0fb1f20cf4ca978e66bdd7" + + "4c8bea0f15c84d6513cdea787dacbd4bb529ed03528284cb12f6ecd841d3" + + "c58c3a57c6bc19b65d6d10692f4e1ad63b091137c8acacc6bc1496953f81" + + "2972bf6362cf883bb75a2d10614029596bf9f35e92addbb50315b30161b7" + + "de8867a1393d9583887a292cadceb54078c9c846ec30882e6ff987494060" + + "721d3c761940b91a126e8d1e0118617bdae01a7f9c1aa96bdd6c78ca06f2" + + "6c8d85664a8705334f4997c724ef98fe265985593d5a9c30798714e6de1e" + + "bd04b648be47a6b5d986a3103e738a5cd114b19b7ba99d2e2eec6181bf3d" + + "ff0fec8c54ae6118be8702c3e775d493a6fafb509712a43ee66c3f4b75b0" + + "194c88937cffa5fa17b284d2556f2b0eebf876e05f92c065515198bd5e83" + + "00d0db432cb256a4a0f9963a05694ffce3ecbd182209e0b7bb50120f6be4" + + "eeb9d268b17790ee14a2c887dc5753e0086630b3123734053aa37595aa8f" + + "31968ddae4991af4ab970c1e3cfa1146a2efd9dc42abd6af14777b8a0455" + + "3865691cbac4b4417b3fa13c154d581b498f3b8cb77adf0e42dc2f2fb521" + + "732447de97271e542c6cf8cad3ba0148cc3ba1f2983ead836a25a2c022d0" + + "43ba18fcd009d518d07b53344a5bc4d626b3b38405a114471f75dc70e015" + + "d11e8f6f57d087fa72909785573008b1", + }, + { + key: "1793bfda9c8666f0839b4b983776735a927bdaa3da99b13c9f3d1cc57d4d6b03", + tag: "bc89cfec34ab2f4f2d5308b8c1a5e70a", + in: "a09f661aa125471417d88912f0a4a14115df9a3a19c1de184878291acb0e" + + "89ee1f9d8213f62df442f8969a9a5a7c402fea09bdbe236fb832544e1f93" + + "9cdd4873802b2bb8fc35ba06b7ff96da6dc7efddfeeda84116bc525a7fc5" + + "2d84d2e63cbac00b122dc64f2d15b36595259d81a1d2a09f204c54072751" + + "dd812259df1104bb2d2ee58baee917c5d0aa2649c8a1503114501e6ed6fe" + + "239847d3d88dccd63d5f842426b600079c6bf06e80a2813b2208181163b8" + + "61dca07fa4d88254e84dac1c78c38397a016b5ad55a6b58878f99036db56" + + "89871ab3c321f6ed5895f218f8fd976c348b3f1269fcdf4d38c9492b4721" + + "6c45f499f5705830b33114d721f9731acf6c69fca681b74c2d82c92e145b" + + "7bab77110821d3a12cc818d7595a5c60c4b5e5219376c38a4dd52d435d41" + + "562802ff65ba2bba5c331c333d5adf194d29b2cd9ebb55927bb4ec17681a" + + "3f5574ad34fb4e964f2c756f6dbbb7a6876a21579a515263444de7a30a33" + + "15005458bc137ccfdff18a3892fc9f58f1de10d4de20bbcf860f5f036d8e" + + "8a188f18e5cf7ea3cd260710e7491befcb131d49a28dfb1ef688fd021a1e" + + "e4420d32fbfb03b47f5e85c37d91e49a1b0db85d966eb5434c4197433eb4" + + "9d56f2ff999c9a72230447032dc949202468261b48b6ac212e3f651d6c63" + + "03a06c90bb2d3a755ed91ba73bcdc28e1c5b0936e51e0a9f69c3ebabd3db" + + "add7abab6d8f6a44daeb3126429a01815f57444fb7022a4a510f8b564ae2" + + "dd9779b3a273fef15859a33e233724846c30d89fb78a595b6ff6c834812c" + + "00a991e405806aafd0c26a788895ad00a5e43c5426197aa8247207077548" + + "ee67db4cd6f878431a2e36e952d84b5fb89d681f553198e2c066310ea6ac" + + "3a31f5b1792620616f6c41d486fb844eeacc7fd36971abf416e8d6d50985" + + "c83cc92ea46ac37da8f0026aba30c945d8bb15080d2d95e4081bad626199" + + "3f95f57ed3252822a7caa035ae22a36c35e280cbbc82d729346cacdb1794" + + "ae9a9bb2793fd1d5c47121b135c2836063367339c5151b4e35278e97f62a" + + "fdd2f231d4b47812d083a829ebb9c374ff2ae8479cc4b76d55f9cef3ec6c" + + "4894f53e8caaeb0d8cd072960cedaf758e48e3640590d4f728626e0a08ee" + + "ebf719c96bf8ed4d0c283be09c0ae67b609e22d3b9aa6b03642854909de0" + + "5ed52b39673867bf586a632ab8072de15c637cc212cba8387515c9c9c433" + + "abd7ba6b02abd09da06a34694ad34f88515b65c0c9c247fdf9819fb05a1a" + + "ea4728c1182f8a08a64b7581cd0fb2131265edcb3d4874b009aede0e87ed" + + "463a2e4392aefd55e008eb7ba931788262f56e53193122a3555d4c08133b" + + "66020154b15643fa7f4f5e9f17621d350ede3dc70be02c59e40fea74dbbd" + + "7919d1a8d4e22ef07c916fa65e7d4b89fb11a7c24ddc4ca5f43344c753b6" + + "1331c3fa4558738ba7832b5b2a275bc9b7989b6e6888865793329806cd3b" + + "f0ba57c941d4428623e062f4ac05e7cd79ad5446f8838f2b247b66bddadf" + + "540845a1bb304a04b7edbbff579c8d37e2f6718f8690abd5231822c7e565" + + "69365ce532449a41ae963ec23a2a75e88307dc6b59cbb3fab913e43ed74d" + + "841ca9f6e4ef96dfd9f04e29e89361aece439c0b2e1943b30410a63d495c" + + "522ac3ec1b04ec4cb345f7f86969957ad750e5bd7dbf1d6a22eed02f70b8" + + "1cb5b2b020c0694d7f63044f9de0c3de1ede52009c858992d01ebb92ff19" + + "a9e0fbea18942fbafb77746c8e9e687dd58ccc569e767528bde43b62c7c1" + + "270a5721f1212de2b29a7aae2d6ba6cd173d7fbc78aec4356ce2e8ba9164" + + "d97dec061dd0c3a0e3c520a7611ac99739049dd5825537c70b7ef660046c" + + "1785546cd99aa400da848eb7c3c91247415c8e245d0f14c30d482c5849ae" + + "aaeab2568288229b08267818dae8f76fc674c684c99eb5faf88a0783813d" + + "f7298e0b50cb233f78471e5ca9cc3b04927c26a3871cf253798cc49aa717" + + "d8f18a1ddcbdc26497d188f15f86ec494dcf8f942c3e07e572385c6fa0ef" + + "40c0b625f1737543074a747a369482a0b342a08b3eccac9f9209be31aefe" + + "5a7794974f71ac0bc9a58026397ea3dd4f5e40511d58d2a3b45925c194ef" + + "13987037d736dd48b509d003a86471d5f161e0e5dd168b4f1ce32f703b89" + + "15004d8dfc708a5bb02b2e6fb67424b2cbcb31ddaa0114c4016b0917382d" + + "aad11815ff5b6e37d5af48daa5ef67cee3439283712bc51b5adf2356cb2a" + + "5181b8941fd78945c7c9d61497683e44fee456ad345e12b4258f15945d45" + + "b6ca4369ee792d849112d583fdb39cd4d333ee057355f0abc8d1eea4640c" + + "128cc1617982db0394233dbd416102eec1874081247d2982bbf9fed1b1b3" + + "8f4da923d68c8975c698f189a4d7840fd7aca9dceb7d91c076f85e1c546f" + + "4d5de4f60c91348455aaea30cac134c844dad93d583c139dd52b3be6346c" + + "4d2e6864125c5a2d0aed8f67930e1ebf8700ca88aacc914ea76ff17148f0" + + "777738cc126e75a2c81110faf02fefc47c91edbab7814599000ce55fe20e" + + "f313566e9b62457acf2f22e1141e220bd9d4747417d03e703d4e39282803" + + "386327fc65dd597f723ee28185c78d9195fc70a75706c36287ab9c6e00e8" + + "5cecbbd6043c6af8d30df6cdd8777be0686853b7c8a55a5b1e03e4431d39" + + "1725ff99875a85cae6926998723b36d13ad458220712209bfc5e8d2ca5d4" + + "4ea044d5ba846b4035e7ac7e9885f55d3f85c0c1b3d09fe929a74450f5d2" + + "9c9672e42d3f59be4ca9d864a4322cc454c2578493bd498a51bbe960e657" + + "3e5dd02c4a3a386d4f29e4578a39e9184024cd28d0e86ecac893b8e271bf" + + "ce3f944d130817378c74d471bd20a4086f2429ed66c5c99969fd8da358ff" + + "5c3be72bf356ae49a385aa0a631b588ddb63628fd162673e915cfc4de56e" + + "ae6ff7101df3b33125c9bab95928f6e61c60039b6cc07a66f9c733251447" + + "ef9c1ffefa2158a8ddf89dc08686a4cf9b86ea09914e79842d72a3236afc" + + "98a3afa0a1cac5590ab6a923e35a2ab8db6410a9d33cb84d1c48a054377e" + + "549774b25f50fbb343ecd5db095155cce9fb0c77d09752f62d4bbf16a770" + + "30452a75f6bdf73f7807d8f3a6bae16ad06b22175fee60549c22548de9c1" + + "3df35ef4e7bf7b66491a62b93c2c3fb0c5edc51f60f5704b56af30f1079d" + + "7c385b99f958ef8209e030e381d1ee8d67d3cb84f32e030e8ea2c1d0c77f" + + "d6b242a9f48707557c8682a08e1127f51221a55c733ab1edd00a9c2912cb" + + "36dde85f73b524e1a4f4da6414c5e4c18d9537722b2becc8a91bcc63f2b0" + + "9f32409c53c2beee0de6726dabcd6bf33118a5c23fb9c5c1810476efe658" + + "4bb6109c516b45e16b2f79f96755680374d82b91f2c519639a1815fd485b" + + "a3c00b46fbefeafcf25554ec5a6a5ae2da07c85b8a0f9fcde50263d9ed85" + + "038b2f7aadb9de765655bd201235218bfc74bcad6a9ddf4506167a649afa" + + "df400b85752d68a92b7a97f26b334dd77fce824862046b286a7c8e0adc36" + + "f713a252a673d4d995b268badf4bec8b8eefe85c25b823b6728582d35c4a" + + "60041114dab72b0623b99e2758f6a1e97365279bfba0eb1fc8952ca4f2c6" + + "fbffd9f5fd7dcad1125b18a796981b5ead0b6431141315898ace96f0d38f" + + "865698df8822ca7b65644b6b1f0a0f0d2e5850d4c93ec48ca3eba1b919e2" + + "4413a46d595ffa427715e499db3b7b9ab53c64abec7302bc737a5bd124bc" + + "da756abbca132f7f67e6989e09bfb23b497da31bf156bb9c69ae54588df1" + + "7420e8fe989f0472c8893b2bfe57cdae265a8cc7aeb39624167a567a6fbe" + + "bb1aa30c3dcfd14f2808a070994085e6e1fa79021e77c399f90ab1f995a7" + + "baff672cb693bd39b798b4c890b7d0a57978d6b9bcdc5bf3f4d205f8f24b" + + "2b43d3ae300a96971c9182be297618b9adceebedba1ab0f324b01d23d7e6" + + "35f009db3dbbc643c2d787567594bc639bfd78c4f3e6d948caf06f013423" + + "eb3c764666b58f886d5d28137c053c2a28535efcea400147e92ac6753574" + + "3b47f9cb48852abed1d057647d5b1c6f334eab1a813401fccd3dae332738" + + "776bb223e359f3c459b5c573ba64fa945bdd66c5ac0fcbd53b67032a7b80" + + "25f551e8d1fd2a4291bdb7941cbabe3a09765dc263e2bbb6db7077cc8fe6" + + "790d4bed5e36bd976d1e37dfdba36aafcdaa10c5f3ed51ba973379bcb8fd" + + "203d8b7282abbd271ecf947e54486e8653b7712c9df996a8ad035f41f29c" + + "ab81509f922c67dacb03f25f8f120cb1365ab3c1c286849c2722448ba9bc" + + "ff42a6b8a7a52f2c79b2bfcbdd22ef8a5651c18879a9575dac35f57d8107" + + "d6bece37b15d7dfff480c01f4461ef11f22228792accda4f7936d29d4c56" + + "cbba103b6d3e6db86e39e5f1bb9e9fd955df65b8a6e44a148620f02b5b90" + + "b2be9e5bb526d0ec75b1e723e94da933a356d7ca42d0ce8349699f730b8e" + + "59bac24a6b633759c88041d29399ce60a2ca2261c7eec1acb9a56e0e65bd" + + "e37653ce2cf7eb83a4d019c755bdc5d685b6394ecddb9006823182dd8138" + + "a1bf79a32d07a8e5e8ab221995c714e571b40bb255b79e328ab883542c16" + + "4899fffa16eb3296f310e302512352a864fd809beaab4169113027c6ccca" + + "99a92c6ce35c30f9449a3add70f10db1ed08078e8e6cbaafef630aab7e9f" + + "c8adb09c18e33fe1af3620d1e4d069ac11325e23cc18e5519a1ed249caf8" + + "ddba871c701f1287cc160019766988f63e089bd9bf1af7e6f5b9002e3b6c" + + "264d69a8bac16914ab55c418d3a8e974677cdcbea36c912e90386a839a37" + + "77b878e680c07c7cc99f42a7dd71924babf7fb0627d1f2cc60d9d390d1e1" + + "50d47386be6eefec9ddbb83b28fa7e2fd28cc3867cbe42d13b00545af8a0" + + "48cc07016ec79808b180e0b258c564739185da754f2e", + }, + { + key: "0d41cb4ac25217feb20e86fc2490e8d2ea2e8225c051252a9395cc4f56e1ae5a", + tag: "42df9f9a59d6dc05c98fd9e9577f7176", + in: "01caba7a19cdb09dc0ec6c522c61c628eacf17ef15485aa5710fed723875" + + "2e4e8e93dd4bbc414e4c5620bab596876dfbea33987e568ddabf7814b318" + + "8210a5f8d70041351e4d8410840642a29cc8d901c25fa67cc8f9664ea5e1" + + "9e433eaff7c722d0258ae112b7aca47120aa8af4420d4412a10732551db2" + + "cd3e0af6e5855d5eea61035af15a4d0d898d04033809e995706eba750a7c" + + "ac07aaa0dc71477d3020f778d0347f1a8e37c18540deb9ae967e734c0264" + + "df0e1f52b0b5334805579ea744c8784c3ae0c3ff8217cd3f53cb747f6996" + + "f3d2147699799e649061b205f97f7992e147fb20f21ff862c6c512e95534" + + "f03075e8e52f162e0d70d7a259e3618474427f400f44f75198edebae6e40" + + "a2173257d114e1bb5a13cf419c821eb124d90e89a938d91f4d2e70dfd1ab" + + "60446f1b602614930a329e98a0c30f107d342281db25b8f8259933e14d20" + + "8bbd991e42969e8b0600272f9bd408483cddfc4cb8dfe7bc19be1989c7fa" + + "129d38e1078d094b82e0a845040ddd69f220dc4aa2b236c44101d7da7779" + + "9827a7b037561b51e50fa033a045571c7267af93b96192df3bf6180c9a30" + + "7e8c8f2b1d6b9391767369625015da02730ad6070df4595eb8099bd8e484" + + "59214310cb62c3a91a4fa8ac3b3d7b2017d4254fb465f0a248e1bf45819b" + + "4f0360f37c9a79d405e2bb72e5c25a1b4df192cfd524d61e1e8b274f2fe0" + + "634c73f0653c7c9e9062c9d081f22a8b0327897eed7c6e870f2815bbac8f" + + "585c1bd868759a98dcb5c3db2f6c53244b9cc494a56f28a9ba673167cea8" + + "b799f37049ee7b0772972b3a6603f0b80eddb58ef03f916106814d72f000" + + "250b3573c97c5c105910d79b2f85ad9d56002a76a1f43d9d1c244ef56d3e" + + "032a9bab95fe3bd5dd830ad7d7e341f28b58c0440658f7fc2ca98f157708" + + "1c647e91432cb0739d9acdbf973ceb9b0047634d695279e8837b04dc5357" + + "f013fde3c55c9c53bf1d817ec59a1b18ed0ac0081ed9bbb3bcd1a5d3634f" + + "50f7506f79dc6a4ebfa640bf65682fe9aeca68088e276937669250064de1" + + "c19ad6d5c697f862114d0f81d2cc52be831ed20d3aab1e41fe6f476b5392" + + "af4799392464c51394c2d1a8325ee2e84f1635d295ee663490e538eb338c" + + "7126a8e731ad5c0becf144c7a9cae5c6493350b589385de29e1a0ad6716c" + + "346ec4f0a31ca5ea35c59ab6b099f65d7f0b3d00925a1da1b5777c029aea" + + "9679e895d7100645dc83f81d82a6174beab2357f7888ea640900cf3ee67a" + + "e0724a123919d78e70e05288f67e5e69ffa6f345be8a96e58bbe260184b5" + + "ec5c0c1354cfd516ebdb8d420029137d41b029641959cc07fa7b4e16b39d" + + "17f36b2367057410a42e0550e9ec1dcd2df4604d52d4f9dd1140d57af08d" + + "50e1527dad793b6d649324de799754f755818bf10e6d1ab614958dbb24ac" + + "8e2c01270a90ec3df4379c3f509b5ef721b0fd4f91a1bdb8127ae4dc74d0" + + "75f6cd8bb28319d6f8e8d8ff64fb4a42d646e9365156c6bc72cc46e9cd1c" + + "f9e735549e3df9a8e6b5fe541948b126190117db71fd1d61ad84be0f725f" + + "20b99eb141b240326d399976c4f2ce5823d94649a9580e1e8820bf49184d" + + "fc34378a60bea89b12aca69cb996c17847b7fb517cf2d51f16d78e3875ce" + + "aa33be15f6a154004f0e1134c6652c815c705efc34bcf35bd7743d28f0a2" + + "77d82dea4709dab41fbfb4e0cbc118c17aa00808872f0edc6437c357cd31" + + "74a02aee61890464e03e9458853189431bf5df6a0ad5d69951e24be7f266" + + "5bb3c904aa03f799fe7edc7bc6779d621cab7e520b5994f81505d0f01e55" + + "96e14b4c1efdf3e8aadee866c5337c1e50066b3acc039c84567b29b7d957" + + "683cadfb04fb35402acaba631e46ca83dbdd8adf28e377ec147e4d555a21" + + "e6d779d7c5a3078ab72702234d36ca65f68bd01221c9411f68f32e16ef04" + + "99a20c2d945fa31b79d9965853d38ada9d48eead9084d868c6bad974b0f4" + + "0956aa0fcbce6dac905858e46c4b62c0ee576b8db7d484a524e951f4c179" + + "decfc7d6f619e86dee808f246dd71c7e0b51d28bc958110d122fa2717148" + + "77823242711632f6e1c7c15248655ced8e451a107707cec8c84929beece4" + + "efe5503d3c1763d0ab7f139f043e26027d5e52a00d5414dd98a324a8fc2a" + + "06a1345cbde747f41099c3377b86bbdc5a17c8f6e5b773a761f78573832e" + + "4359b143810361dedc79142fffc49ddc0b32f225d50d360ceec3920fb0ba" + + "0693b644ee07fbd1ce829e223a02794b197614061c4bfa46112d105c2b7b" + + "4efea448501d146dece44f6640d674d5749db498b32969de6e165e705a18" + + "2aa1f3d8e16892b0120337640d52c9bee35e5b4b17f03eaeb31205c8ecbe" + + "1ae1b110023016e40ee87370a65c5c20bfb00f100d3c6c1de6e4a1c90162" + + "f25bddbf300ed637330206788a4ff96903f971c9618493ad074412af625c" + + "ff9e0f8f183bbd5e96c1f28307e6cae8b50cc0eb1a3a8154e44e9de947af" + + "002e4d1098d6b0ee3f2e71a10d03eb444729c42461283f37be8af2ce81ba" + + "bac246a05c2c94efacc43f0cf9ff3df38ab6fc1648c796ae7026ea95752e" + + "b70873a6da59da10d8b5316126431c4a17289466e95dc739c061d7a4b13a" + + "450809479eef421bddcdade77a6df133410328c754af8999a09b1a5c056b" + + "ecbb6fc2c339586ab92100f46d2fa1fa689994b36aa70703d76bf7738adc" + + "f0589fdfa6bd215339ad69ed983f62efce0add5a63fe7dfe4bfa006ff16e" + + "0cc06d39199ad60adcae12b75ca98d764502a783373da3a41281e03c2037" + + "e1b3ca7f7eb60e2b67427e97ec72d36670db7662c6daa505701fd279f116" + + "ac0ef569471f204e1531c25a4ac3ce19b6f68a8994b6f89b5abf034a6507" + + "32c7fad4206eb4eaa7cd9a710d866bf3c3f13c16faa268ae0cf4f69be909" + + "bb9b79aab80dd25101d4cc813a48d3f38d870f10ac0b6768005aa0e69e87" + + "dfc0424deef06414c9ba6f498c93c41c692a7a6221fb5595b390a32c70e0" + + "2cd64471c797ee8a143725849c1e054ee2043dcfc0b4cb1c00be21a14be9" + + "2d9a07f1b4e975d4c86b8a5c1387e6c42bf393e078fe86d24612d497e14b" + + "874485a3cc922b5b6d91295d7b79ab8bfa1c7f64b51e761d19bb9da82a5a" + + "a34aa469699036b6b2c55e2b84f84942f10585027ab07e2e0e562e0fc3dd" + + "36047850ded84be4416e22aa41c7a2f7d4a4d8e3dd420d746a1d8d56d87e" + + "5133a1b4380bd9a89500fd6d7e68a1ec02eb9e79e4a13edfdde1273466e4" + + "6b0e6a75f59ff6175716629da52463ad21de27f40fa2e25a566eec4b2696" + + "4af3a717dfb0170a73144c0bd9b00bed67ad8c0a146eb5a055812d071209" + + "c9d530cd4f50a41488c2238898dea8bb36b0f1496d3ea8c4ff8e263b367f" + + "64977679e697d88e5295bd97ac16a0420850d1ead9621e25a3f58925c266" + + "ef5246488b1c15a8fe0d8ec4291864faa5a67b2388b7786f47b6d27e8fe8" + + "46f85f85163e54155ef95cea4901e712a44404a4d3f27f28dd961ce36b84" + + "f3856770f07f20a2ebd34d77405beab04ddfc09770167d7d6340f494dc6b" + + "7e4c3df896bd974730193b1e862b58d4a5938e6e4ae8897dba8812924379" + + "e54f51a71364d39f76e24fdf2c6c704479ce85b456558ca6947b8fd76f03" + + "78273f0a7bcd1d860ef1defe4eea8fdb81c73eda028d82fdcb2248582ac4" + + "59eb7698a811e6c5823be886410f6b8577ff2e8252343b6ea890016ae846" + + "01c5894cfb988121059fd9c8fbc1596da470a149404fc67baa15383d38cb" + + "d17ac107b4ff3c1ca4c76b7930de02b240e7547d39f4978e0cc1fa37f8c1" + + "012b677f07bb4df4486196e9b0beb823a3827585475b878e3f6f0a2d3836" + + "2c7d34f9f3c91ed46c39cec95c2a0b6f0279a03a00ed5035b0725c393849" + + "cdb1ed3c0ecbcf3c2ce108017f468e1c3d469c03e8231d4195344ced70cf" + + "daa667252cc1554dce8d0c54eb4cf4da62367d77d7dcc02f81e788ce9f8d" + + "d306ba1b48192359cfe92bdbea9980f87ea0677d7d2082205a436cf514e6" + + "fde5eadd21b13dc836ce33b5dfb6118bcac79ae00fbb16d61f00a923b145" + + "f9caa9f3a2c7f0104f8b052e390987e57c8dc80cd5f0358afb0111af1fc4" + + "e31f92bd832ad35fd2e0bdf768272de52ce0b152f74d43a8973ad516b3ea" + + "f5937ec8a236ebc86adeba610de0cf7168453111f3c983b64df07678cae0" + + "a75466ae15adfb127328e716448cdbd2c1b73424cc29d93df11a765441e0" + + "0eeed72228e1099bd20569d9d0e9e5a0b3c11d0002e2896631186483db61" + + "c1a0cb407951f9b1ea6d3ebc79b37afb5a7037e957985e4955979b91fb85" + + "61ca7d5e8b9cdd5b7ce0130a880d9241027b011fea7696b0c695d4949ca2" + + "d0cf22d44b9fee073ecaef66d4981e172e03ea71a6edc7144393bfea5071" + + "2afac137f091bae2f5700bfb073a6d57fddcba674a899d7349044a10aadb" + + "2e7f547887dd2f765f394de5dc9ef5dbf1eab4d869be8cb68aad8e2614ac" + + "37bbf21ccd5a832ee09fdd07ce50a580a2af36256b1046e646fe3dff6d20" + + "0c5110f1ad1311bc39b8114cd11ecdb87f94df43d4f6468932fc0ed892d0" + + "3d8f3db3f8323ebb29776ab7d260493a36700bcda668abd62126a8189e91" + + "df2d2970ef688d4e8172fc942e69ba63941a36b79ac546fff38f5f7d1176" + + "57612a662ea38134e1090c3e903c9adacdeefd3ac2a0467e9f5125058c19" + + "7b2260d2afad2b0e627a9ae52cd579ee27168065658089e1b83a2d8cdb47" + + "e08966e4ec0018e78c4d267f9575b8fea2a42de5c2d25356fe4b8c9cb1ac" + + "daf0d1af4bf58b9704cd4bc08471e3b9a0e45a5693433ede2eb1374bce44" + + "1f1811cdc7612d7bb61f4f34aea0a44757bbcc12a55c1ba41a7901eb004e" + + "689587a38e5b4df4574ddcc7b2eda97f6e480d7d39f45247ea3b03c90a93" + + "0dd168b65d52a59ce9c2cb4e860cc6aaa0ee02a58d0c8ba990194bce80fe" + + "8c34ba5693fb0943ec2cbfc919e534cc47c04f502b6c217c2f860d1d482a" + + "a016aa02adfc2bea3171fc4e27e2a262fd37b824099aa227fccca508f778" + + "b8c6ec7aaff1d15f6497753f439daa9e52060fd6e9e056e6843d770fb057" + + "6d9e2e782db4843c0c2c7f408a17376719a3c5cf9fa08f04f8a779885a16" + + "5cf93ce404be", + }, + { + key: "ddbd5d6c5ebd61fa72b453dd849dc302c98a0f3e300f4768bf1dc698a3827dd2", + tag: "af608b71a353e63c64911558baa122f3", + in: "c67e2524b0de16483158a0232078fadcf611e4fbdb9e642e397b21222423" + + "cc2ed42ed34ffcb178448919ee337eff9d7d691f622e70fd3317cfd271df" + + "fe6a9d9b7e07db0d20813e2331164a654386db2ab06ae2983bf2460eaaa6" + + "3aa0171fb87afb82e85b40d95c8993b2039d32e9d38473dd13f41fb1ff1e" + + "261752ab004b221a4472b9b1a0e139f0c999f826a26a7e7df362b0611aac" + + "fa83c55cca2f7c0138d2c30313c2f6eb357278328ea6ebd6a5077947e18a" + + "a97c34b9dde3b6f2de4b83778ffcebc8c9cb58756691d5e2a3d15a759a2e" + + "5050b6da937a6f5551aec069a08027d60dd870d175d2a5b5f0b4f3143904" + + "7445c368a5c866370e9426abbc1a1c5a272b96731c4128aedeee93e8e00b" + + "b450601a6d31ea279b9450e738b4a47c0dc22d2d8ed5d44257f6318e0c59" + + "b951fb6b57746062ab95cd73c23ef0a5c000a7d14c18bfff172e59b6f6de" + + "aa61b81009e803eb05e24fb0b706870e18889a9180ac16a042d12dfff9d9" + + "1b88130f045d2342fd5ddc5f443681c31090459f262d1a65654c55251fc7" + + "d5a67bd2e62940ccd606f3e50700e4d1e992a3fdf0388b9ce3df9de6dda1" + + "5c1cd6b70622ac062dcb7ed7058872c00ff3df94032853927126cf6fa4cd" + + "c468d91c9b52dcbc272fd7ba920dcd3ea1e048af9c3286dba74d988ce9ce" + + "77174e25a87935352721dc23b60a9549322fadbe6a00dd1197dfa25b33fd" + + "9e5713afcfd0fae6dbcf27147fa58d995580d7e0a903c895752fe9819f5b" + + "b002ed752719552d0f3575312f2e618173a8ae7c147ca64a709053e5d2e1" + + "2f4d1ea337afa9ac4f9ba62760046ec1e48f4ed8f6df66786c9fd9f5bc7f" + + "9ca2526e1327b042f4657c405757690e190c91f260dee2dd3d2e6616b721" + + "e489c7c3cb828478a3d953b88f09904e7927cdf6dbd6a5419eeeb83c0be2" + + "51934a80dfe61e09442f0761aa2d013e10aeec3a32df204571ce8984a430" + + "9bbe30ccc91977790bf0305d2651ee450b749c3e7761534e45970e70a0a8" + + "473cadbc88f096970c275f188c9d2644e237fd50c2e24c1eabbf7578e80e" + + "6500762ac513fcd68cf6f8bb7a9d9eedadca059d9ecec07fe6fe7792b468" + + "9311861728dd482f087c28374cf9c5ea20b2c8630029e8485fa6fe518c74" + + "ef77d44eb7526ca764e50b5f34ed0f253a91fb2af6e59338e2af6e041e01" + + "084e1efade1aebb7d1b698ccdb8b4248ac89cd40d9517d840960c08f5e86" + + "88d8ba2b54889c1870d315498b70e0e9720f2c8c53a3377a8c0bd2d6a1c6" + + "f17c6ff847eb14def6855dc3886b99039e528b421ccbf6064e39263f8f3d" + + "340d5d20b1b14c264ac2310b5f3a0c6f0c1006d0d4f1a69af68d28ab447f" + + "cd17387e1fc98f164982a6d05dd32d6b4f0f1b04e40c6c6e0fb4467dd6b1" + + "0c5a9c92cc8c2bc97ef669b6d55cdd0aa8a15c46af954359165949012713" + + "4ea9f74181d54a300d3172c9f01db73288ef6a709c763a4891666d0baf88" + + "8531dcc77f0911412d096aef9033fa36b5c1ed283b8b5c109e45b5cde911" + + "6f3da2533fa0ab81929bd5783271d5501a9e4fce2aff9eb5a70a4215b253" + + "46885d7e4225fe34bb55b309a114a312693d60ccc61267359a8c2dd28141" + + "226e7cfd99f0f12c69df57d75dd790dbabfe3145f7fd1a24fa58e03bc2e2" + + "6ea19288af4929e5acc517d8f52a074745ff4644d94179eae6ba7d267292" + + "bbd2053167a0da9be5e4b6cd0a4200fcac5182d9957dffbefa857e662b82" + + "fc3a7cc32506e78030ed5c5d448d7f1b4fd854a735a0c50016bb85e6e716" + + "0f87527bca0de235f4b7dacb75be84919c15a5b8cf6bec035795cb67061b" + + "7855c2134c1b1bfa6affe04b7db239f73af6ea9c02bc9f7972b7f6400b6b" + + "838f4653aefc42179c21765e3ca7a5e96b4402ff544d4bc2332756a23500" + + "11241dc42ec6848afe127c00b9c333e69bb5a54ea5c7193e59ea22bd6d32" + + "af4f56b1bd2d5982ef7d9c1b02d7668525e4e81b68a400f7afc2653f0f41" + + "a03e11c7a02bd094830093481afbab96397245b9f37a568ea1c4ae248cdf" + + "afc87f88b1fb5dc300d8e9039af4e6e701b458ed3f32d693f2e869b76bb5" + + "1358cbbe5b5089013bf452734388a176cccfc1ae9b7cff603631ca48e129" + + "b5c9573d4e379547272cce8aeeeb407d3fc57f782a0eb5fcbd41e6fb13be" + + "7e4f1067cd407b42a6121b2969c384916ba2b32563e659f52aae09c8ce2e" + + "3c500fbb7e58be74cc1592dcfacd9f0d4cea1a90a18658147c81cccf6fb3" + + "078ed27f369e7646f551386a74e1b07074d93e0c1f298c761af46cdaae9f" + + "f4be86808b66d0e228016d27a3a77c843365cb847fddccb0bbcfb3b9008a" + + "1bacac59ffb0aa759a0568c72c556caf0ac1091431b574687c5fc7bd486e" + + "963e0fc3bdc828d988734a21070747c955cf8dba2df1c3a0ba8146cd58b5" + + "91b6d54712db67a9851b1607c8445bc97406eeb7488f5f85e547850d619c" + + "407f97632ca1801f52c09c2b314b4ab0f8e7fb5851fd60852f4666913ca6" + + "bc840c1ec8f8f06caefdbfbf02ce00f20b87b14ba9e651c80f40a31d0306" + + "403f541776075fbf23733a6b19e3b44d04b455b29ef8effa70cce0c59331" + + "7119abc07aa8c8d0246a760b0b36a3d87b244e83bae8a745b8277a531298" + + "f5d0283498a509c89898ddf0f7a7455be1f8a6889c46d323f1dd18c3babe" + + "1751a05f871f0639f50967afa46c19cb93d9c2a79c81e2436a7a62f225bc" + + "37c90698640f5b43673e1dc276de05ff1e29acdb4ace5121659db5f23c49" + + "57aae22f53e6f2cc935824fbd07c2ac87672eeeab895c3f06e09e178560e" + + "2fcfa7097f10201dfb8b1ebac08ca806c1b3ba3aff9284846a1a3beada53" + + "e9f7ade12eb89b5591f462b2543bb4090e081fee9fb53bbf821dc92d6b16" + + "fe820ab2ee4b1f6c0b6a6f19edb0bf6479e257fc73bcd60dc2261d0a4752" + + "e23a0be18abf355f3065177d8c3c14e21edc178d0abd1b39f703e6335131" + + "ec90cba3d9846cee7354a06c320a3f61b8a269abc7138831614f57ca6c19" + + "a4a621142889cd924bf4ffb82b57f871b854f3157e8874c22d43a5726900" + + "bafbb8f2260a1eba3a462e23d4def2ccf68ebaae8e52739a1ce67c039eaf" + + "9a6c3232fbb5a91d1e59a8dcd3798ba71345fbf83d09b83b41cc49d5ff5f" + + "2e809d2b1d5fbc1e7001ea76b9b2d8f896eb6609e2e1c5c562d2a6e74960" + + "2d67a0f6b43a201d5087509b8dc7b0440144e308c18ff8b96b607de2f20c" + + "6ee99bb05367a8b25947011889f724965a2b5c52c9db1e0622df9343c548" + + "d054699badeb15fc41055af0d79a2bfc1a5b4574634fa0dd9dd10a6213ed" + + "b6991187dc560facdc27440456a0a209fd7f5ee4fb350ae71f869723e5eb" + + "5338e3d1448bc993afca6957f4cc7b047a2c7c9593b7234725e66cc0eb23" + + "3824eb4cb905701cc522ec210950b871397c6c0bb3d0b839f2eb1a120f70" + + "36107246df4dfb2c24891bef0bd1dc131f2c9d7c295ee967e3184d963037" + + "fcc9e0b8c7011c8e04b4e70038150d34caab4f8c0230418cd2d8a91146e4" + + "4e11cf6707452ddc03d9b4e6380658135dfb48f62c0690ebad75167f4dd1" + + "c0df3ed555b5081a7b82616d9e501757c83c2193d0f640236d59f9c97a4a" + + "5c8bf532aea2cf5964ed2dbd8a70c01ca5c7677224cf2a37f3b24d8fe4ba" + + "91cd3b5033715de227de51deed15afb8eda9d2b9615d197b8f98322d7096" + + "79c5131eed48050fbe0145a9284e236605c25a4876e2adba42f4e35a8949" + + "3d59bbf44b3338d9d2e65a7d7ec6c863cd47cae9e23181b07298078a5e9b" + + "06a5c7e1059f474eb1a4247e8f02cdd4efdca67d22035b12abecf9b15982" + + "de4932a28e797bc4de38442cff2cba263eeddba0ab14fc706dbca04eaca1" + + "b4cc13000a10e35b32461424809b299798e4d8e66c92aa3181c5df16ab65" + + "9611cb625e895a8021af8c60960227d6f2ebeacb17b13536a5ff139734ef" + + "37cb67018ef9a410b856e6f6eddbe3f59b088d538c50a8f3f0912d06e47b" + + "88d773069aa759cc614e1f53cf6e572c127123d1ab56b79ee753a921cb22" + + "a60e4e6cae768c9966de4e2625484f2e990154da7fca84b6e6c0b59201e7" + + "fb8a729cb20b4c774381e84f1bd6e304543d952dc76ef741b72f3a4ca7a6" + + "ea7958b8b6337994ed82dcf988eb70f509610b9a279ab4d0f28cc2b2dd99" + + "3b8637a6be0cb4b5f67c79654c6b15e1b61120374ba9b974a628c547f11e" + + "52d72d39f8f9c5dbfc23a89f22d38984dd8d5c3ca72cd54e6adfe2b3d163" + + "86afdb50967846a4c311351a51e5fd322757bdb061d44c8796a61fa4db36" + + "793bc11984eac83bbcefb40d0bc7bab0ca81e7df3a7f58c6fe800396716d" + + "832acaddff6d72c8e19dc9ea838294ead800deadb6bc18d3e399fa76c46c" + + "5d88ee72a86a87399423b0578eb6e27d78156ea2abf6f08b5cbf747f2f74" + + "5301b694bfba84bfe3c5527acd50660eea5105a2644c1aa92f954a604fb6" + + "a1b3b2d0331497deafc3aaadc7040b9188a36cf607ee85a0655ae963fd32" + + "91dd58f8bb50b4e46dcf7c2957639bffa6b12d895660dc0323b7a092f999" + + "813380b820e1873c60d3e3038129c66d507862100a5d5842150869e7873d" + + "6bb6ad022350ffa3813aca26c80ccae72692bed9c77c9d4da23178c57153" + + "90b5f4505240a796ec9d10a7f280bd60a570b1b693453807707651fc0464" + + "03e4768965a6f42f112152942134f0a38c84137c7a6e086ef1ab9ad20d24" + + "3b93356b305c0996ab7d02c02c44cbaf8f7e60b8c0b8c9fece3f189b099d" + + "dbd126b7357c1c4ea1c8bc1ad93db91ea9bf043a4320acb60b502bec37b8" + + "6b2a5004b8225e549e613c6f83b97b7e4aeda1b013e0a442d7ce2f14e78e" + + "a94bab700c9ac0abba945e28f39fdadff223c4498cb204f01ddfcb450a41" + + "f32ae47f99a49114c6646a5cb103e9cd75f9d81dba417e48c4053e3b0295" + + "2267cd30589b0f5d993a5485a6ead1ffab9f2f4294c5853ba76383a326a6" + + "a42fb8b78948aa49f0f1f614bd0a3fbd2a58a3197daf2094605bd838285a" + + "1260f1265dca74aadd95652632335fd17cafcb73b202c3f0e5da836c2dcf" + + "2934f005935dca80154af43fa34c8ba440d1581b74ff17dfaca369dc9aa6" + + "734c03916d78e1b952691cef918fe033d33f7f4323cf724ffb8cd6c219bd" + + "046e9f268eb0601098e93daa59dde370e46269dd7c54891f71bee2829a53" + + "df86a2c7fb1046cd7c98fa21cd83597be554997a70acebe0b6e60f1f7098" + + "6f65adcae24385cb7102bdd3e01300ffd15d00f9764b3a5c51e35e5c9cdd" + + "da84f4b656fe514ec4ff8dcd774373f8a9103cf36abefe875f7084b9bbd9" + + "42e0c997ec2d860a4b622ff1a39a628582fd81f237d3d8f6843d26ac77cf" + + "bd48003e8e8c591ff813a9a897e3149ff0297ff476299d717e54d885cdd4" + + "4c3ba6ebf54bc7a1", + }, + { + key: "b15578da1020f662ada0ad4f33a180d9f8ad4991b3720bc42a22b52625c7414a", + tag: "b0e4ad4a010afd6dd41ed82868cda555", + in: "6d2afb7a9154064341bdbb533f11990d4987e7c90fbfc0167c1e58d6efff" + + "6010f7ed569dac62ad37183b0d384519ebed0bf9c6e05a070b4858e6b846" + + "547ab5e45619c866f83cce83dcdab6a8a6c36b115ac832de1c6d433b94fa" + + "35803fa1a36f1ee114f8632402a027a74ac110394f32ec4006beb0057f09" + + "a94dada8bd0d1ca9a14b1f2efb8f526d79d6438bbbaac0ca1a43935627e5" + + "d129d52c06bf6413af07513bc579447eccc3a9406645c94dae59dab98d6a" + + "f92fa90fd4efaaa4bec466806ed401d2083cda587139ad7e9ee2adbb1dfe" + + "a88b59dd788b954a0f52c3854a3fffecb4bea83debbb2f5f8883e6415d3b" + + "ac1b872df1afe185468adc59364c173082f1dd6da9d348f5f5ba2d216243" + + "23de1f623eeec875bf31d12acec40dc0c1b9562826f3105cdad4c43cf45d" + + "829aa8b14012c47847aef7a2a6e3935fd972235f5d3a7ce4ad3582785393" + + "602e2e27329914021eff38ed2926c88acec1551f17a1b818fc1c3ed4b3b6" + + "6825d55bea269d710123b52e12ca9520a069d9c6a21df3a0253b3a4a6a8c" + + "dc226d667541548834da6bdbbdc165f39e40047d4b647c507d981be17b3a" + + "836063436241a8bb46b11a2867b621413c42d838e4578b72cc1982e34bde" + + "c303b5575ef4b8dd9fea8ed5bf69539413909d03461d3853b5fbf714a61c" + + "769569f42b38fac4b849104e2f2ac1dad0e388646278789f83e0b0511571" + + "019d3bfc5b03ca4cb5564e4e75e103ea1b6000be6588e27105d7cdc2d2f1" + + "f680ad34ef823ac4bd4068146e9997834665aec7dcc7a82ff28d85d52dd6" + + "9c18dd35f326bcf709f74df5981bb90ca8e765fef9f0698a19e12220b287" + + "24a6d9e4f4c7ce93f8ca9a126689ad1df820072557ce3db246cdf41599dd" + + "44ca841bece6c7869358005536e1189aa86b764e890ef90970d6e3831def" + + "fa890bf8692381123924e7d9df804fd770a0a30ee97d5dcdca302833efe8" + + "1d4b2505b17382f0b3429b38c41269ac95e36e9f5a1dbc6e6c8963741917" + + "02a23198decb4efe6809fcbeb5d0c9098a4c300155dc841610e55c8a6e27" + + "2a38a39de3d8ebf38a750af25836ffb1bb7822bb98886280f0cab6838c01" + + "cec57961bdc2e1bf158248309ff9294adcb962252b1c24646d132a3be2c9" + + "1ff82e8e101facbdb807826cc9d1840a90874ba08692e808c336c9d280ee" + + "f36a43a75c746fb864f85711e802546ab5cc3f8f117904ba1a85d6e4b729" + + "85122c5041891e16d55b93d6fc1b7fcfdc80ed3d72d55d64b8895bbf2f8e" + + "d188684e7e89afdc1e6a7ab9bd1d3da95d68698df2cdcbb2e1a4ae70e2fd" + + "dd4760f9e5cf4255eeb1e9e8009ab507395bacb8b2177e7c5757ad02baa9" + + "a96db967d20a150d2dd7f3081d90675fe0c82f94aa3cfdf6ac5585583901" + + "7a8e122170cc817f327a3c8ef44acd6e4fa81b73bcd0bcb5792eed470481" + + "152e87f7a20c3f7c69d5a8199bf9bb7c7269b450dc37a9b22102acaa8438" + + "134d6d733d231cee9522f7d02fbb37b5818ad3ca72df4752230ee11392ef" + + "8f8219be55202bc3d476f5a9078b32fb63d42bed4cda5ef90cc62467bf5e" + + "418ecd9d5d0cf1a33eb9a930e652ce96057fef40b65588aac67621d651a0" + + "9003dbc3925912e385296cd3b2b386a44113308ddf2af52ca390487eb20c" + + "716b76d78ad45129e7c285d918de7107ea8c3b0cfd9e73933b87c0b2b505" + + "cb4c95794f2ee6d6d43e2e76026923a0bbfbc3bb22df9ad729452283ce62" + + "dc9b26684fd45e07650581afd73713a708869a069c58b599ab478974f206" + + "dbd3e4e563e346ff1881723c5fd440bdf9f70f761c6f746113397d7c04b6" + + "b341d7e44de7de0aae79badaaef5ed372ef629dffd52926110683ab2d4da" + + "a4be83eb86c8700703a660edd5a5029f66f1581da96fe1feefc970ab4086" + + "a83ae02e959821967bd27b3b629652f5bc3db2b7f1af674f9f3fb3a788f7" + + "88e6dc1722382971831a7ed72502f85b25888c1534d81c0a4f7351ecc40f" + + "4e0412e05718403fae5746d313a78c80ac297f1391ad389070410e1330a1" + + "b07d683d1c795bda74bde947f2cf0dc9638b5d0851cda27df030403816dd" + + "3b70f042888c9c192656cc4b9fea10b81b5347900d9199c8f0f47d42f2ee" + + "482b68acfa5ff47d9950c950a926a497d94c6a796e0b715416520bd6c59f" + + "30217718d5f1d7bf7c24039f6467214ac8783cf011b25c37c67dfddde426" + + "40afe97f94879f4586954737b86701b32d560f08caec3fc45184bc719c7c" + + "5bf699074fde814acae32c189158c737665a8f94637068322f0c23ff8860" + + "f1b1c1bd766440afee290aa6f7150c7adefa6d72a738cd2268da7c94788e" + + "bb39002e9a328a51f3a92dc5c7cd9e4faed5702d3592ad16217c4978f84e" + + "af0fd2c9e4c6f4dcdd9112c781eb41a9aacb0f7935bb5c92d41e67cfff6b" + + "991ccefbd667ffeded1de325da50c33e28e2eef2f636c9726dc5bfe753ee" + + "c7bb6e1f080c89451f81bc8c29dc9067ce83deed02769714fa9bb477aca5" + + "c09089934674a0cc8e4b2c3136b2e4af8040cc601b90a4dec898dc922ca4" + + "976ab5ae4ac5af93fa5b1854a76ac3bcc2090bdeaa49ec4f319cf7c7b674" + + "6d8e617abb3361b28b27983dd1b139ec4f5af7e116439d7ecb16534817bf" + + "264dbd8f59e80b443be12c17fa013c7f4d029504c9bb62b296c2326f4f49" + + "cc3201b70ac3f62abb683c630179594a6d4cf30fd55b163bf8d01986bb6b" + + "cb7050fd527f095c45661920268e56f760fee80a29c9d37b7fc23f608710" + + "1e723038e64ee1b91c4849d69bd95fc9bc24fc4a234f4855f2a203e3f699" + + "c32698585c83781677739f2c48697c93b3388dcc64aa61f01118495ded33" + + "21ef9a1c949481f96005f8d5b277a7d6a0d906ec304cf4292df172e72d20" + + "29ecdeb65f06267a605f376804bf7bc5b82d5c8facfe7e41dc10806d27e0" + + "bcc5a341d80b3c1532407f75088716d732632cd88b0037f0d829bf385fec" + + "b52a202956489f61f16b0f4781bf59068b33d7330571d0b4a6ed91830258" + + "e1220b308784fa155be9bc821f5c0009a33802fa66dd66d1dde997dddd97" + + "873ddf65927dc1be979af2b5f110eee627dc1e210326ac20544a757ac168" + + "1823f3dd04b1ddc4bf96677a0a87633994e7af2ec99b7d5dfe44c6192be6" + + "a6e69d17b074256da3947808fbf68c7506a7e2c99e6b64d1ffadbd6285d8" + + "e7e032e24d42dde0594bf03fd550be05e5d66c91a660cd1ab7cb1f43fa9d" + + "69885203a7aee35a28f117427d7ac02b742f53d13b818f8631081b1730d1" + + "5b4e1e283cc8e5c4fc3b4652fce05fd8db821f99fcf93e6842816a549791" + + "7f6c49cc53d733788b2fe3c687de58bfe6153c70d99380df1fd566a7c758" + + "8052c62e73340d6a9eccd2ed26b763d518f3a0c4d6362212fbecebb4ffb7" + + "dc94d29944fcc4ab37725b105aa7571f364146782356d8ef056a0be93a55" + + "0c890df8fecc178776fe40703ad1bd2443d92c420be4306d99686592c030" + + "fd3e2230c0b48d8db79002e8a832ef27edb53a45532955f1171203d38414" + + "b4692e901e9f40f918528fc494430f86cf967452f456b01846ac6a383fc0" + + "de2243c7d804e8643aabcb78e2653b145f400a999670217c8da43bbb9c11" + + "e074176424be0c116c304a420120138e901eca4b12ce68fec460b23bc0c7" + + "765a74fc66cbda0e503e7b1baf5883744e468c97c5f1c4b0acc4b87de9f1" + + "4b537405dfb28195439d1ff848d9cd28a8d375038ebb540a9075b7b5074b" + + "ebc18418a370f1d3ac5d68f5d239513002ad11bfc2b7ff53e2e41ccffc4b" + + "0503acc4967c93ae8590a43439b5e7987d10cb8d1957bd9ef717ee3d12df" + + "5d6736c1d8bd8da102337a94b7d14f830f6c403cbaf7925a8a2a7af1311c" + + "57224967a38f6ca374013a9819c55fd2e2a5fac4f2490be5b059f4cd9c60" + + "2d62f80789eb8d9ab893c7f44a4945e41886af218179dfa754bbb59aab68" + + "13b71d2202eb8fc8a425625d21176a28a620e21bb0dad820c0b7051ce8d1" + + "3a33f3af0958bb6cd89f9d6414ab00ddd1d2f9fdece9183d0c05fcdfd117" + + "10d250e4b2029e6992a88293d0457e73e5b1b6a1aae182c69b9cb664992f" + + "073595ef68117026ad7ea579a4043cda318931eee7b2946a34cdc7c9755f" + + "80cc79a2bfe3ed9c79dc52faa5126b824868c965eeb37e9e4e6a49600f3a" + + "cce93c0853b546edb310dcd16a5755f15b1098b2f59dbd2d90e2ea8360ba" + + "f12108236e854465456598ae2f7bc380f008f2e3cd7c98c87643cafd7c36" + + "d40e2597236428d46aa5b260f84b4212d5e26804086adcf00363ce4becb4" + + "9b57eb2847b2f18ec82c99714ad4ddfe4ff3bcac1d0fcaa32660a1dccc68" + + "5bed83254c8e2ea0ae3632a70cfbcbeadef922d78a006d43ac7ab1f8a609" + + "c6e0ebc3ca6bb8430f1a562f41010db74b9febf931ca794fa08d1bc17780" + + "532ae76f25c4ee679d788835dfa4e70ca154c9e2865c3750ffe7b837eed1" + + "972be058fdf2bdb3eb301867bb132306c7aa237f6771d60bbc56cf31cb30" + + "32a87204d454542de747418470025ab84935d3eaaca01dbbdae9ef6b5d3a" + + "ca62ce9f871a3e1272b2b671582c096a349c00f32d742ddb17993994d8ae" + + "fc178cbcf9abc03114ff2bf7db8f757c63d6898faccd822f5c2e9a7570fb" + + "9cfff148570888be24ae42644c1a5bebb6f6287147a4bcc01c7675be9e4a" + + "897519dd3132a7cc2e778f8c90d23dc8073f6fa108d7ef82d561794bd9d5" + + "f1faa306334f338ac3ba99c853f79c24f7048fa906fde87d1ed28a7b11c0" + + "66a3bb98f8d21055aaafdf7e069b77b60b3d5cbe7c5e4379c7651af955cd" + + "82a19a09caf36becb6cd3fe9e12f40379941542709991066df21b7b12dfb" + + "2416d83fcdc33bb583e3b42f24f53edf8dc7c579ad3be831c99f72bf9fb7" + + "a35b6562e824e039e6bf1adc8f5ca53846de7bae11c4317e696d887df33c" + + "525f0a9c01fc29f2c26c90b85fe82ed8bd50954cd4e9ac7c85c7f3efec75" + + "da1da4ed173cb695cee295190527edb3cb06c5dbdabe0228cc60b6455153" + + "76244f27aa56da2db10f2659090137ffb82c57233c833e0bbf22d6f647fb" + + "97b3652d2888b3ab08010b8e8a6967d560b747757806736dc98b78226634" + + "f1eecaa4a2e23ba36591acb5737d735c5bc7a2e36f1a46946927e061fdf7" + + "7a3b68ef582c26b01f5aa9a438ecc26c6941221d1590c838072f9e471fe7" + + "fd59dacb0d092d40d76ea2f7c6e954a132a015bd4cb31147f3ebe4518322" + + "916438a62836ac85a4cf4492190a85bcc8edb37e38b99ea552d749c30f74" + + "ca20c298165e8ed02d4671e0b41cac3a32a345b9349ad22c2a4bb2c16a4c" + + "e0613ca0f0518759f7d2b33cfad2fae764f410d4d9ff8a76ae02a8107e7e" + + "01d9cd0552676b85ba002f19c01ad5f416d1d08bb84fec7c3555b098dbce" + + "48e1a5d847895e54db9c5b80cc22d5b87cd41a1a94be102bdd45a3cda5d1" + + "181e10446d213d6b3fdc350d486d2011d705c5f16ccf7519065c47bad7d6" + + "89c71e5fdf9d04bfb91eb1f07fa0f001009c1d4b1f6a116a570823a8580b", + }, + { + key: "392468efccff36dade31fc1c62eb38bb61394fe448def9d9d9beec2413ddb418", + tag: "e1122e7c8e6965b90addbd46d8a548d6", + in: "6a13d37f0ec933194c227351f4a19b507d93465b1f3e88dcb5f1ed1262fa" + + "58ea99ff31e6fc85c39c04129fa69195b71b2060122fe618dd9430a63f97" + + "54b52a80b3cd099f248f91a468bae211a27bdb47ba005d29881ea5143a82" + + "967c4c30c9a4f0dba1a4975e6407fe296d40023a00efa06be763f2d73d46" + + "a2901ae28b3d8ce18009a462e223b71476d7b954c138e177d15a390847de" + + "96a7f7fd0598748e86b0f08e64d915e67c7e3cf936f3dcd60edebd36e2a1" + + "d65b6ac29530c48ab3bd52d45b4f938a19b9b31e2911105a8561600d5377" + + "905a67112ec28025aa680350ff85b808c5b4c98b7b9567d03f5ed3911ec9" + + "365a8de4b15ca62adaa69e5ba710eb1756a346016c67a297d8624f9f1ab5" + + "b3fbce98b141049f0ce26c85d2f8a9cc6ca8ab6c6e148be968931430dcc6" + + "2bf58ea9698ef52a5d271cf48e6748ac9e04bc7ae7da205a1a7535478322" + + "d820eca146cedf4b2f9aa9fcfd77ab56a7276977401dcc1f96baa1b607e0" + + "256bd04ec324ec67a4313e2d5a53d3a3fb5332927929b20c63bde805f637" + + "eb1050fee2a152a0405634f55c48a59fe370d54b2ab1671dae2c7fd92243" + + "10627808e553127c74f724362b4a6ee49b697daae7df3ddc5d2ed9d6befd" + + "77fb9f68fe3041f6ef13f46f34ab682ab8563e8996344f82b2ef006a8d54" + + "3dd9c1db4979d7da97bda45e722065f8a238f0873217b783a9a629a12b3a" + + "4de437445039997bd243efbf5e3b6059b9459d395290efb9081c632fb694" + + "81000dc74c395cb507422df181aba20f776ce3fd8765ac485021992c98b1" + + "67c68805662cb4356a0ee7ba6bdae51ac10cd06bb5b2f3a72841c714c8ed" + + "bc56998fe2fefb9bf69e172fdf54b2ab138ae59372c52a67e93882a3000f" + + "d966992aa2250c6ff93e9cac89645d70625d79332ade5dab7eb1adbe7dce" + + "5a013fb65ad32fe22ed16fb9bb35eca1f37a0433c320e8752f8fc4b7618c" + + "5e4df2efece832e259ad98b895c474e47d0e3fc488bea8f717a17de0dcf7" + + "597fb8fe12e62246296f9a887dcc3a700820c190a55a4931a7d44bd3bb2e" + + "ab6c8a8126f1be93790cebabc1d69e01796e6cc80e7c16bbc82fb333fb21" + + "c774ab7db843242838e82d8e1cb8ccab385e67a4271fe7031d74b6e8edcc" + + "8ed585d1c05a365c7665899c1dbc561151d3b44bceace77c4f53c0e0f6f7" + + "74d42f9ad3e56f1c2a8d53879d695f895690afb4698472a3d52d67159313" + + "133c87823fe0500eb68fe286f8b9a2f59f12785d026dc97bdbf793c7d1eb" + + "155f1f136aae66c256583e987f718afbe733e0a5ce30d021493fb84e2242" + + "5b18754d126235ef80335004fa84f88361a584753df409360cd8bd45bace" + + "8f48156bec66577bf2c685089f5ac7e7ec76c0df068fbaa47661f8517f92" + + "e14723b3b278f151816537a7212c96bd340a00c15c9c9bc9a2a5d163655d" + + "84b38073e2be9217cad97d362d89d4baf3ce0a8d8562f19a8c97a9aaf5e7" + + "77d60456360ffb77b30f177d2809052020d141697ecf9cb65f42b9190caf" + + "6540b2c82f6e5a8482934a6a1a5711a8c24546cd8ba432068404eae5a827" + + "2e09efc3c6037af4feaac0a46329229b010ecac6b9f077a9b076bb6d9ce1" + + "38401eb38d124baa11507a994185295020bf9b754fcf78430db9253f5929" + + "87c46c0f8589c4e463b15a3840b1cea795e24cf6b20f29a630136e0589b3" + + "8dd7fbe5ea21da72c88bd8e56473586822aa3765660a45a988df9b8eb8e8" + + "141939d3e4cc637c5d788064d40a9f7c734e43fdf8d7189a5d76700d9743" + + "fe0122944663afdb88c5201318ca782f6848b742ddebe7463fd4a32280ac" + + "1cf8311e9137d319de05ce9cd85abab24c5364041c14d3b4ce650400498e" + + "122166eccc12784b7ac3b262ac0b198ffc26eeed9a5da5374f7a2a53c87a" + + "78c217ea1fbf8d38f62511657b73109f31691aef14d82ce6e1010eae9e6f" + + "a419e5c1c16c0cc70651eb3374c03549a1bc7d3ed42d60f886102c798dbc" + + "ba56f0a2b3b9b412530c35f5f7ed06311ee14571f9c26ed9c81ef38ff000" + + "2f5ef3aab7351e32049a6ef8f48a43da1d84402d229df513dfaf1b2e4043" + + "6ce68c70ebeddd7477c9164f0dce45a6fc5de050f52ec269659d5854bcae" + + "f7762ed7400713c27a4d523eaf8c136c4a1ca00b9e9e55902daf6cdf8528" + + "c22ca1f2fa7ce87902d75a6850e1a5a4592497be1bb401878f18b189b0e2" + + "c59d10705bfabde3cd2da01eb452006b294108d5d42e88e9e15424d8cd0b" + + "8ab43a6c546b3dbf52e47b59cde6a3e417b0395220b6d63736d429da3458" + + "9a2524f1629320206fa7f1d8a041e17222c4a5814561937e1030e6375c77" + + "9dc988bb928bbdbe2c2eb20111639725d82b5d7192cd3e4acc27581f0ba7" + + "286cff41f97aa5a52ea0083de5057fd2ba985aa738e4d03fcf11ebab1d97" + + "e2ac77d1c2beb8799150a421a07b3777d0b850f24194b8309135b13da6c7" + + "e38653a711e407a1811290fbb7bc15d8b12efc6916e97ead41e042a44721" + + "e9cde3388073d921595bcddcac758dc675173f38242e65e4a284aaa7e8fa" + + "6adddaf00bc46428ab2d8601205b8895bcedfc80ca0aa4619ed6bb082ddf" + + "33ec04fa5d417f33fcdd238c6b11320c5a08f800e0f350b75d81e3bcbd15" + + "58a1eab87a3c8c2ffd7ba1d7e754e607cf98ba22a3fc766c45bd6f2569b4" + + "84639e6611714119d188a24a5e963089a16ed34e20b9f154cad8ac6031dd" + + "7a3a885afc2ae5e003ae8d4e4aabdb3e51dfc423b8cf4ed9ae2010072cbb" + + "b1108c7da1ff075e54ed827a0963ac5523ecdf3fc5eee7b4d1a6773764ec" + + "5c30f41690523fd70d895edb7ca6a1806d54240c4c7b43410da73503a323" + + "90d9070ed30da3a2fb5eccd40d083be7cf8bf40b4279f819cf795b6f075b" + + "5a67a10a06a6076d0d83c72efea05f244901c4b5fd9eb380432519311baf" + + "8c81f6325df4d37ff4d30d318f904ebb837ec76b341dd00a8f247cf0bbe9" + + "6f3784dc8f5feb344958fdf1a9ececb105f8770826db1f17a5281e997951" + + "d3c60cc28fc3e66ffeb5dbac315f98f6d240208043f28dee963d843e68ab" + + "57d847f76ae2f96ce6e37f377ef5dfef2176ecd7440ce4dadcec2231b606" + + "e4a80420fb3ed135640e1f05d6bd58b8dce062dd7d36b885d424f6318e5e" + + "a0753efbb33bbc7360d2b5dfab3ae0d5e000b8d31f2ba0f5fd8b34f96b55" + + "28fff35e769461d0f03cf3bfdf0b801dcbbf2838180cb9b108e06c353e3f" + + "0b9ef61678cfed1ea37ae76bccb5ef5957ac2c8e8f4794c8145a15f1cc88" + + "bfb0881080326c481b373c3bc9b07a9b60a0c8bd5fa4f6f90145590a5227" + + "6fcc0ccc2375d0ccb571d414d1b0c38b4e02c39db4d701c5e25e90785ef4" + + "d26f35edd8c4b96455bdca7245cfefd9cfbd2f319615e5fdf07bb9564fa0" + + "44bb35a58391d02e3927780b4076bc0893dfcb4b63a32cd7a541a4a8c253" + + "0349c6e96e378dbeb66dedf87d813d0b744452c1c4088507dca722193827" + + "9e2dfa24e4a409de494acf654f44262db9206a7717fa434ac4fdc6a6eb5b" + + "1fd5a193b6043bc4327c8c09fd6822eaa9df37bbcac1077754a295621601" + + "267b68733b62dadc2563f1700af180141f29899e2689dbbe9745ba8477f4" + + "352921900b403a01c9dd042a8c1b0e0489959fb0b0a8431c97b41e202204" + + "212ebfa00c593399dbd14d7aec07b8292d2e40b48f05fcd54a15da4a24d7" + + "2759e409f4c7b5b98fce4abac6c30e4872d92efa1f96479ec30f21699825" + + "50fa60584f5a09051a00f8e7dbb3853e66ca3f05fbfe43bef9b120a25a01" + + "eb436ba8ecda715201eda72e517d628f883386c1503aa8b8e75610f7155e" + + "9f916335ab6d6f0f9589b6220cd2b81c2c937dc065d3d14a7df8cc916cd0" + + "0ce1bb53fd9c8974298d3bd316f3658aa8cc6904f073a1472149e4b08c64" + + "5e11abe0428ccb6174df2103edd735965d6454b543d3f01410f77053f65e" + + "c1d1aee56fdd3af23bcd4e1a7fcc4e600c4831007c33fe5f0c8300f686eb" + + "9b4d1e4f08fe4ddc8a90be14dc3a5a88ff96716509341d5db24c0d016863" + + "998b1859c5021df815a6f1ca9845f1a8e99dbad132b406227c5897a1bdf3" + + "e698962f799133ff4429decbef6ce036296facf38e4812fec102b76c6d30" + + "beba1b70722254fafbc471096153478c971db7d96263660209265cb10f13" + + "b34b5fd55c4abe818a5f9715d8a85094e2946b7a001b47f629e26c636d86" + + "4968ad2ab616dfe28840bd60b4b9855c8dbe1cb873fcbc4577b5fefeb8bb" + + "4832039867dc35db9c036c83bc204396e3474ddfe806c77c65c936f488b6" + + "7c1028739562d7bb055d21441af29ae2921290e548dccf8a56021385422b" + + "15da6b232b24151309a75a00296d11aa1952a1513110b0faa93d1d8cd9ae" + + "fa9f1c59377ec9165b2c9e07cbde40db7b81bca6d58fc28bae8f473cd0e9" + + "a2420e0b943a83d284108626c24ac570b1d6c1ab971e71f43fbd6c00e171" + + "238141a6dc987a60385c3a04dd147a2f8e80dfe727b104c0fdd80b326f59" + + "0b9f86fd7b2fd1122a390979889eabd803ab57159c8509a1443eb6789382" + + "090a770ae4eba03306f96e50e19a7d44c584ccc230d104548946efca4520" + + "d61de5f473e2f4eada6c8ce9c7ee975eb4f63c0483cb775ed7d3cf690a61" + + "7d6656d683a8512707d81ca5ba176a42bcffcfa692129f292607d2a47536" + + "ccaeb464c9272d6f3816074b712af602470088b253deba18771e5f67734b" + + "587707cdd06f35264b2262fd253c25b5d38ee7db287610e5398062b7a34e" + + "6e4cf7447d00873b930ad148fd96f0ab18771bc468b874bb109924101c84" + + "c4e239ecc7687d875e4d94a1a973620ca61e35a872c2e2e61a502169f1bb" + + "4e5ff5fa2bff657be6195b3e2c7151a52fc0096d98e7f08f5a98f570aee1" + + "7b4275f1356e87e080ce0e1b9bbabe7dea48b5903bc390ce23472ad64a89" + + "41c3247bfd23ea90b2dee09085571bad85568040105e098f993bb37e43c3" + + "e6d511171c77cfc450570dfb9fc6a3930ef43c03f8213f6203d545d791c7" + + "d3fa42d5dde1655038d35c5dfacc12e9dee24fe833977549eda68ae8b508" + + "be277e743921b584f9dfa0eefbd8bf3c23f51efdef7f7487001d29e8097b" + + "ba63289cfca743023d1668555a46fe6d5b7421377414df1e9ef135480622" + + "22e2e9a7baa618d88f407517f6317b6a0ba3384ace16d68631d59ea169d5" + + "092d20afc1a481b82be5e734bb092953a0a94702bae1a0f48d2a22b9a05f" + + "f64493b7b2e984f27582b1eb937fddf8512c49830435d146dcc291a4118d" + + "5dc638b99cdcbcc5860de7a92c5b13cbd1e01e051f01af40afe124346320" + + "d3626bf9d8f7850744e032a993c276fd388718237740c6caf260fca60b8d" + + "d846102e3262b6e05ceca00c6affe938fac1847350865fc858d3ddd1d130" + + "71d1221ce7c5d575587fcba580e544b74d877ed5ca92763ef0ca0d7bfa08" + + "d57a0216b2a01a2b9ec74b8430051e0074862b7be25b6766ab520f2eb75d" + + "eeb979c28f03795f6f1e4b8410beab19a20febc91985b8a7c298534a6598" + + "f2c5b0dc5de9f5e55a97791507bc6373db26", + }, +} diff --git a/vendor/golang.org/x/crypto/ripemd160/ripemd160.go b/vendor/golang.org/x/crypto/ripemd160/ripemd160.go index 6c6e842..cf3eeb1 100644 --- a/vendor/golang.org/x/crypto/ripemd160/ripemd160.go +++ b/vendor/golang.org/x/crypto/ripemd160/ripemd160.go @@ -3,9 +3,13 @@ // license that can be found in the LICENSE file. // Package ripemd160 implements the RIPEMD-160 hash algorithm. +// +// Deprecated: RIPEMD-160 is a legacy hash and should not be used for new +// applications. Also, this package does not and will not provide an optimized +// implementation. Instead, use a modern hash like SHA-256 (from crypto/sha256). package ripemd160 // import "golang.org/x/crypto/ripemd160" -// RIPEMD-160 is designed by by Hans Dobbertin, Antoon Bosselaers, and Bart +// RIPEMD-160 is designed by Hans Dobbertin, Antoon Bosselaers, and Bart // Preneel with specifications available at: // http://homes.esat.kuleuven.be/~cosicart/pdf/AB-9601/AB-9601.pdf. diff --git a/vendor/golang.org/x/crypto/ripemd160/ripemd160_test.go b/vendor/golang.org/x/crypto/ripemd160/ripemd160_test.go index 5df1b25..a1fbffd 100644 --- a/vendor/golang.org/x/crypto/ripemd160/ripemd160_test.go +++ b/vendor/golang.org/x/crypto/ripemd160/ripemd160_test.go @@ -50,15 +50,23 @@ func TestVectors(t *testing.T) { } } -func TestMillionA(t *testing.T) { +func millionA() string { md := New() for i := 0; i < 100000; i++ { io.WriteString(md, "aaaaaaaaaa") } - out := "52783243c1697bdbe16d37f97f68f08325dc1528" - s := fmt.Sprintf("%x", md.Sum(nil)) - if s != out { + return fmt.Sprintf("%x", md.Sum(nil)) +} + +func TestMillionA(t *testing.T) { + const out = "52783243c1697bdbe16d37f97f68f08325dc1528" + if s := millionA(); s != out { t.Fatalf("RIPEMD-160 (1 million 'a') = %s, expected %s", s, out) } - md.Reset() +} + +func BenchmarkMillionA(b *testing.B) { + for i := 0; i < b.N; i++ { + millionA() + } } diff --git a/vendor/golang.org/x/crypto/ripemd160/ripemd160block.go b/vendor/golang.org/x/crypto/ripemd160/ripemd160block.go index 7bc8e6c..e0edc02 100644 --- a/vendor/golang.org/x/crypto/ripemd160/ripemd160block.go +++ b/vendor/golang.org/x/crypto/ripemd160/ripemd160block.go @@ -8,6 +8,10 @@ package ripemd160 +import ( + "math/bits" +) + // work buffer indices and roll amounts for one line var _n = [80]uint{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, @@ -59,16 +63,16 @@ func _Block(md *digest, p []byte) int { i := 0 for i < 16 { alpha = a + (b ^ c ^ d) + x[_n[i]] - s := _r[i] - alpha = (alpha<>(32-s)) + e - beta = c<<10 | c>>22 + s := int(_r[i]) + alpha = bits.RotateLeft32(alpha, s) + e + beta = bits.RotateLeft32(c, 10) a, b, c, d, e = e, alpha, b, beta, d // parallel line alpha = aa + (bb ^ (cc | ^dd)) + x[n_[i]] + 0x50a28be6 - s = r_[i] - alpha = (alpha<>(32-s)) + ee - beta = cc<<10 | cc>>22 + s = int(r_[i]) + alpha = bits.RotateLeft32(alpha, s) + ee + beta = bits.RotateLeft32(cc, 10) aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd i++ @@ -77,16 +81,16 @@ func _Block(md *digest, p []byte) int { // round 2 for i < 32 { alpha = a + (b&c | ^b&d) + x[_n[i]] + 0x5a827999 - s := _r[i] - alpha = (alpha<>(32-s)) + e - beta = c<<10 | c>>22 + s := int(_r[i]) + alpha = bits.RotateLeft32(alpha, s) + e + beta = bits.RotateLeft32(c, 10) a, b, c, d, e = e, alpha, b, beta, d // parallel line alpha = aa + (bb&dd | cc&^dd) + x[n_[i]] + 0x5c4dd124 - s = r_[i] - alpha = (alpha<>(32-s)) + ee - beta = cc<<10 | cc>>22 + s = int(r_[i]) + alpha = bits.RotateLeft32(alpha, s) + ee + beta = bits.RotateLeft32(cc, 10) aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd i++ @@ -95,16 +99,16 @@ func _Block(md *digest, p []byte) int { // round 3 for i < 48 { alpha = a + (b | ^c ^ d) + x[_n[i]] + 0x6ed9eba1 - s := _r[i] - alpha = (alpha<>(32-s)) + e - beta = c<<10 | c>>22 + s := int(_r[i]) + alpha = bits.RotateLeft32(alpha, s) + e + beta = bits.RotateLeft32(c, 10) a, b, c, d, e = e, alpha, b, beta, d // parallel line alpha = aa + (bb | ^cc ^ dd) + x[n_[i]] + 0x6d703ef3 - s = r_[i] - alpha = (alpha<>(32-s)) + ee - beta = cc<<10 | cc>>22 + s = int(r_[i]) + alpha = bits.RotateLeft32(alpha, s) + ee + beta = bits.RotateLeft32(cc, 10) aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd i++ @@ -113,16 +117,16 @@ func _Block(md *digest, p []byte) int { // round 4 for i < 64 { alpha = a + (b&d | c&^d) + x[_n[i]] + 0x8f1bbcdc - s := _r[i] - alpha = (alpha<>(32-s)) + e - beta = c<<10 | c>>22 + s := int(_r[i]) + alpha = bits.RotateLeft32(alpha, s) + e + beta = bits.RotateLeft32(c, 10) a, b, c, d, e = e, alpha, b, beta, d // parallel line alpha = aa + (bb&cc | ^bb&dd) + x[n_[i]] + 0x7a6d76e9 - s = r_[i] - alpha = (alpha<>(32-s)) + ee - beta = cc<<10 | cc>>22 + s = int(r_[i]) + alpha = bits.RotateLeft32(alpha, s) + ee + beta = bits.RotateLeft32(cc, 10) aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd i++ @@ -131,16 +135,16 @@ func _Block(md *digest, p []byte) int { // round 5 for i < 80 { alpha = a + (b ^ (c | ^d)) + x[_n[i]] + 0xa953fd4e - s := _r[i] - alpha = (alpha<>(32-s)) + e - beta = c<<10 | c>>22 + s := int(_r[i]) + alpha = bits.RotateLeft32(alpha, s) + e + beta = bits.RotateLeft32(c, 10) a, b, c, d, e = e, alpha, b, beta, d // parallel line alpha = aa + (bb ^ cc ^ dd) + x[n_[i]] - s = r_[i] - alpha = (alpha<>(32-s)) + ee - beta = cc<<10 | cc>>22 + s = int(r_[i]) + alpha = bits.RotateLeft32(alpha, s) + ee + beta = bits.RotateLeft32(cc, 10) aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd i++ diff --git a/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.go b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.go index c34d362..656e8df 100644 --- a/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.go +++ b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.go @@ -6,14 +6,13 @@ package salsa -// This function is implemented in salsa2020_amd64.s. - //go:noescape +// salsa2020XORKeyStream is implemented in salsa20_amd64.s. func salsa2020XORKeyStream(out, in *byte, n uint64, nonce, key *byte) // XORKeyStream crypts bytes from in to out using the given key and counters. -// In and out may be the same slice but otherwise should not overlap. Counter +// In and out must overlap entirely or not at all. Counter // contains the raw salsa20 counter bytes (both nonce and block counter). func XORKeyStream(out, in []byte, counter *[16]byte, key *[32]byte) { if len(in) == 0 { diff --git a/vendor/golang.org/x/crypto/salsa20/salsa/salsa2020_amd64.s b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.s similarity index 99% rename from vendor/golang.org/x/crypto/salsa20/salsa/salsa2020_amd64.s rename to vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.s index 22afbdc..18085d2 100644 --- a/vendor/golang.org/x/crypto/salsa20/salsa/salsa2020_amd64.s +++ b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64.s @@ -99,30 +99,24 @@ TEXT ·salsa2020XORKeyStream(SB),0,$456-40 // frame = 424 + 32 byte alignment MOVL 36 (SP),CX MOVL DX,288(SP) MOVL CX,304(SP) - ADDQ $1,DX SHLQ $32,CX ADDQ CX,DX + ADDQ $1,DX MOVQ DX,CX SHRQ $32,CX MOVL DX, 292 (SP) MOVL CX, 308 (SP) ADDQ $1,DX - SHLQ $32,CX - ADDQ CX,DX MOVQ DX,CX SHRQ $32,CX MOVL DX, 296 (SP) MOVL CX, 312 (SP) ADDQ $1,DX - SHLQ $32,CX - ADDQ CX,DX MOVQ DX,CX SHRQ $32,CX MOVL DX, 300 (SP) MOVL CX, 316 (SP) ADDQ $1,DX - SHLQ $32,CX - ADDQ CX,DX MOVQ DX,CX SHRQ $32,CX MOVL DX,16(SP) diff --git a/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64_test.go b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64_test.go new file mode 100644 index 0000000..d4e779c --- /dev/null +++ b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_amd64_test.go @@ -0,0 +1,31 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build amd64,!appengine,!gccgo + +package salsa + +import ( + "bytes" + "testing" +) + +func TestCounterOverflow(t *testing.T) { + in := make([]byte, 4096) + key := &[32]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2} + for n, counter := range []*[16]byte{ + &[16]byte{0, 1, 2, 3, 4, 5, 6, 7, 0, 0, 0, 0, 0, 0, 0, 0}, // zero counter + &[16]byte{0, 1, 2, 3, 4, 5, 6, 7, 0, 0, 0, 0, 0xff, 0xff, 0xff, 0xff}, // counter about to overflow 32 bits + &[16]byte{0, 1, 2, 3, 4, 5, 6, 7, 1, 2, 3, 4, 0xff, 0xff, 0xff, 0xff}, // counter above 32 bits + } { + out := make([]byte, 4096) + XORKeyStream(out, in, counter, key) + outGeneric := make([]byte, 4096) + genericXORKeyStream(outGeneric, in, counter, key) + if !bytes.Equal(out, outGeneric) { + t.Errorf("%d: assembly and go implementations disagree", n) + } + } +} diff --git a/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_noasm.go b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_noasm.go new file mode 100644 index 0000000..8a46bd2 --- /dev/null +++ b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_noasm.go @@ -0,0 +1,14 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 appengine gccgo + +package salsa + +// XORKeyStream crypts bytes from in to out using the given key and counters. +// In and out must overlap entirely or not at all. Counter +// contains the raw salsa20 counter bytes (both nonce and block counter). +func XORKeyStream(out, in []byte, counter *[16]byte, key *[32]byte) { + genericXORKeyStream(out, in, counter, key) +} diff --git a/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_ref.go b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_ref.go index 95f8ca5..68169c6 100644 --- a/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_ref.go +++ b/vendor/golang.org/x/crypto/salsa20/salsa/salsa20_ref.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build !amd64 appengine gccgo - package salsa const rounds = 20 @@ -202,10 +200,9 @@ func core(out *[64]byte, in *[16]byte, k *[32]byte, c *[16]byte) { out[63] = byte(x15 >> 24) } -// XORKeyStream crypts bytes from in to out using the given key and counters. -// In and out may be the same slice but otherwise should not overlap. Counter -// contains the raw salsa20 counter bytes (both nonce and block counter). -func XORKeyStream(out, in []byte, counter *[16]byte, key *[32]byte) { +// genericXORKeyStream is the generic implementation of XORKeyStream to be used +// when no assembly implementation is available. +func genericXORKeyStream(out, in []byte, counter *[16]byte, key *[32]byte) { var block [64]byte var counterCopy [16]byte copy(counterCopy[:], counter[:]) diff --git a/vendor/golang.org/x/crypto/salsa20/salsa20.go b/vendor/golang.org/x/crypto/salsa20/salsa20.go index a8ddd76..6f9bb10 100644 --- a/vendor/golang.org/x/crypto/salsa20/salsa20.go +++ b/vendor/golang.org/x/crypto/salsa20/salsa20.go @@ -24,15 +24,19 @@ package salsa20 // import "golang.org/x/crypto/salsa20" // TODO(agl): implement XORKeyStream12 and XORKeyStream8 - the reduced round variants of Salsa20. import ( + "golang.org/x/crypto/internal/subtle" "golang.org/x/crypto/salsa20/salsa" ) -// XORKeyStream crypts bytes from in to out using the given key and nonce. In -// and out may be the same slice but otherwise should not overlap. Nonce must +// XORKeyStream crypts bytes from in to out using the given key and nonce. +// In and out must overlap entirely or not at all. Nonce must // be either 8 or 24 bytes long. func XORKeyStream(out, in []byte, nonce []byte, key *[32]byte) { if len(out) < len(in) { - in = in[:len(out)] + panic("salsa20: output smaller than input") + } + if subtle.InexactOverlap(out[:len(in)], in) { + panic("salsa20: invalid buffer overlap") } var subNonce [16]byte diff --git a/vendor/golang.org/x/crypto/scrypt/example_test.go b/vendor/golang.org/x/crypto/scrypt/example_test.go new file mode 100644 index 0000000..6736479 --- /dev/null +++ b/vendor/golang.org/x/crypto/scrypt/example_test.go @@ -0,0 +1,26 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package scrypt_test + +import ( + "encoding/base64" + "fmt" + "log" + + "golang.org/x/crypto/scrypt" +) + +func Example() { + // DO NOT use this salt value; generate your own random salt. 8 bytes is + // a good length. + salt := []byte{0xc8, 0x28, 0xf2, 0x58, 0xa7, 0x6a, 0xad, 0x7b} + + dk, err := scrypt.Key([]byte("some password"), salt, 1<<15, 8, 1, 32) + if err != nil { + log.Fatal(err) + } + fmt.Println(base64.StdEncoding.EncodeToString(dk)) + // Output: lGnMz8io0AUkfzn6Pls1qX20Vs7PGN6sbYQ2TQgY12M= +} diff --git a/vendor/golang.org/x/crypto/scrypt/scrypt.go b/vendor/golang.org/x/crypto/scrypt/scrypt.go index 14375c5..3362afd 100644 --- a/vendor/golang.org/x/crypto/scrypt/scrypt.go +++ b/vendor/golang.org/x/crypto/scrypt/scrypt.go @@ -29,7 +29,7 @@ func blockXOR(dst, src []uint32, n int) { } // salsaXOR applies Salsa20/8 to the XOR of 16 numbers from tmp and in, -// and puts the result into both both tmp and out. +// and puts the result into both tmp and out. func salsaXOR(tmp *[16]uint32, in, out []uint32) { w0 := tmp[0] ^ in[0] w1 := tmp[1] ^ in[1] @@ -218,11 +218,12 @@ func smix(b []byte, r, N int, v, xy []uint32) { // For example, you can get a derived key for e.g. AES-256 (which needs a // 32-byte key) by doing: // -// dk, err := scrypt.Key([]byte("some password"), salt, 16384, 8, 1, 32) +// dk, err := scrypt.Key([]byte("some password"), salt, 32768, 8, 1, 32) // -// The recommended parameters for interactive logins as of 2009 are N=16384, -// r=8, p=1. They should be increased as memory latency and CPU parallelism -// increases. Remember to get a good random salt. +// The recommended parameters for interactive logins as of 2017 are N=32768, r=8 +// and p=1. The parameters N, r, and p should be increased as memory latency and +// CPU parallelism increases; consider setting N to the highest power of 2 you +// can derive within 100 milliseconds. Remember to get a good random salt. func Key(password, salt []byte, N, r, p, keyLen int) ([]byte, error) { if N <= 1 || N&(N-1) != 0 { return nil, errors.New("scrypt: N must be > 1 and a power of 2") diff --git a/vendor/golang.org/x/crypto/scrypt/scrypt_test.go b/vendor/golang.org/x/crypto/scrypt/scrypt_test.go index e096c3a..766ed8d 100644 --- a/vendor/golang.org/x/crypto/scrypt/scrypt_test.go +++ b/vendor/golang.org/x/crypto/scrypt/scrypt_test.go @@ -153,8 +153,10 @@ func TestKey(t *testing.T) { } } +var sink []byte + func BenchmarkKey(b *testing.B) { for i := 0; i < b.N; i++ { - Key([]byte("password"), []byte("salt"), 16384, 8, 1, 64) + sink, _ = Key([]byte("password"), []byte("salt"), 1<<15, 8, 1, 64) } } diff --git a/vendor/golang.org/x/crypto/sha3/doc.go b/vendor/golang.org/x/crypto/sha3/doc.go index a0ee3ae..c2fef30 100644 --- a/vendor/golang.org/x/crypto/sha3/doc.go +++ b/vendor/golang.org/x/crypto/sha3/doc.go @@ -43,7 +43,7 @@ // is then "full" and the permutation is applied to "empty" it. This process is // repeated until all the input has been "absorbed". The input is then padded. // The digest is "squeezed" from the sponge in the same way, except that output -// output is copied out instead of input being XORed in. +// is copied out instead of input being XORed in. // // A sponge is parameterized by its generic security strength, which is equal // to half its capacity; capacity + rate is equal to the permutation's width. diff --git a/vendor/golang.org/x/crypto/sha3/hashes.go b/vendor/golang.org/x/crypto/sha3/hashes.go index 2b51cf4..0d8043f 100644 --- a/vendor/golang.org/x/crypto/sha3/hashes.go +++ b/vendor/golang.org/x/crypto/sha3/hashes.go @@ -15,22 +15,54 @@ import ( // New224 creates a new SHA3-224 hash. // Its generic security strength is 224 bits against preimage attacks, // and 112 bits against collision attacks. -func New224() hash.Hash { return &state{rate: 144, outputLen: 28, dsbyte: 0x06} } +func New224() hash.Hash { + if h := new224Asm(); h != nil { + return h + } + return &state{rate: 144, outputLen: 28, dsbyte: 0x06} +} // New256 creates a new SHA3-256 hash. // Its generic security strength is 256 bits against preimage attacks, // and 128 bits against collision attacks. -func New256() hash.Hash { return &state{rate: 136, outputLen: 32, dsbyte: 0x06} } +func New256() hash.Hash { + if h := new256Asm(); h != nil { + return h + } + return &state{rate: 136, outputLen: 32, dsbyte: 0x06} +} // New384 creates a new SHA3-384 hash. // Its generic security strength is 384 bits against preimage attacks, // and 192 bits against collision attacks. -func New384() hash.Hash { return &state{rate: 104, outputLen: 48, dsbyte: 0x06} } +func New384() hash.Hash { + if h := new384Asm(); h != nil { + return h + } + return &state{rate: 104, outputLen: 48, dsbyte: 0x06} +} // New512 creates a new SHA3-512 hash. // Its generic security strength is 512 bits against preimage attacks, // and 256 bits against collision attacks. -func New512() hash.Hash { return &state{rate: 72, outputLen: 64, dsbyte: 0x06} } +func New512() hash.Hash { + if h := new512Asm(); h != nil { + return h + } + return &state{rate: 72, outputLen: 64, dsbyte: 0x06} +} + +// NewLegacyKeccak256 creates a new Keccak-256 hash. +// +// Only use this function if you require compatibility with an existing cryptosystem +// that uses non-standard padding. All other users should use New256 instead. +func NewLegacyKeccak256() hash.Hash { return &state{rate: 136, outputLen: 32, dsbyte: 0x01} } + +// NewLegacyKeccak512 creates a new Keccak-512 hash. +// +// Only use this function if you require compatibility with an existing cryptosystem +// that uses non-standard padding. All other users should use New512 instead. +func NewLegacyKeccak512() hash.Hash { return &state{rate: 72, outputLen: 64, dsbyte: 0x01} } // Sum224 returns the SHA3-224 digest of the data. func Sum224(data []byte) (digest [28]byte) { diff --git a/vendor/golang.org/x/crypto/sha3/hashes_generic.go b/vendor/golang.org/x/crypto/sha3/hashes_generic.go new file mode 100644 index 0000000..c4ff3f6 --- /dev/null +++ b/vendor/golang.org/x/crypto/sha3/hashes_generic.go @@ -0,0 +1,27 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build gccgo appengine !s390x + +package sha3 + +import ( + "hash" +) + +// new224Asm returns an assembly implementation of SHA3-224 if available, +// otherwise it returns nil. +func new224Asm() hash.Hash { return nil } + +// new256Asm returns an assembly implementation of SHA3-256 if available, +// otherwise it returns nil. +func new256Asm() hash.Hash { return nil } + +// new384Asm returns an assembly implementation of SHA3-384 if available, +// otherwise it returns nil. +func new384Asm() hash.Hash { return nil } + +// new512Asm returns an assembly implementation of SHA3-512 if available, +// otherwise it returns nil. +func new512Asm() hash.Hash { return nil } diff --git a/vendor/golang.org/x/crypto/sha3/sha3.go b/vendor/golang.org/x/crypto/sha3/sha3.go index c86167c..b12a35c 100644 --- a/vendor/golang.org/x/crypto/sha3/sha3.go +++ b/vendor/golang.org/x/crypto/sha3/sha3.go @@ -42,9 +42,8 @@ type state struct { storage [maxRate]byte // Specific to SHA-3 and SHAKE. - fixedOutput bool // whether this is a fixed-output-length instance - outputLen int // the default output size in bytes - state spongeDirection // whether the sponge is absorbing or squeezing + outputLen int // the default output size in bytes + state spongeDirection // whether the sponge is absorbing or squeezing } // BlockSize returns the rate of sponge underlying this hash function. diff --git a/vendor/golang.org/x/crypto/sha3/sha3_s390x.go b/vendor/golang.org/x/crypto/sha3/sha3_s390x.go new file mode 100644 index 0000000..b6cbc5c --- /dev/null +++ b/vendor/golang.org/x/crypto/sha3/sha3_s390x.go @@ -0,0 +1,284 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build !gccgo,!appengine + +package sha3 + +// This file contains code for using the 'compute intermediate +// message digest' (KIMD) and 'compute last message digest' (KLMD) +// instructions to compute SHA-3 and SHAKE hashes on IBM Z. + +import ( + "hash" + + "golang.org/x/sys/cpu" +) + +// codes represent 7-bit KIMD/KLMD function codes as defined in +// the Principles of Operation. +type code uint64 + +const ( + // function codes for KIMD/KLMD + sha3_224 code = 32 + sha3_256 = 33 + sha3_384 = 34 + sha3_512 = 35 + shake_128 = 36 + shake_256 = 37 + nopad = 0x100 +) + +// kimd is a wrapper for the 'compute intermediate message digest' instruction. +// src must be a multiple of the rate for the given function code. +//go:noescape +func kimd(function code, chain *[200]byte, src []byte) + +// klmd is a wrapper for the 'compute last message digest' instruction. +// src padding is handled by the instruction. +//go:noescape +func klmd(function code, chain *[200]byte, dst, src []byte) + +type asmState struct { + a [200]byte // 1600 bit state + buf []byte // care must be taken to ensure cap(buf) is a multiple of rate + rate int // equivalent to block size + storage [3072]byte // underlying storage for buf + outputLen int // output length if fixed, 0 if not + function code // KIMD/KLMD function code + state spongeDirection // whether the sponge is absorbing or squeezing +} + +func newAsmState(function code) *asmState { + var s asmState + s.function = function + switch function { + case sha3_224: + s.rate = 144 + s.outputLen = 28 + case sha3_256: + s.rate = 136 + s.outputLen = 32 + case sha3_384: + s.rate = 104 + s.outputLen = 48 + case sha3_512: + s.rate = 72 + s.outputLen = 64 + case shake_128: + s.rate = 168 + case shake_256: + s.rate = 136 + default: + panic("sha3: unrecognized function code") + } + + // limit s.buf size to a multiple of s.rate + s.resetBuf() + return &s +} + +func (s *asmState) clone() *asmState { + c := *s + c.buf = c.storage[:len(s.buf):cap(s.buf)] + return &c +} + +// copyIntoBuf copies b into buf. It will panic if there is not enough space to +// store all of b. +func (s *asmState) copyIntoBuf(b []byte) { + bufLen := len(s.buf) + s.buf = s.buf[:len(s.buf)+len(b)] + copy(s.buf[bufLen:], b) +} + +// resetBuf points buf at storage, sets the length to 0 and sets cap to be a +// multiple of the rate. +func (s *asmState) resetBuf() { + max := (cap(s.storage) / s.rate) * s.rate + s.buf = s.storage[:0:max] +} + +// Write (via the embedded io.Writer interface) adds more data to the running hash. +// It never returns an error. +func (s *asmState) Write(b []byte) (int, error) { + if s.state != spongeAbsorbing { + panic("sha3: write to sponge after read") + } + length := len(b) + for len(b) > 0 { + if len(s.buf) == 0 && len(b) >= cap(s.buf) { + // Hash the data directly and push any remaining bytes + // into the buffer. + remainder := len(s.buf) % s.rate + kimd(s.function, &s.a, b[:len(b)-remainder]) + if remainder != 0 { + s.copyIntoBuf(b[len(b)-remainder:]) + } + return length, nil + } + + if len(s.buf) == cap(s.buf) { + // flush the buffer + kimd(s.function, &s.a, s.buf) + s.buf = s.buf[:0] + } + + // copy as much as we can into the buffer + n := len(b) + if len(b) > cap(s.buf)-len(s.buf) { + n = cap(s.buf) - len(s.buf) + } + s.copyIntoBuf(b[:n]) + b = b[n:] + } + return length, nil +} + +// Read squeezes an arbitrary number of bytes from the sponge. +func (s *asmState) Read(out []byte) (n int, err error) { + n = len(out) + + // need to pad if we were absorbing + if s.state == spongeAbsorbing { + s.state = spongeSqueezing + + // write hash directly into out if possible + if len(out)%s.rate == 0 { + klmd(s.function, &s.a, out, s.buf) // len(out) may be 0 + s.buf = s.buf[:0] + return + } + + // write hash into buffer + max := cap(s.buf) + if max > len(out) { + max = (len(out)/s.rate)*s.rate + s.rate + } + klmd(s.function, &s.a, s.buf[:max], s.buf) + s.buf = s.buf[:max] + } + + for len(out) > 0 { + // flush the buffer + if len(s.buf) != 0 { + c := copy(out, s.buf) + out = out[c:] + s.buf = s.buf[c:] + continue + } + + // write hash directly into out if possible + if len(out)%s.rate == 0 { + klmd(s.function|nopad, &s.a, out, nil) + return + } + + // write hash into buffer + s.resetBuf() + if cap(s.buf) > len(out) { + s.buf = s.buf[:(len(out)/s.rate)*s.rate+s.rate] + } + klmd(s.function|nopad, &s.a, s.buf, nil) + } + return +} + +// Sum appends the current hash to b and returns the resulting slice. +// It does not change the underlying hash state. +func (s *asmState) Sum(b []byte) []byte { + if s.outputLen == 0 { + panic("sha3: cannot call Sum on SHAKE functions") + } + + // Copy the state to preserve the original. + a := s.a + + // Hash the buffer. Note that we don't clear it because we + // aren't updating the state. + klmd(s.function, &a, nil, s.buf) + return append(b, a[:s.outputLen]...) +} + +// Reset resets the Hash to its initial state. +func (s *asmState) Reset() { + for i := range s.a { + s.a[i] = 0 + } + s.resetBuf() + s.state = spongeAbsorbing +} + +// Size returns the number of bytes Sum will return. +func (s *asmState) Size() int { + return s.outputLen +} + +// BlockSize returns the hash's underlying block size. +// The Write method must be able to accept any amount +// of data, but it may operate more efficiently if all writes +// are a multiple of the block size. +func (s *asmState) BlockSize() int { + return s.rate +} + +// Clone returns a copy of the ShakeHash in its current state. +func (s *asmState) Clone() ShakeHash { + return s.clone() +} + +// new224Asm returns an assembly implementation of SHA3-224 if available, +// otherwise it returns nil. +func new224Asm() hash.Hash { + if cpu.S390X.HasSHA3 { + return newAsmState(sha3_224) + } + return nil +} + +// new256Asm returns an assembly implementation of SHA3-256 if available, +// otherwise it returns nil. +func new256Asm() hash.Hash { + if cpu.S390X.HasSHA3 { + return newAsmState(sha3_256) + } + return nil +} + +// new384Asm returns an assembly implementation of SHA3-384 if available, +// otherwise it returns nil. +func new384Asm() hash.Hash { + if cpu.S390X.HasSHA3 { + return newAsmState(sha3_384) + } + return nil +} + +// new512Asm returns an assembly implementation of SHA3-512 if available, +// otherwise it returns nil. +func new512Asm() hash.Hash { + if cpu.S390X.HasSHA3 { + return newAsmState(sha3_512) + } + return nil +} + +// newShake128Asm returns an assembly implementation of SHAKE-128 if available, +// otherwise it returns nil. +func newShake128Asm() ShakeHash { + if cpu.S390X.HasSHA3 { + return newAsmState(shake_128) + } + return nil +} + +// newShake256Asm returns an assembly implementation of SHAKE-256 if available, +// otherwise it returns nil. +func newShake256Asm() ShakeHash { + if cpu.S390X.HasSHA3 { + return newAsmState(shake_256) + } + return nil +} diff --git a/vendor/golang.org/x/crypto/sha3/sha3_s390x.s b/vendor/golang.org/x/crypto/sha3/sha3_s390x.s new file mode 100644 index 0000000..b2ef69f --- /dev/null +++ b/vendor/golang.org/x/crypto/sha3/sha3_s390x.s @@ -0,0 +1,33 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build !gccgo,!appengine + +#include "textflag.h" + +// func kimd(function code, chain *[200]byte, src []byte) +TEXT ·kimd(SB), NOFRAME|NOSPLIT, $0-40 + MOVD function+0(FP), R0 + MOVD chain+8(FP), R1 + LMG src+16(FP), R2, R3 // R2=base, R3=len + +continue: + WORD $0xB93E0002 // KIMD --, R2 + BVS continue // continue if interrupted + MOVD $0, R0 // reset R0 for pre-go1.8 compilers + RET + +// func klmd(function code, chain *[200]byte, dst, src []byte) +TEXT ·klmd(SB), NOFRAME|NOSPLIT, $0-64 + // TODO: SHAKE support + MOVD function+0(FP), R0 + MOVD chain+8(FP), R1 + LMG dst+16(FP), R2, R3 // R2=base, R3=len + LMG src+40(FP), R4, R5 // R4=base, R5=len + +continue: + WORD $0xB93F0024 // KLMD R2, R4 + BVS continue // continue if interrupted + MOVD $0, R0 // reset R0 for pre-go1.8 compilers + RET diff --git a/vendor/golang.org/x/crypto/sha3/sha3_test.go b/vendor/golang.org/x/crypto/sha3/sha3_test.go index 312e8f9..26d1549 100644 --- a/vendor/golang.org/x/crypto/sha3/sha3_test.go +++ b/vendor/golang.org/x/crypto/sha3/sha3_test.go @@ -36,15 +36,17 @@ func newHashShake256() hash.Hash { } // testDigests contains functions returning hash.Hash instances -// with output-length equal to the KAT length for both SHA-3 and -// SHAKE instances. +// with output-length equal to the KAT length for SHA-3, Keccak +// and SHAKE instances. var testDigests = map[string]func() hash.Hash{ - "SHA3-224": New224, - "SHA3-256": New256, - "SHA3-384": New384, - "SHA3-512": New512, - "SHAKE128": newHashShake128, - "SHAKE256": newHashShake256, + "SHA3-224": New224, + "SHA3-256": New256, + "SHA3-384": New384, + "SHA3-512": New512, + "Keccak-256": NewLegacyKeccak256, + "Keccak-512": NewLegacyKeccak512, + "SHAKE128": newHashShake128, + "SHAKE256": newHashShake256, } // testShakes contains functions that return ShakeHash instances for @@ -124,9 +126,39 @@ func TestKeccakKats(t *testing.T) { }) } +// TestKeccak does a basic test of the non-standardized Keccak hash functions. +func TestKeccak(t *testing.T) { + tests := []struct { + fn func() hash.Hash + data []byte + want string + }{ + { + NewLegacyKeccak256, + []byte("abc"), + "4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45", + }, + { + NewLegacyKeccak512, + []byte("abc"), + "18587dc2ea106b9a1563e32b3312421ca164c7f1f07bc922a9c83d77cea3a1e5d0c69910739025372dc14ac9642629379540c17e2a65b19d77aa511a9d00bb96", + }, + } + + for _, u := range tests { + h := u.fn() + h.Write(u.data) + got := h.Sum(nil) + want := decodeHex(u.want) + if !bytes.Equal(got, want) { + t.Errorf("unexpected hash for size %d: got '%x' want '%s'", h.Size()*8, got, u.want) + } + } +} + // TestUnalignedWrite tests that writing data in an arbitrary pattern with // small input buffers. -func testUnalignedWrite(t *testing.T) { +func TestUnalignedWrite(t *testing.T) { testUnalignedAndGeneric(t, func(impl string) { buf := sequentialBytes(0x10000) for alg, df := range testDigests { @@ -202,7 +234,7 @@ func TestSqueezing(t *testing.T) { d1 := newShakeHash() d1.Write([]byte(testString)) var multiple []byte - for _ = range ref { + for range ref { one := make([]byte, 1) d1.Read(one) multiple = append(multiple, one...) diff --git a/vendor/golang.org/x/crypto/sha3/shake.go b/vendor/golang.org/x/crypto/sha3/shake.go index 841f986..97c9b06 100644 --- a/vendor/golang.org/x/crypto/sha3/shake.go +++ b/vendor/golang.org/x/crypto/sha3/shake.go @@ -38,12 +38,22 @@ func (d *state) Clone() ShakeHash { // NewShake128 creates a new SHAKE128 variable-output-length ShakeHash. // Its generic security strength is 128 bits against all attacks if at // least 32 bytes of its output are used. -func NewShake128() ShakeHash { return &state{rate: 168, dsbyte: 0x1f} } +func NewShake128() ShakeHash { + if h := newShake128Asm(); h != nil { + return h + } + return &state{rate: 168, dsbyte: 0x1f} +} -// NewShake256 creates a new SHAKE128 variable-output-length ShakeHash. +// NewShake256 creates a new SHAKE256 variable-output-length ShakeHash. // Its generic security strength is 256 bits against all attacks if // at least 64 bytes of its output are used. -func NewShake256() ShakeHash { return &state{rate: 136, dsbyte: 0x1f} } +func NewShake256() ShakeHash { + if h := newShake256Asm(); h != nil { + return h + } + return &state{rate: 136, dsbyte: 0x1f} +} // ShakeSum128 writes an arbitrary-length digest of data into hash. func ShakeSum128(hash, data []byte) { diff --git a/vendor/golang.org/x/crypto/sha3/shake_generic.go b/vendor/golang.org/x/crypto/sha3/shake_generic.go new file mode 100644 index 0000000..73d0c90 --- /dev/null +++ b/vendor/golang.org/x/crypto/sha3/shake_generic.go @@ -0,0 +1,19 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build gccgo appengine !s390x + +package sha3 + +// newShake128Asm returns an assembly implementation of SHAKE-128 if available, +// otherwise it returns nil. +func newShake128Asm() ShakeHash { + return nil +} + +// newShake256Asm returns an assembly implementation of SHAKE-256 if available, +// otherwise it returns nil. +func newShake256Asm() ShakeHash { + return nil +} diff --git a/vendor/golang.org/x/crypto/ssh/agent/client.go b/vendor/golang.org/x/crypto/ssh/agent/client.go index dce7682..51f7405 100644 --- a/vendor/golang.org/x/crypto/ssh/agent/client.go +++ b/vendor/golang.org/x/crypto/ssh/agent/client.go @@ -8,7 +8,7 @@ // ssh-agent process using the sample server. // // References: -// [PROTOCOL.agent]: http://cvsweb.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.agent?rev=HEAD +// [PROTOCOL.agent]: https://tools.ietf.org/html/draft-miller-ssh-agent-00 package agent // import "golang.org/x/crypto/ssh/agent" import ( @@ -25,10 +25,22 @@ import ( "math/big" "sync" + "crypto" "golang.org/x/crypto/ed25519" "golang.org/x/crypto/ssh" ) +// SignatureFlags represent additional flags that can be passed to the signature +// requests an defined in [PROTOCOL.agent] section 4.5.1. +type SignatureFlags uint32 + +// SignatureFlag values as defined in [PROTOCOL.agent] section 5.3. +const ( + SignatureFlagReserved SignatureFlags = 1 << iota + SignatureFlagRsaSha256 + SignatureFlagRsaSha512 +) + // Agent represents the capabilities of an ssh-agent. type Agent interface { // List returns the identities known to the agent. @@ -57,6 +69,26 @@ type Agent interface { Signers() ([]ssh.Signer, error) } +type ExtendedAgent interface { + Agent + + // SignWithFlags signs like Sign, but allows for additional flags to be sent/received + SignWithFlags(key ssh.PublicKey, data []byte, flags SignatureFlags) (*ssh.Signature, error) + + // Extension processes a custom extension request. Standard-compliant agents are not + // required to support any extensions, but this method allows agents to implement + // vendor-specific methods or add experimental features. See [PROTOCOL.agent] section 4.7. + // If agent extensions are unsupported entirely this method MUST return an + // ErrExtensionUnsupported error. Similarly, if just the specific extensionType in + // the request is unsupported by the agent then ErrExtensionUnsupported MUST be + // returned. + // + // In the case of success, since [PROTOCOL.agent] section 4.7 specifies that the contents + // of the response are unspecified (including the type of the message), the complete + // response will be returned as a []byte slice, including the "type" byte of the message. + Extension(extensionType string, contents []byte) ([]byte, error) +} + // ConstraintExtension describes an optional constraint defined by users. type ConstraintExtension struct { // ExtensionName consist of a UTF-8 string suffixed by the @@ -98,7 +130,7 @@ const ( agentAddIdentity = 17 agentRemoveIdentity = 18 agentRemoveAllIdentities = 19 - agentAddIdConstrained = 25 + agentAddIDConstrained = 25 // 3.3 Key-type independent requests from client to agent agentAddSmartcardKey = 20 @@ -179,6 +211,23 @@ type constrainExtensionAgentMsg struct { Rest []byte `ssh:"rest"` } +// See [PROTOCOL.agent], section 4.7 +const agentExtension = 27 +const agentExtensionFailure = 28 + +// ErrExtensionUnsupported indicates that an extension defined in +// [PROTOCOL.agent] section 4.7 is unsupported by the agent. Specifically this +// error indicates that the agent returned a standard SSH_AGENT_FAILURE message +// as the result of a SSH_AGENTC_EXTENSION request. Note that the protocol +// specification (and therefore this error) does not distinguish between a +// specific extension being unsupported and extensions being unsupported entirely. +var ErrExtensionUnsupported = errors.New("agent: extension unsupported") + +type extensionAgentMsg struct { + ExtensionType string `sshtype:"27"` + Contents []byte +} + // Key represents a protocol 2 public key as defined in // [PROTOCOL.agent], section 2.5.2. type Key struct { @@ -260,7 +309,7 @@ type client struct { // NewClient returns an Agent that talks to an ssh-agent process over // the given connection. -func NewClient(rw io.ReadWriter) Agent { +func NewClient(rw io.ReadWriter) ExtendedAgent { return &client{conn: rw} } @@ -268,6 +317,21 @@ func NewClient(rw io.ReadWriter) Agent { // unmarshaled into reply and replyType is set to the first byte of // the reply, which contains the type of the message. func (c *client) call(req []byte) (reply interface{}, err error) { + buf, err := c.callRaw(req) + if err != nil { + return nil, err + } + reply, err = unmarshal(buf) + if err != nil { + return nil, clientErr(err) + } + return reply, nil +} + +// callRaw sends an RPC to the agent. On success, the raw +// bytes of the response are returned; no unmarshalling is +// performed on the response. +func (c *client) callRaw(req []byte) (reply []byte, err error) { c.mu.Lock() defer c.mu.Unlock() @@ -284,18 +348,14 @@ func (c *client) call(req []byte) (reply interface{}, err error) { } respSize := binary.BigEndian.Uint32(respSizeBuf[:]) if respSize > maxAgentResponseBytes { - return nil, clientErr(err) + return nil, clientErr(errors.New("response too large")) } buf := make([]byte, respSize) if _, err = io.ReadFull(c.conn, buf); err != nil { return nil, clientErr(err) } - reply, err = unmarshal(buf) - if err != nil { - return nil, clientErr(err) - } - return reply, err + return buf, nil } func (c *client) simpleCall(req []byte) error { @@ -369,9 +429,14 @@ func (c *client) List() ([]*Key, error) { // Sign has the agent sign the data using a protocol 2 key as defined // in [PROTOCOL.agent] section 2.6.2. func (c *client) Sign(key ssh.PublicKey, data []byte) (*ssh.Signature, error) { + return c.SignWithFlags(key, data, 0) +} + +func (c *client) SignWithFlags(key ssh.PublicKey, data []byte, flags SignatureFlags) (*ssh.Signature, error) { req := ssh.Marshal(signRequestAgentMsg{ KeyBlob: key.Marshal(), Data: data, + Flags: uint32(flags), }) msg, err := c.call(req) @@ -515,7 +580,7 @@ func (c *client) insertKey(s interface{}, comment string, constraints []byte) er // if constraints are present then the message type needs to be changed. if len(constraints) != 0 { - req[0] = agentAddIdConstrained + req[0] = agentAddIDConstrained } resp, err := c.call(req) @@ -577,11 +642,11 @@ func (c *client) Add(key AddedKey) error { constraints = append(constraints, agentConstrainConfirm) } - if cert := key.Certificate; cert == nil { + cert := key.Certificate + if cert == nil { return c.insertKey(key.PrivateKey, key.Comment, constraints) - } else { - return c.insertCert(key.PrivateKey, cert, key.Comment, constraints) } + return c.insertCert(key.PrivateKey, cert, key.Comment, constraints) } func (c *client) insertCert(s interface{}, cert *ssh.Certificate, comment string, constraints []byte) error { @@ -633,7 +698,7 @@ func (c *client) insertCert(s interface{}, cert *ssh.Certificate, comment string // if constraints are present then the message type needs to be changed. if len(constraints) != 0 { - req[0] = agentAddIdConstrained + req[0] = agentAddIDConstrained } signer, err := ssh.NewSignerFromKey(s) @@ -681,3 +746,44 @@ func (s *agentKeyringSigner) Sign(rand io.Reader, data []byte) (*ssh.Signature, // The agent has its own entropy source, so the rand argument is ignored. return s.agent.Sign(s.pub, data) } + +func (s *agentKeyringSigner) SignWithOpts(rand io.Reader, data []byte, opts crypto.SignerOpts) (*ssh.Signature, error) { + var flags SignatureFlags + if opts != nil { + switch opts.HashFunc() { + case crypto.SHA256: + flags = SignatureFlagRsaSha256 + case crypto.SHA512: + flags = SignatureFlagRsaSha512 + } + } + return s.agent.SignWithFlags(s.pub, data, flags) +} + +// Calls an extension method. It is up to the agent implementation as to whether or not +// any particular extension is supported and may always return an error. Because the +// type of the response is up to the implementation, this returns the bytes of the +// response and does not attempt any type of unmarshalling. +func (c *client) Extension(extensionType string, contents []byte) ([]byte, error) { + req := ssh.Marshal(extensionAgentMsg{ + ExtensionType: extensionType, + Contents: contents, + }) + buf, err := c.callRaw(req) + if err != nil { + return nil, err + } + if len(buf) == 0 { + return nil, errors.New("agent: failure; empty response") + } + // [PROTOCOL.agent] section 4.7 indicates that an SSH_AGENT_FAILURE message + // represents an agent that does not support the extension + if buf[0] == agentFailure { + return nil, ErrExtensionUnsupported + } + if buf[0] == agentExtensionFailure { + return nil, errors.New("agent: generic extension failure") + } + + return buf, nil +} diff --git a/vendor/golang.org/x/crypto/ssh/agent/client_test.go b/vendor/golang.org/x/crypto/ssh/agent/client_test.go index a5b20f5..c31853a 100644 --- a/vendor/golang.org/x/crypto/ssh/agent/client_test.go +++ b/vendor/golang.org/x/crypto/ssh/agent/client_test.go @@ -8,19 +8,21 @@ import ( "bytes" "crypto/rand" "errors" + "io" "net" "os" "os/exec" "path/filepath" "strconv" + "sync" "testing" "time" "golang.org/x/crypto/ssh" ) -// startOpenSSHAgent executes ssh-agent, and returns a Agent interface to it. -func startOpenSSHAgent(t *testing.T) (client Agent, socket string, cleanup func()) { +// startOpenSSHAgent executes ssh-agent, and returns an Agent interface to it. +func startOpenSSHAgent(t *testing.T) (client ExtendedAgent, socket string, cleanup func()) { if testing.Short() { // ssh-agent is not always available, and the key // types supported vary by platform. @@ -79,13 +81,12 @@ func startOpenSSHAgent(t *testing.T) (client Agent, socket string, cleanup func( } } -// startKeyringAgent uses Keyring to simulate a ssh-agent Server and returns a client. -func startKeyringAgent(t *testing.T) (client Agent, cleanup func()) { +func startAgent(t *testing.T, agent Agent) (client ExtendedAgent, cleanup func()) { c1, c2, err := netPipe() if err != nil { t.Fatalf("netPipe: %v", err) } - go ServeAgent(NewKeyring(), c2) + go ServeAgent(agent, c2) return NewClient(c1), func() { c1.Close() @@ -93,6 +94,11 @@ func startKeyringAgent(t *testing.T) (client Agent, cleanup func()) { } } +// startKeyringAgent uses Keyring to simulate a ssh-agent Server and returns a client. +func startKeyringAgent(t *testing.T) (client ExtendedAgent, cleanup func()) { + return startAgent(t, NewKeyring()) +} + func testOpenSSHAgent(t *testing.T, key interface{}, cert *ssh.Certificate, lifetimeSecs uint32) { agent, _, cleanup := startOpenSSHAgent(t) defer cleanup() @@ -107,7 +113,7 @@ func testKeyringAgent(t *testing.T, key interface{}, cert *ssh.Certificate, life testAgentInterface(t, agent, key, cert, lifetimeSecs) } -func testAgentInterface(t *testing.T, agent Agent, key interface{}, cert *ssh.Certificate, lifetimeSecs uint32) { +func testAgentInterface(t *testing.T, agent ExtendedAgent, key interface{}, cert *ssh.Certificate, lifetimeSecs uint32) { signer, err := ssh.NewSignerFromKey(key) if err != nil { t.Fatalf("NewSignerFromKey(%T): %v", key, err) @@ -159,6 +165,25 @@ func testAgentInterface(t *testing.T, agent Agent, key interface{}, cert *ssh.Ce t.Fatalf("Verify(%s): %v", pubKey.Type(), err) } + // For tests on RSA keys, try signing with SHA-256 and SHA-512 flags + if pubKey.Type() == "ssh-rsa" { + sshFlagTest := func(flag SignatureFlags, expectedSigFormat string) { + sig, err = agent.SignWithFlags(pubKey, data, flag) + if err != nil { + t.Fatalf("SignWithFlags(%s): %v", pubKey.Type(), err) + } + if sig.Format != expectedSigFormat { + t.Fatalf("Signature format didn't match expected value: %s != %s", sig.Format, expectedSigFormat) + } + if err := pubKey.Verify(data, sig); err != nil { + t.Fatalf("Verify(%s): %v", pubKey.Type(), err) + } + } + sshFlagTest(0, ssh.SigAlgoRSA) + sshFlagTest(SignatureFlagRsaSha256, ssh.SigAlgoRSASHA2256) + sshFlagTest(SignatureFlagRsaSha512, ssh.SigAlgoRSASHA2512) + } + // If the key has a lifetime, is it removed when it should be? if lifetimeSecs > 0 { time.Sleep(time.Second*time.Duration(lifetimeSecs) + 100*time.Millisecond) @@ -173,6 +198,63 @@ func testAgentInterface(t *testing.T, agent Agent, key interface{}, cert *ssh.Ce } +func TestMalformedRequests(t *testing.T) { + keyringAgent := NewKeyring() + listener, err := netListener() + if err != nil { + t.Fatalf("netListener: %v", err) + } + defer listener.Close() + + testCase := func(t *testing.T, requestBytes []byte, wantServerErr bool) { + var wg sync.WaitGroup + wg.Add(1) + go func() { + defer wg.Done() + c, err := listener.Accept() + if err != nil { + t.Errorf("listener.Accept: %v", err) + return + } + defer c.Close() + + err = ServeAgent(keyringAgent, c) + if err == nil { + t.Error("ServeAgent should have returned an error to malformed input") + } else { + if (err != io.EOF) != wantServerErr { + t.Errorf("ServeAgent returned expected error: %v", err) + } + } + }() + + c, err := net.Dial("tcp", listener.Addr().String()) + if err != nil { + t.Fatalf("net.Dial: %v", err) + } + _, err = c.Write(requestBytes) + if err != nil { + t.Errorf("Unexpected error writing raw bytes on connection: %v", err) + } + c.Close() + wg.Wait() + } + + var testCases = []struct { + name string + requestBytes []byte + wantServerErr bool + }{ + {"Empty request", []byte{}, false}, + {"Short header", []byte{0x00}, true}, + {"Empty body", []byte{0x00, 0x00, 0x00, 0x00}, true}, + {"Short body", []byte{0x00, 0x00, 0x00, 0x01}, false}, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { testCase(t, tc.requestBytes, tc.wantServerErr) }) + } +} + func TestAgent(t *testing.T) { for _, keyType := range []string{"rsa", "dsa", "ecdsa", "ed25519"} { testOpenSSHAgent(t, testPrivateKeys[keyType], nil, 0) @@ -192,17 +274,26 @@ func TestCert(t *testing.T) { testKeyringAgent(t, testPrivateKeys["rsa"], cert, 0) } -// netPipe is analogous to net.Pipe, but it uses a real net.Conn, and -// therefore is buffered (net.Pipe deadlocks if both sides start with -// a write.) -func netPipe() (net.Conn, net.Conn, error) { +// netListener creates a localhost network listener. +func netListener() (net.Listener, error) { listener, err := net.Listen("tcp", "127.0.0.1:0") if err != nil { listener, err = net.Listen("tcp", "[::1]:0") if err != nil { - return nil, nil, err + return nil, err } } + return listener, nil +} + +// netPipe is analogous to net.Pipe, but it uses a real net.Conn, and +// therefore is buffered (net.Pipe deadlocks if both sides start with +// a write.) +func netPipe() (net.Conn, net.Conn, error) { + listener, err := netListener() + if err != nil { + return nil, nil, err + } defer listener.Close() c1, err := net.Dial("tcp", listener.Addr().String()) if err != nil { @@ -218,6 +309,35 @@ func netPipe() (net.Conn, net.Conn, error) { return c1, c2, nil } +func TestServerResponseTooLarge(t *testing.T) { + a, b, err := netPipe() + if err != nil { + t.Fatalf("netPipe: %v", err) + } + + defer a.Close() + defer b.Close() + + var response identitiesAnswerAgentMsg + response.NumKeys = 1 + response.Keys = make([]byte, maxAgentResponseBytes+1) + + agent := NewClient(a) + go func() { + n, _ := b.Write(ssh.Marshal(response)) + if n < 4 { + t.Fatalf("At least 4 bytes (the response size) should have been successfully written: %d < 4", n) + } + }() + _, err = agent.List() + if err == nil { + t.Fatal("Did not get error result") + } + if err.Error() != "agent: client error: response too large" { + t.Fatal("Did not get expected error result") + } +} + func TestAuth(t *testing.T) { agent, _, cleanup := startOpenSSHAgent(t) defer cleanup() @@ -377,3 +497,38 @@ func testAgentLifetime(t *testing.T, agent Agent) { t.Errorf("Want 0 keys, got %v", len(keys)) } } + +type keyringExtended struct { + *keyring +} + +func (r *keyringExtended) Extension(extensionType string, contents []byte) ([]byte, error) { + if extensionType != "my-extension@example.com" { + return []byte{agentExtensionFailure}, nil + } + return append([]byte{agentSuccess}, contents...), nil +} + +func TestAgentExtensions(t *testing.T) { + agent, _, cleanup := startOpenSSHAgent(t) + defer cleanup() + _, err := agent.Extension("my-extension@example.com", []byte{0x00, 0x01, 0x02}) + if err == nil { + t.Fatal("should have gotten agent extension failure") + } + + agent, cleanup = startAgent(t, &keyringExtended{}) + defer cleanup() + result, err := agent.Extension("my-extension@example.com", []byte{0x00, 0x01, 0x02}) + if err != nil { + t.Fatalf("agent extension failure: %v", err) + } + if len(result) != 4 || !bytes.Equal(result, []byte{agentSuccess, 0x00, 0x01, 0x02}) { + t.Fatalf("agent extension result invalid: %v", result) + } + + _, err = agent.Extension("bad-extension@example.com", []byte{0x00, 0x01, 0x02}) + if err == nil { + t.Fatal("should have gotten agent extension failure") + } +} diff --git a/vendor/golang.org/x/crypto/ssh/agent/keyring.go b/vendor/golang.org/x/crypto/ssh/agent/keyring.go index a6ba06a..c9d9794 100644 --- a/vendor/golang.org/x/crypto/ssh/agent/keyring.go +++ b/vendor/golang.org/x/crypto/ssh/agent/keyring.go @@ -102,7 +102,7 @@ func (r *keyring) Unlock(passphrase []byte) error { if !r.locked { return errors.New("agent: not locked") } - if len(passphrase) != len(r.passphrase) || 1 != subtle.ConstantTimeCompare(passphrase, r.passphrase) { + if 1 != subtle.ConstantTimeCompare(passphrase, r.passphrase) { return fmt.Errorf("agent: incorrect passphrase") } @@ -182,6 +182,10 @@ func (r *keyring) Add(key AddedKey) error { // Sign returns a signature for the data. func (r *keyring) Sign(key ssh.PublicKey, data []byte) (*ssh.Signature, error) { + return r.SignWithFlags(key, data, 0) +} + +func (r *keyring) SignWithFlags(key ssh.PublicKey, data []byte, flags SignatureFlags) (*ssh.Signature, error) { r.mu.Lock() defer r.mu.Unlock() if r.locked { @@ -192,7 +196,24 @@ func (r *keyring) Sign(key ssh.PublicKey, data []byte) (*ssh.Signature, error) { wanted := key.Marshal() for _, k := range r.keys { if bytes.Equal(k.signer.PublicKey().Marshal(), wanted) { - return k.signer.Sign(rand.Reader, data) + if flags == 0 { + return k.signer.Sign(rand.Reader, data) + } else { + if algorithmSigner, ok := k.signer.(ssh.AlgorithmSigner); !ok { + return nil, fmt.Errorf("agent: signature does not support non-default signature algorithm: %T", k.signer) + } else { + var algorithm string + switch flags { + case SignatureFlagRsaSha256: + algorithm = ssh.SigAlgoRSASHA2256 + case SignatureFlagRsaSha512: + algorithm = ssh.SigAlgoRSASHA2512 + default: + return nil, fmt.Errorf("agent: unsupported signature flags: %d", flags) + } + return algorithmSigner.SignWithAlgorithm(rand.Reader, data, algorithm) + } + } } } return nil, errors.New("not found") @@ -213,3 +234,8 @@ func (r *keyring) Signers() ([]ssh.Signer, error) { } return s, nil } + +// The keyring does not support any extensions +func (r *keyring) Extension(extensionType string, contents []byte) ([]byte, error) { + return nil, ErrExtensionUnsupported +} diff --git a/vendor/golang.org/x/crypto/ssh/agent/server.go b/vendor/golang.org/x/crypto/ssh/agent/server.go index 321e48a..6e7a1e0 100644 --- a/vendor/golang.org/x/crypto/ssh/agent/server.go +++ b/vendor/golang.org/x/crypto/ssh/agent/server.go @@ -128,7 +128,14 @@ func (s *server) processRequest(data []byte) (interface{}, error) { Blob: req.KeyBlob, } - sig, err := s.agent.Sign(k, req.Data) // TODO(hanwen): flags. + var sig *ssh.Signature + var err error + if extendedAgent, ok := s.agent.(ExtendedAgent); ok { + sig, err = extendedAgent.SignWithFlags(k, req.Data, SignatureFlags(req.Flags)) + } else { + sig, err = s.agent.Sign(k, req.Data) + } + if err != nil { return nil, err } @@ -148,8 +155,45 @@ func (s *server) processRequest(data []byte) (interface{}, error) { } return rep, nil - case agentAddIdConstrained, agentAddIdentity: + case agentAddIDConstrained, agentAddIdentity: return nil, s.insertIdentity(data) + + case agentExtension: + // Return a stub object where the whole contents of the response gets marshaled. + var responseStub struct { + Rest []byte `ssh:"rest"` + } + + if extendedAgent, ok := s.agent.(ExtendedAgent); !ok { + // If this agent doesn't implement extensions, [PROTOCOL.agent] section 4.7 + // requires that we return a standard SSH_AGENT_FAILURE message. + responseStub.Rest = []byte{agentFailure} + } else { + var req extensionAgentMsg + if err := ssh.Unmarshal(data, &req); err != nil { + return nil, err + } + res, err := extendedAgent.Extension(req.ExtensionType, req.Contents) + if err != nil { + // If agent extensions are unsupported, return a standard SSH_AGENT_FAILURE + // message as required by [PROTOCOL.agent] section 4.7. + if err == ErrExtensionUnsupported { + responseStub.Rest = []byte{agentFailure} + } else { + // As the result of any other error processing an extension request, + // [PROTOCOL.agent] section 4.7 requires that we return a + // SSH_AGENT_EXTENSION_FAILURE code. + responseStub.Rest = []byte{agentExtensionFailure} + } + } else { + if len(res) == 0 { + return nil, nil + } + responseStub.Rest = res + } + } + + return responseStub, nil } return nil, fmt.Errorf("unknown opcode %d", data[0]) @@ -497,6 +541,9 @@ func ServeAgent(agent Agent, c io.ReadWriter) error { return err } l := binary.BigEndian.Uint32(length[:]) + if l == 0 { + return fmt.Errorf("agent: request size is 0") + } if l > maxAgentResponseBytes { // We also cap requests. return fmt.Errorf("agent: request too large: %d", l) diff --git a/vendor/golang.org/x/crypto/ssh/benchmark_test.go b/vendor/golang.org/x/crypto/ssh/benchmark_test.go index d9f7eb9..a13235d 100644 --- a/vendor/golang.org/x/crypto/ssh/benchmark_test.go +++ b/vendor/golang.org/x/crypto/ssh/benchmark_test.go @@ -40,7 +40,8 @@ func sshPipe() (Conn, *server, error) { } clientConf := ClientConfig{ - User: "user", + User: "user", + HostKeyCallback: InsecureIgnoreHostKey(), } serverConf := ServerConfig{ NoClientAuth: true, @@ -92,6 +93,9 @@ func BenchmarkEndToEnd(b *testing.B) { b.Fatalf("Client: %v", err) } ch, incoming, err := newCh.Accept() + if err != nil { + b.Fatalf("Accept: %v", err) + } go DiscardRequests(incoming) for i := 0; i < b.N; i++ { if _, err := io.ReadFull(ch, output); err != nil { diff --git a/vendor/golang.org/x/crypto/ssh/buffer.go b/vendor/golang.org/x/crypto/ssh/buffer.go index 6931b51..1ab07d0 100644 --- a/vendor/golang.org/x/crypto/ssh/buffer.go +++ b/vendor/golang.org/x/crypto/ssh/buffer.go @@ -51,13 +51,12 @@ func (b *buffer) write(buf []byte) { } // eof closes the buffer. Reads from the buffer once all -// the data has been consumed will receive os.EOF. -func (b *buffer) eof() error { +// the data has been consumed will receive io.EOF. +func (b *buffer) eof() { b.Cond.L.Lock() b.closed = true b.Cond.Signal() b.Cond.L.Unlock() - return nil } // Read reads data from the internal buffer in buf. Reads will block diff --git a/vendor/golang.org/x/crypto/ssh/certs.go b/vendor/golang.org/x/crypto/ssh/certs.go index b1f0220..00ed992 100644 --- a/vendor/golang.org/x/crypto/ssh/certs.go +++ b/vendor/golang.org/x/crypto/ssh/certs.go @@ -44,7 +44,9 @@ type Signature struct { const CertTimeInfinity = 1<<64 - 1 // An Certificate represents an OpenSSH certificate as defined in -// [PROTOCOL.certkeys]?rev=1.8. +// [PROTOCOL.certkeys]?rev=1.8. The Certificate type implements the +// PublicKey interface, so it can be unmarshaled using +// ParsePublicKey. type Certificate struct { Nonce []byte Key PublicKey @@ -220,6 +222,11 @@ type openSSHCertSigner struct { signer Signer } +type algorithmOpenSSHCertSigner struct { + *openSSHCertSigner + algorithmSigner AlgorithmSigner +} + // NewCertSigner returns a Signer that signs with the given Certificate, whose // private key is held by signer. It returns an error if the public key in cert // doesn't match the key used by signer. @@ -228,7 +235,12 @@ func NewCertSigner(cert *Certificate, signer Signer) (Signer, error) { return nil, errors.New("ssh: signer and cert have different public key") } - return &openSSHCertSigner{cert, signer}, nil + if algorithmSigner, ok := signer.(AlgorithmSigner); ok { + return &algorithmOpenSSHCertSigner{ + &openSSHCertSigner{cert, signer}, algorithmSigner}, nil + } else { + return &openSSHCertSigner{cert, signer}, nil + } } func (s *openSSHCertSigner) Sign(rand io.Reader, data []byte) (*Signature, error) { @@ -239,6 +251,10 @@ func (s *openSSHCertSigner) PublicKey() PublicKey { return s.pub } +func (s *algorithmOpenSSHCertSigner) SignWithAlgorithm(rand io.Reader, data []byte, algorithm string) (*Signature, error) { + return s.algorithmSigner.SignWithAlgorithm(rand, data, algorithm) +} + const sourceAddressCriticalOption = "source-address" // CertChecker does the work of verifying a certificate. Its methods @@ -340,10 +356,10 @@ func (c *CertChecker) Authenticate(conn ConnMetadata, pubKey PublicKey) (*Permis // the signature of the certificate. func (c *CertChecker) CheckCert(principal string, cert *Certificate) error { if c.IsRevoked != nil && c.IsRevoked(cert) { - return fmt.Errorf("ssh: certicate serial %d revoked", cert.Serial) + return fmt.Errorf("ssh: certificate serial %d revoked", cert.Serial) } - for opt, _ := range cert.CriticalOptions { + for opt := range cert.CriticalOptions { // sourceAddressCriticalOption will be enforced by // serverAuthenticate if opt == sourceAddressCriticalOption { diff --git a/vendor/golang.org/x/crypto/ssh/certs_test.go b/vendor/golang.org/x/crypto/ssh/certs_test.go index 0200531..c8e7cf5 100644 --- a/vendor/golang.org/x/crypto/ssh/certs_test.go +++ b/vendor/golang.org/x/crypto/ssh/certs_test.go @@ -6,10 +6,15 @@ package ssh import ( "bytes" + "crypto/ecdsa" + "crypto/elliptic" "crypto/rand" + "net" "reflect" "testing" "time" + + "golang.org/x/crypto/ssh/testdata" ) // Cert generated by ssh-keygen 6.0p1 Debian-4. @@ -220,3 +225,111 @@ func TestHostKeyCert(t *testing.T) { } } } + +func TestCertTypes(t *testing.T) { + var testVars = []struct { + name string + keys func() Signer + }{ + { + name: CertAlgoECDSA256v01, + keys: func() Signer { + s, _ := ParsePrivateKey(testdata.PEMBytes["ecdsap256"]) + return s + }, + }, + { + name: CertAlgoECDSA384v01, + keys: func() Signer { + s, _ := ParsePrivateKey(testdata.PEMBytes["ecdsap384"]) + return s + }, + }, + { + name: CertAlgoECDSA521v01, + keys: func() Signer { + s, _ := ParsePrivateKey(testdata.PEMBytes["ecdsap521"]) + return s + }, + }, + { + name: CertAlgoED25519v01, + keys: func() Signer { + s, _ := ParsePrivateKey(testdata.PEMBytes["ed25519"]) + return s + }, + }, + { + name: CertAlgoRSAv01, + keys: func() Signer { + s, _ := ParsePrivateKey(testdata.PEMBytes["rsa"]) + return s + }, + }, + { + name: CertAlgoDSAv01, + keys: func() Signer { + s, _ := ParsePrivateKey(testdata.PEMBytes["dsa"]) + return s + }, + }, + } + + k, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + t.Fatalf("error generating host key: %v", err) + } + + signer, err := NewSignerFromKey(k) + if err != nil { + t.Fatalf("error generating signer for ssh listener: %v", err) + } + + conf := &ServerConfig{ + PublicKeyCallback: func(c ConnMetadata, k PublicKey) (*Permissions, error) { + return new(Permissions), nil + }, + } + conf.AddHostKey(signer) + + for _, m := range testVars { + t.Run(m.name, func(t *testing.T) { + + c1, c2, err := netPipe() + if err != nil { + t.Fatalf("netPipe: %v", err) + } + defer c1.Close() + defer c2.Close() + + go NewServerConn(c1, conf) + + priv := m.keys() + if err != nil { + t.Fatalf("error generating ssh pubkey: %v", err) + } + + cert := &Certificate{ + CertType: UserCert, + Key: priv.PublicKey(), + } + cert.SignCert(rand.Reader, priv) + + certSigner, err := NewCertSigner(cert, priv) + if err != nil { + t.Fatalf("error generating cert signer: %v", err) + } + + config := &ClientConfig{ + User: "user", + HostKeyCallback: func(h string, r net.Addr, k PublicKey) error { return nil }, + Auth: []AuthMethod{PublicKeys(certSigner)}, + } + + _, _, _, err = NewClientConn(c2, "", config) + if err != nil { + t.Fatalf("error connecting: %v", err) + } + }) + } +} diff --git a/vendor/golang.org/x/crypto/ssh/channel.go b/vendor/golang.org/x/crypto/ssh/channel.go index 195530e..c0834c0 100644 --- a/vendor/golang.org/x/crypto/ssh/channel.go +++ b/vendor/golang.org/x/crypto/ssh/channel.go @@ -205,32 +205,32 @@ type channel struct { // writePacket sends a packet. If the packet is a channel close, it updates // sentClose. This method takes the lock c.writeMu. -func (c *channel) writePacket(packet []byte) error { - c.writeMu.Lock() - if c.sentClose { - c.writeMu.Unlock() +func (ch *channel) writePacket(packet []byte) error { + ch.writeMu.Lock() + if ch.sentClose { + ch.writeMu.Unlock() return io.EOF } - c.sentClose = (packet[0] == msgChannelClose) - err := c.mux.conn.writePacket(packet) - c.writeMu.Unlock() + ch.sentClose = (packet[0] == msgChannelClose) + err := ch.mux.conn.writePacket(packet) + ch.writeMu.Unlock() return err } -func (c *channel) sendMessage(msg interface{}) error { +func (ch *channel) sendMessage(msg interface{}) error { if debugMux { - log.Printf("send(%d): %#v", c.mux.chanList.offset, msg) + log.Printf("send(%d): %#v", ch.mux.chanList.offset, msg) } p := Marshal(msg) - binary.BigEndian.PutUint32(p[1:], c.remoteId) - return c.writePacket(p) + binary.BigEndian.PutUint32(p[1:], ch.remoteId) + return ch.writePacket(p) } // WriteExtended writes data to a specific extended stream. These streams are // used, for example, for stderr. -func (c *channel) WriteExtended(data []byte, extendedCode uint32) (n int, err error) { - if c.sentEOF { +func (ch *channel) WriteExtended(data []byte, extendedCode uint32) (n int, err error) { + if ch.sentEOF { return 0, io.EOF } // 1 byte message type, 4 bytes remoteId, 4 bytes data length @@ -241,16 +241,16 @@ func (c *channel) WriteExtended(data []byte, extendedCode uint32) (n int, err er opCode = msgChannelExtendedData } - c.writeMu.Lock() - packet := c.packetPool[extendedCode] + ch.writeMu.Lock() + packet := ch.packetPool[extendedCode] // We don't remove the buffer from packetPool, so // WriteExtended calls from different goroutines will be // flagged as errors by the race detector. - c.writeMu.Unlock() + ch.writeMu.Unlock() for len(data) > 0 { - space := min(c.maxRemotePayload, len(data)) - if space, err = c.remoteWin.reserve(space); err != nil { + space := min(ch.maxRemotePayload, len(data)) + if space, err = ch.remoteWin.reserve(space); err != nil { return n, err } if want := headerLength + space; uint32(cap(packet)) < want { @@ -262,13 +262,13 @@ func (c *channel) WriteExtended(data []byte, extendedCode uint32) (n int, err er todo := data[:space] packet[0] = opCode - binary.BigEndian.PutUint32(packet[1:], c.remoteId) + binary.BigEndian.PutUint32(packet[1:], ch.remoteId) if extendedCode > 0 { binary.BigEndian.PutUint32(packet[5:], uint32(extendedCode)) } binary.BigEndian.PutUint32(packet[headerLength-4:], uint32(len(todo))) copy(packet[headerLength:], todo) - if err = c.writePacket(packet); err != nil { + if err = ch.writePacket(packet); err != nil { return n, err } @@ -276,14 +276,14 @@ func (c *channel) WriteExtended(data []byte, extendedCode uint32) (n int, err er data = data[len(todo):] } - c.writeMu.Lock() - c.packetPool[extendedCode] = packet - c.writeMu.Unlock() + ch.writeMu.Lock() + ch.packetPool[extendedCode] = packet + ch.writeMu.Unlock() return n, err } -func (c *channel) handleData(packet []byte) error { +func (ch *channel) handleData(packet []byte) error { headerLen := 9 isExtendedData := packet[0] == msgChannelExtendedData if isExtendedData { @@ -303,7 +303,7 @@ func (c *channel) handleData(packet []byte) error { if length == 0 { return nil } - if length > c.maxIncomingPayload { + if length > ch.maxIncomingPayload { // TODO(hanwen): should send Disconnect? return errors.New("ssh: incoming packet exceeds maximum payload size") } @@ -313,21 +313,21 @@ func (c *channel) handleData(packet []byte) error { return errors.New("ssh: wrong packet length") } - c.windowMu.Lock() - if c.myWindow < length { - c.windowMu.Unlock() + ch.windowMu.Lock() + if ch.myWindow < length { + ch.windowMu.Unlock() // TODO(hanwen): should send Disconnect with reason? return errors.New("ssh: remote side wrote too much") } - c.myWindow -= length - c.windowMu.Unlock() + ch.myWindow -= length + ch.windowMu.Unlock() if extended == 1 { - c.extPending.write(data) + ch.extPending.write(data) } else if extended > 0 { // discard other extended data. } else { - c.pending.write(data) + ch.pending.write(data) } return nil } @@ -384,31 +384,31 @@ func (c *channel) close() { // responseMessageReceived is called when a success or failure message is // received on a channel to check that such a message is reasonable for the // given channel. -func (c *channel) responseMessageReceived() error { - if c.direction == channelInbound { +func (ch *channel) responseMessageReceived() error { + if ch.direction == channelInbound { return errors.New("ssh: channel response message received on inbound channel") } - if c.decided { + if ch.decided { return errors.New("ssh: duplicate response received for channel") } - c.decided = true + ch.decided = true return nil } -func (c *channel) handlePacket(packet []byte) error { +func (ch *channel) handlePacket(packet []byte) error { switch packet[0] { case msgChannelData, msgChannelExtendedData: - return c.handleData(packet) + return ch.handleData(packet) case msgChannelClose: - c.sendMessage(channelCloseMsg{PeersId: c.remoteId}) - c.mux.chanList.remove(c.localId) - c.close() + ch.sendMessage(channelCloseMsg{PeersID: ch.remoteId}) + ch.mux.chanList.remove(ch.localId) + ch.close() return nil case msgChannelEOF: // RFC 4254 is mute on how EOF affects dataExt messages but // it is logical to signal EOF at the same time. - c.extPending.eof() - c.pending.eof() + ch.extPending.eof() + ch.pending.eof() return nil } @@ -419,24 +419,24 @@ func (c *channel) handlePacket(packet []byte) error { switch msg := decoded.(type) { case *channelOpenFailureMsg: - if err := c.responseMessageReceived(); err != nil { + if err := ch.responseMessageReceived(); err != nil { return err } - c.mux.chanList.remove(msg.PeersId) - c.msg <- msg + ch.mux.chanList.remove(msg.PeersID) + ch.msg <- msg case *channelOpenConfirmMsg: - if err := c.responseMessageReceived(); err != nil { + if err := ch.responseMessageReceived(); err != nil { return err } if msg.MaxPacketSize < minPacketLength || msg.MaxPacketSize > 1<<31 { return fmt.Errorf("ssh: invalid MaxPacketSize %d from peer", msg.MaxPacketSize) } - c.remoteId = msg.MyId - c.maxRemotePayload = msg.MaxPacketSize - c.remoteWin.add(msg.MyWindow) - c.msg <- msg + ch.remoteId = msg.MyID + ch.maxRemotePayload = msg.MaxPacketSize + ch.remoteWin.add(msg.MyWindow) + ch.msg <- msg case *windowAdjustMsg: - if !c.remoteWin.add(msg.AdditionalBytes) { + if !ch.remoteWin.add(msg.AdditionalBytes) { return fmt.Errorf("ssh: invalid window update for %d bytes", msg.AdditionalBytes) } case *channelRequestMsg: @@ -444,12 +444,12 @@ func (c *channel) handlePacket(packet []byte) error { Type: msg.Request, WantReply: msg.WantReply, Payload: msg.RequestSpecificData, - ch: c, + ch: ch, } - c.incomingRequests <- &req + ch.incomingRequests <- &req default: - c.msg <- msg + ch.msg <- msg } return nil } @@ -488,23 +488,23 @@ func (e *extChannel) Read(data []byte) (n int, err error) { return e.ch.ReadExtended(data, e.code) } -func (c *channel) Accept() (Channel, <-chan *Request, error) { - if c.decided { +func (ch *channel) Accept() (Channel, <-chan *Request, error) { + if ch.decided { return nil, nil, errDecidedAlready } - c.maxIncomingPayload = channelMaxPacket + ch.maxIncomingPayload = channelMaxPacket confirm := channelOpenConfirmMsg{ - PeersId: c.remoteId, - MyId: c.localId, - MyWindow: c.myWindow, - MaxPacketSize: c.maxIncomingPayload, + PeersID: ch.remoteId, + MyID: ch.localId, + MyWindow: ch.myWindow, + MaxPacketSize: ch.maxIncomingPayload, } - c.decided = true - if err := c.sendMessage(confirm); err != nil { + ch.decided = true + if err := ch.sendMessage(confirm); err != nil { return nil, nil, err } - return c, c.incomingRequests, nil + return ch, ch.incomingRequests, nil } func (ch *channel) Reject(reason RejectionReason, message string) error { @@ -512,7 +512,7 @@ func (ch *channel) Reject(reason RejectionReason, message string) error { return errDecidedAlready } reject := channelOpenFailureMsg{ - PeersId: ch.remoteId, + PeersID: ch.remoteId, Reason: reason, Message: message, Language: "en", @@ -541,7 +541,7 @@ func (ch *channel) CloseWrite() error { } ch.sentEOF = true return ch.sendMessage(channelEOFMsg{ - PeersId: ch.remoteId}) + PeersID: ch.remoteId}) } func (ch *channel) Close() error { @@ -550,7 +550,7 @@ func (ch *channel) Close() error { } return ch.sendMessage(channelCloseMsg{ - PeersId: ch.remoteId}) + PeersID: ch.remoteId}) } // Extended returns an io.ReadWriter that sends and receives data on the given, @@ -577,7 +577,7 @@ func (ch *channel) SendRequest(name string, wantReply bool, payload []byte) (boo } msg := channelRequestMsg{ - PeersId: ch.remoteId, + PeersID: ch.remoteId, Request: name, WantReply: wantReply, RequestSpecificData: payload, @@ -614,11 +614,11 @@ func (ch *channel) ackRequest(ok bool) error { var msg interface{} if !ok { msg = channelRequestFailureMsg{ - PeersId: ch.remoteId, + PeersID: ch.remoteId, } } else { msg = channelRequestSuccessMsg{ - PeersId: ch.remoteId, + PeersID: ch.remoteId, } } return ch.sendMessage(msg) diff --git a/vendor/golang.org/x/crypto/ssh/cipher.go b/vendor/golang.org/x/crypto/ssh/cipher.go index 22bb30c..67b0126 100644 --- a/vendor/golang.org/x/crypto/ssh/cipher.go +++ b/vendor/golang.org/x/crypto/ssh/cipher.go @@ -16,6 +16,10 @@ import ( "hash" "io" "io/ioutil" + "math/bits" + + "golang.org/x/crypto/internal/chacha20" + "golang.org/x/crypto/poly1305" ) const ( @@ -53,78 +57,78 @@ func newRC4(key, iv []byte) (cipher.Stream, error) { return rc4.NewCipher(key) } -type streamCipherMode struct { - keySize int - ivSize int - skip int - createFunc func(key, iv []byte) (cipher.Stream, error) +type cipherMode struct { + keySize int + ivSize int + create func(key, iv []byte, macKey []byte, algs directionAlgorithms) (packetCipher, error) } -func (c *streamCipherMode) createStream(key, iv []byte) (cipher.Stream, error) { - if len(key) < c.keySize { - panic("ssh: key length too small for cipher") - } - if len(iv) < c.ivSize { - panic("ssh: iv too small for cipher") - } - - stream, err := c.createFunc(key[:c.keySize], iv[:c.ivSize]) - if err != nil { - return nil, err - } +func streamCipherMode(skip int, createFunc func(key, iv []byte) (cipher.Stream, error)) func(key, iv []byte, macKey []byte, algs directionAlgorithms) (packetCipher, error) { + return func(key, iv, macKey []byte, algs directionAlgorithms) (packetCipher, error) { + stream, err := createFunc(key, iv) + if err != nil { + return nil, err + } - var streamDump []byte - if c.skip > 0 { - streamDump = make([]byte, 512) - } + var streamDump []byte + if skip > 0 { + streamDump = make([]byte, 512) + } - for remainingToDump := c.skip; remainingToDump > 0; { - dumpThisTime := remainingToDump - if dumpThisTime > len(streamDump) { - dumpThisTime = len(streamDump) + for remainingToDump := skip; remainingToDump > 0; { + dumpThisTime := remainingToDump + if dumpThisTime > len(streamDump) { + dumpThisTime = len(streamDump) + } + stream.XORKeyStream(streamDump[:dumpThisTime], streamDump[:dumpThisTime]) + remainingToDump -= dumpThisTime } - stream.XORKeyStream(streamDump[:dumpThisTime], streamDump[:dumpThisTime]) - remainingToDump -= dumpThisTime - } - return stream, nil + mac := macModes[algs.MAC].new(macKey) + return &streamPacketCipher{ + mac: mac, + etm: macModes[algs.MAC].etm, + macResult: make([]byte, mac.Size()), + cipher: stream, + }, nil + } } // cipherModes documents properties of supported ciphers. Ciphers not included // are not supported and will not be negotiated, even if explicitly requested in // ClientConfig.Crypto.Ciphers. -var cipherModes = map[string]*streamCipherMode{ +var cipherModes = map[string]*cipherMode{ // Ciphers from RFC4344, which introduced many CTR-based ciphers. Algorithms // are defined in the order specified in the RFC. - "aes128-ctr": {16, aes.BlockSize, 0, newAESCTR}, - "aes192-ctr": {24, aes.BlockSize, 0, newAESCTR}, - "aes256-ctr": {32, aes.BlockSize, 0, newAESCTR}, + "aes128-ctr": {16, aes.BlockSize, streamCipherMode(0, newAESCTR)}, + "aes192-ctr": {24, aes.BlockSize, streamCipherMode(0, newAESCTR)}, + "aes256-ctr": {32, aes.BlockSize, streamCipherMode(0, newAESCTR)}, // Ciphers from RFC4345, which introduces security-improved arcfour ciphers. // They are defined in the order specified in the RFC. - "arcfour128": {16, 0, 1536, newRC4}, - "arcfour256": {32, 0, 1536, newRC4}, + "arcfour128": {16, 0, streamCipherMode(1536, newRC4)}, + "arcfour256": {32, 0, streamCipherMode(1536, newRC4)}, // Cipher defined in RFC 4253, which describes SSH Transport Layer Protocol. // Note that this cipher is not safe, as stated in RFC 4253: "Arcfour (and // RC4) has problems with weak keys, and should be used with caution." // RFC4345 introduces improved versions of Arcfour. - "arcfour": {16, 0, 0, newRC4}, + "arcfour": {16, 0, streamCipherMode(0, newRC4)}, - // AES-GCM is not a stream cipher, so it is constructed with a - // special case. If we add any more non-stream ciphers, we - // should invest a cleaner way to do this. - gcmCipherID: {16, 12, 0, nil}, + // AEAD ciphers + gcmCipherID: {16, 12, newGCMCipher}, + chacha20Poly1305ID: {64, 0, newChaCha20Cipher}, // CBC mode is insecure and so is not included in the default config. // (See http://www.isg.rhul.ac.uk/~kp/SandPfinal.pdf). If absolutely // needed, it's possible to specify a custom Config to enable it. // You should expect that an active attacker can recover plaintext if // you do. - aes128cbcID: {16, aes.BlockSize, 0, nil}, + aes128cbcID: {16, aes.BlockSize, newAESCBCCipher}, - // 3des-cbc is insecure and is disabled by default. - tripledescbcID: {24, des.BlockSize, 0, nil}, + // 3des-cbc is insecure and is not included in the default + // config. + tripledescbcID: {24, des.BlockSize, newTripleDESCBCCipher}, } // prefixLen is the length of the packet prefix that contains the packet length @@ -304,7 +308,7 @@ type gcmCipher struct { buf []byte } -func newGCMCipher(iv, key, macKey []byte) (packetCipher, error) { +func newGCMCipher(key, iv, unusedMacKey []byte, unusedAlgs directionAlgorithms) (packetCipher, error) { c, err := aes.NewCipher(key) if err != nil { return nil, err @@ -372,7 +376,7 @@ func (c *gcmCipher) readPacket(seqNum uint32, r io.Reader) ([]byte, error) { } length := binary.BigEndian.Uint32(c.prefix[:]) if length > maxPacket { - return nil, errors.New("ssh: max packet length exceeded.") + return nil, errors.New("ssh: max packet length exceeded") } if cap(c.buf) < int(length+gcmTagSize) { @@ -422,7 +426,7 @@ type cbcCipher struct { oracleCamouflage uint32 } -func newCBCCipher(c cipher.Block, iv, key, macKey []byte, algs directionAlgorithms) (packetCipher, error) { +func newCBCCipher(c cipher.Block, key, iv, macKey []byte, algs directionAlgorithms) (packetCipher, error) { cbc := &cbcCipher{ mac: macModes[algs.MAC].new(macKey), decrypter: cipher.NewCBCDecrypter(c, iv), @@ -436,13 +440,13 @@ func newCBCCipher(c cipher.Block, iv, key, macKey []byte, algs directionAlgorith return cbc, nil } -func newAESCBCCipher(iv, key, macKey []byte, algs directionAlgorithms) (packetCipher, error) { +func newAESCBCCipher(key, iv, macKey []byte, algs directionAlgorithms) (packetCipher, error) { c, err := aes.NewCipher(key) if err != nil { return nil, err } - cbc, err := newCBCCipher(c, iv, key, macKey, algs) + cbc, err := newCBCCipher(c, key, iv, macKey, algs) if err != nil { return nil, err } @@ -450,13 +454,13 @@ func newAESCBCCipher(iv, key, macKey []byte, algs directionAlgorithms) (packetCi return cbc, nil } -func newTripleDESCBCCipher(iv, key, macKey []byte, algs directionAlgorithms) (packetCipher, error) { +func newTripleDESCBCCipher(key, iv, macKey []byte, algs directionAlgorithms) (packetCipher, error) { c, err := des.NewTripleDESCipher(key) if err != nil { return nil, err } - cbc, err := newCBCCipher(c, iv, key, macKey, algs) + cbc, err := newCBCCipher(c, key, iv, macKey, algs) if err != nil { return nil, err } @@ -548,11 +552,11 @@ func (c *cbcCipher) readPacketLeaky(seqNum uint32, r io.Reader) ([]byte, error) c.packetData = c.packetData[:entirePacketSize] } - if n, err := io.ReadFull(r, c.packetData[firstBlockLength:]); err != nil { + n, err := io.ReadFull(r, c.packetData[firstBlockLength:]) + if err != nil { return nil, err - } else { - c.oracleCamouflage -= uint32(n) } + c.oracleCamouflage -= uint32(n) remainingCrypted := c.packetData[firstBlockLength:macStart] c.decrypter.CryptBlocks(remainingCrypted, remainingCrypted) @@ -627,3 +631,140 @@ func (c *cbcCipher) writePacket(seqNum uint32, w io.Writer, rand io.Reader, pack return nil } + +const chacha20Poly1305ID = "chacha20-poly1305@openssh.com" + +// chacha20Poly1305Cipher implements the chacha20-poly1305@openssh.com +// AEAD, which is described here: +// +// https://tools.ietf.org/html/draft-josefsson-ssh-chacha20-poly1305-openssh-00 +// +// the methods here also implement padding, which RFC4253 Section 6 +// also requires of stream ciphers. +type chacha20Poly1305Cipher struct { + lengthKey [8]uint32 + contentKey [8]uint32 + buf []byte +} + +func newChaCha20Cipher(key, unusedIV, unusedMACKey []byte, unusedAlgs directionAlgorithms) (packetCipher, error) { + if len(key) != 64 { + panic(len(key)) + } + + c := &chacha20Poly1305Cipher{ + buf: make([]byte, 256), + } + + for i := range c.contentKey { + c.contentKey[i] = binary.LittleEndian.Uint32(key[i*4 : (i+1)*4]) + } + for i := range c.lengthKey { + c.lengthKey[i] = binary.LittleEndian.Uint32(key[(i+8)*4 : (i+9)*4]) + } + return c, nil +} + +func (c *chacha20Poly1305Cipher) readPacket(seqNum uint32, r io.Reader) ([]byte, error) { + nonce := [3]uint32{0, 0, bits.ReverseBytes32(seqNum)} + s := chacha20.New(c.contentKey, nonce) + var polyKey [32]byte + s.XORKeyStream(polyKey[:], polyKey[:]) + s.Advance() // skip next 32 bytes + + encryptedLength := c.buf[:4] + if _, err := io.ReadFull(r, encryptedLength); err != nil { + return nil, err + } + + var lenBytes [4]byte + chacha20.New(c.lengthKey, nonce).XORKeyStream(lenBytes[:], encryptedLength) + + length := binary.BigEndian.Uint32(lenBytes[:]) + if length > maxPacket { + return nil, errors.New("ssh: invalid packet length, packet too large") + } + + contentEnd := 4 + length + packetEnd := contentEnd + poly1305.TagSize + if uint32(cap(c.buf)) < packetEnd { + c.buf = make([]byte, packetEnd) + copy(c.buf[:], encryptedLength) + } else { + c.buf = c.buf[:packetEnd] + } + + if _, err := io.ReadFull(r, c.buf[4:packetEnd]); err != nil { + return nil, err + } + + var mac [poly1305.TagSize]byte + copy(mac[:], c.buf[contentEnd:packetEnd]) + if !poly1305.Verify(&mac, c.buf[:contentEnd], &polyKey) { + return nil, errors.New("ssh: MAC failure") + } + + plain := c.buf[4:contentEnd] + s.XORKeyStream(plain, plain) + + padding := plain[0] + if padding < 4 { + // padding is a byte, so it automatically satisfies + // the maximum size, which is 255. + return nil, fmt.Errorf("ssh: illegal padding %d", padding) + } + + if int(padding)+1 >= len(plain) { + return nil, fmt.Errorf("ssh: padding %d too large", padding) + } + + plain = plain[1 : len(plain)-int(padding)] + + return plain, nil +} + +func (c *chacha20Poly1305Cipher) writePacket(seqNum uint32, w io.Writer, rand io.Reader, payload []byte) error { + nonce := [3]uint32{0, 0, bits.ReverseBytes32(seqNum)} + s := chacha20.New(c.contentKey, nonce) + var polyKey [32]byte + s.XORKeyStream(polyKey[:], polyKey[:]) + s.Advance() // skip next 32 bytes + + // There is no blocksize, so fall back to multiple of 8 byte + // padding, as described in RFC 4253, Sec 6. + const packetSizeMultiple = 8 + + padding := packetSizeMultiple - (1+len(payload))%packetSizeMultiple + if padding < 4 { + padding += packetSizeMultiple + } + + // size (4 bytes), padding (1), payload, padding, tag. + totalLength := 4 + 1 + len(payload) + padding + poly1305.TagSize + if cap(c.buf) < totalLength { + c.buf = make([]byte, totalLength) + } else { + c.buf = c.buf[:totalLength] + } + + binary.BigEndian.PutUint32(c.buf, uint32(1+len(payload)+padding)) + chacha20.New(c.lengthKey, nonce).XORKeyStream(c.buf, c.buf[:4]) + c.buf[4] = byte(padding) + copy(c.buf[5:], payload) + packetEnd := 5 + len(payload) + padding + if _, err := io.ReadFull(rand, c.buf[5+len(payload):packetEnd]); err != nil { + return err + } + + s.XORKeyStream(c.buf[4:], c.buf[4:packetEnd]) + + var mac [poly1305.TagSize]byte + poly1305.Sum(&mac, c.buf[:packetEnd], &polyKey) + + copy(c.buf[packetEnd:], mac[:]) + + if _, err := w.Write(c.buf); err != nil { + return err + } + return nil +} diff --git a/vendor/golang.org/x/crypto/ssh/cipher_test.go b/vendor/golang.org/x/crypto/ssh/cipher_test.go index 5cfa17a..a52d6e4 100644 --- a/vendor/golang.org/x/crypto/ssh/cipher_test.go +++ b/vendor/golang.org/x/crypto/ssh/cipher_test.go @@ -7,7 +7,6 @@ package ssh import ( "bytes" "crypto" - "crypto/aes" "crypto/rand" "testing" ) @@ -15,60 +14,63 @@ import ( func TestDefaultCiphersExist(t *testing.T) { for _, cipherAlgo := range supportedCiphers { if _, ok := cipherModes[cipherAlgo]; !ok { - t.Errorf("default cipher %q is unknown", cipherAlgo) + t.Errorf("supported cipher %q is unknown", cipherAlgo) + } + } + for _, cipherAlgo := range preferredCiphers { + if _, ok := cipherModes[cipherAlgo]; !ok { + t.Errorf("preferred cipher %q is unknown", cipherAlgo) } } } func TestPacketCiphers(t *testing.T) { - // Still test aes128cbc cipher although it's commented out. - cipherModes[aes128cbcID] = &streamCipherMode{16, aes.BlockSize, 0, nil} - defer delete(cipherModes, aes128cbcID) - + defaultMac := "hmac-sha2-256" + defaultCipher := "aes128-ctr" for cipher := range cipherModes { - for mac := range macModes { - kr := &kexResult{Hash: crypto.SHA1} - algs := directionAlgorithms{ - Cipher: cipher, - MAC: mac, - Compression: "none", - } - client, err := newPacketCipher(clientKeys, algs, kr) - if err != nil { - t.Errorf("newPacketCipher(client, %q, %q): %v", cipher, mac, err) - continue - } - server, err := newPacketCipher(clientKeys, algs, kr) - if err != nil { - t.Errorf("newPacketCipher(client, %q, %q): %v", cipher, mac, err) - continue - } - - want := "bla bla" - input := []byte(want) - buf := &bytes.Buffer{} - if err := client.writePacket(0, buf, rand.Reader, input); err != nil { - t.Errorf("writePacket(%q, %q): %v", cipher, mac, err) - continue - } - - packet, err := server.readPacket(0, buf) - if err != nil { - t.Errorf("readPacket(%q, %q): %v", cipher, mac, err) - continue - } - - if string(packet) != want { - t.Errorf("roundtrip(%q, %q): got %q, want %q", cipher, mac, packet, want) - } - } + t.Run("cipher="+cipher, + func(t *testing.T) { testPacketCipher(t, cipher, defaultMac) }) + } + for mac := range macModes { + t.Run("mac="+mac, + func(t *testing.T) { testPacketCipher(t, defaultCipher, mac) }) } } -func TestCBCOracleCounterMeasure(t *testing.T) { - cipherModes[aes128cbcID] = &streamCipherMode{16, aes.BlockSize, 0, nil} - defer delete(cipherModes, aes128cbcID) +func testPacketCipher(t *testing.T, cipher, mac string) { + kr := &kexResult{Hash: crypto.SHA1} + algs := directionAlgorithms{ + Cipher: cipher, + MAC: mac, + Compression: "none", + } + client, err := newPacketCipher(clientKeys, algs, kr) + if err != nil { + t.Fatalf("newPacketCipher(client, %q, %q): %v", cipher, mac, err) + } + server, err := newPacketCipher(clientKeys, algs, kr) + if err != nil { + t.Fatalf("newPacketCipher(client, %q, %q): %v", cipher, mac, err) + } + + want := "bla bla" + input := []byte(want) + buf := &bytes.Buffer{} + if err := client.writePacket(0, buf, rand.Reader, input); err != nil { + t.Fatalf("writePacket(%q, %q): %v", cipher, mac, err) + } + packet, err := server.readPacket(0, buf) + if err != nil { + t.Fatalf("readPacket(%q, %q): %v", cipher, mac, err) + } + + if string(packet) != want { + t.Errorf("roundtrip(%q, %q): got %q, want %q", cipher, mac, packet, want) + } +} + +func TestCBCOracleCounterMeasure(t *testing.T) { kr := &kexResult{Hash: crypto.SHA1} algs := directionAlgorithms{ Cipher: aes128cbcID, diff --git a/vendor/golang.org/x/crypto/ssh/client.go b/vendor/golang.org/x/crypto/ssh/client.go index a7e3263..7b00bff 100644 --- a/vendor/golang.org/x/crypto/ssh/client.go +++ b/vendor/golang.org/x/crypto/ssh/client.go @@ -9,6 +9,7 @@ import ( "errors" "fmt" "net" + "os" "sync" "time" ) @@ -18,6 +19,8 @@ import ( type Client struct { Conn + handleForwardsOnce sync.Once // guards calling (*Client).handleForwards + forwards forwardList // forwarded tcpip connections from the remote side mu sync.Mutex channelHandlers map[string]chan NewChannel @@ -59,8 +62,6 @@ func NewClient(c Conn, chans <-chan NewChannel, reqs <-chan *Request) *Client { conn.Wait() conn.forwards.closeAll() }() - go conn.forwards.handleChannels(conn.HandleChannelOpen("forwarded-tcpip")) - go conn.forwards.handleChannels(conn.HandleChannelOpen("forwarded-streamlocal@openssh.com")) return conn } @@ -184,9 +185,13 @@ func Dial(network, addr string, config *ClientConfig) (*Client, error) { // keys. A HostKeyCallback must return nil if the host key is OK, or // an error to reject it. It receives the hostname as passed to Dial // or NewClientConn. The remote address is the RemoteAddr of the -// net.Conn underlying the the SSH connection. +// net.Conn underlying the SSH connection. type HostKeyCallback func(hostname string, remote net.Addr, key PublicKey) error +// BannerCallback is the function type used for treat the banner sent by +// the server. A BannerCallback receives the message sent by the remote server. +type BannerCallback func(message string) error + // A ClientConfig structure is used to configure a Client. It must not be // modified after having been passed to an SSH function. type ClientConfig struct { @@ -209,6 +214,12 @@ type ClientConfig struct { // FixedHostKey can be used for simplistic host key checks. HostKeyCallback HostKeyCallback + // BannerCallback is called during the SSH dance to display a custom + // server's message. The client configuration can supply this callback to + // handle it as wished. The function BannerDisplayStderr can be used for + // simplistic display on Stderr. + BannerCallback BannerCallback + // ClientVersion contains the version identification string that will // be used for the connection. If empty, a reasonable default is used. ClientVersion string @@ -255,3 +266,13 @@ func FixedHostKey(key PublicKey) HostKeyCallback { hk := &fixedHostKey{key} return hk.check } + +// BannerDisplayStderr returns a function that can be used for +// ClientConfig.BannerCallback to display banners on os.Stderr. +func BannerDisplayStderr() BannerCallback { + return func(banner string) error { + _, err := os.Stderr.WriteString(banner) + + return err + } +} diff --git a/vendor/golang.org/x/crypto/ssh/client_auth.go b/vendor/golang.org/x/crypto/ssh/client_auth.go index b882da0..5f44b77 100644 --- a/vendor/golang.org/x/crypto/ssh/client_auth.go +++ b/vendor/golang.org/x/crypto/ssh/client_auth.go @@ -11,6 +11,14 @@ import ( "io" ) +type authResult int + +const ( + authFailure authResult = iota + authPartialSuccess + authSuccess +) + // clientAuthenticate authenticates with the remote server. See RFC 4252. func (c *connection) clientAuthenticate(config *ClientConfig) error { // initiate user auth session @@ -37,11 +45,12 @@ func (c *connection) clientAuthenticate(config *ClientConfig) error { if err != nil { return err } - if ok { + if ok == authSuccess { // success return nil + } else if ok == authFailure { + tried[auth.method()] = true } - tried[auth.method()] = true if methods == nil { methods = lastMethods } @@ -82,7 +91,7 @@ type AuthMethod interface { // If authentication is not successful, a []string of alternative // method names is returned. If the slice is nil, it will be ignored // and the previous set of possible methods will be reused. - auth(session []byte, user string, p packetConn, rand io.Reader) (bool, []string, error) + auth(session []byte, user string, p packetConn, rand io.Reader) (authResult, []string, error) // method returns the RFC 4252 method name. method() string @@ -91,13 +100,13 @@ type AuthMethod interface { // "none" authentication, RFC 4252 section 5.2. type noneAuth int -func (n *noneAuth) auth(session []byte, user string, c packetConn, rand io.Reader) (bool, []string, error) { +func (n *noneAuth) auth(session []byte, user string, c packetConn, rand io.Reader) (authResult, []string, error) { if err := c.writePacket(Marshal(&userAuthRequestMsg{ User: user, Service: serviceSSH, Method: "none", })); err != nil { - return false, nil, err + return authFailure, nil, err } return handleAuthResponse(c) @@ -111,7 +120,7 @@ func (n *noneAuth) method() string { // a function call, e.g. by prompting the user. type passwordCallback func() (password string, err error) -func (cb passwordCallback) auth(session []byte, user string, c packetConn, rand io.Reader) (bool, []string, error) { +func (cb passwordCallback) auth(session []byte, user string, c packetConn, rand io.Reader) (authResult, []string, error) { type passwordAuthMsg struct { User string `sshtype:"50"` Service string @@ -125,7 +134,7 @@ func (cb passwordCallback) auth(session []byte, user string, c packetConn, rand // The program may only find out that the user doesn't have a password // when prompting. if err != nil { - return false, nil, err + return authFailure, nil, err } if err := c.writePacket(Marshal(&passwordAuthMsg{ @@ -135,7 +144,7 @@ func (cb passwordCallback) auth(session []byte, user string, c packetConn, rand Reply: false, Password: pw, })); err != nil { - return false, nil, err + return authFailure, nil, err } return handleAuthResponse(c) @@ -178,7 +187,7 @@ func (cb publicKeyCallback) method() string { return "publickey" } -func (cb publicKeyCallback) auth(session []byte, user string, c packetConn, rand io.Reader) (bool, []string, error) { +func (cb publicKeyCallback) auth(session []byte, user string, c packetConn, rand io.Reader) (authResult, []string, error) { // Authentication is performed by sending an enquiry to test if a key is // acceptable to the remote. If the key is acceptable, the client will // attempt to authenticate with the valid key. If not the client will repeat @@ -186,13 +195,13 @@ func (cb publicKeyCallback) auth(session []byte, user string, c packetConn, rand signers, err := cb() if err != nil { - return false, nil, err + return authFailure, nil, err } var methods []string for _, signer := range signers { ok, err := validateKey(signer.PublicKey(), user, c) if err != nil { - return false, nil, err + return authFailure, nil, err } if !ok { continue @@ -206,7 +215,7 @@ func (cb publicKeyCallback) auth(session []byte, user string, c packetConn, rand Method: cb.method(), }, []byte(pub.Type()), pubKey)) if err != nil { - return false, nil, err + return authFailure, nil, err } // manually wrap the serialized signature in a string @@ -224,24 +233,24 @@ func (cb publicKeyCallback) auth(session []byte, user string, c packetConn, rand } p := Marshal(&msg) if err := c.writePacket(p); err != nil { - return false, nil, err + return authFailure, nil, err } - var success bool + var success authResult success, methods, err = handleAuthResponse(c) if err != nil { - return false, nil, err + return authFailure, nil, err } // If authentication succeeds or the list of available methods does not // contain the "publickey" method, do not attempt to authenticate with any // other keys. According to RFC 4252 Section 7, the latter can occur when // additional authentication methods are required. - if success || !containsMethod(methods, cb.method()) { + if success == authSuccess || !containsMethod(methods, cb.method()) { return success, methods, err } } - return false, methods, nil + return authFailure, methods, nil } func containsMethod(methods []string, method string) bool { @@ -283,7 +292,9 @@ func confirmKeyAck(key PublicKey, c packetConn) (bool, error) { } switch packet[0] { case msgUserAuthBanner: - // TODO(gpaul): add callback to present the banner to the user + if err := handleBannerResponse(c, packet); err != nil { + return false, err + } case msgUserAuthPubKeyOk: var msg userAuthPubKeyOkMsg if err := Unmarshal(packet, &msg); err != nil { @@ -316,30 +327,53 @@ func PublicKeysCallback(getSigners func() (signers []Signer, err error)) AuthMet // handleAuthResponse returns whether the preceding authentication request succeeded // along with a list of remaining authentication methods to try next and // an error if an unexpected response was received. -func handleAuthResponse(c packetConn) (bool, []string, error) { +func handleAuthResponse(c packetConn) (authResult, []string, error) { for { packet, err := c.readPacket() if err != nil { - return false, nil, err + return authFailure, nil, err } switch packet[0] { case msgUserAuthBanner: - // TODO: add callback to present the banner to the user + if err := handleBannerResponse(c, packet); err != nil { + return authFailure, nil, err + } case msgUserAuthFailure: var msg userAuthFailureMsg if err := Unmarshal(packet, &msg); err != nil { - return false, nil, err + return authFailure, nil, err + } + if msg.PartialSuccess { + return authPartialSuccess, msg.Methods, nil } - return false, msg.Methods, nil + return authFailure, msg.Methods, nil case msgUserAuthSuccess: - return true, nil, nil + return authSuccess, nil, nil default: - return false, nil, unexpectedMessageError(msgUserAuthSuccess, packet[0]) + return authFailure, nil, unexpectedMessageError(msgUserAuthSuccess, packet[0]) } } } +func handleBannerResponse(c packetConn, packet []byte) error { + var msg userAuthBannerMsg + if err := Unmarshal(packet, &msg); err != nil { + return err + } + + transport, ok := c.(*handshakeTransport) + if !ok { + return nil + } + + if transport.bannerCallback != nil { + return transport.bannerCallback(msg.Message) + } + + return nil +} + // KeyboardInteractiveChallenge should print questions, optionally // disabling echoing (e.g. for passwords), and return all the answers. // Challenge may be called multiple times in a single session. After @@ -349,7 +383,7 @@ func handleAuthResponse(c packetConn) (bool, []string, error) { // both CLI and GUI environments. type KeyboardInteractiveChallenge func(user, instruction string, questions []string, echos []bool) (answers []string, err error) -// KeyboardInteractive returns a AuthMethod using a prompt/response +// KeyboardInteractive returns an AuthMethod using a prompt/response // sequence controlled by the server. func KeyboardInteractive(challenge KeyboardInteractiveChallenge) AuthMethod { return challenge @@ -359,7 +393,7 @@ func (cb KeyboardInteractiveChallenge) method() string { return "keyboard-interactive" } -func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packetConn, rand io.Reader) (bool, []string, error) { +func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packetConn, rand io.Reader) (authResult, []string, error) { type initiateMsg struct { User string `sshtype:"50"` Service string @@ -373,37 +407,42 @@ func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packe Service: serviceSSH, Method: "keyboard-interactive", })); err != nil { - return false, nil, err + return authFailure, nil, err } for { packet, err := c.readPacket() if err != nil { - return false, nil, err + return authFailure, nil, err } // like handleAuthResponse, but with less options. switch packet[0] { case msgUserAuthBanner: - // TODO: Print banners during userauth. + if err := handleBannerResponse(c, packet); err != nil { + return authFailure, nil, err + } continue case msgUserAuthInfoRequest: // OK case msgUserAuthFailure: var msg userAuthFailureMsg if err := Unmarshal(packet, &msg); err != nil { - return false, nil, err + return authFailure, nil, err + } + if msg.PartialSuccess { + return authPartialSuccess, msg.Methods, nil } - return false, msg.Methods, nil + return authFailure, msg.Methods, nil case msgUserAuthSuccess: - return true, nil, nil + return authSuccess, nil, nil default: - return false, nil, unexpectedMessageError(msgUserAuthInfoRequest, packet[0]) + return authFailure, nil, unexpectedMessageError(msgUserAuthInfoRequest, packet[0]) } var msg userAuthInfoRequestMsg if err := Unmarshal(packet, &msg); err != nil { - return false, nil, err + return authFailure, nil, err } // Manually unpack the prompt/echo pairs. @@ -413,7 +452,7 @@ func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packe for i := 0; i < int(msg.NumPrompts); i++ { prompt, r, ok := parseString(rest) if !ok || len(r) == 0 { - return false, nil, errors.New("ssh: prompt format error") + return authFailure, nil, errors.New("ssh: prompt format error") } prompts = append(prompts, string(prompt)) echos = append(echos, r[0] != 0) @@ -421,16 +460,16 @@ func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packe } if len(rest) != 0 { - return false, nil, errors.New("ssh: extra data following keyboard-interactive pairs") + return authFailure, nil, errors.New("ssh: extra data following keyboard-interactive pairs") } answers, err := cb(msg.User, msg.Instruction, prompts, echos) if err != nil { - return false, nil, err + return authFailure, nil, err } if len(answers) != len(prompts) { - return false, nil, errors.New("ssh: not enough answers from keyboard-interactive callback") + return authFailure, nil, errors.New("ssh: not enough answers from keyboard-interactive callback") } responseLength := 1 + 4 for _, a := range answers { @@ -446,7 +485,7 @@ func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packe } if err := c.writePacket(serialized); err != nil { - return false, nil, err + return authFailure, nil, err } } } @@ -456,10 +495,10 @@ type retryableAuthMethod struct { maxTries int } -func (r *retryableAuthMethod) auth(session []byte, user string, c packetConn, rand io.Reader) (ok bool, methods []string, err error) { +func (r *retryableAuthMethod) auth(session []byte, user string, c packetConn, rand io.Reader) (ok authResult, methods []string, err error) { for i := 0; r.maxTries <= 0 || i < r.maxTries; i++ { ok, methods, err = r.authMethod.auth(session, user, c, rand) - if ok || err != nil { // either success or error terminate + if ok != authFailure || err != nil { // either success, partial success or error terminate return ok, methods, err } } diff --git a/vendor/golang.org/x/crypto/ssh/client_auth_test.go b/vendor/golang.org/x/crypto/ssh/client_auth_test.go index 145b57a..026d137 100644 --- a/vendor/golang.org/x/crypto/ssh/client_auth_test.go +++ b/vendor/golang.org/x/crypto/ssh/client_auth_test.go @@ -9,6 +9,7 @@ import ( "crypto/rand" "errors" "fmt" + "io" "os" "strings" "testing" @@ -28,8 +29,14 @@ func (cr keyboardInteractive) Challenge(user string, instruction string, questio var clientPassword = "tiger" // tryAuth runs a handshake with a given config against an SSH server -// with config serverConfig +// with config serverConfig. Returns both client and server side errors. func tryAuth(t *testing.T, config *ClientConfig) error { + err, _ := tryAuthBothSides(t, config) + return err +} + +// tryAuthBothSides runs the handshake and returns the resulting errors from both sides of the connection. +func tryAuthBothSides(t *testing.T, config *ClientConfig) (clientError error, serverAuthErrors []error) { c1, c2, err := netPipe() if err != nil { t.Fatalf("netPipe: %v", err) @@ -79,9 +86,13 @@ func tryAuth(t *testing.T, config *ClientConfig) error { } serverConfig.AddHostKey(testSigners["rsa"]) + serverConfig.AuthLogCallback = func(conn ConnMetadata, method string, err error) { + serverAuthErrors = append(serverAuthErrors, err) + } + go newServer(c1, serverConfig) _, _, _, err = NewClientConn(c2, "", config) - return err + return err, serverAuthErrors } func TestClientAuthPublicKey(t *testing.T) { @@ -213,6 +224,45 @@ func TestAuthMethodRSAandDSA(t *testing.T) { } } +type invalidAlgSigner struct { + Signer +} + +func (s *invalidAlgSigner) Sign(rand io.Reader, data []byte) (*Signature, error) { + sig, err := s.Signer.Sign(rand, data) + if sig != nil { + sig.Format = "invalid" + } + return sig, err +} + +func TestMethodInvalidAlgorithm(t *testing.T) { + config := &ClientConfig{ + User: "testuser", + Auth: []AuthMethod{ + PublicKeys(&invalidAlgSigner{testSigners["rsa"]}), + }, + HostKeyCallback: InsecureIgnoreHostKey(), + } + + err, serverErrors := tryAuthBothSides(t, config) + if err == nil { + t.Fatalf("login succeeded") + } + + found := false + want := "algorithm \"invalid\"" + + var errStrings []string + for _, err := range serverErrors { + found = found || (err != nil && strings.Contains(err.Error(), want)) + errStrings = append(errStrings, err.Error()) + } + if !found { + t.Errorf("server got error %q, want substring %q", errStrings, want) + } +} + func TestClientHMAC(t *testing.T) { for _, mac := range supportedMACs { config := &ClientConfig{ @@ -614,8 +664,8 @@ func TestClientAuthErrorList(t *testing.T) { for i, e := range authErrs.Errors { switch i { case 0: - if e.Error() != "no auth passed yet" { - t.Fatalf("errors: got %v, want no auth passed yet", e.Error()) + if e != ErrNoAuth { + t.Fatalf("errors: got error %v, want ErrNoAuth", e) } case 1: if e != publicKeyErr { diff --git a/vendor/golang.org/x/crypto/ssh/client_test.go b/vendor/golang.org/x/crypto/ssh/client_test.go index ccf5607..81f9599 100644 --- a/vendor/golang.org/x/crypto/ssh/client_test.go +++ b/vendor/golang.org/x/crypto/ssh/client_test.go @@ -5,41 +5,77 @@ package ssh import ( - "net" "strings" "testing" ) -func testClientVersion(t *testing.T, config *ClientConfig, expected string) { - clientConn, serverConn := net.Pipe() - defer clientConn.Close() - receivedVersion := make(chan string, 1) - config.HostKeyCallback = InsecureIgnoreHostKey() - go func() { - version, err := readVersion(serverConn) - if err != nil { - receivedVersion <- "" - } else { - receivedVersion <- string(version) - } - serverConn.Close() - }() - NewClientConn(clientConn, "", config) - actual := <-receivedVersion - if actual != expected { - t.Fatalf("got %s; want %s", actual, expected) +func TestClientVersion(t *testing.T) { + for _, tt := range []struct { + name string + version string + multiLine string + wantErr bool + }{ + { + name: "default version", + version: packageVersion, + }, + { + name: "custom version", + version: "SSH-2.0-CustomClientVersionString", + }, + { + name: "good multi line version", + version: packageVersion, + multiLine: strings.Repeat("ignored\r\n", 20), + }, + { + name: "bad multi line version", + version: packageVersion, + multiLine: "bad multi line version", + wantErr: true, + }, + { + name: "long multi line version", + version: packageVersion, + multiLine: strings.Repeat("long multi line version\r\n", 50)[:256], + wantErr: true, + }, + } { + t.Run(tt.name, func(t *testing.T) { + c1, c2, err := netPipe() + if err != nil { + t.Fatalf("netPipe: %v", err) + } + defer c1.Close() + defer c2.Close() + go func() { + if tt.multiLine != "" { + c1.Write([]byte(tt.multiLine)) + } + NewClientConn(c1, "", &ClientConfig{ + ClientVersion: tt.version, + HostKeyCallback: InsecureIgnoreHostKey(), + }) + c1.Close() + }() + conf := &ServerConfig{NoClientAuth: true} + conf.AddHostKey(testSigners["rsa"]) + conn, _, _, err := NewServerConn(c2, conf) + if err == nil == tt.wantErr { + t.Fatalf("got err %v; wantErr %t", err, tt.wantErr) + } + if tt.wantErr { + // Don't verify the version on an expected error. + return + } + if got := string(conn.ClientVersion()); got != tt.version { + t.Fatalf("got %q; want %q", got, tt.version) + } + }) } } -func TestCustomClientVersion(t *testing.T) { - version := "Test-Client-Version-0.0" - testClientVersion(t, &ClientConfig{ClientVersion: version}, version) -} - -func TestDefaultClientVersion(t *testing.T) { - testClientVersion(t, &ClientConfig{}, packageVersion) -} - func TestHostKeyCheck(t *testing.T) { for _, tt := range []struct { name string @@ -79,3 +115,52 @@ func TestHostKeyCheck(t *testing.T) { } } } + +func TestBannerCallback(t *testing.T) { + c1, c2, err := netPipe() + if err != nil { + t.Fatalf("netPipe: %v", err) + } + defer c1.Close() + defer c2.Close() + + serverConf := &ServerConfig{ + PasswordCallback: func(conn ConnMetadata, password []byte) (*Permissions, error) { + return &Permissions{}, nil + }, + BannerCallback: func(conn ConnMetadata) string { + return "Hello World" + }, + } + serverConf.AddHostKey(testSigners["rsa"]) + go NewServerConn(c1, serverConf) + + var receivedBanner string + var bannerCount int + clientConf := ClientConfig{ + Auth: []AuthMethod{ + Password("123"), + }, + User: "user", + HostKeyCallback: InsecureIgnoreHostKey(), + BannerCallback: func(message string) error { + bannerCount++ + receivedBanner = message + return nil + }, + } + + _, _, _, err = NewClientConn(c2, "", &clientConf) + if err != nil { + t.Fatal(err) + } + + if bannerCount != 1 { + t.Errorf("got %d banners; want 1", bannerCount) + } + + expected := "Hello World" + if receivedBanner != expected { + t.Fatalf("got %s; want %s", receivedBanner, expected) + } +} diff --git a/vendor/golang.org/x/crypto/ssh/common.go b/vendor/golang.org/x/crypto/ssh/common.go index dc39e4d..04f3620 100644 --- a/vendor/golang.org/x/crypto/ssh/common.go +++ b/vendor/golang.org/x/crypto/ssh/common.go @@ -24,11 +24,21 @@ const ( serviceSSH = "ssh-connection" ) -// supportedCiphers specifies the supported ciphers in preference order. +// supportedCiphers lists ciphers we support but might not recommend. var supportedCiphers = []string{ "aes128-ctr", "aes192-ctr", "aes256-ctr", "aes128-gcm@openssh.com", - "arcfour256", "arcfour128", + chacha20Poly1305ID, + "arcfour256", "arcfour128", "arcfour", + aes128cbcID, + tripledescbcID, +} + +// preferredCiphers specifies the default preference for ciphers. +var preferredCiphers = []string{ + "aes128-gcm@openssh.com", + chacha20Poly1305ID, + "aes128-ctr", "aes192-ctr", "aes256-ctr", } // supportedKexAlgos specifies the supported key-exchange algorithms in @@ -211,7 +221,7 @@ func (c *Config) SetDefaults() { c.Rand = rand.Reader } if c.Ciphers == nil { - c.Ciphers = supportedCiphers + c.Ciphers = preferredCiphers } var ciphers []string for _, c := range c.Ciphers { @@ -242,7 +252,7 @@ func (c *Config) SetDefaults() { // buildDataSignedForAuth returns the data that is signed in order to prove // possession of a private key. See RFC 4252, section 7. -func buildDataSignedForAuth(sessionId []byte, req userAuthRequestMsg, algo, pubKey []byte) []byte { +func buildDataSignedForAuth(sessionID []byte, req userAuthRequestMsg, algo, pubKey []byte) []byte { data := struct { Session []byte Type byte @@ -253,7 +263,7 @@ func buildDataSignedForAuth(sessionId []byte, req userAuthRequestMsg, algo, pubK Algo []byte PubKey []byte }{ - sessionId, + sessionID, msgUserAuthRequest, req.User, req.Service, diff --git a/vendor/golang.org/x/crypto/ssh/handshake.go b/vendor/golang.org/x/crypto/ssh/handshake.go index 932ce83..4f7912e 100644 --- a/vendor/golang.org/x/crypto/ssh/handshake.go +++ b/vendor/golang.org/x/crypto/ssh/handshake.go @@ -78,6 +78,11 @@ type handshakeTransport struct { dialAddress string remoteAddr net.Addr + // bannerCallback is non-empty if we are the client and it has been set in + // ClientConfig. In that case it is called during the user authentication + // dance to handle a custom server's message. + bannerCallback BannerCallback + // Algorithms agreed in the last key exchange. algorithms *algorithms @@ -120,6 +125,7 @@ func newClientTransport(conn keyingTransport, clientVersion, serverVersion []byt t.dialAddress = dialAddr t.remoteAddr = addr t.hostKeyCallback = config.HostKeyCallback + t.bannerCallback = config.BannerCallback if config.HostKeyAlgorithms != nil { t.hostKeyAlgorithms = config.HostKeyAlgorithms } else { diff --git a/vendor/golang.org/x/crypto/ssh/kex.go b/vendor/golang.org/x/crypto/ssh/kex.go index f91c277..f34bcc0 100644 --- a/vendor/golang.org/x/crypto/ssh/kex.go +++ b/vendor/golang.org/x/crypto/ssh/kex.go @@ -119,7 +119,7 @@ func (group *dhGroup) Client(c packetConn, randSource io.Reader, magics *handsha return nil, err } - kInt, err := group.diffieHellman(kexDHReply.Y, x) + ki, err := group.diffieHellman(kexDHReply.Y, x) if err != nil { return nil, err } @@ -129,8 +129,8 @@ func (group *dhGroup) Client(c packetConn, randSource io.Reader, magics *handsha writeString(h, kexDHReply.HostKey) writeInt(h, X) writeInt(h, kexDHReply.Y) - K := make([]byte, intLength(kInt)) - marshalInt(K, kInt) + K := make([]byte, intLength(ki)) + marshalInt(K, ki) h.Write(K) return &kexResult{ @@ -164,7 +164,7 @@ func (group *dhGroup) Server(c packetConn, randSource io.Reader, magics *handsha } Y := new(big.Int).Exp(group.g, y, group.p) - kInt, err := group.diffieHellman(kexDHInit.X, y) + ki, err := group.diffieHellman(kexDHInit.X, y) if err != nil { return nil, err } @@ -177,8 +177,8 @@ func (group *dhGroup) Server(c packetConn, randSource io.Reader, magics *handsha writeInt(h, kexDHInit.X) writeInt(h, Y) - K := make([]byte, intLength(kInt)) - marshalInt(K, kInt) + K := make([]byte, intLength(ki)) + marshalInt(K, ki) h.Write(K) H := h.Sum(nil) @@ -462,9 +462,9 @@ func (kex *curve25519sha256) Client(c packetConn, rand io.Reader, magics *handsh writeString(h, kp.pub[:]) writeString(h, reply.EphemeralPubKey) - kInt := new(big.Int).SetBytes(secret[:]) - K := make([]byte, intLength(kInt)) - marshalInt(K, kInt) + ki := new(big.Int).SetBytes(secret[:]) + K := make([]byte, intLength(ki)) + marshalInt(K, ki) h.Write(K) return &kexResult{ @@ -510,9 +510,9 @@ func (kex *curve25519sha256) Server(c packetConn, rand io.Reader, magics *handsh writeString(h, kexInit.ClientPubKey) writeString(h, kp.pub[:]) - kInt := new(big.Int).SetBytes(secret[:]) - K := make([]byte, intLength(kInt)) - marshalInt(K, kInt) + ki := new(big.Int).SetBytes(secret[:]) + K := make([]byte, intLength(ki)) + marshalInt(K, ki) h.Write(K) H := h.Sum(nil) diff --git a/vendor/golang.org/x/crypto/ssh/keys.go b/vendor/golang.org/x/crypto/ssh/keys.go index 7a8756a..9698047 100644 --- a/vendor/golang.org/x/crypto/ssh/keys.go +++ b/vendor/golang.org/x/crypto/ssh/keys.go @@ -38,6 +38,16 @@ const ( KeyAlgoED25519 = "ssh-ed25519" ) +// These constants represent non-default signature algorithms that are supported +// as algorithm parameters to AlgorithmSigner.SignWithAlgorithm methods. See +// [PROTOCOL.agent] section 4.5.1 and +// https://tools.ietf.org/html/draft-ietf-curdle-rsa-sha2-10 +const ( + SigAlgoRSA = "ssh-rsa" + SigAlgoRSASHA2256 = "rsa-sha2-256" + SigAlgoRSASHA2512 = "rsa-sha2-512" +) + // parsePubKey parses a public key of the given algorithm. // Use ParsePublicKey for keys with prepended algorithm. func parsePubKey(in []byte, algo string) (pubKey PublicKey, rest []byte, err error) { @@ -276,7 +286,8 @@ type PublicKey interface { Type() string // Marshal returns the serialized key data in SSH wire format, - // with the name prefix. + // with the name prefix. To unmarshal the returned data, use + // the ParsePublicKey function. Marshal() []byte // Verify that sig is a signature on the given data using this @@ -300,6 +311,19 @@ type Signer interface { Sign(rand io.Reader, data []byte) (*Signature, error) } +// A AlgorithmSigner is a Signer that also supports specifying a specific +// algorithm to use for signing. +type AlgorithmSigner interface { + Signer + + // SignWithAlgorithm is like Signer.Sign, but allows specification of a + // non-default signing algorithm. See the SigAlgo* constants in this + // package for signature algorithms supported by this package. Callers may + // pass an empty string for the algorithm in which case the AlgorithmSigner + // will use its default algorithm. + SignWithAlgorithm(rand io.Reader, data []byte, algorithm string) (*Signature, error) +} + type rsaPublicKey rsa.PublicKey func (r *rsaPublicKey) Type() string { @@ -348,13 +372,21 @@ func (r *rsaPublicKey) Marshal() []byte { } func (r *rsaPublicKey) Verify(data []byte, sig *Signature) error { - if sig.Format != r.Type() { + var hash crypto.Hash + switch sig.Format { + case SigAlgoRSA: + hash = crypto.SHA1 + case SigAlgoRSASHA2256: + hash = crypto.SHA256 + case SigAlgoRSASHA2512: + hash = crypto.SHA512 + default: return fmt.Errorf("ssh: signature type %s for key type %s", sig.Format, r.Type()) } - h := crypto.SHA1.New() + h := hash.New() h.Write(data) digest := h.Sum(nil) - return rsa.VerifyPKCS1v15((*rsa.PublicKey)(r), crypto.SHA1, digest, sig.Blob) + return rsa.VerifyPKCS1v15((*rsa.PublicKey)(r), hash, digest, sig.Blob) } func (r *rsaPublicKey) CryptoPublicKey() crypto.PublicKey { @@ -363,10 +395,21 @@ func (r *rsaPublicKey) CryptoPublicKey() crypto.PublicKey { type dsaPublicKey dsa.PublicKey -func (r *dsaPublicKey) Type() string { +func (k *dsaPublicKey) Type() string { return "ssh-dss" } +func checkDSAParams(param *dsa.Parameters) error { + // SSH specifies FIPS 186-2, which only provided a single size + // (1024 bits) DSA key. FIPS 186-3 allows for larger key + // sizes, which would confuse SSH. + if l := param.P.BitLen(); l != 1024 { + return fmt.Errorf("ssh: unsupported DSA key size %d", l) + } + + return nil +} + // parseDSA parses an DSA key according to RFC 4253, section 6.6. func parseDSA(in []byte) (out PublicKey, rest []byte, err error) { var w struct { @@ -377,13 +420,18 @@ func parseDSA(in []byte) (out PublicKey, rest []byte, err error) { return nil, nil, err } + param := dsa.Parameters{ + P: w.P, + Q: w.Q, + G: w.G, + } + if err := checkDSAParams(¶m); err != nil { + return nil, nil, err + } + key := &dsaPublicKey{ - Parameters: dsa.Parameters{ - P: w.P, - Q: w.Q, - G: w.G, - }, - Y: w.Y, + Parameters: param, + Y: w.Y, } return key, w.Rest, nil } @@ -442,6 +490,14 @@ func (k *dsaPrivateKey) PublicKey() PublicKey { } func (k *dsaPrivateKey) Sign(rand io.Reader, data []byte) (*Signature, error) { + return k.SignWithAlgorithm(rand, data, "") +} + +func (k *dsaPrivateKey) SignWithAlgorithm(rand io.Reader, data []byte, algorithm string) (*Signature, error) { + if algorithm != "" && algorithm != k.PublicKey().Type() { + return nil, fmt.Errorf("ssh: unsupported signature algorithm %s", algorithm) + } + h := crypto.SHA1.New() h.Write(data) digest := h.Sum(nil) @@ -465,12 +521,12 @@ func (k *dsaPrivateKey) Sign(rand io.Reader, data []byte) (*Signature, error) { type ecdsaPublicKey ecdsa.PublicKey -func (key *ecdsaPublicKey) Type() string { - return "ecdsa-sha2-" + key.nistID() +func (k *ecdsaPublicKey) Type() string { + return "ecdsa-sha2-" + k.nistID() } -func (key *ecdsaPublicKey) nistID() string { - switch key.Params().BitSize { +func (k *ecdsaPublicKey) nistID() string { + switch k.Params().BitSize { case 256: return "nistp256" case 384: @@ -483,7 +539,7 @@ func (key *ecdsaPublicKey) nistID() string { type ed25519PublicKey ed25519.PublicKey -func (key ed25519PublicKey) Type() string { +func (k ed25519PublicKey) Type() string { return KeyAlgoED25519 } @@ -502,23 +558,23 @@ func parseED25519(in []byte) (out PublicKey, rest []byte, err error) { return (ed25519PublicKey)(key), w.Rest, nil } -func (key ed25519PublicKey) Marshal() []byte { +func (k ed25519PublicKey) Marshal() []byte { w := struct { Name string KeyBytes []byte }{ KeyAlgoED25519, - []byte(key), + []byte(k), } return Marshal(&w) } -func (key ed25519PublicKey) Verify(b []byte, sig *Signature) error { - if sig.Format != key.Type() { - return fmt.Errorf("ssh: signature type %s for key type %s", sig.Format, key.Type()) +func (k ed25519PublicKey) Verify(b []byte, sig *Signature) error { + if sig.Format != k.Type() { + return fmt.Errorf("ssh: signature type %s for key type %s", sig.Format, k.Type()) } - edKey := (ed25519.PublicKey)(key) + edKey := (ed25519.PublicKey)(k) if ok := ed25519.Verify(edKey, b, sig.Blob); !ok { return errors.New("ssh: signature did not verify") } @@ -579,9 +635,9 @@ func parseECDSA(in []byte) (out PublicKey, rest []byte, err error) { return (*ecdsaPublicKey)(key), w.Rest, nil } -func (key *ecdsaPublicKey) Marshal() []byte { +func (k *ecdsaPublicKey) Marshal() []byte { // See RFC 5656, section 3.1. - keyBytes := elliptic.Marshal(key.Curve, key.X, key.Y) + keyBytes := elliptic.Marshal(k.Curve, k.X, k.Y) // ECDSA publickey struct layout should match the struct used by // parseECDSACert in the x/crypto/ssh/agent package. w := struct { @@ -589,20 +645,20 @@ func (key *ecdsaPublicKey) Marshal() []byte { ID string Key []byte }{ - key.Type(), - key.nistID(), + k.Type(), + k.nistID(), keyBytes, } return Marshal(&w) } -func (key *ecdsaPublicKey) Verify(data []byte, sig *Signature) error { - if sig.Format != key.Type() { - return fmt.Errorf("ssh: signature type %s for key type %s", sig.Format, key.Type()) +func (k *ecdsaPublicKey) Verify(data []byte, sig *Signature) error { + if sig.Format != k.Type() { + return fmt.Errorf("ssh: signature type %s for key type %s", sig.Format, k.Type()) } - h := ecHash(key.Curve).New() + h := ecHash(k.Curve).New() h.Write(data) digest := h.Sum(nil) @@ -619,7 +675,7 @@ func (key *ecdsaPublicKey) Verify(data []byte, sig *Signature) error { return err } - if ecdsa.Verify((*ecdsa.PublicKey)(key), digest, ecSig.R, ecSig.S) { + if ecdsa.Verify((*ecdsa.PublicKey)(k), digest, ecSig.R, ecSig.S) { return nil } return errors.New("ssh: signature did not verify") @@ -630,19 +686,28 @@ func (k *ecdsaPublicKey) CryptoPublicKey() crypto.PublicKey { } // NewSignerFromKey takes an *rsa.PrivateKey, *dsa.PrivateKey, -// *ecdsa.PrivateKey or any other crypto.Signer and returns a corresponding -// Signer instance. ECDSA keys must use P-256, P-384 or P-521. +// *ecdsa.PrivateKey or any other crypto.Signer and returns a +// corresponding Signer instance. ECDSA keys must use P-256, P-384 or +// P-521. DSA keys must use parameter size L1024N160. func NewSignerFromKey(key interface{}) (Signer, error) { switch key := key.(type) { case crypto.Signer: return NewSignerFromSigner(key) case *dsa.PrivateKey: - return &dsaPrivateKey{key}, nil + return newDSAPrivateKey(key) default: return nil, fmt.Errorf("ssh: unsupported key type %T", key) } } +func newDSAPrivateKey(key *dsa.PrivateKey) (Signer, error) { + if err := checkDSAParams(&key.PublicKey.Parameters); err != nil { + return nil, err + } + + return &dsaPrivateKey{key}, nil +} + type wrappedSigner struct { signer crypto.Signer pubKey PublicKey @@ -665,16 +730,42 @@ func (s *wrappedSigner) PublicKey() PublicKey { } func (s *wrappedSigner) Sign(rand io.Reader, data []byte) (*Signature, error) { + return s.SignWithAlgorithm(rand, data, "") +} + +func (s *wrappedSigner) SignWithAlgorithm(rand io.Reader, data []byte, algorithm string) (*Signature, error) { var hashFunc crypto.Hash - switch key := s.pubKey.(type) { - case *rsaPublicKey, *dsaPublicKey: - hashFunc = crypto.SHA1 - case *ecdsaPublicKey: - hashFunc = ecHash(key.Curve) - case ed25519PublicKey: - default: - return nil, fmt.Errorf("ssh: unsupported key type %T", key) + if _, ok := s.pubKey.(*rsaPublicKey); ok { + // RSA keys support a few hash functions determined by the requested signature algorithm + switch algorithm { + case "", SigAlgoRSA: + algorithm = SigAlgoRSA + hashFunc = crypto.SHA1 + case SigAlgoRSASHA2256: + hashFunc = crypto.SHA256 + case SigAlgoRSASHA2512: + hashFunc = crypto.SHA512 + default: + return nil, fmt.Errorf("ssh: unsupported signature algorithm %s", algorithm) + } + } else { + // The only supported algorithm for all other key types is the same as the type of the key + if algorithm == "" { + algorithm = s.pubKey.Type() + } else if algorithm != s.pubKey.Type() { + return nil, fmt.Errorf("ssh: unsupported signature algorithm %s", algorithm) + } + + switch key := s.pubKey.(type) { + case *dsaPublicKey: + hashFunc = crypto.SHA1 + case *ecdsaPublicKey: + hashFunc = ecHash(key.Curve) + case ed25519PublicKey: + default: + return nil, fmt.Errorf("ssh: unsupported key type %T", key) + } } var digest []byte @@ -719,7 +810,7 @@ func (s *wrappedSigner) Sign(rand io.Reader, data []byte) (*Signature, error) { } return &Signature{ - Format: s.pubKey.Type(), + Format: algorithm, Blob: signature, }, nil } @@ -733,7 +824,7 @@ func NewPublicKey(key interface{}) (PublicKey, error) { return (*rsaPublicKey)(key), nil case *ecdsa.PublicKey: if !supportedEllipticCurve(key.Curve) { - return nil, errors.New("ssh: only P-256, P-384 and P-521 EC keys are supported.") + return nil, errors.New("ssh: only P-256, P-384 and P-521 EC keys are supported") } return (*ecdsaPublicKey)(key), nil case *dsa.PublicKey: @@ -777,7 +868,7 @@ func encryptedBlock(block *pem.Block) bool { } // ParseRawPrivateKey returns a private key from a PEM encoded private key. It -// supports RSA (PKCS#1), DSA (OpenSSL), and ECDSA private keys. +// supports RSA (PKCS#1), PKCS#8, DSA (OpenSSL), and ECDSA private keys. func ParseRawPrivateKey(pemBytes []byte) (interface{}, error) { block, _ := pem.Decode(pemBytes) if block == nil { @@ -791,6 +882,9 @@ func ParseRawPrivateKey(pemBytes []byte) (interface{}, error) { switch block.Type { case "RSA PRIVATE KEY": return x509.ParsePKCS1PrivateKey(block.Bytes) + // RFC5208 - https://tools.ietf.org/html/rfc5208 + case "PRIVATE KEY": + return x509.ParsePKCS8PrivateKey(block.Bytes) case "EC PRIVATE KEY": return x509.ParseECPrivateKey(block.Bytes) case "DSA PRIVATE KEY": @@ -874,8 +968,8 @@ func ParseDSAPrivateKey(der []byte) (*dsa.PrivateKey, error) { // Implemented based on the documentation at // https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.key func parseOpenSSHPrivateKey(key []byte) (crypto.PrivateKey, error) { - magic := append([]byte("openssh-key-v1"), 0) - if !bytes.Equal(magic, key[0:len(magic)]) { + const magic = "openssh-key-v1\x00" + if len(key) < len(magic) || string(key[:len(magic)]) != magic { return nil, errors.New("ssh: invalid openssh private key format") } remaining := key[len(magic):] diff --git a/vendor/golang.org/x/crypto/ssh/keys_test.go b/vendor/golang.org/x/crypto/ssh/keys_test.go index 20ab954..3847b3b 100644 --- a/vendor/golang.org/x/crypto/ssh/keys_test.go +++ b/vendor/golang.org/x/crypto/ssh/keys_test.go @@ -13,7 +13,9 @@ import ( "crypto/rsa" "crypto/x509" "encoding/base64" + "encoding/pem" "fmt" + "io" "reflect" "strings" "testing" @@ -107,6 +109,49 @@ func TestKeySignVerify(t *testing.T) { } } +func TestKeySignWithAlgorithmVerify(t *testing.T) { + for _, priv := range testSigners { + if algorithmSigner, ok := priv.(AlgorithmSigner); !ok { + t.Errorf("Signers constructed by ssh package should always implement the AlgorithmSigner interface: %T", priv) + } else { + pub := priv.PublicKey() + data := []byte("sign me") + + signWithAlgTestCase := func(algorithm string, expectedAlg string) { + sig, err := algorithmSigner.SignWithAlgorithm(rand.Reader, data, algorithm) + if err != nil { + t.Fatalf("Sign(%T): %v", priv, err) + } + if sig.Format != expectedAlg { + t.Errorf("signature format did not match requested signature algorithm: %s != %s", sig.Format, expectedAlg) + } + + if err := pub.Verify(data, sig); err != nil { + t.Errorf("publicKey.Verify(%T): %v", priv, err) + } + sig.Blob[5]++ + if err := pub.Verify(data, sig); err == nil { + t.Errorf("publicKey.Verify on broken sig did not fail") + } + } + + // Using the empty string as the algorithm name should result in the same signature format as the algorithm-free Sign method. + defaultSig, err := priv.Sign(rand.Reader, data) + if err != nil { + t.Fatalf("Sign(%T): %v", priv, err) + } + signWithAlgTestCase("", defaultSig.Format) + + // RSA keys are the only ones which currently support more than one signing algorithm + if pub.Type() == KeyAlgoRSA { + for _, algorithm := range []string{SigAlgoRSA, SigAlgoRSASHA2256, SigAlgoRSASHA2512} { + signWithAlgTestCase(algorithm, algorithm) + } + } + } + } +} + func TestParseRSAPrivateKey(t *testing.T) { key := testPrivateKeys["rsa"] @@ -234,7 +279,7 @@ func TestMarshalParsePublicKey(t *testing.T) { } } -type authResult struct { +type testAuthResult struct { pubKey PublicKey options []string comments string @@ -242,11 +287,11 @@ type authResult struct { ok bool } -func testAuthorizedKeys(t *testing.T, authKeys []byte, expected []authResult) { +func testAuthorizedKeys(t *testing.T, authKeys []byte, expected []testAuthResult) { rest := authKeys - var values []authResult + var values []testAuthResult for len(rest) > 0 { - var r authResult + var r testAuthResult var err error r.pubKey, r.comments, r.options, rest, err = ParseAuthorizedKey(rest) r.ok = (err == nil) @@ -264,7 +309,7 @@ func TestAuthorizedKeyBasic(t *testing.T) { pub, pubSerialized := getTestKey() line := "ssh-rsa " + pubSerialized + " user@host" testAuthorizedKeys(t, []byte(line), - []authResult{ + []testAuthResult{ {pub, nil, "user@host", "", true}, }) } @@ -286,7 +331,7 @@ func TestAuth(t *testing.T) { authOptions := strings.Join(authWithOptions, eol) rest2 := strings.Join(authWithOptions[3:], eol) rest3 := strings.Join(authWithOptions[6:], eol) - testAuthorizedKeys(t, []byte(authOptions), []authResult{ + testAuthorizedKeys(t, []byte(authOptions), []testAuthResult{ {pub, []string{`env="HOME=/home/root"`, "no-port-forwarding"}, "user@host", rest2, true}, {pub, []string{`env="HOME=/home/root2"`}, "user2@host2", rest3, true}, {nil, nil, "", "", false}, @@ -297,7 +342,7 @@ func TestAuth(t *testing.T) { func TestAuthWithQuotedSpaceInEnv(t *testing.T) { pub, pubSerialized := getTestKey() authWithQuotedSpaceInEnv := []byte(`env="HOME=/home/root dir",no-port-forwarding ssh-rsa ` + pubSerialized + ` user@host`) - testAuthorizedKeys(t, []byte(authWithQuotedSpaceInEnv), []authResult{ + testAuthorizedKeys(t, []byte(authWithQuotedSpaceInEnv), []testAuthResult{ {pub, []string{`env="HOME=/home/root dir"`, "no-port-forwarding"}, "user@host", "", true}, }) } @@ -305,7 +350,7 @@ func TestAuthWithQuotedSpaceInEnv(t *testing.T) { func TestAuthWithQuotedCommaInEnv(t *testing.T) { pub, pubSerialized := getTestKey() authWithQuotedCommaInEnv := []byte(`env="HOME=/home/root,dir",no-port-forwarding ssh-rsa ` + pubSerialized + ` user@host`) - testAuthorizedKeys(t, []byte(authWithQuotedCommaInEnv), []authResult{ + testAuthorizedKeys(t, []byte(authWithQuotedCommaInEnv), []testAuthResult{ {pub, []string{`env="HOME=/home/root,dir"`, "no-port-forwarding"}, "user@host", "", true}, }) } @@ -314,11 +359,11 @@ func TestAuthWithQuotedQuoteInEnv(t *testing.T) { pub, pubSerialized := getTestKey() authWithQuotedQuoteInEnv := []byte(`env="HOME=/home/\"root dir",no-port-forwarding` + "\t" + `ssh-rsa` + "\t" + pubSerialized + ` user@host`) authWithDoubleQuotedQuote := []byte(`no-port-forwarding,env="HOME=/home/ \"root dir\"" ssh-rsa ` + pubSerialized + "\t" + `user@host`) - testAuthorizedKeys(t, []byte(authWithQuotedQuoteInEnv), []authResult{ + testAuthorizedKeys(t, []byte(authWithQuotedQuoteInEnv), []testAuthResult{ {pub, []string{`env="HOME=/home/\"root dir"`, "no-port-forwarding"}, "user@host", "", true}, }) - testAuthorizedKeys(t, []byte(authWithDoubleQuotedQuote), []authResult{ + testAuthorizedKeys(t, []byte(authWithDoubleQuotedQuote), []testAuthResult{ {pub, []string{"no-port-forwarding", `env="HOME=/home/ \"root dir\""`}, "user@host", "", true}, }) } @@ -327,7 +372,7 @@ func TestAuthWithInvalidSpace(t *testing.T) { _, pubSerialized := getTestKey() authWithInvalidSpace := []byte(`env="HOME=/home/root dir", no-port-forwarding ssh-rsa ` + pubSerialized + ` user@host #more to follow but still no valid keys`) - testAuthorizedKeys(t, []byte(authWithInvalidSpace), []authResult{ + testAuthorizedKeys(t, []byte(authWithInvalidSpace), []testAuthResult{ {nil, nil, "", "", false}, }) } @@ -337,7 +382,7 @@ func TestAuthWithMissingQuote(t *testing.T) { authWithMissingQuote := []byte(`env="HOME=/home/root,no-port-forwarding ssh-rsa ` + pubSerialized + ` user@host env="HOME=/home/root",shared-control ssh-rsa ` + pubSerialized + ` user@host`) - testAuthorizedKeys(t, []byte(authWithMissingQuote), []authResult{ + testAuthorizedKeys(t, []byte(authWithMissingQuote), []testAuthResult{ {pub, []string{`env="HOME=/home/root"`, `shared-control`}, "user@host", "", true}, }) } @@ -498,3 +543,32 @@ func TestFingerprintSHA256(t *testing.T) { t.Errorf("got fingerprint %q want %q", fingerprint, want) } } + +func TestInvalidKeys(t *testing.T) { + keyTypes := []string{ + "RSA PRIVATE KEY", + "PRIVATE KEY", + "EC PRIVATE KEY", + "DSA PRIVATE KEY", + "OPENSSH PRIVATE KEY", + } + + for _, keyType := range keyTypes { + for _, dataLen := range []int{0, 1, 2, 5, 10, 20} { + data := make([]byte, dataLen) + if _, err := io.ReadFull(rand.Reader, data); err != nil { + t.Fatal(err) + } + + var buf bytes.Buffer + pem.Encode(&buf, &pem.Block{ + Type: keyType, + Bytes: data, + }) + + // This test is just to ensure that the function + // doesn't panic so the return value is ignored. + ParseRawPrivateKey(buf.Bytes()) + } + } +} diff --git a/vendor/golang.org/x/crypto/ssh/knownhosts/knownhosts.go b/vendor/golang.org/x/crypto/ssh/knownhosts/knownhosts.go index ea92b29..260cfe5 100644 --- a/vendor/golang.org/x/crypto/ssh/knownhosts/knownhosts.go +++ b/vendor/golang.org/x/crypto/ssh/knownhosts/knownhosts.go @@ -2,8 +2,9 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package knownhosts implements a parser for the OpenSSH -// known_hosts host key database. +// Package knownhosts implements a parser for the OpenSSH known_hosts +// host key database, and provides utility functions for writing +// OpenSSH compliant known_hosts files. package knownhosts import ( @@ -38,7 +39,7 @@ func (a *addr) String() string { } type matcher interface { - match([]addr) bool + match(addr) bool } type hostPattern struct { @@ -57,19 +58,16 @@ func (p *hostPattern) String() string { type hostPatterns []hostPattern -func (ps hostPatterns) match(addrs []addr) bool { +func (ps hostPatterns) match(a addr) bool { matched := false for _, p := range ps { - for _, a := range addrs { - m := p.match(a) - if !m { - continue - } - if p.negate { - return false - } - matched = true + if !p.match(a) { + continue } + if p.negate { + return false + } + matched = true } return matched } @@ -108,8 +106,8 @@ func wildcardMatch(pat []byte, str []byte) bool { } } -func (l *hostPattern) match(a addr) bool { - return wildcardMatch([]byte(l.addr.host), []byte(a.host)) && l.addr.port == a.port +func (p *hostPattern) match(a addr) bool { + return wildcardMatch([]byte(p.addr.host), []byte(a.host)) && p.addr.port == a.port } type keyDBLine struct { @@ -122,8 +120,8 @@ func serialize(k ssh.PublicKey) string { return k.Type() + " " + base64.StdEncoding.EncodeToString(k.Marshal()) } -func (l *keyDBLine) match(addrs []addr) bool { - return l.matcher.match(addrs) +func (l *keyDBLine) match(a addr) bool { + return l.matcher.match(a) } type hostKeyDB struct { @@ -153,7 +151,7 @@ func (db *hostKeyDB) IsHostAuthority(remote ssh.PublicKey, address string) bool a := addr{host: h, port: p} for _, l := range db.lines { - if l.cert && keyEq(l.knownKey.Key, remote) && l.match([]addr{a}) { + if l.cert && keyEq(l.knownKey.Key, remote) && l.match(a) { return true } } @@ -338,26 +336,24 @@ func (db *hostKeyDB) check(address string, remote net.Addr, remoteKey ssh.Public return fmt.Errorf("knownhosts: SplitHostPort(%s): %v", remote, err) } - addrs := []addr{ - {host, port}, - } - + hostToCheck := addr{host, port} if address != "" { + // Give preference to the hostname if available. host, port, err := net.SplitHostPort(address) if err != nil { return fmt.Errorf("knownhosts: SplitHostPort(%s): %v", address, err) } - addrs = append(addrs, addr{host, port}) + hostToCheck = addr{host, port} } - return db.checkAddrs(addrs, remoteKey) + return db.checkAddr(hostToCheck, remoteKey) } -// checkAddrs checks if we can find the given public key for any of -// the given addresses. If we only find an entry for the IP address, +// checkAddr checks if we can find the given public key for the +// given address. If we only find an entry for the IP address, // or only the hostname, then this still succeeds. -func (db *hostKeyDB) checkAddrs(addrs []addr, remoteKey ssh.PublicKey) error { +func (db *hostKeyDB) checkAddr(a addr, remoteKey ssh.PublicKey) error { // TODO(hanwen): are these the right semantics? What if there // is just a key for the IP address, but not for the // hostname? @@ -365,7 +361,7 @@ func (db *hostKeyDB) checkAddrs(addrs []addr, remoteKey ssh.PublicKey) error { // Algorithm => key. knownKeys := map[string]KnownKey{} for _, l := range db.lines { - if l.match(addrs) { + if l.match(a) { typ := l.knownKey.Key.Type() if _, ok := knownKeys[typ]; !ok { knownKeys[typ] = l.knownKey @@ -414,7 +410,10 @@ func (db *hostKeyDB) Read(r io.Reader, filename string) error { // New creates a host key callback from the given OpenSSH host key // files. The returned callback is for use in -// ssh.ClientConfig.HostKeyCallback. Hashed hostnames are not supported. +// ssh.ClientConfig.HostKeyCallback. By preference, the key check +// operates on the hostname if available, i.e. if a server changes its +// IP address, the host key check will still succeed, even though a +// record of the new IP address is not available. func New(files ...string) (ssh.HostKeyCallback, error) { db := newHostKeyDB() for _, fn := range files { @@ -536,11 +535,6 @@ func newHashedHost(encoded string) (*hashedHost, error) { return &hashedHost{salt: salt, hash: hash}, nil } -func (h *hashedHost) match(addrs []addr) bool { - for _, a := range addrs { - if bytes.Equal(hashHost(Normalize(a.String()), h.salt), h.hash) { - return true - } - } - return false +func (h *hashedHost) match(a addr) bool { + return bytes.Equal(hashHost(Normalize(a.String()), h.salt), h.hash) } diff --git a/vendor/golang.org/x/crypto/ssh/knownhosts/knownhosts_test.go b/vendor/golang.org/x/crypto/ssh/knownhosts/knownhosts_test.go index be7cc0e..464dd59 100644 --- a/vendor/golang.org/x/crypto/ssh/knownhosts/knownhosts_test.go +++ b/vendor/golang.org/x/crypto/ssh/knownhosts/knownhosts_test.go @@ -166,7 +166,7 @@ func TestBasic(t *testing.T) { str := fmt.Sprintf("#comment\n\nserver.org,%s %s\notherhost %s", testAddr, edKeyStr, ecKeyStr) db := testDB(t, str) if err := db.check("server.org:22", testAddr, edKey); err != nil { - t.Errorf("got error %q, want none", err) + t.Errorf("got error %v, want none", err) } want := KnownKey{ @@ -185,6 +185,33 @@ func TestBasic(t *testing.T) { } } +func TestHostNamePrecedence(t *testing.T) { + var evilAddr = &net.TCPAddr{ + IP: net.IP{66, 66, 66, 66}, + Port: 22, + } + + str := fmt.Sprintf("server.org,%s %s\nevil.org,%s %s", testAddr, edKeyStr, evilAddr, ecKeyStr) + db := testDB(t, str) + + if err := db.check("server.org:22", evilAddr, ecKey); err == nil { + t.Errorf("check succeeded") + } else if _, ok := err.(*KeyError); !ok { + t.Errorf("got %T, want *KeyError", err) + } +} + +func TestDBOrderingPrecedenceKeyType(t *testing.T) { + str := fmt.Sprintf("server.org,%s %s\nserver.org,%s %s", testAddr, edKeyStr, testAddr, alternateEdKeyStr) + db := testDB(t, str) + + if err := db.check("server.org:22", testAddr, alternateEdKey); err == nil { + t.Errorf("check succeeded") + } else if _, ok := err.(*KeyError); !ok { + t.Errorf("got %T, want *KeyError", err) + } +} + func TestNegate(t *testing.T) { str := fmt.Sprintf("%s,!server.org %s", testAddr, edKeyStr) db := testDB(t, str) diff --git a/vendor/golang.org/x/crypto/ssh/messages.go b/vendor/golang.org/x/crypto/ssh/messages.go index e6ecd3a..08d2811 100644 --- a/vendor/golang.org/x/crypto/ssh/messages.go +++ b/vendor/golang.org/x/crypto/ssh/messages.go @@ -23,10 +23,6 @@ const ( msgUnimplemented = 3 msgDebug = 4 msgNewKeys = 21 - - // Standard authentication messages - msgUserAuthSuccess = 52 - msgUserAuthBanner = 53 ) // SSH messages: @@ -137,6 +133,18 @@ type userAuthFailureMsg struct { PartialSuccess bool } +// See RFC 4252, section 5.1 +const msgUserAuthSuccess = 52 + +// See RFC 4252, section 5.4 +const msgUserAuthBanner = 53 + +type userAuthBannerMsg struct { + Message string `sshtype:"53"` + // unused, but required to allow message parsing + Language string +} + // See RFC 4256, section 3.2 const msgUserAuthInfoRequest = 60 const msgUserAuthInfoResponse = 61 @@ -154,7 +162,7 @@ const msgChannelOpen = 90 type channelOpenMsg struct { ChanType string `sshtype:"90"` - PeersId uint32 + PeersID uint32 PeersWindow uint32 MaxPacketSize uint32 TypeSpecificData []byte `ssh:"rest"` @@ -165,7 +173,7 @@ const msgChannelData = 94 // Used for debug print outs of packets. type channelDataMsg struct { - PeersId uint32 `sshtype:"94"` + PeersID uint32 `sshtype:"94"` Length uint32 Rest []byte `ssh:"rest"` } @@ -174,8 +182,8 @@ type channelDataMsg struct { const msgChannelOpenConfirm = 91 type channelOpenConfirmMsg struct { - PeersId uint32 `sshtype:"91"` - MyId uint32 + PeersID uint32 `sshtype:"91"` + MyID uint32 MyWindow uint32 MaxPacketSize uint32 TypeSpecificData []byte `ssh:"rest"` @@ -185,7 +193,7 @@ type channelOpenConfirmMsg struct { const msgChannelOpenFailure = 92 type channelOpenFailureMsg struct { - PeersId uint32 `sshtype:"92"` + PeersID uint32 `sshtype:"92"` Reason RejectionReason Message string Language string @@ -194,7 +202,7 @@ type channelOpenFailureMsg struct { const msgChannelRequest = 98 type channelRequestMsg struct { - PeersId uint32 `sshtype:"98"` + PeersID uint32 `sshtype:"98"` Request string WantReply bool RequestSpecificData []byte `ssh:"rest"` @@ -204,28 +212,28 @@ type channelRequestMsg struct { const msgChannelSuccess = 99 type channelRequestSuccessMsg struct { - PeersId uint32 `sshtype:"99"` + PeersID uint32 `sshtype:"99"` } // See RFC 4254, section 5.4. const msgChannelFailure = 100 type channelRequestFailureMsg struct { - PeersId uint32 `sshtype:"100"` + PeersID uint32 `sshtype:"100"` } // See RFC 4254, section 5.3 const msgChannelClose = 97 type channelCloseMsg struct { - PeersId uint32 `sshtype:"97"` + PeersID uint32 `sshtype:"97"` } // See RFC 4254, section 5.3 const msgChannelEOF = 96 type channelEOFMsg struct { - PeersId uint32 `sshtype:"96"` + PeersID uint32 `sshtype:"96"` } // See RFC 4254, section 4 @@ -255,7 +263,7 @@ type globalRequestFailureMsg struct { const msgChannelWindowAdjust = 93 type windowAdjustMsg struct { - PeersId uint32 `sshtype:"93"` + PeersID uint32 `sshtype:"93"` AdditionalBytes uint32 } diff --git a/vendor/golang.org/x/crypto/ssh/mux.go b/vendor/golang.org/x/crypto/ssh/mux.go index 27a527c..f190162 100644 --- a/vendor/golang.org/x/crypto/ssh/mux.go +++ b/vendor/golang.org/x/crypto/ssh/mux.go @@ -278,7 +278,7 @@ func (m *mux) handleChannelOpen(packet []byte) error { if msg.MaxPacketSize < minPacketLength || msg.MaxPacketSize > 1<<31 { failMsg := channelOpenFailureMsg{ - PeersId: msg.PeersId, + PeersID: msg.PeersID, Reason: ConnectionFailed, Message: "invalid request", Language: "en_US.UTF-8", @@ -287,7 +287,7 @@ func (m *mux) handleChannelOpen(packet []byte) error { } c := m.newChannel(msg.ChanType, channelInbound, msg.TypeSpecificData) - c.remoteId = msg.PeersId + c.remoteId = msg.PeersID c.maxRemotePayload = msg.MaxPacketSize c.remoteWin.add(msg.PeersWindow) m.incomingChannels <- c @@ -313,7 +313,7 @@ func (m *mux) openChannel(chanType string, extra []byte) (*channel, error) { PeersWindow: ch.myWindow, MaxPacketSize: ch.maxIncomingPayload, TypeSpecificData: extra, - PeersId: ch.localId, + PeersID: ch.localId, } if err := m.sendMessage(open); err != nil { return nil, err diff --git a/vendor/golang.org/x/crypto/ssh/mux_test.go b/vendor/golang.org/x/crypto/ssh/mux_test.go index 25d2181..94596ec 100644 --- a/vendor/golang.org/x/crypto/ssh/mux_test.go +++ b/vendor/golang.org/x/crypto/ssh/mux_test.go @@ -20,7 +20,7 @@ func muxPair() (*mux, *mux) { return s, c } -// Returns both ends of a channel, and the mux for the the 2nd +// Returns both ends of a channel, and the mux for the 2nd // channel. func channelPair(t *testing.T) (*channel, *channel, *mux) { c, s := muxPair() @@ -108,10 +108,6 @@ func TestMuxReadWrite(t *testing.T) { if err != nil { t.Fatalf("Write: %v", err) } - err = s.Close() - if err != nil { - t.Fatalf("Close: %v", err) - } }() var buf [1024]byte diff --git a/vendor/golang.org/x/crypto/ssh/server.go b/vendor/golang.org/x/crypto/ssh/server.go index 8a78b7c..e86e896 100644 --- a/vendor/golang.org/x/crypto/ssh/server.go +++ b/vendor/golang.org/x/crypto/ssh/server.go @@ -95,6 +95,10 @@ type ServerConfig struct { // Note that RFC 4253 section 4.2 requires that this string start with // "SSH-2.0-". ServerVersion string + + // BannerCallback, if present, is called and the return string is sent to + // the client after key exchange completed but before authentication. + BannerCallback func(conn ConnMetadata) string } // AddHostKey adds a private key as a host key. If an existing host @@ -162,6 +166,9 @@ type ServerConn struct { // unsuccessful, it closes the connection and returns an error. The // Request and NewChannel channels must be serviced, or the connection // will hang. +// +// The returned error may be of type *ServerAuthError for +// authentication errors. func NewServerConn(c net.Conn, config *ServerConfig) (*ServerConn, <-chan NewChannel, <-chan *Request, error) { fullConf := *config fullConf.SetDefaults() @@ -252,7 +259,7 @@ func (s *connection) serverHandshake(config *ServerConfig) (*Permissions, error) func isAcceptableAlgo(algo string) bool { switch algo { case KeyAlgoRSA, KeyAlgoDSA, KeyAlgoECDSA256, KeyAlgoECDSA384, KeyAlgoECDSA521, KeyAlgoED25519, - CertAlgoRSAv01, CertAlgoDSAv01, CertAlgoECDSA256v01, CertAlgoECDSA384v01, CertAlgoECDSA521v01: + CertAlgoRSAv01, CertAlgoDSAv01, CertAlgoECDSA256v01, CertAlgoECDSA384v01, CertAlgoECDSA521v01, CertAlgoED25519v01: return true } return false @@ -288,12 +295,13 @@ func checkSourceAddress(addr net.Addr, sourceAddrs string) error { return fmt.Errorf("ssh: remote address %v is not allowed because of source-address restriction", addr) } -// ServerAuthError implements the error interface. It appends any authentication -// errors that may occur, and is returned if all of the authentication methods -// provided by the user failed to authenticate. +// ServerAuthError represents server authentication errors and is +// sometimes returned by NewServerConn. It appends any authentication +// errors that may occur, and is returned if all of the authentication +// methods provided by the user failed to authenticate. type ServerAuthError struct { // Errors contains authentication errors returned by the authentication - // callback methods. + // callback methods. The first entry is typically ErrNoAuth. Errors []error } @@ -305,6 +313,13 @@ func (l ServerAuthError) Error() string { return "[" + strings.Join(errs, ", ") + "]" } +// ErrNoAuth is the error value returned if no +// authentication method has been passed yet. This happens as a normal +// part of the authentication loop, since the client first tries +// 'none' authentication to discover available methods. +// It is returned in ServerAuthError.Errors from NewServerConn. +var ErrNoAuth = errors.New("ssh: no auth passed yet") + func (s *connection) serverAuthenticate(config *ServerConfig) (*Permissions, error) { sessionID := s.transport.getSessionID() var cache pubKeyCache @@ -312,6 +327,7 @@ func (s *connection) serverAuthenticate(config *ServerConfig) (*Permissions, err authFailures := 0 var authErrs []error + var displayedBanner bool userAuthLoop: for { @@ -343,8 +359,22 @@ userAuthLoop: } s.user = userAuthReq.User + + if !displayedBanner && config.BannerCallback != nil { + displayedBanner = true + msg := config.BannerCallback(s) + if msg != "" { + bannerMsg := &userAuthBannerMsg{ + Message: msg, + } + if err := s.transport.writePacket(Marshal(bannerMsg)); err != nil { + return nil, err + } + } + } + perms = nil - authErr := errors.New("no auth passed yet") + authErr := ErrNoAuth switch userAuthReq.Method { case "none": @@ -374,7 +404,7 @@ userAuthLoop: perms, authErr = config.PasswordCallback(s, password) case "keyboard-interactive": if config.KeyboardInteractiveCallback == nil { - authErr = errors.New("ssh: keyboard-interactive auth not configubred") + authErr = errors.New("ssh: keyboard-interactive auth not configured") break } @@ -454,6 +484,7 @@ userAuthLoop: // sig.Format. This is usually the same, but // for certs, the names differ. if !isAcceptableAlgo(sig.Format) { + authErr = fmt.Errorf("ssh: algorithm %q not accepted", sig.Format) break } signedData := buildDataSignedForAuth(sessionID, userAuthReq, algoBytes, pubKeyData) diff --git a/vendor/golang.org/x/crypto/ssh/session.go b/vendor/golang.org/x/crypto/ssh/session.go index cc06e03..d3321f6 100644 --- a/vendor/golang.org/x/crypto/ssh/session.go +++ b/vendor/golang.org/x/crypto/ssh/session.go @@ -406,7 +406,7 @@ func (s *Session) Wait() error { s.stdinPipeWriter.Close() } var copyError error - for _ = range s.copyFuncs { + for range s.copyFuncs { if err := <-s.errors; err != nil && copyError == nil { copyError = err } diff --git a/vendor/golang.org/x/crypto/ssh/streamlocal.go b/vendor/golang.org/x/crypto/ssh/streamlocal.go index a2dccc6..b171b33 100644 --- a/vendor/golang.org/x/crypto/ssh/streamlocal.go +++ b/vendor/golang.org/x/crypto/ssh/streamlocal.go @@ -32,6 +32,7 @@ type streamLocalChannelForwardMsg struct { // ListenUnix is similar to ListenTCP but uses a Unix domain socket. func (c *Client) ListenUnix(socketPath string) (net.Listener, error) { + c.handleForwardsOnce.Do(c.handleForwards) m := streamLocalChannelForwardMsg{ socketPath, } diff --git a/vendor/golang.org/x/crypto/ssh/tcpip.go b/vendor/golang.org/x/crypto/ssh/tcpip.go index acf1717..80d35f5 100644 --- a/vendor/golang.org/x/crypto/ssh/tcpip.go +++ b/vendor/golang.org/x/crypto/ssh/tcpip.go @@ -90,10 +90,19 @@ type channelForwardMsg struct { rport uint32 } +// handleForwards starts goroutines handling forwarded connections. +// It's called on first use by (*Client).ListenTCP to not launch +// goroutines until needed. +func (c *Client) handleForwards() { + go c.forwards.handleChannels(c.HandleChannelOpen("forwarded-tcpip")) + go c.forwards.handleChannels(c.HandleChannelOpen("forwarded-streamlocal@openssh.com")) +} + // ListenTCP requests the remote peer open a listening socket // on laddr. Incoming connections will be available by calling // Accept on the returned net.Listener. func (c *Client) ListenTCP(laddr *net.TCPAddr) (net.Listener, error) { + c.handleForwardsOnce.Do(c.handleForwards) if laddr.Port == 0 && isBrokenOpenSSHVersion(string(c.ServerVersion())) { return c.autoPortListenWorkaround(laddr) } diff --git a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go b/vendor/golang.org/x/crypto/ssh/terminal/terminal.go index 18379a9..2f04ee5 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go +++ b/vendor/golang.org/x/crypto/ssh/terminal/terminal.go @@ -7,6 +7,7 @@ package terminal import ( "bytes" "io" + "strconv" "sync" "unicode/utf8" ) @@ -159,6 +160,10 @@ func bytesToKey(b []byte, pasteActive bool) (rune, []byte) { return keyClearScreen, b[1:] case 23: // ^W return keyDeleteWord, b[1:] + case 14: // ^N + return keyDown, b[1:] + case 16: // ^P + return keyUp, b[1:] } } @@ -267,34 +272,44 @@ func (t *Terminal) moveCursorToPos(pos int) { } func (t *Terminal) move(up, down, left, right int) { - movement := make([]rune, 3*(up+down+left+right)) - m := movement - for i := 0; i < up; i++ { - m[0] = keyEscape - m[1] = '[' - m[2] = 'A' - m = m[3:] + m := []rune{} + + // 1 unit up can be expressed as ^[[A or ^[A + // 5 units up can be expressed as ^[[5A + + if up == 1 { + m = append(m, keyEscape, '[', 'A') + } else if up > 1 { + m = append(m, keyEscape, '[') + m = append(m, []rune(strconv.Itoa(up))...) + m = append(m, 'A') } - for i := 0; i < down; i++ { - m[0] = keyEscape - m[1] = '[' - m[2] = 'B' - m = m[3:] + + if down == 1 { + m = append(m, keyEscape, '[', 'B') + } else if down > 1 { + m = append(m, keyEscape, '[') + m = append(m, []rune(strconv.Itoa(down))...) + m = append(m, 'B') } - for i := 0; i < left; i++ { - m[0] = keyEscape - m[1] = '[' - m[2] = 'D' - m = m[3:] + + if right == 1 { + m = append(m, keyEscape, '[', 'C') + } else if right > 1 { + m = append(m, keyEscape, '[') + m = append(m, []rune(strconv.Itoa(right))...) + m = append(m, 'C') } - for i := 0; i < right; i++ { - m[0] = keyEscape - m[1] = '[' - m[2] = 'C' - m = m[3:] + + if left == 1 { + m = append(m, keyEscape, '[', 'D') + } else if left > 1 { + m = append(m, keyEscape, '[') + m = append(m, []rune(strconv.Itoa(left))...) + m = append(m, 'D') } - t.queue(movement) + t.queue(m) } func (t *Terminal) clearLineToRight() { @@ -617,7 +632,7 @@ func writeWithCRLF(w io.Writer, buf []byte) (n int, err error) { if _, err = w.Write(crlf); err != nil { return n, err } - n += 1 + n++ buf = buf[1:] } } diff --git a/vendor/golang.org/x/crypto/ssh/terminal/terminal_test.go b/vendor/golang.org/x/crypto/ssh/terminal/terminal_test.go index 901c72a..4e7a0c6 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/terminal_test.go +++ b/vendor/golang.org/x/crypto/ssh/terminal/terminal_test.go @@ -2,12 +2,15 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +// +build aix darwin dragonfly freebsd linux,!appengine netbsd openbsd windows plan9 solaris + package terminal import ( "bytes" "io" "os" + "runtime" "testing" ) @@ -88,6 +91,12 @@ var keyPressTests = []struct { { in: "\x1b[B\r", // down }, + { + in: "\016\r", // ^P + }, + { + in: "\014\r", // ^N + }, { in: "line\x1b[A\x1b[B\r", // up then down line: "line", @@ -228,6 +237,49 @@ func TestKeyPresses(t *testing.T) { } } +var renderTests = []struct { + in string + received string + err error +}{ + { + // Cursor move after keyHome (left 4) then enter (right 4, newline) + in: "abcd\x1b[H\r", + received: "> abcd\x1b[4D\x1b[4C\r\n", + }, + { + // Write, home, prepend, enter. Prepends rewrites the line. + in: "cdef\x1b[Hab\r", + received: "> cdef" + // Initial input + "\x1b[4Da" + // Move cursor back, insert first char + "cdef" + // Copy over original string + "\x1b[4Dbcdef" + // Repeat for second char with copy + "\x1b[4D" + // Put cursor back in position to insert again + "\x1b[4C\r\n", // Put cursor at the end of the line and newline. + }, +} + +func TestRender(t *testing.T) { + for i, test := range renderTests { + for j := 1; j < len(test.in); j++ { + c := &MockTerminal{ + toSend: []byte(test.in), + bytesPerRead: j, + } + ss := NewTerminal(c, "> ") + _, err := ss.ReadLine() + if err != test.err { + t.Errorf("Error resulting from test %d (%d bytes per read) was '%v', expected '%v'", i, j, err, test.err) + break + } + if test.received != string(c.received) { + t.Errorf("Results rendered from test %d (%d bytes per read) was '%s', expected '%s'", i, j, c.received, test.received) + break + } + } + } +} + func TestPasswordNotSaved(t *testing.T) { c := &MockTerminal{ toSend: []byte("password\r\x1b[A\r"), @@ -324,6 +376,11 @@ func TestMakeRawState(t *testing.T) { if err != nil { t.Fatalf("failed to get terminal state from GetState: %s", err) } + + if runtime.GOOS == "darwin" && (runtime.GOARCH == "arm" || runtime.GOARCH == "arm64") { + t.Skip("MakeRaw not allowed on iOS; skipping test") + } + defer Restore(fd, st) raw, err := MakeRaw(fd) if err != nil { diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util.go b/vendor/golang.org/x/crypto/ssh/terminal/util.go index d019196..3911040 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util.go +++ b/vendor/golang.org/x/crypto/ssh/terminal/util.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build darwin dragonfly freebsd linux,!appengine netbsd openbsd +// +build aix darwin dragonfly freebsd linux,!appengine netbsd openbsd // Package terminal provides support functions for dealing with terminals, as // commonly found on UNIX systems. @@ -17,40 +17,41 @@ package terminal // import "golang.org/x/crypto/ssh/terminal" import ( - "syscall" - "unsafe" + "golang.org/x/sys/unix" ) // State contains the state of a terminal. type State struct { - termios syscall.Termios + termios unix.Termios } -// IsTerminal returns true if the given file descriptor is a terminal. +// IsTerminal returns whether the given file descriptor is a terminal. func IsTerminal(fd int) bool { - var termios syscall.Termios - _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0) - return err == 0 + _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) + return err == nil } // MakeRaw put the terminal connected to the given file descriptor into raw // mode and returns the previous state of the terminal so that it can be // restored. func MakeRaw(fd int) (*State, error) { - var oldState State - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState.termios)), 0, 0, 0); err != 0 { + termios, err := unix.IoctlGetTermios(fd, ioctlReadTermios) + if err != nil { return nil, err } - newState := oldState.termios + oldState := State{termios: *termios} + // This attempts to replicate the behaviour documented for cfmakeraw in // the termios(3) manpage. - newState.Iflag &^= syscall.IGNBRK | syscall.BRKINT | syscall.PARMRK | syscall.ISTRIP | syscall.INLCR | syscall.IGNCR | syscall.ICRNL | syscall.IXON - newState.Oflag &^= syscall.OPOST - newState.Lflag &^= syscall.ECHO | syscall.ECHONL | syscall.ICANON | syscall.ISIG | syscall.IEXTEN - newState.Cflag &^= syscall.CSIZE | syscall.PARENB - newState.Cflag |= syscall.CS8 - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&newState)), 0, 0, 0); err != 0 { + termios.Iflag &^= unix.IGNBRK | unix.BRKINT | unix.PARMRK | unix.ISTRIP | unix.INLCR | unix.IGNCR | unix.ICRNL | unix.IXON + termios.Oflag &^= unix.OPOST + termios.Lflag &^= unix.ECHO | unix.ECHONL | unix.ICANON | unix.ISIG | unix.IEXTEN + termios.Cflag &^= unix.CSIZE | unix.PARENB + termios.Cflag |= unix.CS8 + termios.Cc[unix.VMIN] = 1 + termios.Cc[unix.VTIME] = 0 + if err := unix.IoctlSetTermios(fd, ioctlWriteTermios, termios); err != nil { return nil, err } @@ -60,60 +61,54 @@ func MakeRaw(fd int) (*State, error) { // GetState returns the current state of a terminal which may be useful to // restore the terminal after a signal. func GetState(fd int) (*State, error) { - var oldState State - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState.termios)), 0, 0, 0); err != 0 { + termios, err := unix.IoctlGetTermios(fd, ioctlReadTermios) + if err != nil { return nil, err } - return &oldState, nil + return &State{termios: *termios}, nil } // Restore restores the terminal connected to the given file descriptor to a // previous state. func Restore(fd int, state *State) error { - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&state.termios)), 0, 0, 0); err != 0 { - return err - } - return nil + return unix.IoctlSetTermios(fd, ioctlWriteTermios, &state.termios) } // GetSize returns the dimensions of the given terminal. func GetSize(fd int) (width, height int, err error) { - var dimensions [4]uint16 - - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), uintptr(syscall.TIOCGWINSZ), uintptr(unsafe.Pointer(&dimensions)), 0, 0, 0); err != 0 { + ws, err := unix.IoctlGetWinsize(fd, unix.TIOCGWINSZ) + if err != nil { return -1, -1, err } - return int(dimensions[1]), int(dimensions[0]), nil + return int(ws.Col), int(ws.Row), nil } // passwordReader is an io.Reader that reads from a specific file descriptor. type passwordReader int func (r passwordReader) Read(buf []byte) (int, error) { - return syscall.Read(int(r), buf) + return unix.Read(int(r), buf) } // ReadPassword reads a line of input from a terminal without local echo. This // is commonly used for inputting passwords and other sensitive data. The slice // returned does not include the \n. func ReadPassword(fd int) ([]byte, error) { - var oldState syscall.Termios - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState)), 0, 0, 0); err != 0 { + termios, err := unix.IoctlGetTermios(fd, ioctlReadTermios) + if err != nil { return nil, err } - newState := oldState - newState.Lflag &^= syscall.ECHO - newState.Lflag |= syscall.ICANON | syscall.ISIG - newState.Iflag |= syscall.ICRNL - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&newState)), 0, 0, 0); err != 0 { + newState := *termios + newState.Lflag &^= unix.ECHO + newState.Lflag |= unix.ICANON | unix.ISIG + newState.Iflag |= unix.ICRNL + if err := unix.IoctlSetTermios(fd, ioctlWriteTermios, &newState); err != nil { return nil, err } - defer func() { - syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&oldState)), 0, 0, 0) - }() + defer unix.IoctlSetTermios(fd, ioctlWriteTermios, termios) return readPasswordLine(passwordReader(fd)) } diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_aix.go b/vendor/golang.org/x/crypto/ssh/terminal/util_aix.go new file mode 100644 index 0000000..dfcd627 --- /dev/null +++ b/vendor/golang.org/x/crypto/ssh/terminal/util_aix.go @@ -0,0 +1,12 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build aix + +package terminal + +import "golang.org/x/sys/unix" + +const ioctlReadTermios = unix.TCGETS +const ioctlWriteTermios = unix.TCSETS diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go b/vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go index 799f049..9317ac7 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go +++ b/vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go @@ -21,7 +21,7 @@ import ( type State struct{} -// IsTerminal returns true if the given file descriptor is a terminal. +// IsTerminal returns whether the given file descriptor is a terminal. func IsTerminal(fd int) bool { return false } diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go b/vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go index a2e1b57..3d5f06a 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go +++ b/vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go @@ -14,10 +14,10 @@ import ( // State contains the state of a terminal. type State struct { - state *unix.Termios + termios unix.Termios } -// IsTerminal returns true if the given file descriptor is a terminal. +// IsTerminal returns whether the given file descriptor is a terminal. func IsTerminal(fd int) bool { _, err := unix.IoctlGetTermio(fd, unix.TCGETA) return err == nil @@ -75,47 +75,43 @@ func ReadPassword(fd int) ([]byte, error) { // restored. // see http://cr.illumos.org/~webrev/andy_js/1060/ func MakeRaw(fd int) (*State, error) { - oldTermiosPtr, err := unix.IoctlGetTermios(fd, unix.TCGETS) + termios, err := unix.IoctlGetTermios(fd, unix.TCGETS) if err != nil { return nil, err } - oldTermios := *oldTermiosPtr - - newTermios := oldTermios - newTermios.Iflag &^= syscall.IGNBRK | syscall.BRKINT | syscall.PARMRK | syscall.ISTRIP | syscall.INLCR | syscall.IGNCR | syscall.ICRNL | syscall.IXON - newTermios.Oflag &^= syscall.OPOST - newTermios.Lflag &^= syscall.ECHO | syscall.ECHONL | syscall.ICANON | syscall.ISIG | syscall.IEXTEN - newTermios.Cflag &^= syscall.CSIZE | syscall.PARENB - newTermios.Cflag |= syscall.CS8 - newTermios.Cc[unix.VMIN] = 1 - newTermios.Cc[unix.VTIME] = 0 - - if err := unix.IoctlSetTermios(fd, unix.TCSETS, &newTermios); err != nil { + + oldState := State{termios: *termios} + + termios.Iflag &^= unix.IGNBRK | unix.BRKINT | unix.PARMRK | unix.ISTRIP | unix.INLCR | unix.IGNCR | unix.ICRNL | unix.IXON + termios.Oflag &^= unix.OPOST + termios.Lflag &^= unix.ECHO | unix.ECHONL | unix.ICANON | unix.ISIG | unix.IEXTEN + termios.Cflag &^= unix.CSIZE | unix.PARENB + termios.Cflag |= unix.CS8 + termios.Cc[unix.VMIN] = 1 + termios.Cc[unix.VTIME] = 0 + + if err := unix.IoctlSetTermios(fd, unix.TCSETS, termios); err != nil { return nil, err } - return &State{ - state: oldTermiosPtr, - }, nil + return &oldState, nil } // Restore restores the terminal connected to the given file descriptor to a // previous state. func Restore(fd int, oldState *State) error { - return unix.IoctlSetTermios(fd, unix.TCSETS, oldState.state) + return unix.IoctlSetTermios(fd, unix.TCSETS, &oldState.termios) } // GetState returns the current state of a terminal which may be useful to // restore the terminal after a signal. func GetState(fd int) (*State, error) { - oldTermiosPtr, err := unix.IoctlGetTermios(fd, unix.TCGETS) + termios, err := unix.IoctlGetTermios(fd, unix.TCGETS) if err != nil { return nil, err } - return &State{ - state: oldTermiosPtr, - }, nil + return &State{termios: *termios}, nil } // GetSize returns the dimensions of the given terminal. diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go b/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go index 60979cc..5cfdf8f 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go +++ b/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go @@ -17,6 +17,8 @@ package terminal import ( + "os" + "golang.org/x/sys/windows" ) @@ -24,7 +26,7 @@ type State struct { mode uint32 } -// IsTerminal returns true if the given file descriptor is a terminal. +// IsTerminal returns whether the given file descriptor is a terminal. func IsTerminal(fd int) bool { var st uint32 err := windows.GetConsoleMode(windows.Handle(fd), &st) @@ -62,20 +64,15 @@ func Restore(fd int, state *State) error { return windows.SetConsoleMode(windows.Handle(fd), state.mode) } -// GetSize returns the dimensions of the given terminal. +// GetSize returns the visible dimensions of the given terminal. +// +// These dimensions don't include any scrollback buffer height. func GetSize(fd int) (width, height int, err error) { var info windows.ConsoleScreenBufferInfo if err := windows.GetConsoleScreenBufferInfo(windows.Handle(fd), &info); err != nil { return 0, 0, err } - return int(info.Size.X), int(info.Size.Y), nil -} - -// passwordReader is an io.Reader that reads from a specific Windows HANDLE. -type passwordReader int - -func (r passwordReader) Read(buf []byte) (int, error) { - return windows.Read(windows.Handle(r), buf) + return int(info.Window.Right - info.Window.Left + 1), int(info.Window.Bottom - info.Window.Top + 1), nil } // ReadPassword reads a line of input from a terminal without local echo. This @@ -94,9 +91,15 @@ func ReadPassword(fd int) ([]byte, error) { return nil, err } - defer func() { - windows.SetConsoleMode(windows.Handle(fd), old) - }() + defer windows.SetConsoleMode(windows.Handle(fd), old) + + var h windows.Handle + p, _ := windows.GetCurrentProcess() + if err := windows.DuplicateHandle(p, windows.Handle(fd), p, &h, 0, false, windows.DUPLICATE_SAME_ACCESS); err != nil { + return nil, err + } - return readPasswordLine(passwordReader(fd)) + f := os.NewFile(uintptr(h), "stdin") + defer f.Close() + return readPasswordLine(f) } diff --git a/vendor/golang.org/x/crypto/ssh/test/banner_test.go b/vendor/golang.org/x/crypto/ssh/test/banner_test.go new file mode 100644 index 0000000..d3b21ac --- /dev/null +++ b/vendor/golang.org/x/crypto/ssh/test/banner_test.go @@ -0,0 +1,32 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin dragonfly freebsd linux netbsd openbsd + +package test + +import ( + "testing" +) + +func TestBannerCallbackAgainstOpenSSH(t *testing.T) { + server := newServer(t) + defer server.Shutdown() + + clientConf := clientConfig() + + var receivedBanner string + clientConf.BannerCallback = func(message string) error { + receivedBanner = message + return nil + } + + conn := server.Dial(clientConf) + defer conn.Close() + + expected := "Server Banner" + if receivedBanner != expected { + t.Fatalf("got %v; want %v", receivedBanner, expected) + } +} diff --git a/vendor/golang.org/x/crypto/ssh/test/doc.go b/vendor/golang.org/x/crypto/ssh/test/doc.go index 3f9b334..198f0ca 100644 --- a/vendor/golang.org/x/crypto/ssh/test/doc.go +++ b/vendor/golang.org/x/crypto/ssh/test/doc.go @@ -2,6 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// This package contains integration tests for the +// Package test contains integration tests for the // golang.org/x/crypto/ssh package. package test // import "golang.org/x/crypto/ssh/test" diff --git a/vendor/golang.org/x/crypto/ssh/test/multi_auth_test.go b/vendor/golang.org/x/crypto/ssh/test/multi_auth_test.go new file mode 100644 index 0000000..f594d36 --- /dev/null +++ b/vendor/golang.org/x/crypto/ssh/test/multi_auth_test.go @@ -0,0 +1,144 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Tests for ssh client multi-auth +// +// These tests run a simple go ssh client against OpenSSH server +// over unix domain sockets. The tests use multiple combinations +// of password, keyboard-interactive and publickey authentication +// methods. +// +// A wrapper library for making sshd PAM authentication use test +// passwords is required in ./sshd_test_pw.so. If the library does +// not exist these tests will be skipped. See compile instructions +// (for linux) in file ./sshd_test_pw.c. + +// +build linux + +package test + +import ( + "fmt" + "strings" + "testing" + + "golang.org/x/crypto/ssh" +) + +// test cases +type multiAuthTestCase struct { + authMethods []string + expectedPasswordCbs int + expectedKbdIntCbs int +} + +// test context +type multiAuthTestCtx struct { + password string + numPasswordCbs int + numKbdIntCbs int +} + +// create test context +func newMultiAuthTestCtx(t *testing.T) *multiAuthTestCtx { + password, err := randomPassword() + if err != nil { + t.Fatalf("Failed to generate random test password: %s", err.Error()) + } + + return &multiAuthTestCtx{ + password: password, + } +} + +// password callback +func (ctx *multiAuthTestCtx) passwordCb() (secret string, err error) { + ctx.numPasswordCbs++ + return ctx.password, nil +} + +// keyboard-interactive callback +func (ctx *multiAuthTestCtx) kbdIntCb(user, instruction string, questions []string, echos []bool) (answers []string, err error) { + if len(questions) == 0 { + return nil, nil + } + + ctx.numKbdIntCbs++ + if len(questions) == 1 { + return []string{ctx.password}, nil + } + + return nil, fmt.Errorf("unsupported keyboard-interactive flow") +} + +// TestMultiAuth runs several subtests for different combinations of password, keyboard-interactive and publickey authentication methods +func TestMultiAuth(t *testing.T) { + testCases := []multiAuthTestCase{ + // Test password,publickey authentication, assert that password callback is called 1 time + multiAuthTestCase{ + authMethods: []string{"password", "publickey"}, + expectedPasswordCbs: 1, + }, + // Test keyboard-interactive,publickey authentication, assert that keyboard-interactive callback is called 1 time + multiAuthTestCase{ + authMethods: []string{"keyboard-interactive", "publickey"}, + expectedKbdIntCbs: 1, + }, + // Test publickey,password authentication, assert that password callback is called 1 time + multiAuthTestCase{ + authMethods: []string{"publickey", "password"}, + expectedPasswordCbs: 1, + }, + // Test publickey,keyboard-interactive authentication, assert that keyboard-interactive callback is called 1 time + multiAuthTestCase{ + authMethods: []string{"publickey", "keyboard-interactive"}, + expectedKbdIntCbs: 1, + }, + // Test password,password authentication, assert that password callback is called 2 times + multiAuthTestCase{ + authMethods: []string{"password", "password"}, + expectedPasswordCbs: 2, + }, + } + + for _, testCase := range testCases { + t.Run(strings.Join(testCase.authMethods, ","), func(t *testing.T) { + ctx := newMultiAuthTestCtx(t) + + server := newServerForConfig(t, "MultiAuth", map[string]string{"AuthMethods": strings.Join(testCase.authMethods, ",")}) + defer server.Shutdown() + + clientConfig := clientConfig() + server.setTestPassword(clientConfig.User, ctx.password) + + publicKeyAuthMethod := clientConfig.Auth[0] + clientConfig.Auth = nil + for _, authMethod := range testCase.authMethods { + switch authMethod { + case "publickey": + clientConfig.Auth = append(clientConfig.Auth, publicKeyAuthMethod) + case "password": + clientConfig.Auth = append(clientConfig.Auth, + ssh.RetryableAuthMethod(ssh.PasswordCallback(ctx.passwordCb), 5)) + case "keyboard-interactive": + clientConfig.Auth = append(clientConfig.Auth, + ssh.RetryableAuthMethod(ssh.KeyboardInteractive(ctx.kbdIntCb), 5)) + default: + t.Fatalf("Unknown authentication method %s", authMethod) + } + } + + conn := server.Dial(clientConfig) + defer conn.Close() + + if ctx.numPasswordCbs != testCase.expectedPasswordCbs { + t.Fatalf("passwordCallback was called %d times, expected %d times", ctx.numPasswordCbs, testCase.expectedPasswordCbs) + } + + if ctx.numKbdIntCbs != testCase.expectedKbdIntCbs { + t.Fatalf("keyboardInteractiveCallback was called %d times, expected %d times", ctx.numKbdIntCbs, testCase.expectedKbdIntCbs) + } + }) + } +} diff --git a/vendor/golang.org/x/crypto/ssh/test/session_test.go b/vendor/golang.org/x/crypto/ssh/test/session_test.go index 8238d9d..4eb7afd 100644 --- a/vendor/golang.org/x/crypto/ssh/test/session_test.go +++ b/vendor/golang.org/x/crypto/ssh/test/session_test.go @@ -11,6 +11,7 @@ package test import ( "bytes" "errors" + "fmt" "io" "strings" "testing" @@ -324,27 +325,56 @@ func TestWindowChange(t *testing.T) { } } +func testOneCipher(t *testing.T, cipher string, cipherOrder []string) { + server := newServer(t) + defer server.Shutdown() + conf := clientConfig() + conf.Ciphers = []string{cipher} + // Don't fail if sshd doesn't have the cipher. + conf.Ciphers = append(conf.Ciphers, cipherOrder...) + conn, err := server.TryDial(conf) + if err != nil { + t.Fatalf("TryDial: %v", err) + } + defer conn.Close() + + numBytes := 4096 + + // Exercise sending data to the server + if _, _, err := conn.Conn.SendRequest("drop-me", false, make([]byte, numBytes)); err != nil { + t.Fatalf("SendRequest: %v", err) + } + + // Exercise receiving data from the server + session, err := conn.NewSession() + if err != nil { + t.Fatalf("NewSession: %v", err) + } + + out, err := session.Output(fmt.Sprintf("dd if=/dev/zero of=/dev/stdout bs=%d count=1", numBytes)) + if err != nil { + t.Fatalf("Output: %v", err) + } + + if len(out) != numBytes { + t.Fatalf("got %d bytes, want %d bytes", len(out), numBytes) + } +} + +var deprecatedCiphers = []string{ + "aes128-cbc", "3des-cbc", + "arcfour128", "arcfour256", +} + func TestCiphers(t *testing.T) { var config ssh.Config config.SetDefaults() - cipherOrder := config.Ciphers - // These ciphers will not be tested when commented out in cipher.go it will - // fallback to the next available as per line 292. - cipherOrder = append(cipherOrder, "aes128-cbc", "3des-cbc") + cipherOrder := append(config.Ciphers, deprecatedCiphers...) for _, ciph := range cipherOrder { - server := newServer(t) - defer server.Shutdown() - conf := clientConfig() - conf.Ciphers = []string{ciph} - // Don't fail if sshd doesn't have the cipher. - conf.Ciphers = append(conf.Ciphers, cipherOrder...) - conn, err := server.TryDial(conf) - if err == nil { - conn.Close() - } else { - t.Fatalf("failed for cipher %q", ciph) - } + t.Run(ciph, func(t *testing.T) { + testOneCipher(t, ciph, cipherOrder) + }) } } diff --git a/vendor/golang.org/x/crypto/ssh/test/sshd_test_pw.c b/vendor/golang.org/x/crypto/ssh/test/sshd_test_pw.c new file mode 100644 index 0000000..2794a56 --- /dev/null +++ b/vendor/golang.org/x/crypto/ssh/test/sshd_test_pw.c @@ -0,0 +1,173 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// sshd_test_pw.c +// Wrapper to inject test password data for sshd PAM authentication +// +// This wrapper implements custom versions of getpwnam, getpwnam_r, +// getspnam and getspnam_r. These functions first call their real +// libc versions, then check if the requested user matches test user +// specified in env variable TEST_USER and if so replace the password +// with crypted() value of TEST_PASSWD env variable. +// +// Compile: +// gcc -Wall -shared -o sshd_test_pw.so -fPIC sshd_test_pw.c +// +// Compile with debug: +// gcc -DVERBOSE -Wall -shared -o sshd_test_pw.so -fPIC sshd_test_pw.c +// +// Run sshd: +// LD_PRELOAD="sshd_test_pw.so" TEST_USER="..." TEST_PASSWD="..." sshd ... + +// +build ignore + +#define _GNU_SOURCE +#include +#include +#include +#include +#include +#include +#include + +#ifdef VERBOSE +#define DEBUG(X...) fprintf(stderr, X) +#else +#define DEBUG(X...) while (0) { } +#endif + +/* crypt() password */ +static char * +pwhash(char *passwd) { + return strdup(crypt(passwd, "$6$")); +} + +/* Pointers to real functions in libc */ +static struct passwd * (*real_getpwnam)(const char *) = NULL; +static int (*real_getpwnam_r)(const char *, struct passwd *, char *, size_t, struct passwd **) = NULL; +static struct spwd * (*real_getspnam)(const char *) = NULL; +static int (*real_getspnam_r)(const char *, struct spwd *, char *, size_t, struct spwd **) = NULL; + +/* Cached test user and test password */ +static char *test_user = NULL; +static char *test_passwd_hash = NULL; + +static void +init(void) { + /* Fetch real libc function pointers */ + real_getpwnam = dlsym(RTLD_NEXT, "getpwnam"); + real_getpwnam_r = dlsym(RTLD_NEXT, "getpwnam_r"); + real_getspnam = dlsym(RTLD_NEXT, "getspnam"); + real_getspnam_r = dlsym(RTLD_NEXT, "getspnam_r"); + + /* abort if env variables are not defined */ + if (getenv("TEST_USER") == NULL || getenv("TEST_PASSWD") == NULL) { + fprintf(stderr, "env variables TEST_USER and TEST_PASSWD are missing\n"); + abort(); + } + + /* Fetch test user and test password from env */ + test_user = strdup(getenv("TEST_USER")); + test_passwd_hash = pwhash(getenv("TEST_PASSWD")); + + DEBUG("sshd_test_pw init():\n"); + DEBUG("\treal_getpwnam: %p\n", real_getpwnam); + DEBUG("\treal_getpwnam_r: %p\n", real_getpwnam_r); + DEBUG("\treal_getspnam: %p\n", real_getspnam); + DEBUG("\treal_getspnam_r: %p\n", real_getspnam_r); + DEBUG("\tTEST_USER: '%s'\n", test_user); + DEBUG("\tTEST_PASSWD: '%s'\n", getenv("TEST_PASSWD")); + DEBUG("\tTEST_PASSWD_HASH: '%s'\n", test_passwd_hash); +} + +static int +is_test_user(const char *name) { + if (test_user != NULL && strcmp(test_user, name) == 0) + return 1; + return 0; +} + +/* getpwnam */ + +struct passwd * +getpwnam(const char *name) { + struct passwd *pw; + + DEBUG("sshd_test_pw getpwnam(%s)\n", name); + + if (real_getpwnam == NULL) + init(); + if ((pw = real_getpwnam(name)) == NULL) + return NULL; + + if (is_test_user(name)) + pw->pw_passwd = strdup(test_passwd_hash); + + return pw; +} + +/* getpwnam_r */ + +int +getpwnam_r(const char *name, + struct passwd *pwd, + char *buf, + size_t buflen, + struct passwd **result) { + int r; + + DEBUG("sshd_test_pw getpwnam_r(%s)\n", name); + + if (real_getpwnam_r == NULL) + init(); + if ((r = real_getpwnam_r(name, pwd, buf, buflen, result)) != 0 || *result == NULL) + return r; + + if (is_test_user(name)) + pwd->pw_passwd = strdup(test_passwd_hash); + + return 0; +} + +/* getspnam */ + +struct spwd * +getspnam(const char *name) { + struct spwd *sp; + + DEBUG("sshd_test_pw getspnam(%s)\n", name); + + if (real_getspnam == NULL) + init(); + if ((sp = real_getspnam(name)) == NULL) + return NULL; + + if (is_test_user(name)) + sp->sp_pwdp = strdup(test_passwd_hash); + + return sp; +} + +/* getspnam_r */ + +int +getspnam_r(const char *name, + struct spwd *spbuf, + char *buf, + size_t buflen, + struct spwd **spbufp) { + int r; + + DEBUG("sshd_test_pw getspnam_r(%s)\n", name); + + if (real_getspnam_r == NULL) + init(); + if ((r = real_getspnam_r(name, spbuf, buf, buflen, spbufp)) != 0) + return r; + + if (is_test_user(name)) + spbuf->sp_pwdp = strdup(test_passwd_hash); + + return r; +} diff --git a/vendor/golang.org/x/crypto/ssh/test/test_unix_test.go b/vendor/golang.org/x/crypto/ssh/test/test_unix_test.go index e673536..2fbe880 100644 --- a/vendor/golang.org/x/crypto/ssh/test/test_unix_test.go +++ b/vendor/golang.org/x/crypto/ssh/test/test_unix_test.go @@ -10,6 +10,8 @@ package test import ( "bytes" + "crypto/rand" + "encoding/base64" "fmt" "io/ioutil" "log" @@ -25,8 +27,10 @@ import ( "golang.org/x/crypto/ssh/testdata" ) -const sshd_config = ` +const ( + defaultSshdConfig = ` Protocol 2 +Banner {{.Dir}}/banner HostKey {{.Dir}}/id_rsa HostKey {{.Dir}}/id_dsa HostKey {{.Dir}}/id_ecdsa @@ -49,8 +53,17 @@ RhostsRSAAuthentication no HostbasedAuthentication no PubkeyAcceptedKeyTypes=* ` + multiAuthSshdConfigTail = ` +UsePAM yes +PasswordAuthentication yes +ChallengeResponseAuthentication yes +AuthenticationMethods {{.AuthMethods}} +` +) -var configTmpl = template.Must(template.New("").Parse(sshd_config)) +var configTmpl = map[string]*template.Template{ + "default": template.Must(template.New("").Parse(defaultSshdConfig)), + "MultiAuth": template.Must(template.New("").Parse(defaultSshdConfig + multiAuthSshdConfigTail))} type server struct { t *testing.T @@ -59,6 +72,10 @@ type server struct { cmd *exec.Cmd output bytes.Buffer // holds stderr from sshd process + testUser string // test username for sshd + testPasswd string // test password for sshd + sshdTestPwSo string // dynamic library to inject a custom password into sshd + // Client half of the network connection. clientConn net.Conn } @@ -185,6 +202,20 @@ func (s *server) TryDialWithAddr(config *ssh.ClientConfig, addr string) (*ssh.Cl s.cmd.Stdin = f s.cmd.Stdout = f s.cmd.Stderr = &s.output + + if s.sshdTestPwSo != "" { + if s.testUser == "" { + s.t.Fatal("user missing from sshd_test_pw.so config") + } + if s.testPasswd == "" { + s.t.Fatal("password missing from sshd_test_pw.so config") + } + s.cmd.Env = append(os.Environ(), + fmt.Sprintf("LD_PRELOAD=%s", s.sshdTestPwSo), + fmt.Sprintf("TEST_USER=%s", s.testUser), + fmt.Sprintf("TEST_PASSWD=%s", s.testPasswd)) + } + if err := s.cmd.Start(); err != nil { s.t.Fail() s.Shutdown() @@ -235,11 +266,49 @@ func writeFile(path string, contents []byte) { } } +// generate random password +func randomPassword() (string, error) { + b := make([]byte, 12) + _, err := rand.Read(b) + if err != nil { + return "", err + } + return base64.RawURLEncoding.EncodeToString(b), nil +} + +// setTestPassword is used for setting user and password data for sshd_test_pw.so +// This function also checks that ./sshd_test_pw.so exists and if not calls s.t.Skip() +func (s *server) setTestPassword(user, passwd string) error { + wd, _ := os.Getwd() + wrapper := filepath.Join(wd, "sshd_test_pw.so") + if _, err := os.Stat(wrapper); err != nil { + s.t.Skip(fmt.Errorf("sshd_test_pw.so is not available")) + return err + } + + s.sshdTestPwSo = wrapper + s.testUser = user + s.testPasswd = passwd + return nil +} + // newServer returns a new mock ssh server. func newServer(t *testing.T) *server { + return newServerForConfig(t, "default", map[string]string{}) +} + +// newServerForConfig returns a new mock ssh server. +func newServerForConfig(t *testing.T, config string, configVars map[string]string) *server { if testing.Short() { t.Skip("skipping test due to -short") } + u, err := user.Current() + if err != nil { + t.Fatalf("user.Current: %v", err) + } + if u.Name == "root" { + t.Skip("skipping test because current user is root") + } dir, err := ioutil.TempDir("", "sshtest") if err != nil { t.Fatal(err) @@ -248,14 +317,18 @@ func newServer(t *testing.T) *server { if err != nil { t.Fatal(err) } - err = configTmpl.Execute(f, map[string]string{ - "Dir": dir, - }) + if _, ok := configTmpl[config]; ok == false { + t.Fatal(fmt.Errorf("Invalid server config '%s'", config)) + } + configVars["Dir"] = dir + err = configTmpl[config].Execute(f, configVars) if err != nil { t.Fatal(err) } f.Close() + writeFile(filepath.Join(dir, "banner"), []byte("Server Banner")) + for k, v := range testdata.PEMBytes { filename := "id_" + k writeFile(filepath.Join(dir, filename), v) @@ -268,7 +341,7 @@ func newServer(t *testing.T) *server { } var authkeys bytes.Buffer - for k, _ := range testdata.PEMBytes { + for k := range testdata.PEMBytes { authkeys.Write(ssh.MarshalAuthorizedKey(testPublicKeys[k])) } writeFile(filepath.Join(dir, "authorized_keys"), authkeys.Bytes()) diff --git a/vendor/golang.org/x/crypto/ssh/testdata/keys.go b/vendor/golang.org/x/crypto/ssh/testdata/keys.go index 3b3d26c..bfae85f 100644 --- a/vendor/golang.org/x/crypto/ssh/testdata/keys.go +++ b/vendor/golang.org/x/crypto/ssh/testdata/keys.go @@ -23,6 +23,27 @@ MHcCAQEEINGWx0zo6fhJ/0EAfrPzVFyFC9s18lBt3cRoEDhS3ARooAoGCCqGSM49 AwEHoUQDQgAEi9Hdw6KvZcWxfg2IDhA7UkpDtzzt6ZqJXSsFdLd+Kx4S3Sx4cVO+ 6/ZOXRnPmNAlLUqjShUsUBBngG0u2fqEqA== -----END EC PRIVATE KEY----- +`), + "ecdsap256": []byte(`-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIAPCE25zK0PQSnsgVcEbM1mbKTASH4pqb5QJajplDwDZoAoGCCqGSM49 +AwEHoUQDQgAEWy8TxGcIHRh5XGpO4dFVfDjeNY+VkgubQrf/eyFJZHxAn1SKraXU +qJUjTKj1z622OxYtJ5P7s9CfAEVsTzLCzg== +-----END EC PRIVATE KEY----- +`), + "ecdsap384": []byte(`-----BEGIN EC PRIVATE KEY----- +MIGkAgEBBDBWfSnMuNKq8J9rQLzzEkx3KAoEohSXqhE/4CdjEYtoU2i22HW80DDS +qQhYNHRAduygBwYFK4EEACKhZANiAAQWaDMAd0HUd8ZiXCX7mYDDnC54gwH/nG43 +VhCUEYmF7HMZm/B9Yn3GjFk3qYEDEvuF/52+NvUKBKKaLbh32AWxMv0ibcoba4cz +hL9+hWYhUD9XIUlzMWiZ2y6eBE9PdRI= +-----END EC PRIVATE KEY----- +`), + "ecdsap521": []byte(`-----BEGIN EC PRIVATE KEY----- +MIHcAgEBBEIBrkYpQcy8KTVHNiAkjlFZwee90224Bu6wz94R4OBo+Ts0eoAQG7SF +iaygEDMUbx6kTgXTBcKZ0jrWPKakayNZ/kigBwYFK4EEACOhgYkDgYYABADFuvLV +UoaCDGHcw5uNfdRIsvaLKuWSpLsl48eWGZAwdNG432GDVKduO+pceuE+8XzcyJb+ +uMv+D2b11Q/LQUcHJwE6fqbm8m3EtDKPsoKs0u/XUJb0JsH4J8lkZzbUTjvGYamn +FFlRjzoB3Oxu8UQgb+MWPedtH9XYBbg9biz4jJLkXQ== +-----END EC PRIVATE KEY----- `), "rsa": []byte(`-----BEGIN RSA PRIVATE KEY----- MIICXAIBAAKBgQC8A6FGHDiWCSREAXCq6yBfNVr0xCVG2CzvktFNRpue+RXrGs/2 @@ -39,6 +60,35 @@ NDvRS0rjwt6lJGv7zPZoqDc65VfrK2aNyHx2PgFyzwrEOtuF57bu7pnvEIxpLTeM z26i6XVMeYXAWZMTloMCQBbpGgEERQpeUknLBqUHhg/wXF6+lFA+vEGnkY+Dwab2 KCXFGd+SQ5GdUcEMe9isUH6DYj/6/yCDoFrXXmpQb+M= -----END RSA PRIVATE KEY----- +`), + "pkcs8": []byte(`-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCitzS2KiRQTccf +VApb0mbPpo1lt29JjeLBYAehXHWfQ+w8sXpd8e04n/020spx1R94yg+v0NjXyh2R +NFXNBYdhNei33VJxUeKNlExaecvW2yxfuZqka+ZxT1aI8zrAsjh3Rwc6wayAJS4R +wZuzlDv4jZitWqwD+mb/22Zwq/WSs4YX5dUHDklfdWSVnoBfue8K/00n8f5yMTdJ +vFF0qAJwf9spPEHla0lYcozJk64CO5lRkqfLor4UnsXXOiA7aRIoaUSKa+rlhiqt +1EMGYiBjblPt4SwMelGGU2UfywPb4d85gpQ/s8SBARbpPxNVs2IbHDMwj70P3uZc +74M3c4VJAgMBAAECggEAFIzY3mziGzZHgMBncoNXMsCRORh6uKpvygZr0EhSHqRA +cMXlc3n7gNxL6aGjqc7F48Z5RrY0vMQtCcq3T2Z0W6WoV5hfMiqqV0E0h3S8ds1F +hG13h26NMyBXCILXl8Cqev4Afr45IBISCHIQTRTaoiCX+MTr1rDIU2YNQQumvzkz +fMw2XiFTFTgxAtJUAgKoTqLtm7/T+az7TKw+Hesgbx7yaJoMh9DWGBh4Y61DnIDA +fcxJboAfxxnFiXvdBVmzo72pCsRXrWOsjW6WxQmCKuXHvyB1FZTmMaEFNCGSJDa6 +U+OCzA3m65loAZAE7ffFHhYgssz/h9TBaOjKO0BX1QKBgQDZiCBvu+bFh9pEodcS +VxaI+ATlsYcmGdLtnZw5pxuEdr60iNWhpEcV6lGkbdiv5aL43QaGFDLagqeHI77b ++ITFbPPdCiYNaqlk6wyiXv4pdN7V683EDmGWSQlPeC9IhUilt2c+fChK2EB/XlkO +q8c3Vk1MsC6JOxDXNgJxylNpswKBgQC/fYBTb9iD+uM2n3SzJlct/ZlPaONKnNDR +pbTOdxBFHsu2VkfY858tfnEPkmSRX0yKmjHni6e8/qIzfzLwWBY4NmxhNZE5v+qJ +qZF26ULFdrZB4oWXAOliy/1S473OpQnp2MZp2asd0LPcg/BNaMuQrz44hxHb76R7 +qWD0ebIfEwKBgQCRCIiP1pjbVGN7ZOgPS080DSC+wClahtcyI+ZYLglTvRQTLDQ7 +LFtUykCav748MIADKuJBnM/3DiuCF5wV71EejDDfS/fo9BdyuKBY1brhixFTUX+E +Ww5Hc/SoLnpgALVZ/7jvWTpIBHykLxRziqYtR/YLzl+IkX/97P2ePoZ0rwKBgHNC +/7M5Z4JJyepfIMeVFHTCaT27TNTkf20x6Rs937U7TDN8y9JzEiU4LqXI4HAAhPoI +xnExRs4kF04YCnlRDE7Zs3Lv43J3ap1iTATfcymYwyv1RaQXEGQ/lUQHgYCZJtZz +fTrJoo5XyWu6nzJ5Gc8FLNaptr5ECSXGVm3Rsr2xAoGBAJWqEEQS/ejhO05QcPqh +y4cUdLr0269ILVsvic4Ot6zgfPIntXAK6IsHGKcg57kYm6W9k1CmmlA4ENGryJnR +vxyyqA9eyTFc1CQNuc2frKFA9It49JzjXahKc0aDHEHmTR787Tmk1LbuT0/gm9kA +L4INU6g+WqF0fatJxd+IJPrp +-----END PRIVATE KEY----- `), "ed25519": []byte(`-----BEGIN OPENSSH PRIVATE KEY----- b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW @@ -105,7 +155,7 @@ var SSHCertificates = map[string][]byte{ // Generated by the following commands: // // 1. Assumes "rsa" key above in file named "rsa", write out the public key to "rsa.pub": - // ssh-keygen -y -f rsa > rsa.pu + // ssh-keygen -y -f rsa > rsa.pub // // 2. Assumes "ca" key above in file named "ca", sign a cert for "rsa.pub": // ssh-keygen -s ca -h -n host.example.com -V +500w -I host.example.com-key rsa.pub diff --git a/vendor/golang.org/x/crypto/ssh/transport.go b/vendor/golang.org/x/crypto/ssh/transport.go index f9780e0..f6fae1d 100644 --- a/vendor/golang.org/x/crypto/ssh/transport.go +++ b/vendor/golang.org/x/crypto/ssh/transport.go @@ -6,6 +6,7 @@ package ssh import ( "bufio" + "bytes" "errors" "io" "log" @@ -76,17 +77,17 @@ type connectionState struct { // both directions are triggered by reading and writing a msgNewKey packet // respectively. func (t *transport) prepareKeyChange(algs *algorithms, kexResult *kexResult) error { - if ciph, err := newPacketCipher(t.reader.dir, algs.r, kexResult); err != nil { + ciph, err := newPacketCipher(t.reader.dir, algs.r, kexResult) + if err != nil { return err - } else { - t.reader.pendingKeyChange <- ciph } + t.reader.pendingKeyChange <- ciph - if ciph, err := newPacketCipher(t.writer.dir, algs.w, kexResult); err != nil { + ciph, err = newPacketCipher(t.writer.dir, algs.w, kexResult) + if err != nil { return err - } else { - t.writer.pendingKeyChange <- ciph } + t.writer.pendingKeyChange <- ciph return nil } @@ -139,7 +140,7 @@ func (s *connectionState) readPacket(r *bufio.Reader) ([]byte, error) { case cipher := <-s.pendingKeyChange: s.packetCipher = cipher default: - return nil, errors.New("ssh: got bogus newkeys message.") + return nil, errors.New("ssh: got bogus newkeys message") } case msgDisconnect: @@ -232,52 +233,22 @@ var ( clientKeys = direction{[]byte{'A'}, []byte{'C'}, []byte{'E'}} ) -// generateKeys generates key material for IV, MAC and encryption. -func generateKeys(d direction, algs directionAlgorithms, kex *kexResult) (iv, key, macKey []byte) { +// setupKeys sets the cipher and MAC keys from kex.K, kex.H and sessionId, as +// described in RFC 4253, section 6.4. direction should either be serverKeys +// (to setup server->client keys) or clientKeys (for client->server keys). +func newPacketCipher(d direction, algs directionAlgorithms, kex *kexResult) (packetCipher, error) { cipherMode := cipherModes[algs.Cipher] macMode := macModes[algs.MAC] - iv = make([]byte, cipherMode.ivSize) - key = make([]byte, cipherMode.keySize) - macKey = make([]byte, macMode.keySize) + iv := make([]byte, cipherMode.ivSize) + key := make([]byte, cipherMode.keySize) + macKey := make([]byte, macMode.keySize) generateKeyMaterial(iv, d.ivTag, kex) generateKeyMaterial(key, d.keyTag, kex) generateKeyMaterial(macKey, d.macKeyTag, kex) - return -} - -// setupKeys sets the cipher and MAC keys from kex.K, kex.H and sessionId, as -// described in RFC 4253, section 6.4. direction should either be serverKeys -// (to setup server->client keys) or clientKeys (for client->server keys). -func newPacketCipher(d direction, algs directionAlgorithms, kex *kexResult) (packetCipher, error) { - iv, key, macKey := generateKeys(d, algs, kex) - - if algs.Cipher == gcmCipherID { - return newGCMCipher(iv, key, macKey) - } - - if algs.Cipher == aes128cbcID { - return newAESCBCCipher(iv, key, macKey, algs) - } - - if algs.Cipher == tripledescbcID { - return newTripleDESCBCCipher(iv, key, macKey, algs) - } - c := &streamPacketCipher{ - mac: macModes[algs.MAC].new(macKey), - etm: macModes[algs.MAC].etm, - } - c.macResult = make([]byte, c.mac.Size()) - - var err error - c.cipher, err = cipherModes[algs.Cipher].createStream(key, iv) - if err != nil { - return nil, err - } - - return c, nil + return cipherModes[algs.Cipher].create(key, iv, macKey, algs) } // generateKeyMaterial fills out with key material generated from tag, K, H @@ -342,7 +313,7 @@ func readVersion(r io.Reader) ([]byte, error) { var ok bool var buf [1]byte - for len(versionString) < maxVersionStringBytes { + for length := 0; length < maxVersionStringBytes; length++ { _, err := io.ReadFull(r, buf[:]) if err != nil { return nil, err @@ -350,6 +321,13 @@ func readVersion(r io.Reader) ([]byte, error) { // The RFC says that the version should be terminated with \r\n // but several SSH servers actually only send a \n. if buf[0] == '\n' { + if !bytes.HasPrefix(versionString, []byte("SSH-")) { + // RFC 4253 says we need to ignore all version string lines + // except the one containing the SSH version (provided that + // all the lines do not exceed 255 bytes in total). + versionString = versionString[:0] + continue + } ok = true break } diff --git a/vendor/golang.org/x/crypto/ssh/transport_test.go b/vendor/golang.org/x/crypto/ssh/transport_test.go index 92d83ab..8445e1e 100644 --- a/vendor/golang.org/x/crypto/ssh/transport_test.go +++ b/vendor/golang.org/x/crypto/ssh/transport_test.go @@ -13,11 +13,13 @@ import ( ) func TestReadVersion(t *testing.T) { - longversion := strings.Repeat("SSH-2.0-bla", 50)[:253] + longVersion := strings.Repeat("SSH-2.0-bla", 50)[:253] + multiLineVersion := strings.Repeat("ignored\r\n", 20) + "SSH-2.0-bla\r\n" cases := map[string]string{ "SSH-2.0-bla\r\n": "SSH-2.0-bla", "SSH-2.0-bla\n": "SSH-2.0-bla", - longversion + "\r\n": longversion, + multiLineVersion: "SSH-2.0-bla", + longVersion + "\r\n": longVersion, } for in, want := range cases { @@ -33,9 +35,11 @@ func TestReadVersion(t *testing.T) { } func TestReadVersionError(t *testing.T) { - longversion := strings.Repeat("SSH-2.0-bla", 50)[:253] + longVersion := strings.Repeat("SSH-2.0-bla", 50)[:253] + multiLineVersion := strings.Repeat("ignored\r\n", 50) + "SSH-2.0-bla\r\n" cases := []string{ - longversion + "too-long\r\n", + longVersion + "too-long\r\n", + multiLineVersion, } for _, in := range cases { if _, err := readVersion(bytes.NewBufferString(in)); err == nil { @@ -60,7 +64,7 @@ func TestExchangeVersionsBasic(t *testing.T) { func TestExchangeVersions(t *testing.T) { cases := []string{ "not\x000allowed", - "not allowed\n", + "not allowed\x01\r\n", } for _, c := range cases { buf := bytes.NewBufferString("SSH-2.0-bla\r\n") diff --git a/vendor/golang.org/x/crypto/tea/cipher.go b/vendor/golang.org/x/crypto/tea/cipher.go index 9c13d12..c1ff90e 100644 --- a/vendor/golang.org/x/crypto/tea/cipher.go +++ b/vendor/golang.org/x/crypto/tea/cipher.go @@ -5,7 +5,14 @@ // Package tea implements the TEA algorithm, as defined in Needham and // Wheeler's 1994 technical report, “TEA, a Tiny Encryption Algorithm”. See // http://www.cix.co.uk/~klockstone/tea.pdf for details. - +// +// TEA is a legacy cipher and its short block size makes it vulnerable to +// birthday bound attacks (see https://sweet32.info). It should only be used +// where compatibility with legacy systems, not security, is the goal. +// +// Deprecated: any new system should use AES (from crypto/aes, if necessary in +// an AEAD mode like crypto/cipher.NewGCM) or XChaCha20-Poly1305 (from +// golang.org/x/crypto/chacha20poly1305). package tea import ( diff --git a/vendor/golang.org/x/crypto/twofish/twofish.go b/vendor/golang.org/x/crypto/twofish/twofish.go index 6db01fc..1197d75 100644 --- a/vendor/golang.org/x/crypto/twofish/twofish.go +++ b/vendor/golang.org/x/crypto/twofish/twofish.go @@ -3,6 +3,12 @@ // license that can be found in the LICENSE file. // Package twofish implements Bruce Schneier's Twofish encryption algorithm. +// +// Deprecated: Twofish is a legacy cipher and should not be used for new +// applications. Also, this package does not and will not provide an optimized +// implementation. Instead, use AES (from crypto/aes, if necessary in an AEAD +// mode like crypto/cipher.NewGCM) or XChaCha20-Poly1305 (from +// golang.org/x/crypto/chacha20poly1305). package twofish // import "golang.org/x/crypto/twofish" // Twofish is defined in https://www.schneier.com/paper-twofish-paper.pdf [TWOFISH] diff --git a/vendor/golang.org/x/crypto/xtea/block.go b/vendor/golang.org/x/crypto/xtea/block.go index bf5d245..fcb4e4d 100644 --- a/vendor/golang.org/x/crypto/xtea/block.go +++ b/vendor/golang.org/x/crypto/xtea/block.go @@ -50,7 +50,7 @@ func encryptBlock(c *Cipher, dst, src []byte) { uint32ToBlock(v0, v1, dst) } -// decryptBlock decrypt a single 8 byte block using XTEA. +// decryptBlock decrypts a single 8 byte block using XTEA. func decryptBlock(c *Cipher, dst, src []byte) { v0, v1 := blockToUint32(src) diff --git a/vendor/golang.org/x/crypto/xtea/cipher.go b/vendor/golang.org/x/crypto/xtea/cipher.go index 108b426..a4c2fd0 100644 --- a/vendor/golang.org/x/crypto/xtea/cipher.go +++ b/vendor/golang.org/x/crypto/xtea/cipher.go @@ -4,6 +4,14 @@ // Package xtea implements XTEA encryption, as defined in Needham and Wheeler's // 1997 technical report, "Tea extensions." +// +// XTEA is a legacy cipher and its short block size makes it vulnerable to +// birthday bound attacks (see https://sweet32.info). It should only be used +// where compatibility with legacy systems, not security, is the goal. +// +// Deprecated: any new system should use AES (from crypto/aes, if necessary in +// an AEAD mode like crypto/cipher.NewGCM) or XChaCha20-Poly1305 (from +// golang.org/x/crypto/chacha20poly1305). package xtea // import "golang.org/x/crypto/xtea" // For details, see http://www.cix.co.uk/~klockstone/xtea.pdf @@ -14,8 +22,8 @@ import "strconv" const BlockSize = 8 // A Cipher is an instance of an XTEA cipher using a particular key. -// table contains a series of precalculated values that are used each round. type Cipher struct { + // table contains a series of precalculated values that are used each round. table [64]uint32 } @@ -54,7 +62,7 @@ func (c *Cipher) BlockSize() int { return BlockSize } // instead, use an encryption mode like CBC (see crypto/cipher/cbc.go). func (c *Cipher) Encrypt(dst, src []byte) { encryptBlock(c, dst, src) } -// Decrypt decrypts the 8 byte buffer src using the key k and stores the result in dst. +// Decrypt decrypts the 8 byte buffer src using the key and stores the result in dst. func (c *Cipher) Decrypt(dst, src []byte) { decryptBlock(c, dst, src) } // initCipher initializes the cipher context by creating a look up table @@ -69,7 +77,7 @@ func initCipher(c *Cipher, key []byte) { // Precalculate the table const delta = 0x9E3779B9 - var sum uint32 = 0 + var sum uint32 // Two rounds of XTEA applied per loop for i := 0; i < numRounds; { diff --git a/vendor/golang.org/x/crypto/xts/xts.go b/vendor/golang.org/x/crypto/xts/xts.go index a7643fd..b51308e 100644 --- a/vendor/golang.org/x/crypto/xts/xts.go +++ b/vendor/golang.org/x/crypto/xts/xts.go @@ -15,20 +15,25 @@ // effectively create a unique key for each sector. // // XTS does not provide any authentication. An attacker can manipulate the -// ciphertext and randomise a block (16 bytes) of the plaintext. +// ciphertext and randomise a block (16 bytes) of the plaintext. This package +// does not implement ciphertext-stealing so sectors must be a multiple of 16 +// bytes. // -// (Note: this package does not implement ciphertext-stealing so sectors must -// be a multiple of 16 bytes.) +// Note that XTS is usually not appropriate for any use besides disk encryption. +// Most users should use an AEAD mode like GCM (from crypto/cipher.NewGCM) instead. package xts // import "golang.org/x/crypto/xts" import ( "crypto/cipher" "encoding/binary" "errors" + "sync" + + "golang.org/x/crypto/internal/subtle" ) -// Cipher contains an expanded key structure. It doesn't contain mutable state -// and therefore can be used concurrently. +// Cipher contains an expanded key structure. It is safe for concurrent use if +// the underlying block cipher is safe for concurrent use. type Cipher struct { k1, k2 cipher.Block } @@ -37,6 +42,12 @@ type Cipher struct { // only defined for 16-byte ciphers. const blockSize = 16 +var tweakPool = sync.Pool{ + New: func() interface{} { + return new([blockSize]byte) + }, +} + // NewCipher creates a Cipher given a function for creating the underlying // block cipher (which must have a block size of 16 bytes). The key must be // twice the length of the underlying cipher's key. @@ -55,7 +66,7 @@ func NewCipher(cipherFunc func([]byte) (cipher.Block, error), key []byte) (c *Ci } // Encrypt encrypts a sector of plaintext and puts the result into ciphertext. -// Plaintext and ciphertext may be the same slice but should not overlap. +// Plaintext and ciphertext must overlap entirely or not at all. // Sectors must be a multiple of 16 bytes and less than 2²⁴ bytes. func (c *Cipher) Encrypt(ciphertext, plaintext []byte, sectorNum uint64) { if len(ciphertext) < len(plaintext) { @@ -64,8 +75,14 @@ func (c *Cipher) Encrypt(ciphertext, plaintext []byte, sectorNum uint64) { if len(plaintext)%blockSize != 0 { panic("xts: plaintext is not a multiple of the block size") } + if subtle.InexactOverlap(ciphertext[:len(plaintext)], plaintext) { + panic("xts: invalid buffer overlap") + } - var tweak [blockSize]byte + tweak := tweakPool.Get().(*[blockSize]byte) + for i := range tweak { + tweak[i] = 0 + } binary.LittleEndian.PutUint64(tweak[:8], sectorNum) c.k2.Encrypt(tweak[:], tweak[:]) @@ -81,12 +98,14 @@ func (c *Cipher) Encrypt(ciphertext, plaintext []byte, sectorNum uint64) { plaintext = plaintext[blockSize:] ciphertext = ciphertext[blockSize:] - mul2(&tweak) + mul2(tweak) } + + tweakPool.Put(tweak) } // Decrypt decrypts a sector of ciphertext and puts the result into plaintext. -// Plaintext and ciphertext may be the same slice but should not overlap. +// Plaintext and ciphertext must overlap entirely or not at all. // Sectors must be a multiple of 16 bytes and less than 2²⁴ bytes. func (c *Cipher) Decrypt(plaintext, ciphertext []byte, sectorNum uint64) { if len(plaintext) < len(ciphertext) { @@ -95,8 +114,14 @@ func (c *Cipher) Decrypt(plaintext, ciphertext []byte, sectorNum uint64) { if len(ciphertext)%blockSize != 0 { panic("xts: ciphertext is not a multiple of the block size") } + if subtle.InexactOverlap(plaintext[:len(ciphertext)], ciphertext) { + panic("xts: invalid buffer overlap") + } - var tweak [blockSize]byte + tweak := tweakPool.Get().(*[blockSize]byte) + for i := range tweak { + tweak[i] = 0 + } binary.LittleEndian.PutUint64(tweak[:8], sectorNum) c.k2.Encrypt(tweak[:], tweak[:]) @@ -112,8 +137,10 @@ func (c *Cipher) Decrypt(plaintext, ciphertext []byte, sectorNum uint64) { plaintext = plaintext[blockSize:] ciphertext = ciphertext[blockSize:] - mul2(&tweak) + mul2(tweak) } + + tweakPool.Put(tweak) } // mul2 multiplies tweak by 2 in GF(2¹²⁸) with an irreducible polynomial of diff --git a/vendor/golang.org/x/crypto/xts/xts_test.go b/vendor/golang.org/x/crypto/xts/xts_test.go index 96d3b6c..75db1c5 100644 --- a/vendor/golang.org/x/crypto/xts/xts_test.go +++ b/vendor/golang.org/x/crypto/xts/xts_test.go @@ -103,3 +103,19 @@ func TestShorterCiphertext(t *testing.T) { t.Errorf("En/Decryption is not inverse") } } + +func BenchmarkXTS(b *testing.B) { + b.ReportAllocs() + c, err := NewCipher(aes.NewCipher, make([]byte, 32)) + if err != nil { + b.Fatalf("NewCipher failed: %s", err) + } + plaintext := make([]byte, 32) + encrypted := make([]byte, 48) + decrypted := make([]byte, 48) + + for i := 0; i < b.N; i++ { + c.Encrypt(encrypted, plaintext, 0) + c.Decrypt(decrypted, encrypted[:len(plaintext)], 0) + } +} diff --git a/vendor/golang.org/x/net/CONTRIBUTING.md b/vendor/golang.org/x/net/CONTRIBUTING.md index 88dff59..d0485e8 100644 --- a/vendor/golang.org/x/net/CONTRIBUTING.md +++ b/vendor/golang.org/x/net/CONTRIBUTING.md @@ -4,16 +4,15 @@ Go is an open source project. It is the work of hundreds of contributors. We appreciate your help! - ## Filing issues When [filing an issue](https://golang.org/issue/new), make sure to answer these five questions: -1. What version of Go are you using (`go version`)? -2. What operating system and processor architecture are you using? -3. What did you do? -4. What did you expect to see? -5. What did you see instead? +1. What version of Go are you using (`go version`)? +2. What operating system and processor architecture are you using? +3. What did you do? +4. What did you expect to see? +5. What did you see instead? General questions should go to the [golang-nuts mailing list](https://groups.google.com/group/golang-nuts) instead of the issue tracker. The gophers there will answer or ask you to file an issue if you've tripped over a bug. @@ -23,9 +22,5 @@ The gophers there will answer or ask you to file an issue if you've tripped over Please read the [Contribution Guidelines](https://golang.org/doc/contribute.html) before sending patches. -**We do not accept GitHub pull requests** -(we use [Gerrit](https://code.google.com/p/gerrit/) instead for code review). - Unless otherwise noted, the Go source files are distributed under the BSD-style license found in the LICENSE file. - diff --git a/vendor/golang.org/x/net/README b/vendor/golang.org/x/net/README deleted file mode 100644 index 6b13d8e..0000000 --- a/vendor/golang.org/x/net/README +++ /dev/null @@ -1,3 +0,0 @@ -This repository holds supplementary Go networking libraries. - -To submit changes to this repository, see http://golang.org/doc/contribute.html. diff --git a/vendor/golang.org/x/net/README.md b/vendor/golang.org/x/net/README.md new file mode 100644 index 0000000..00a9b6e --- /dev/null +++ b/vendor/golang.org/x/net/README.md @@ -0,0 +1,16 @@ +# Go Networking + +This repository holds supplementary Go networking libraries. + +## Download/Install + +The easiest way to install is to run `go get -u golang.org/x/net`. You can +also manually git clone the repository to `$GOPATH/src/golang.org/x/net`. + +## Report Issues / Send Patches + +This repository uses Gerrit for code changes. To learn how to submit +changes to this repository, see https://golang.org/doc/contribute.html. +The main issue tracker for the net repository is located at +https://github.com/golang/go/issues. Prefix your issue with "x/net:" in the +subject line, so it is easy to find. diff --git a/vendor/golang.org/x/net/bpf/constants.go b/vendor/golang.org/x/net/bpf/constants.go index b89ca35..12f3ee8 100644 --- a/vendor/golang.org/x/net/bpf/constants.go +++ b/vendor/golang.org/x/net/bpf/constants.go @@ -38,6 +38,7 @@ const ( type JumpTest uint16 // Supported operators for conditional jumps. +// K can be RegX for JumpIfX const ( // K == A JumpEqual JumpTest = iota @@ -134,12 +135,9 @@ const ( opMaskLoadDest = 0x01 opMaskLoadWidth = 0x18 opMaskLoadMode = 0xe0 - // opClsALU - opMaskOperandSrc = 0x08 - opMaskOperator = 0xf0 - // opClsJump - opMaskJumpConst = 0x0f - opMaskJumpCond = 0xf0 + // opClsALU & opClsJump + opMaskOperand = 0x08 + opMaskOperator = 0xf0 ) const ( @@ -192,15 +190,21 @@ const ( opLoadWidth1 ) -// Operator defined by ALUOp* +// Operand for ALU and Jump instructions +type opOperand uint16 +// Supported operand sources. const ( - opALUSrcConstant uint16 = iota << 3 - opALUSrcX + opOperandConstant opOperand = iota << 3 + opOperandX ) +// An jumpOp is a conditional jump condition. +type jumpOp uint16 + +// Supported jump conditions. const ( - opJumpAlways = iota << 4 + opJumpAlways jumpOp = iota << 4 opJumpEqual opJumpGT opJumpGE diff --git a/vendor/golang.org/x/net/bpf/instructions.go b/vendor/golang.org/x/net/bpf/instructions.go index 3b4fd08..3cffcaa 100644 --- a/vendor/golang.org/x/net/bpf/instructions.go +++ b/vendor/golang.org/x/net/bpf/instructions.go @@ -89,10 +89,14 @@ func (ri RawInstruction) Disassemble() Instruction { case opClsALU: switch op := ALUOp(ri.Op & opMaskOperator); op { case ALUOpAdd, ALUOpSub, ALUOpMul, ALUOpDiv, ALUOpOr, ALUOpAnd, ALUOpShiftLeft, ALUOpShiftRight, ALUOpMod, ALUOpXor: - if ri.Op&opMaskOperandSrc != 0 { + switch operand := opOperand(ri.Op & opMaskOperand); operand { + case opOperandX: return ALUOpX{Op: op} + case opOperandConstant: + return ALUOpConstant{Op: op, Val: ri.K} + default: + return ri } - return ALUOpConstant{Op: op, Val: ri.K} case aluOpNeg: return NegateA{} default: @@ -100,63 +104,18 @@ func (ri RawInstruction) Disassemble() Instruction { } case opClsJump: - if ri.Op&opMaskJumpConst != opClsJump { - return ri - } - switch ri.Op & opMaskJumpCond { + switch op := jumpOp(ri.Op & opMaskOperator); op { case opJumpAlways: return Jump{Skip: ri.K} - case opJumpEqual: - if ri.Jt == 0 { - return JumpIf{ - Cond: JumpNotEqual, - Val: ri.K, - SkipTrue: ri.Jf, - SkipFalse: 0, - } - } - return JumpIf{ - Cond: JumpEqual, - Val: ri.K, - SkipTrue: ri.Jt, - SkipFalse: ri.Jf, - } - case opJumpGT: - if ri.Jt == 0 { - return JumpIf{ - Cond: JumpLessOrEqual, - Val: ri.K, - SkipTrue: ri.Jf, - SkipFalse: 0, - } - } - return JumpIf{ - Cond: JumpGreaterThan, - Val: ri.K, - SkipTrue: ri.Jt, - SkipFalse: ri.Jf, - } - case opJumpGE: - if ri.Jt == 0 { - return JumpIf{ - Cond: JumpLessThan, - Val: ri.K, - SkipTrue: ri.Jf, - SkipFalse: 0, - } - } - return JumpIf{ - Cond: JumpGreaterOrEqual, - Val: ri.K, - SkipTrue: ri.Jt, - SkipFalse: ri.Jf, - } - case opJumpSet: - return JumpIf{ - Cond: JumpBitsSet, - Val: ri.K, - SkipTrue: ri.Jt, - SkipFalse: ri.Jf, + case opJumpEqual, opJumpGT, opJumpGE, opJumpSet: + cond, skipTrue, skipFalse := jumpOpToTest(op, ri.Jt, ri.Jf) + switch operand := opOperand(ri.Op & opMaskOperand); operand { + case opOperandX: + return JumpIfX{Cond: cond, SkipTrue: skipTrue, SkipFalse: skipFalse} + case opOperandConstant: + return JumpIf{Cond: cond, Val: ri.K, SkipTrue: skipTrue, SkipFalse: skipFalse} + default: + return ri } default: return ri @@ -187,6 +146,41 @@ func (ri RawInstruction) Disassemble() Instruction { } } +func jumpOpToTest(op jumpOp, skipTrue uint8, skipFalse uint8) (JumpTest, uint8, uint8) { + var test JumpTest + + // Decode "fake" jump conditions that don't appear in machine code + // Ensures the Assemble -> Disassemble stage recreates the same instructions + // See https://github.com/golang/go/issues/18470 + if skipTrue == 0 { + switch op { + case opJumpEqual: + test = JumpNotEqual + case opJumpGT: + test = JumpLessOrEqual + case opJumpGE: + test = JumpLessThan + case opJumpSet: + test = JumpBitsNotSet + } + + return test, skipFalse, 0 + } + + switch op { + case opJumpEqual: + test = JumpEqual + case opJumpGT: + test = JumpGreaterThan + case opJumpGE: + test = JumpGreaterOrEqual + case opJumpSet: + test = JumpBitsSet + } + + return test, skipTrue, skipFalse +} + // LoadConstant loads Val into register Dst. type LoadConstant struct { Dst Register @@ -198,7 +192,7 @@ func (a LoadConstant) Assemble() (RawInstruction, error) { return assembleLoad(a.Dst, 4, opAddrModeImmediate, a.Val) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadConstant) String() string { switch a.Dst { case RegA: @@ -224,7 +218,7 @@ func (a LoadScratch) Assemble() (RawInstruction, error) { return assembleLoad(a.Dst, 4, opAddrModeScratch, uint32(a.N)) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadScratch) String() string { switch a.Dst { case RegA: @@ -248,7 +242,7 @@ func (a LoadAbsolute) Assemble() (RawInstruction, error) { return assembleLoad(RegA, a.Size, opAddrModeAbsolute, a.Off) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadAbsolute) String() string { switch a.Size { case 1: // byte @@ -277,7 +271,7 @@ func (a LoadIndirect) Assemble() (RawInstruction, error) { return assembleLoad(RegA, a.Size, opAddrModeIndirect, a.Off) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadIndirect) String() string { switch a.Size { case 1: // byte @@ -306,7 +300,7 @@ func (a LoadMemShift) Assemble() (RawInstruction, error) { return assembleLoad(RegX, 1, opAddrModeMemShift, a.Off) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadMemShift) String() string { return fmt.Sprintf("ldx 4*([%d]&0xf)", a.Off) } @@ -325,7 +319,7 @@ func (a LoadExtension) Assemble() (RawInstruction, error) { return assembleLoad(RegA, 4, opAddrModeAbsolute, uint32(extOffset+a.Num)) } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a LoadExtension) String() string { switch a.Num { case ExtLen: @@ -392,7 +386,7 @@ func (a StoreScratch) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a StoreScratch) String() string { switch a.Src { case RegA: @@ -413,12 +407,12 @@ type ALUOpConstant struct { // Assemble implements the Instruction Assemble method. func (a ALUOpConstant) Assemble() (RawInstruction, error) { return RawInstruction{ - Op: opClsALU | opALUSrcConstant | uint16(a.Op), + Op: opClsALU | uint16(opOperandConstant) | uint16(a.Op), K: a.Val, }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a ALUOpConstant) String() string { switch a.Op { case ALUOpAdd: @@ -454,11 +448,11 @@ type ALUOpX struct { // Assemble implements the Instruction Assemble method. func (a ALUOpX) Assemble() (RawInstruction, error) { return RawInstruction{ - Op: opClsALU | opALUSrcX | uint16(a.Op), + Op: opClsALU | uint16(opOperandX) | uint16(a.Op), }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a ALUOpX) String() string { switch a.Op { case ALUOpAdd: @@ -496,7 +490,7 @@ func (a NegateA) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a NegateA) String() string { return fmt.Sprintf("neg") } @@ -509,12 +503,12 @@ type Jump struct { // Assemble implements the Instruction Assemble method. func (a Jump) Assemble() (RawInstruction, error) { return RawInstruction{ - Op: opClsJump | opJumpAlways, + Op: opClsJump | uint16(opJumpAlways), K: a.Skip, }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a Jump) String() string { return fmt.Sprintf("ja %d", a.Skip) } @@ -530,11 +524,39 @@ type JumpIf struct { // Assemble implements the Instruction Assemble method. func (a JumpIf) Assemble() (RawInstruction, error) { + return jumpToRaw(a.Cond, opOperandConstant, a.Val, a.SkipTrue, a.SkipFalse) +} + +// String returns the instruction in assembler notation. +func (a JumpIf) String() string { + return jumpToString(a.Cond, fmt.Sprintf("#%d", a.Val), a.SkipTrue, a.SkipFalse) +} + +// JumpIfX skips the following Skip instructions in the program if A +// X is true. +type JumpIfX struct { + Cond JumpTest + SkipTrue uint8 + SkipFalse uint8 +} + +// Assemble implements the Instruction Assemble method. +func (a JumpIfX) Assemble() (RawInstruction, error) { + return jumpToRaw(a.Cond, opOperandX, 0, a.SkipTrue, a.SkipFalse) +} + +// String returns the instruction in assembler notation. +func (a JumpIfX) String() string { + return jumpToString(a.Cond, "x", a.SkipTrue, a.SkipFalse) +} + +// jumpToRaw assembles a jump instruction into a RawInstruction +func jumpToRaw(test JumpTest, operand opOperand, k uint32, skipTrue, skipFalse uint8) (RawInstruction, error) { var ( - cond uint16 + cond jumpOp flip bool ) - switch a.Cond { + switch test { case JumpEqual: cond = opJumpEqual case JumpNotEqual: @@ -552,63 +574,63 @@ func (a JumpIf) Assemble() (RawInstruction, error) { case JumpBitsNotSet: cond, flip = opJumpSet, true default: - return RawInstruction{}, fmt.Errorf("unknown JumpTest %v", a.Cond) + return RawInstruction{}, fmt.Errorf("unknown JumpTest %v", test) } - jt, jf := a.SkipTrue, a.SkipFalse + jt, jf := skipTrue, skipFalse if flip { jt, jf = jf, jt } return RawInstruction{ - Op: opClsJump | cond, + Op: opClsJump | uint16(cond) | uint16(operand), Jt: jt, Jf: jf, - K: a.Val, + K: k, }, nil } -// String returns the the instruction in assembler notation. -func (a JumpIf) String() string { - switch a.Cond { +// jumpToString converts a jump instruction to assembler notation +func jumpToString(cond JumpTest, operand string, skipTrue, skipFalse uint8) string { + switch cond { // K == A case JumpEqual: - return conditionalJump(a, "jeq", "jneq") + return conditionalJump(operand, skipTrue, skipFalse, "jeq", "jneq") // K != A case JumpNotEqual: - return fmt.Sprintf("jneq #%d,%d", a.Val, a.SkipTrue) + return fmt.Sprintf("jneq %s,%d", operand, skipTrue) // K > A case JumpGreaterThan: - return conditionalJump(a, "jgt", "jle") + return conditionalJump(operand, skipTrue, skipFalse, "jgt", "jle") // K < A case JumpLessThan: - return fmt.Sprintf("jlt #%d,%d", a.Val, a.SkipTrue) + return fmt.Sprintf("jlt %s,%d", operand, skipTrue) // K >= A case JumpGreaterOrEqual: - return conditionalJump(a, "jge", "jlt") + return conditionalJump(operand, skipTrue, skipFalse, "jge", "jlt") // K <= A case JumpLessOrEqual: - return fmt.Sprintf("jle #%d,%d", a.Val, a.SkipTrue) + return fmt.Sprintf("jle %s,%d", operand, skipTrue) // K & A != 0 case JumpBitsSet: - if a.SkipFalse > 0 { - return fmt.Sprintf("jset #%d,%d,%d", a.Val, a.SkipTrue, a.SkipFalse) + if skipFalse > 0 { + return fmt.Sprintf("jset %s,%d,%d", operand, skipTrue, skipFalse) } - return fmt.Sprintf("jset #%d,%d", a.Val, a.SkipTrue) + return fmt.Sprintf("jset %s,%d", operand, skipTrue) // K & A == 0, there is no assembler instruction for JumpBitNotSet, use JumpBitSet and invert skips case JumpBitsNotSet: - return JumpIf{Cond: JumpBitsSet, SkipTrue: a.SkipFalse, SkipFalse: a.SkipTrue, Val: a.Val}.String() + return jumpToString(JumpBitsSet, operand, skipFalse, skipTrue) default: - return fmt.Sprintf("unknown instruction: %#v", a) + return fmt.Sprintf("unknown JumpTest %#v", cond) } } -func conditionalJump(inst JumpIf, positiveJump, negativeJump string) string { - if inst.SkipTrue > 0 { - if inst.SkipFalse > 0 { - return fmt.Sprintf("%s #%d,%d,%d", positiveJump, inst.Val, inst.SkipTrue, inst.SkipFalse) +func conditionalJump(operand string, skipTrue, skipFalse uint8, positiveJump, negativeJump string) string { + if skipTrue > 0 { + if skipFalse > 0 { + return fmt.Sprintf("%s %s,%d,%d", positiveJump, operand, skipTrue, skipFalse) } - return fmt.Sprintf("%s #%d,%d", positiveJump, inst.Val, inst.SkipTrue) + return fmt.Sprintf("%s %s,%d", positiveJump, operand, skipTrue) } - return fmt.Sprintf("%s #%d,%d", negativeJump, inst.Val, inst.SkipFalse) + return fmt.Sprintf("%s %s,%d", negativeJump, operand, skipFalse) } // RetA exits the BPF program, returning the value of register A. @@ -621,7 +643,7 @@ func (a RetA) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a RetA) String() string { return fmt.Sprintf("ret a") } @@ -639,7 +661,7 @@ func (a RetConstant) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a RetConstant) String() string { return fmt.Sprintf("ret #%d", a.Val) } @@ -654,7 +676,7 @@ func (a TXA) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a TXA) String() string { return fmt.Sprintf("txa") } @@ -669,7 +691,7 @@ func (a TAX) Assemble() (RawInstruction, error) { }, nil } -// String returns the the instruction in assembler notation. +// String returns the instruction in assembler notation. func (a TAX) String() string { return fmt.Sprintf("tax") } diff --git a/vendor/golang.org/x/net/bpf/instructions_test.go b/vendor/golang.org/x/net/bpf/instructions_test.go index dde474a..69b25c5 100644 --- a/vendor/golang.org/x/net/bpf/instructions_test.go +++ b/vendor/golang.org/x/net/bpf/instructions_test.go @@ -64,14 +64,22 @@ var allInstructions = []Instruction{ NegateA{}, - Jump{Skip: 10}, - JumpIf{Cond: JumpEqual, Val: 42, SkipTrue: 8, SkipFalse: 9}, - JumpIf{Cond: JumpNotEqual, Val: 42, SkipTrue: 8}, - JumpIf{Cond: JumpLessThan, Val: 42, SkipTrue: 7}, - JumpIf{Cond: JumpLessOrEqual, Val: 42, SkipTrue: 6}, - JumpIf{Cond: JumpGreaterThan, Val: 42, SkipTrue: 4, SkipFalse: 5}, - JumpIf{Cond: JumpGreaterOrEqual, Val: 42, SkipTrue: 3, SkipFalse: 4}, - JumpIf{Cond: JumpBitsSet, Val: 42, SkipTrue: 2, SkipFalse: 3}, + Jump{Skip: 17}, + JumpIf{Cond: JumpEqual, Val: 42, SkipTrue: 15, SkipFalse: 16}, + JumpIf{Cond: JumpNotEqual, Val: 42, SkipTrue: 15}, + JumpIf{Cond: JumpLessThan, Val: 42, SkipTrue: 14}, + JumpIf{Cond: JumpLessOrEqual, Val: 42, SkipTrue: 13}, + JumpIf{Cond: JumpGreaterThan, Val: 42, SkipTrue: 11, SkipFalse: 12}, + JumpIf{Cond: JumpGreaterOrEqual, Val: 42, SkipTrue: 10, SkipFalse: 11}, + JumpIf{Cond: JumpBitsSet, Val: 42, SkipTrue: 9, SkipFalse: 10}, + + JumpIfX{Cond: JumpEqual, SkipTrue: 8, SkipFalse: 9}, + JumpIfX{Cond: JumpNotEqual, SkipTrue: 8}, + JumpIfX{Cond: JumpLessThan, SkipTrue: 7}, + JumpIfX{Cond: JumpLessOrEqual, SkipTrue: 6}, + JumpIfX{Cond: JumpGreaterThan, SkipTrue: 4, SkipFalse: 5}, + JumpIfX{Cond: JumpGreaterOrEqual, SkipTrue: 3, SkipFalse: 4}, + JumpIfX{Cond: JumpBitsSet, SkipTrue: 2, SkipFalse: 3}, TAX{}, TXA{}, @@ -487,7 +495,67 @@ func TestString(t *testing.T) { }, { instruction: JumpIf{Cond: 0xffff, Val: 42, SkipTrue: 1, SkipFalse: 2}, - assembler: "unknown instruction: bpf.JumpIf{Cond:0xffff, Val:0x2a, SkipTrue:0x1, SkipFalse:0x2}", + assembler: "unknown JumpTest 0xffff", + }, + { + instruction: JumpIfX{Cond: JumpEqual, SkipTrue: 8, SkipFalse: 9}, + assembler: "jeq x,8,9", + }, + { + instruction: JumpIfX{Cond: JumpEqual, SkipTrue: 8}, + assembler: "jeq x,8", + }, + { + instruction: JumpIfX{Cond: JumpEqual, SkipFalse: 8}, + assembler: "jneq x,8", + }, + { + instruction: JumpIfX{Cond: JumpNotEqual, SkipTrue: 8}, + assembler: "jneq x,8", + }, + { + instruction: JumpIfX{Cond: JumpLessThan, SkipTrue: 7}, + assembler: "jlt x,7", + }, + { + instruction: JumpIfX{Cond: JumpLessOrEqual, SkipTrue: 6}, + assembler: "jle x,6", + }, + { + instruction: JumpIfX{Cond: JumpGreaterThan, SkipTrue: 4, SkipFalse: 5}, + assembler: "jgt x,4,5", + }, + { + instruction: JumpIfX{Cond: JumpGreaterThan, SkipTrue: 4}, + assembler: "jgt x,4", + }, + { + instruction: JumpIfX{Cond: JumpGreaterOrEqual, SkipTrue: 3, SkipFalse: 4}, + assembler: "jge x,3,4", + }, + { + instruction: JumpIfX{Cond: JumpGreaterOrEqual, SkipTrue: 3}, + assembler: "jge x,3", + }, + { + instruction: JumpIfX{Cond: JumpBitsSet, SkipTrue: 2, SkipFalse: 3}, + assembler: "jset x,2,3", + }, + { + instruction: JumpIfX{Cond: JumpBitsSet, SkipTrue: 2}, + assembler: "jset x,2", + }, + { + instruction: JumpIfX{Cond: JumpBitsNotSet, SkipTrue: 2, SkipFalse: 3}, + assembler: "jset x,3,2", + }, + { + instruction: JumpIfX{Cond: JumpBitsNotSet, SkipTrue: 2}, + assembler: "jset x,0,2", + }, + { + instruction: JumpIfX{Cond: 0xffff, SkipTrue: 1, SkipFalse: 2}, + assembler: "unknown JumpTest 0xffff", }, { instruction: TAX{}, diff --git a/vendor/golang.org/x/net/bpf/testdata/all_instructions.bpf b/vendor/golang.org/x/net/bpf/testdata/all_instructions.bpf index f871440..929eb25 100644 --- a/vendor/golang.org/x/net/bpf/testdata/all_instructions.bpf +++ b/vendor/golang.org/x/net/bpf/testdata/all_instructions.bpf @@ -1 +1 @@ -50,0 0 0 42,1 0 0 42,96 0 0 3,97 0 0 3,48 0 0 42,40 0 0 42,32 0 0 42,80 0 0 42,72 0 0 42,64 0 0 42,177 0 0 42,128 0 0 0,32 0 0 4294963200,32 0 0 4294963204,32 0 0 4294963256,2 0 0 3,3 0 0 3,4 0 0 42,20 0 0 42,36 0 0 42,52 0 0 42,68 0 0 42,84 0 0 42,100 0 0 42,116 0 0 42,148 0 0 42,164 0 0 42,12 0 0 0,28 0 0 0,44 0 0 0,60 0 0 0,76 0 0 0,92 0 0 0,108 0 0 0,124 0 0 0,156 0 0 0,172 0 0 0,132 0 0 0,5 0 0 10,21 8 9 42,21 0 8 42,53 0 7 42,37 0 6 42,37 4 5 42,53 3 4 42,69 2 3 42,7 0 0 0,135 0 0 0,22 0 0 0,6 0 0 0, +57,0 0 0 42,1 0 0 42,96 0 0 3,97 0 0 3,48 0 0 42,40 0 0 42,32 0 0 42,80 0 0 42,72 0 0 42,64 0 0 42,177 0 0 42,128 0 0 0,32 0 0 4294963200,32 0 0 4294963204,32 0 0 4294963256,2 0 0 3,3 0 0 3,4 0 0 42,20 0 0 42,36 0 0 42,52 0 0 42,68 0 0 42,84 0 0 42,100 0 0 42,116 0 0 42,148 0 0 42,164 0 0 42,12 0 0 0,28 0 0 0,44 0 0 0,60 0 0 0,76 0 0 0,92 0 0 0,108 0 0 0,124 0 0 0,156 0 0 0,172 0 0 0,132 0 0 0,5 0 0 17,21 15 16 42,21 0 15 42,53 0 14 42,37 0 13 42,37 11 12 42,53 10 11 42,69 9 10 42,29 8 9 0,29 0 8 0,61 0 7 0,45 0 6 0,45 4 5 0,61 3 4 0,77 2 3 0,7 0 0 0,135 0 0 0,22 0 0 0,6 0 0 42, diff --git a/vendor/golang.org/x/net/bpf/testdata/all_instructions.txt b/vendor/golang.org/x/net/bpf/testdata/all_instructions.txt index 3045501..8e4d758 100644 --- a/vendor/golang.org/x/net/bpf/testdata/all_instructions.txt +++ b/vendor/golang.org/x/net/bpf/testdata/all_instructions.txt @@ -1,6 +1,6 @@ # This filter is compiled to all_instructions.bpf by the `bpf_asm` # tool, which can be found in the linux kernel source tree under -# tools/net. +# tools/bpf. # Load immediate ld #42 @@ -60,7 +60,7 @@ xor x # !A neg -# Jumps +# Jump A constant ja end jeq #42,prev,end jne #42,end @@ -70,6 +70,15 @@ jgt #42,prev,end jge #42,prev,end jset #42,prev,end +# Jump A X +jeq x,prev,end +jne x,end +jlt x,end +jle x,end +jgt x,prev,end +jge x,prev,end +jset x,prev,end + # Register transfers tax txa diff --git a/vendor/golang.org/x/net/bpf/vm.go b/vendor/golang.org/x/net/bpf/vm.go index 4c656f1..73f57f1 100644 --- a/vendor/golang.org/x/net/bpf/vm.go +++ b/vendor/golang.org/x/net/bpf/vm.go @@ -35,6 +35,13 @@ func NewVM(filter []Instruction) (*VM, error) { if check <= int(ins.SkipFalse) { return nil, fmt.Errorf("cannot jump %d instructions in false case; jumping past program bounds", ins.SkipFalse) } + case JumpIfX: + if check <= int(ins.SkipTrue) { + return nil, fmt.Errorf("cannot jump %d instructions in true case; jumping past program bounds", ins.SkipTrue) + } + if check <= int(ins.SkipFalse) { + return nil, fmt.Errorf("cannot jump %d instructions in false case; jumping past program bounds", ins.SkipFalse) + } // Check for division or modulus by zero case ALUOpConstant: if ins.Val != 0 { @@ -109,6 +116,9 @@ func (v *VM) Run(in []byte) (int, error) { case JumpIf: jump := jumpIf(ins, regA) i += jump + case JumpIfX: + jump := jumpIfX(ins, regA, regX) + i += jump case LoadAbsolute: regA, ok = loadAbsolute(ins, in) case LoadConstant: diff --git a/vendor/golang.org/x/net/bpf/vm_bpf_test.go b/vendor/golang.org/x/net/bpf/vm_bpf_test.go index 77fa8fe..137eea1 100644 --- a/vendor/golang.org/x/net/bpf/vm_bpf_test.go +++ b/vendor/golang.org/x/net/bpf/vm_bpf_test.go @@ -12,6 +12,8 @@ import ( "golang.org/x/net/bpf" "golang.org/x/net/ipv4" + "golang.org/x/net/ipv6" + "golang.org/x/net/nettest" ) // A virtualMachine is a BPF virtual machine which can process an @@ -137,7 +139,7 @@ type osVirtualMachine struct { // testOSVM creates a virtualMachine which uses the OS's BPF VM by injecting // packets into a UDP listener with a BPF program attached to it. func testOSVM(t *testing.T, filter []bpf.Instruction) (virtualMachine, func()) { - l, err := net.ListenPacket("udp4", "127.0.0.1:0") + l, err := nettest.NewLocalPacketListener("udp") if err != nil { t.Fatalf("failed to open OS VM UDP listener: %v", err) } @@ -147,12 +149,17 @@ func testOSVM(t *testing.T, filter []bpf.Instruction) (virtualMachine, func()) { t.Fatalf("failed to compile BPF program: %v", err) } - p := ipv4.NewPacketConn(l) - if err = p.SetBPF(prog); err != nil { + ip := l.LocalAddr().(*net.UDPAddr).IP + if ip.To4() != nil && ip.To16() == nil { + err = ipv4.NewPacketConn(l).SetBPF(prog) + } else { + err = ipv6.NewPacketConn(l).SetBPF(prog) + } + if err != nil { t.Fatalf("failed to attach BPF program to listener: %v", err) } - s, err := net.Dial("udp4", l.LocalAddr().String()) + s, err := net.Dial(l.LocalAddr().Network(), l.LocalAddr().String()) if err != nil { t.Fatalf("failed to dial connection to listener: %v", err) } diff --git a/vendor/golang.org/x/net/bpf/vm_instructions.go b/vendor/golang.org/x/net/bpf/vm_instructions.go index 516f946..f0d2e55 100644 --- a/vendor/golang.org/x/net/bpf/vm_instructions.go +++ b/vendor/golang.org/x/net/bpf/vm_instructions.go @@ -55,34 +55,41 @@ func aluOpCommon(op ALUOp, regA uint32, value uint32) uint32 { } } -func jumpIf(ins JumpIf, value uint32) int { +func jumpIf(ins JumpIf, regA uint32) int { + return jumpIfCommon(ins.Cond, ins.SkipTrue, ins.SkipFalse, regA, ins.Val) +} + +func jumpIfX(ins JumpIfX, regA uint32, regX uint32) int { + return jumpIfCommon(ins.Cond, ins.SkipTrue, ins.SkipFalse, regA, regX) +} + +func jumpIfCommon(cond JumpTest, skipTrue, skipFalse uint8, regA uint32, value uint32) int { var ok bool - inV := uint32(ins.Val) - switch ins.Cond { + switch cond { case JumpEqual: - ok = value == inV + ok = regA == value case JumpNotEqual: - ok = value != inV + ok = regA != value case JumpGreaterThan: - ok = value > inV + ok = regA > value case JumpLessThan: - ok = value < inV + ok = regA < value case JumpGreaterOrEqual: - ok = value >= inV + ok = regA >= value case JumpLessOrEqual: - ok = value <= inV + ok = regA <= value case JumpBitsSet: - ok = (value & inV) != 0 + ok = (regA & value) != 0 case JumpBitsNotSet: - ok = (value & inV) == 0 + ok = (regA & value) == 0 } if ok { - return int(ins.SkipTrue) + return int(skipTrue) } - return int(ins.SkipFalse) + return int(skipFalse) } func loadAbsolute(ins LoadAbsolute, in []byte) (uint32, bool) { diff --git a/vendor/golang.org/x/net/bpf/vm_jump_test.go b/vendor/golang.org/x/net/bpf/vm_jump_test.go index e0a3a98..77a2d47 100644 --- a/vendor/golang.org/x/net/bpf/vm_jump_test.go +++ b/vendor/golang.org/x/net/bpf/vm_jump_test.go @@ -83,6 +83,32 @@ func TestVMJumpIfFalseOutOfProgram(t *testing.T) { } } +func TestVMJumpIfXTrueOutOfProgram(t *testing.T) { + _, _, err := testVM(t, []bpf.Instruction{ + bpf.JumpIfX{ + Cond: bpf.JumpEqual, + SkipTrue: 2, + }, + bpf.RetA{}, + }) + if errStr(err) != "cannot jump 2 instructions in true case; jumping past program bounds" { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestVMJumpIfXFalseOutOfProgram(t *testing.T) { + _, _, err := testVM(t, []bpf.Instruction{ + bpf.JumpIfX{ + Cond: bpf.JumpEqual, + SkipFalse: 3, + }, + bpf.RetA{}, + }) + if errStr(err) != "cannot jump 3 instructions in false case; jumping past program bounds" { + t.Fatalf("unexpected error: %v", err) + } +} + func TestVMJumpIfEqual(t *testing.T) { vm, done, err := testVM(t, []bpf.Instruction{ bpf.LoadAbsolute{ @@ -378,3 +404,323 @@ func TestVMJumpIfBitsNotSet(t *testing.T) { want, got) } } + +func TestVMJumpIfXEqual(t *testing.T) { + vm, done, err := testVM(t, []bpf.Instruction{ + bpf.LoadAbsolute{ + Off: 8, + Size: 1, + }, + bpf.LoadConstant{ + Dst: bpf.RegX, + Val: 1, + }, + bpf.JumpIfX{ + Cond: bpf.JumpEqual, + SkipTrue: 1, + }, + bpf.RetConstant{ + Val: 0, + }, + bpf.RetConstant{ + Val: 9, + }, + }) + if err != nil { + t.Fatalf("failed to load BPF program: %v", err) + } + defer done() + + out, err := vm.Run([]byte{ + 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, + 1, + }) + if err != nil { + t.Fatalf("unexpected error while running program: %v", err) + } + if want, got := 1, out; want != got { + t.Fatalf("unexpected number of output bytes:\n- want: %d\n- got: %d", + want, got) + } +} + +func TestVMJumpIfXNotEqual(t *testing.T) { + vm, done, err := testVM(t, []bpf.Instruction{ + bpf.LoadAbsolute{ + Off: 8, + Size: 1, + }, + bpf.LoadConstant{ + Dst: bpf.RegX, + Val: 1, + }, + bpf.JumpIfX{ + Cond: bpf.JumpNotEqual, + SkipFalse: 1, + }, + bpf.RetConstant{ + Val: 0, + }, + bpf.RetConstant{ + Val: 9, + }, + }) + if err != nil { + t.Fatalf("failed to load BPF program: %v", err) + } + defer done() + + out, err := vm.Run([]byte{ + 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, + 1, + }) + if err != nil { + t.Fatalf("unexpected error while running program: %v", err) + } + if want, got := 1, out; want != got { + t.Fatalf("unexpected number of output bytes:\n- want: %d\n- got: %d", + want, got) + } +} + +func TestVMJumpIfXGreaterThan(t *testing.T) { + vm, done, err := testVM(t, []bpf.Instruction{ + bpf.LoadAbsolute{ + Off: 8, + Size: 4, + }, + bpf.LoadConstant{ + Dst: bpf.RegX, + Val: 0x00010202, + }, + bpf.JumpIfX{ + Cond: bpf.JumpGreaterThan, + SkipTrue: 1, + }, + bpf.RetConstant{ + Val: 0, + }, + bpf.RetConstant{ + Val: 12, + }, + }) + if err != nil { + t.Fatalf("failed to load BPF program: %v", err) + } + defer done() + + out, err := vm.Run([]byte{ + 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, + 0, 1, 2, 3, + }) + if err != nil { + t.Fatalf("unexpected error while running program: %v", err) + } + if want, got := 4, out; want != got { + t.Fatalf("unexpected number of output bytes:\n- want: %d\n- got: %d", + want, got) + } +} + +func TestVMJumpIfXLessThan(t *testing.T) { + vm, done, err := testVM(t, []bpf.Instruction{ + bpf.LoadAbsolute{ + Off: 8, + Size: 4, + }, + bpf.LoadConstant{ + Dst: bpf.RegX, + Val: 0xff010203, + }, + bpf.JumpIfX{ + Cond: bpf.JumpLessThan, + SkipTrue: 1, + }, + bpf.RetConstant{ + Val: 0, + }, + bpf.RetConstant{ + Val: 12, + }, + }) + if err != nil { + t.Fatalf("failed to load BPF program: %v", err) + } + defer done() + + out, err := vm.Run([]byte{ + 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, + 0, 1, 2, 3, + }) + if err != nil { + t.Fatalf("unexpected error while running program: %v", err) + } + if want, got := 4, out; want != got { + t.Fatalf("unexpected number of output bytes:\n- want: %d\n- got: %d", + want, got) + } +} + +func TestVMJumpIfXGreaterOrEqual(t *testing.T) { + vm, done, err := testVM(t, []bpf.Instruction{ + bpf.LoadAbsolute{ + Off: 8, + Size: 4, + }, + bpf.LoadConstant{ + Dst: bpf.RegX, + Val: 0x00010203, + }, + bpf.JumpIfX{ + Cond: bpf.JumpGreaterOrEqual, + SkipTrue: 1, + }, + bpf.RetConstant{ + Val: 0, + }, + bpf.RetConstant{ + Val: 12, + }, + }) + if err != nil { + t.Fatalf("failed to load BPF program: %v", err) + } + defer done() + + out, err := vm.Run([]byte{ + 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, + 0, 1, 2, 3, + }) + if err != nil { + t.Fatalf("unexpected error while running program: %v", err) + } + if want, got := 4, out; want != got { + t.Fatalf("unexpected number of output bytes:\n- want: %d\n- got: %d", + want, got) + } +} + +func TestVMJumpIfXLessOrEqual(t *testing.T) { + vm, done, err := testVM(t, []bpf.Instruction{ + bpf.LoadAbsolute{ + Off: 8, + Size: 4, + }, + bpf.LoadConstant{ + Dst: bpf.RegX, + Val: 0xff010203, + }, + bpf.JumpIfX{ + Cond: bpf.JumpLessOrEqual, + SkipTrue: 1, + }, + bpf.RetConstant{ + Val: 0, + }, + bpf.RetConstant{ + Val: 12, + }, + }) + if err != nil { + t.Fatalf("failed to load BPF program: %v", err) + } + defer done() + + out, err := vm.Run([]byte{ + 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, + 0, 1, 2, 3, + }) + if err != nil { + t.Fatalf("unexpected error while running program: %v", err) + } + if want, got := 4, out; want != got { + t.Fatalf("unexpected number of output bytes:\n- want: %d\n- got: %d", + want, got) + } +} + +func TestVMJumpIfXBitsSet(t *testing.T) { + vm, done, err := testVM(t, []bpf.Instruction{ + bpf.LoadAbsolute{ + Off: 8, + Size: 2, + }, + bpf.LoadConstant{ + Dst: bpf.RegX, + Val: 0x1122, + }, + bpf.JumpIfX{ + Cond: bpf.JumpBitsSet, + SkipTrue: 1, + }, + bpf.RetConstant{ + Val: 0, + }, + bpf.RetConstant{ + Val: 10, + }, + }) + if err != nil { + t.Fatalf("failed to load BPF program: %v", err) + } + defer done() + + out, err := vm.Run([]byte{ + 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, + 0x01, 0x02, + }) + if err != nil { + t.Fatalf("unexpected error while running program: %v", err) + } + if want, got := 2, out; want != got { + t.Fatalf("unexpected number of output bytes:\n- want: %d\n- got: %d", + want, got) + } +} + +func TestVMJumpIfXBitsNotSet(t *testing.T) { + vm, done, err := testVM(t, []bpf.Instruction{ + bpf.LoadAbsolute{ + Off: 8, + Size: 2, + }, + bpf.LoadConstant{ + Dst: bpf.RegX, + Val: 0x1221, + }, + bpf.JumpIfX{ + Cond: bpf.JumpBitsNotSet, + SkipTrue: 1, + }, + bpf.RetConstant{ + Val: 0, + }, + bpf.RetConstant{ + Val: 10, + }, + }) + if err != nil { + t.Fatalf("failed to load BPF program: %v", err) + } + defer done() + + out, err := vm.Run([]byte{ + 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, + 0x01, 0x02, + }) + if err != nil { + t.Fatalf("unexpected error while running program: %v", err) + } + if want, got := 2, out; want != got { + t.Fatalf("unexpected number of output bytes:\n- want: %d\n- got: %d", + want, got) + } +} diff --git a/vendor/golang.org/x/net/context/context.go b/vendor/golang.org/x/net/context/context.go index d3681ab..a3c021d 100644 --- a/vendor/golang.org/x/net/context/context.go +++ b/vendor/golang.org/x/net/context/context.go @@ -5,6 +5,8 @@ // Package context defines the Context type, which carries deadlines, // cancelation signals, and other request-scoped values across API boundaries // and between processes. +// As of Go 1.7 this package is available in the standard library under the +// name context. https://golang.org/pkg/context. // // Incoming requests to a server should create a Context, and outgoing calls to // servers should accept a Context. The chain of function calls between must diff --git a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go index 606cf1f..37dc0cf 100644 --- a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go +++ b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go @@ -2,18 +2,15 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.7 - // Package ctxhttp provides helper functions for performing context-aware HTTP requests. package ctxhttp // import "golang.org/x/net/context/ctxhttp" import ( + "context" "io" "net/http" "net/url" "strings" - - "golang.org/x/net/context" ) // Do sends an HTTP request with the provided http.Client and returns diff --git a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_17_test.go b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_17_test.go deleted file mode 100644 index 72411b1..0000000 --- a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_17_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !plan9,go1.7 - -package ctxhttp - -import ( - "io" - "net/http" - "net/http/httptest" - "testing" - - "context" -) - -func TestGo17Context(t *testing.T) { - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - io.WriteString(w, "ok") - })) - defer ts.Close() - ctx := context.Background() - resp, err := Get(ctx, http.DefaultClient, ts.URL) - if resp == nil || err != nil { - t.Fatalf("error received from client: %v %v", err, resp) - } - resp.Body.Close() -} diff --git a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_pre17.go b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_pre17.go deleted file mode 100644 index 926870c..0000000 --- a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_pre17.go +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !go1.7 - -package ctxhttp // import "golang.org/x/net/context/ctxhttp" - -import ( - "io" - "net/http" - "net/url" - "strings" - - "golang.org/x/net/context" -) - -func nop() {} - -var ( - testHookContextDoneBeforeHeaders = nop - testHookDoReturned = nop - testHookDidBodyClose = nop -) - -// Do sends an HTTP request with the provided http.Client and returns an HTTP response. -// If the client is nil, http.DefaultClient is used. -// If the context is canceled or times out, ctx.Err() will be returned. -func Do(ctx context.Context, client *http.Client, req *http.Request) (*http.Response, error) { - if client == nil { - client = http.DefaultClient - } - - // TODO(djd): Respect any existing value of req.Cancel. - cancel := make(chan struct{}) - req.Cancel = cancel - - type responseAndError struct { - resp *http.Response - err error - } - result := make(chan responseAndError, 1) - - // Make local copies of test hooks closed over by goroutines below. - // Prevents data races in tests. - testHookDoReturned := testHookDoReturned - testHookDidBodyClose := testHookDidBodyClose - - go func() { - resp, err := client.Do(req) - testHookDoReturned() - result <- responseAndError{resp, err} - }() - - var resp *http.Response - - select { - case <-ctx.Done(): - testHookContextDoneBeforeHeaders() - close(cancel) - // Clean up after the goroutine calling client.Do: - go func() { - if r := <-result; r.resp != nil { - testHookDidBodyClose() - r.resp.Body.Close() - } - }() - return nil, ctx.Err() - case r := <-result: - var err error - resp, err = r.resp, r.err - if err != nil { - return resp, err - } - } - - c := make(chan struct{}) - go func() { - select { - case <-ctx.Done(): - close(cancel) - case <-c: - // The response's Body is closed. - } - }() - resp.Body = ¬ifyingReader{resp.Body, c} - - return resp, nil -} - -// Get issues a GET request via the Do function. -func Get(ctx context.Context, client *http.Client, url string) (*http.Response, error) { - req, err := http.NewRequest("GET", url, nil) - if err != nil { - return nil, err - } - return Do(ctx, client, req) -} - -// Head issues a HEAD request via the Do function. -func Head(ctx context.Context, client *http.Client, url string) (*http.Response, error) { - req, err := http.NewRequest("HEAD", url, nil) - if err != nil { - return nil, err - } - return Do(ctx, client, req) -} - -// Post issues a POST request via the Do function. -func Post(ctx context.Context, client *http.Client, url string, bodyType string, body io.Reader) (*http.Response, error) { - req, err := http.NewRequest("POST", url, body) - if err != nil { - return nil, err - } - req.Header.Set("Content-Type", bodyType) - return Do(ctx, client, req) -} - -// PostForm issues a POST request via the Do function. -func PostForm(ctx context.Context, client *http.Client, url string, data url.Values) (*http.Response, error) { - return Post(ctx, client, url, "application/x-www-form-urlencoded", strings.NewReader(data.Encode())) -} - -// notifyingReader is an io.ReadCloser that closes the notify channel after -// Close is called or a Read fails on the underlying ReadCloser. -type notifyingReader struct { - io.ReadCloser - notify chan<- struct{} -} - -func (r *notifyingReader) Read(p []byte) (int, error) { - n, err := r.ReadCloser.Read(p) - if err != nil && r.notify != nil { - close(r.notify) - r.notify = nil - } - return n, err -} - -func (r *notifyingReader) Close() error { - err := r.ReadCloser.Close() - if r.notify != nil { - close(r.notify) - r.notify = nil - } - return err -} diff --git a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_pre17_test.go b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_pre17_test.go deleted file mode 100644 index 9159cf0..0000000 --- a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_pre17_test.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !plan9,!go1.7 - -package ctxhttp - -import ( - "net" - "net/http" - "net/http/httptest" - "sync" - "testing" - "time" - - "golang.org/x/net/context" -) - -// golang.org/issue/14065 -func TestClosesResponseBodyOnCancel(t *testing.T) { - defer func() { testHookContextDoneBeforeHeaders = nop }() - defer func() { testHookDoReturned = nop }() - defer func() { testHookDidBodyClose = nop }() - - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {})) - defer ts.Close() - - ctx, cancel := context.WithCancel(context.Background()) - - // closed when Do enters select case <-ctx.Done() - enteredDonePath := make(chan struct{}) - - testHookContextDoneBeforeHeaders = func() { - close(enteredDonePath) - } - - testHookDoReturned = func() { - // We now have the result (the Flush'd headers) at least, - // so we can cancel the request. - cancel() - - // But block the client.Do goroutine from sending - // until Do enters into the <-ctx.Done() path, since - // otherwise if both channels are readable, select - // picks a random one. - <-enteredDonePath - } - - sawBodyClose := make(chan struct{}) - testHookDidBodyClose = func() { close(sawBodyClose) } - - tr := &http.Transport{} - defer tr.CloseIdleConnections() - c := &http.Client{Transport: tr} - req, _ := http.NewRequest("GET", ts.URL, nil) - _, doErr := Do(ctx, c, req) - - select { - case <-sawBodyClose: - case <-time.After(5 * time.Second): - t.Fatal("timeout waiting for body to close") - } - - if doErr != ctx.Err() { - t.Errorf("Do error = %v; want %v", doErr, ctx.Err()) - } -} - -type noteCloseConn struct { - net.Conn - onceClose sync.Once - closefn func() -} - -func (c *noteCloseConn) Close() error { - c.onceClose.Do(c.closefn) - return c.Conn.Close() -} diff --git a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_test.go b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_test.go index 1e41551..c4339c4 100644 --- a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_test.go +++ b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_test.go @@ -7,16 +7,28 @@ package ctxhttp import ( + "context" "io" "io/ioutil" "net/http" "net/http/httptest" "testing" "time" - - "golang.org/x/net/context" ) +func TestGo17Context(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + io.WriteString(w, "ok") + })) + defer ts.Close() + ctx := context.Background() + resp, err := Get(ctx, http.DefaultClient, ts.URL) + if resp == nil || err != nil { + t.Fatalf("error received from client: %v %v", err, resp) + } + resp.Body.Close() +} + const ( requestDuration = 100 * time.Millisecond requestBody = "ok" diff --git a/vendor/golang.org/x/net/dns/dnsmessage/example_test.go b/vendor/golang.org/x/net/dns/dnsmessage/example_test.go index 5415c2d..8600a6b 100644 --- a/vendor/golang.org/x/net/dns/dnsmessage/example_test.go +++ b/vendor/golang.org/x/net/dns/dnsmessage/example_test.go @@ -37,20 +37,20 @@ func ExampleParser() { }, Answers: []dnsmessage.Resource{ { - dnsmessage.ResourceHeader{ + Header: dnsmessage.ResourceHeader{ Name: mustNewName("foo.bar.example.com."), Type: dnsmessage.TypeA, Class: dnsmessage.ClassINET, }, - &dnsmessage.AResource{[4]byte{127, 0, 0, 1}}, + Body: &dnsmessage.AResource{A: [4]byte{127, 0, 0, 1}}, }, { - dnsmessage.ResourceHeader{ + Header: dnsmessage.ResourceHeader{ Name: mustNewName("bar.example.com."), Type: dnsmessage.TypeA, Class: dnsmessage.ClassINET, }, - &dnsmessage.AResource{[4]byte{127, 0, 0, 2}}, + Body: &dnsmessage.AResource{A: [4]byte{127, 0, 0, 2}}, }, }, } diff --git a/vendor/golang.org/x/net/dns/dnsmessage/message.go b/vendor/golang.org/x/net/dns/dnsmessage/message.go index 19b260d..13fbc08 100644 --- a/vendor/golang.org/x/net/dns/dnsmessage/message.go +++ b/vendor/golang.org/x/net/dns/dnsmessage/message.go @@ -5,6 +5,9 @@ // Package dnsmessage provides a mostly RFC 1035 compliant implementation of // DNS message packing and unpacking. // +// The package also supports messages with Extension Mechanisms for DNS +// (EDNS(0)) as defined in RFC 6891. +// // This implementation is designed to minimize heap allocations and avoid // unnecessary packing and unpacking as much as possible. package dnsmessage @@ -13,21 +16,11 @@ import ( "errors" ) -// Packet formats +// Message formats // A Type is a type of DNS request and response. type Type uint16 -// A Class is a type of network. -type Class uint16 - -// An OpCode is a DNS operation code. -type OpCode uint16 - -// An RCode is a DNS response status code. -type RCode uint16 - -// Wire constants. const ( // ResourceHeader.Type and Question.Type TypeA Type = 1 @@ -39,6 +32,7 @@ const ( TypeTXT Type = 16 TypeAAAA Type = 28 TypeSRV Type = 33 + TypeOPT Type = 41 // Question.Type TypeWKS Type = 11 @@ -46,7 +40,46 @@ const ( TypeMINFO Type = 14 TypeAXFR Type = 252 TypeALL Type = 255 +) + +var typeNames = map[Type]string{ + TypeA: "TypeA", + TypeNS: "TypeNS", + TypeCNAME: "TypeCNAME", + TypeSOA: "TypeSOA", + TypePTR: "TypePTR", + TypeMX: "TypeMX", + TypeTXT: "TypeTXT", + TypeAAAA: "TypeAAAA", + TypeSRV: "TypeSRV", + TypeOPT: "TypeOPT", + TypeWKS: "TypeWKS", + TypeHINFO: "TypeHINFO", + TypeMINFO: "TypeMINFO", + TypeAXFR: "TypeAXFR", + TypeALL: "TypeALL", +} + +// String implements fmt.Stringer.String. +func (t Type) String() string { + if n, ok := typeNames[t]; ok { + return n + } + return printUint16(uint16(t)) +} + +// GoString implements fmt.GoStringer.GoString. +func (t Type) GoString() string { + if n, ok := typeNames[t]; ok { + return "dnsmessage." + n + } + return printUint16(uint16(t)) +} +// A Class is a type of network. +type Class uint16 + +const ( // ResourceHeader.Class and Question.Class ClassINET Class = 1 ClassCSNET Class = 2 @@ -55,7 +88,44 @@ const ( // Question.Class ClassANY Class = 255 +) + +var classNames = map[Class]string{ + ClassINET: "ClassINET", + ClassCSNET: "ClassCSNET", + ClassCHAOS: "ClassCHAOS", + ClassHESIOD: "ClassHESIOD", + ClassANY: "ClassANY", +} +// String implements fmt.Stringer.String. +func (c Class) String() string { + if n, ok := classNames[c]; ok { + return n + } + return printUint16(uint16(c)) +} + +// GoString implements fmt.GoStringer.GoString. +func (c Class) GoString() string { + if n, ok := classNames[c]; ok { + return "dnsmessage." + n + } + return printUint16(uint16(c)) +} + +// An OpCode is a DNS operation code. +type OpCode uint16 + +// GoString implements fmt.GoStringer.GoString. +func (o OpCode) GoString() string { + return printUint16(uint16(o)) +} + +// An RCode is a DNS response status code. +type RCode uint16 + +const ( // Message.Rcode RCodeSuccess RCode = 0 RCodeFormatError RCode = 1 @@ -65,6 +135,116 @@ const ( RCodeRefused RCode = 5 ) +var rCodeNames = map[RCode]string{ + RCodeSuccess: "RCodeSuccess", + RCodeFormatError: "RCodeFormatError", + RCodeServerFailure: "RCodeServerFailure", + RCodeNameError: "RCodeNameError", + RCodeNotImplemented: "RCodeNotImplemented", + RCodeRefused: "RCodeRefused", +} + +// String implements fmt.Stringer.String. +func (r RCode) String() string { + if n, ok := rCodeNames[r]; ok { + return n + } + return printUint16(uint16(r)) +} + +// GoString implements fmt.GoStringer.GoString. +func (r RCode) GoString() string { + if n, ok := rCodeNames[r]; ok { + return "dnsmessage." + n + } + return printUint16(uint16(r)) +} + +func printPaddedUint8(i uint8) string { + b := byte(i) + return string([]byte{ + b/100 + '0', + b/10%10 + '0', + b%10 + '0', + }) +} + +func printUint8Bytes(buf []byte, i uint8) []byte { + b := byte(i) + if i >= 100 { + buf = append(buf, b/100+'0') + } + if i >= 10 { + buf = append(buf, b/10%10+'0') + } + return append(buf, b%10+'0') +} + +func printByteSlice(b []byte) string { + if len(b) == 0 { + return "" + } + buf := make([]byte, 0, 5*len(b)) + buf = printUint8Bytes(buf, uint8(b[0])) + for _, n := range b[1:] { + buf = append(buf, ',', ' ') + buf = printUint8Bytes(buf, uint8(n)) + } + return string(buf) +} + +const hexDigits = "0123456789abcdef" + +func printString(str []byte) string { + buf := make([]byte, 0, len(str)) + for i := 0; i < len(str); i++ { + c := str[i] + if c == '.' || c == '-' || c == ' ' || + 'A' <= c && c <= 'Z' || + 'a' <= c && c <= 'z' || + '0' <= c && c <= '9' { + buf = append(buf, c) + continue + } + + upper := c >> 4 + lower := (c << 4) >> 4 + buf = append( + buf, + '\\', + 'x', + hexDigits[upper], + hexDigits[lower], + ) + } + return string(buf) +} + +func printUint16(i uint16) string { + return printUint32(uint32(i)) +} + +func printUint32(i uint32) string { + // Max value is 4294967295. + buf := make([]byte, 10) + for b, d := buf, uint32(1000000000); d > 0; d /= 10 { + b[0] = byte(i/d%10 + '0') + if b[0] == '0' && len(b) == len(buf) && len(buf) > 1 { + buf = buf[1:] + } + b = b[1:] + i %= d + } + return string(buf) +} + +func printBool(b bool) string { + if b { + return "true" + } + return "false" +} + var ( // ErrNotStarted indicates that the prerequisite information isn't // available yet because the previous records haven't been appropriately @@ -90,6 +270,8 @@ var ( errTooManyAuthorities = errors.New("too many Authorities to pack (>65535)") errTooManyAdditionals = errors.New("too many Additionals to pack (>65535)") errNonCanonicalName = errors.New("name is not in canonical format (it must end with a .)") + errStringTooLong = errors.New("character string exceeds maximum length (255)") + errCompressedSRV = errors.New("compressed name in SRV resource data") ) // Internal constants. @@ -159,6 +341,19 @@ func (m *Header) pack() (id uint16, bits uint16) { return } +// GoString implements fmt.GoStringer.GoString. +func (m *Header) GoString() string { + return "dnsmessage.Header{" + + "ID: " + printUint16(m.ID) + ", " + + "Response: " + printBool(m.Response) + ", " + + "OpCode: " + m.OpCode.GoString() + ", " + + "Authoritative: " + printBool(m.Authoritative) + ", " + + "Truncated: " + printBool(m.Truncated) + ", " + + "RecursionDesired: " + printBool(m.RecursionDesired) + ", " + + "RecursionAvailable: " + printBool(m.RecursionAvailable) + ", " + + "RCode: " + m.RCode.GoString() + "}" +} + // Message is a representation of a DNS message. type Message struct { Header @@ -218,6 +413,7 @@ func (h *header) count(sec section) uint16 { return 0 } +// pack appends the wire format of the header to msg. func (h *header) pack(msg []byte) []byte { msg = packUint16(msg, h.id) msg = packUint16(msg, h.bits) @@ -270,32 +466,43 @@ type Resource struct { Body ResourceBody } +func (r *Resource) GoString() string { + return "dnsmessage.Resource{" + + "Header: " + r.Header.GoString() + + ", Body: &" + r.Body.GoString() + + "}" +} + // A ResourceBody is a DNS resource record minus the header. type ResourceBody interface { // pack packs a Resource except for its header. - pack(msg []byte, compression map[string]int) ([]byte, error) + pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) // realType returns the actual type of the Resource. This is used to // fill in the header Type field. realType() Type + + // GoString implements fmt.GoStringer.GoString. + GoString() string } -func (r *Resource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the Resource to msg. +func (r *Resource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { if r.Body == nil { return msg, errNilResouceBody } oldMsg := msg r.Header.Type = r.Body.realType() - msg, length, err := r.Header.pack(msg, compression) + msg, lenOff, err := r.Header.pack(msg, compression, compressionOff) if err != nil { return msg, &nestedError{"ResourceHeader", err} } preLen := len(msg) - msg, err = r.Body.pack(msg, compression) + msg, err = r.Body.pack(msg, compression, compressionOff) if err != nil { return msg, &nestedError{"content", err} } - if err := r.Header.fixLen(msg, length, preLen); err != nil { + if err := r.Header.fixLen(msg, lenOff, preLen); err != nil { return oldMsg, err } return msg, nil @@ -436,7 +643,13 @@ func (p *Parser) Question() (Question, error) { // AllQuestions parses all Questions. func (p *Parser) AllQuestions() ([]Question, error) { - qs := make([]Question, 0, p.header.questions) + // Multiple questions are valid according to the spec, + // but servers don't actually support them. There will + // be at most one question here. + // + // Do not pre-allocate based on info in p.header, since + // the data is untrusted. + qs := []Question{} for { q, err := p.Question() if err == ErrSectionDone { @@ -492,7 +705,16 @@ func (p *Parser) Answer() (Resource, error) { // AllAnswers parses all Answer Resources. func (p *Parser) AllAnswers() ([]Resource, error) { - as := make([]Resource, 0, p.header.answers) + // The most common query is for A/AAAA, which usually returns + // a handful of IPs. + // + // Pre-allocate up to a certain limit, since p.header is + // untrusted data. + n := int(p.header.answers) + if n > 20 { + n = 20 + } + as := make([]Resource, 0, n) for { a, err := p.Answer() if err == ErrSectionDone { @@ -533,7 +755,16 @@ func (p *Parser) Authority() (Resource, error) { // AllAuthorities parses all Authority Resources. func (p *Parser) AllAuthorities() ([]Resource, error) { - as := make([]Resource, 0, p.header.authorities) + // Authorities contains SOA in case of NXDOMAIN and friends, + // otherwise it is empty. + // + // Pre-allocate up to a certain limit, since p.header is + // untrusted data. + n := int(p.header.authorities) + if n > 10 { + n = 10 + } + as := make([]Resource, 0, n) for { a, err := p.Authority() if err == ErrSectionDone { @@ -574,7 +805,16 @@ func (p *Parser) Additional() (Resource, error) { // AllAdditionals parses all Additional Resources. func (p *Parser) AllAdditionals() ([]Resource, error) { - as := make([]Resource, 0, p.header.additionals) + // Additionals usually contain OPT, and sometimes A/AAAA + // glue records. + // + // Pre-allocate up to a certain limit, since p.header is + // untrusted data. + n := int(p.header.additionals) + if n > 10 { + n = 10 + } + as := make([]Resource, 0, n) for { a, err := p.Additional() if err == ErrSectionDone { @@ -765,6 +1005,24 @@ func (p *Parser) AAAAResource() (AAAAResource, error) { return r, nil } +// OPTResource parses a single OPTResource. +// +// One of the XXXHeader methods must have been called before calling this +// method. +func (p *Parser) OPTResource() (OPTResource, error) { + if !p.resHeaderValid || p.resHeader.Type != TypeOPT { + return OPTResource{}, ErrNotStarted + } + r, err := unpackOPTResource(p.msg, p.off, p.resHeader.Length) + if err != nil { + return OPTResource{}, err + } + p.off += int(p.resHeader.Length) + p.resHeaderValid = false + p.index++ + return r, nil +} + // Unpack parses a full Message. func (m *Message) Unpack(msg []byte) error { var p Parser @@ -789,6 +1047,12 @@ func (m *Message) Unpack(msg []byte) error { // Pack packs a full Message. func (m *Message) Pack() ([]byte, error) { + return m.AppendPack(make([]byte, 0, packStartingCap)) +} + +// AppendPack is like Pack but appends the full Message to b and returns the +// extended buffer. +func (m *Message) AppendPack(b []byte) ([]byte, error) { // Validate the lengths. It is very unlikely that anyone will try to // pack more than 65535 of any particular type, but it is possible and // we should fail gracefully. @@ -813,41 +1077,40 @@ func (m *Message) Pack() ([]byte, error) { h.authorities = uint16(len(m.Authorities)) h.additionals = uint16(len(m.Additionals)) - msg := make([]byte, 0, packStartingCap) - - msg = h.pack(msg) + compressionOff := len(b) + msg := h.pack(b) // RFC 1035 allows (but does not require) compression for packing. RFC // 1035 requires unpacking implementations to support compression, so // unconditionally enabling it is fine. // // DNS lookups are typically done over UDP, and RFC 1035 states that UDP - // DNS packets can be a maximum of 512 bytes long. Without compression, - // many DNS response packets are over this limit, so enabling + // DNS messages can be a maximum of 512 bytes long. Without compression, + // many DNS response messages are over this limit, so enabling // compression will help ensure compliance. compression := map[string]int{} for i := range m.Questions { var err error - if msg, err = m.Questions[i].pack(msg, compression); err != nil { + if msg, err = m.Questions[i].pack(msg, compression, compressionOff); err != nil { return nil, &nestedError{"packing Question", err} } } for i := range m.Answers { var err error - if msg, err = m.Answers[i].pack(msg, compression); err != nil { + if msg, err = m.Answers[i].pack(msg, compression, compressionOff); err != nil { return nil, &nestedError{"packing Answer", err} } } for i := range m.Authorities { var err error - if msg, err = m.Authorities[i].pack(msg, compression); err != nil { + if msg, err = m.Authorities[i].pack(msg, compression, compressionOff); err != nil { return nil, &nestedError{"packing Authority", err} } } for i := range m.Additionals { var err error - if msg, err = m.Additionals[i].pack(msg, compression); err != nil { + if msg, err = m.Additionals[i].pack(msg, compression, compressionOff); err != nil { return nil, &nestedError{"packing Additional", err} } } @@ -855,37 +1118,104 @@ func (m *Message) Pack() ([]byte, error) { return msg, nil } +// GoString implements fmt.GoStringer.GoString. +func (m *Message) GoString() string { + s := "dnsmessage.Message{Header: " + m.Header.GoString() + ", " + + "Questions: []dnsmessage.Question{" + if len(m.Questions) > 0 { + s += m.Questions[0].GoString() + for _, q := range m.Questions[1:] { + s += ", " + q.GoString() + } + } + s += "}, Answers: []dnsmessage.Resource{" + if len(m.Answers) > 0 { + s += m.Answers[0].GoString() + for _, a := range m.Answers[1:] { + s += ", " + a.GoString() + } + } + s += "}, Authorities: []dnsmessage.Resource{" + if len(m.Authorities) > 0 { + s += m.Authorities[0].GoString() + for _, a := range m.Authorities[1:] { + s += ", " + a.GoString() + } + } + s += "}, Additionals: []dnsmessage.Resource{" + if len(m.Additionals) > 0 { + s += m.Additionals[0].GoString() + for _, a := range m.Additionals[1:] { + s += ", " + a.GoString() + } + } + return s + "}}" +} + // A Builder allows incrementally packing a DNS message. +// +// Example usage: +// buf := make([]byte, 2, 514) +// b := NewBuilder(buf, Header{...}) +// b.EnableCompression() +// // Optionally start a section and add things to that section. +// // Repeat adding sections as necessary. +// buf, err := b.Finish() +// // If err is nil, buf[2:] will contain the built bytes. type Builder struct { - msg []byte - header header - section section + // msg is the storage for the message being built. + msg []byte + + // section keeps track of the current section being built. + section section + + // header keeps track of what should go in the header when Finish is + // called. + header header + + // start is the starting index of the bytes allocated in msg for header. + start int + + // compression is a mapping from name suffixes to their starting index + // in msg. compression map[string]int } -// Start initializes the builder. +// NewBuilder creates a new builder with compression disabled. // -// buf is optional (nil is fine), but if provided, Start takes ownership of buf. -func (b *Builder) Start(buf []byte, h Header) { - b.StartWithoutCompression(buf, h) - b.compression = map[string]int{} +// Note: Most users will want to immediately enable compression with the +// EnableCompression method. See that method's comment for why you may or may +// not want to enable compression. +// +// The DNS message is appended to the provided initial buffer buf (which may be +// nil) as it is built. The final message is returned by the (*Builder).Finish +// method, which may return the same underlying array if there was sufficient +// capacity in the slice. +func NewBuilder(buf []byte, h Header) Builder { + if buf == nil { + buf = make([]byte, 0, packStartingCap) + } + b := Builder{msg: buf, start: len(buf)} + b.header.id, b.header.bits = h.pack() + var hb [headerLen]byte + b.msg = append(b.msg, hb[:]...) + b.section = sectionHeader + return b } -// StartWithoutCompression initializes the builder with compression disabled. +// EnableCompression enables compression in the Builder. // -// This avoids compression related allocations, but can result in larger message -// sizes. Be careful with this mode as it can cause messages to exceed the UDP -// size limit. +// Leaving compression disabled avoids compression related allocations, but can +// result in larger message sizes. Be careful with this mode as it can cause +// messages to exceed the UDP size limit. // -// buf is optional (nil is fine), but if provided, Start takes ownership of buf. -func (b *Builder) StartWithoutCompression(buf []byte, h Header) { - *b = Builder{msg: buf} - b.header.id, b.header.bits = h.pack() - if cap(b.msg) < headerLen { - b.msg = make([]byte, 0, packStartingCap) - } - b.msg = b.msg[:headerLen] - b.section = sectionHeader +// According to RFC 1035, section 4.1.4, the use of compression is optional, but +// all implementations must accept both compressed and uncompressed DNS +// messages. +// +// Compression should be enabled before any sections are added for best results. +func (b *Builder) EnableCompression() { + b.compression = map[string]int{} } func (b *Builder) startCheck(s section) error { @@ -966,7 +1296,7 @@ func (b *Builder) Question(q Question) error { if b.section > sectionQuestions { return ErrSectionDone } - msg, err := q.pack(b.msg, b.compression) + msg, err := q.pack(b.msg, b.compression, b.start) if err != nil { return err } @@ -993,15 +1323,15 @@ func (b *Builder) CNAMEResource(h ResourceHeader, r CNAMEResource) error { return err } h.Type = r.realType() - msg, length, err := h.pack(b.msg, b.compression) + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) if err != nil { return &nestedError{"ResourceHeader", err} } preLen := len(msg) - if msg, err = r.pack(msg, b.compression); err != nil { + if msg, err = r.pack(msg, b.compression, b.start); err != nil { return &nestedError{"CNAMEResource body", err} } - if err := h.fixLen(msg, length, preLen); err != nil { + if err := h.fixLen(msg, lenOff, preLen); err != nil { return err } if err := b.incrementSectionCount(); err != nil { @@ -1017,15 +1347,15 @@ func (b *Builder) MXResource(h ResourceHeader, r MXResource) error { return err } h.Type = r.realType() - msg, length, err := h.pack(b.msg, b.compression) + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) if err != nil { return &nestedError{"ResourceHeader", err} } preLen := len(msg) - if msg, err = r.pack(msg, b.compression); err != nil { + if msg, err = r.pack(msg, b.compression, b.start); err != nil { return &nestedError{"MXResource body", err} } - if err := h.fixLen(msg, length, preLen); err != nil { + if err := h.fixLen(msg, lenOff, preLen); err != nil { return err } if err := b.incrementSectionCount(); err != nil { @@ -1041,15 +1371,15 @@ func (b *Builder) NSResource(h ResourceHeader, r NSResource) error { return err } h.Type = r.realType() - msg, length, err := h.pack(b.msg, b.compression) + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) if err != nil { return &nestedError{"ResourceHeader", err} } preLen := len(msg) - if msg, err = r.pack(msg, b.compression); err != nil { + if msg, err = r.pack(msg, b.compression, b.start); err != nil { return &nestedError{"NSResource body", err} } - if err := h.fixLen(msg, length, preLen); err != nil { + if err := h.fixLen(msg, lenOff, preLen); err != nil { return err } if err := b.incrementSectionCount(); err != nil { @@ -1065,15 +1395,15 @@ func (b *Builder) PTRResource(h ResourceHeader, r PTRResource) error { return err } h.Type = r.realType() - msg, length, err := h.pack(b.msg, b.compression) + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) if err != nil { return &nestedError{"ResourceHeader", err} } preLen := len(msg) - if msg, err = r.pack(msg, b.compression); err != nil { + if msg, err = r.pack(msg, b.compression, b.start); err != nil { return &nestedError{"PTRResource body", err} } - if err := h.fixLen(msg, length, preLen); err != nil { + if err := h.fixLen(msg, lenOff, preLen); err != nil { return err } if err := b.incrementSectionCount(); err != nil { @@ -1089,15 +1419,15 @@ func (b *Builder) SOAResource(h ResourceHeader, r SOAResource) error { return err } h.Type = r.realType() - msg, length, err := h.pack(b.msg, b.compression) + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) if err != nil { return &nestedError{"ResourceHeader", err} } preLen := len(msg) - if msg, err = r.pack(msg, b.compression); err != nil { + if msg, err = r.pack(msg, b.compression, b.start); err != nil { return &nestedError{"SOAResource body", err} } - if err := h.fixLen(msg, length, preLen); err != nil { + if err := h.fixLen(msg, lenOff, preLen); err != nil { return err } if err := b.incrementSectionCount(); err != nil { @@ -1113,15 +1443,15 @@ func (b *Builder) TXTResource(h ResourceHeader, r TXTResource) error { return err } h.Type = r.realType() - msg, length, err := h.pack(b.msg, b.compression) + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) if err != nil { return &nestedError{"ResourceHeader", err} } preLen := len(msg) - if msg, err = r.pack(msg, b.compression); err != nil { + if msg, err = r.pack(msg, b.compression, b.start); err != nil { return &nestedError{"TXTResource body", err} } - if err := h.fixLen(msg, length, preLen); err != nil { + if err := h.fixLen(msg, lenOff, preLen); err != nil { return err } if err := b.incrementSectionCount(); err != nil { @@ -1137,15 +1467,15 @@ func (b *Builder) SRVResource(h ResourceHeader, r SRVResource) error { return err } h.Type = r.realType() - msg, length, err := h.pack(b.msg, b.compression) + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) if err != nil { return &nestedError{"ResourceHeader", err} } preLen := len(msg) - if msg, err = r.pack(msg, b.compression); err != nil { + if msg, err = r.pack(msg, b.compression, b.start); err != nil { return &nestedError{"SRVResource body", err} } - if err := h.fixLen(msg, length, preLen); err != nil { + if err := h.fixLen(msg, lenOff, preLen); err != nil { return err } if err := b.incrementSectionCount(); err != nil { @@ -1161,15 +1491,15 @@ func (b *Builder) AResource(h ResourceHeader, r AResource) error { return err } h.Type = r.realType() - msg, length, err := h.pack(b.msg, b.compression) + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) if err != nil { return &nestedError{"ResourceHeader", err} } preLen := len(msg) - if msg, err = r.pack(msg, b.compression); err != nil { + if msg, err = r.pack(msg, b.compression, b.start); err != nil { return &nestedError{"AResource body", err} } - if err := h.fixLen(msg, length, preLen); err != nil { + if err := h.fixLen(msg, lenOff, preLen); err != nil { return err } if err := b.incrementSectionCount(); err != nil { @@ -1185,15 +1515,15 @@ func (b *Builder) AAAAResource(h ResourceHeader, r AAAAResource) error { return err } h.Type = r.realType() - msg, length, err := h.pack(b.msg, b.compression) + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) if err != nil { return &nestedError{"ResourceHeader", err} } preLen := len(msg) - if msg, err = r.pack(msg, b.compression); err != nil { + if msg, err = r.pack(msg, b.compression, b.start); err != nil { return &nestedError{"AAAAResource body", err} } - if err := h.fixLen(msg, length, preLen); err != nil { + if err := h.fixLen(msg, lenOff, preLen); err != nil { return err } if err := b.incrementSectionCount(); err != nil { @@ -1203,13 +1533,38 @@ func (b *Builder) AAAAResource(h ResourceHeader, r AAAAResource) error { return nil } -// Finish ends message building and generates a binary packet. +// OPTResource adds a single OPTResource. +func (b *Builder) OPTResource(h ResourceHeader, r OPTResource) error { + if err := b.checkResourceSection(); err != nil { + return err + } + h.Type = r.realType() + msg, lenOff, err := h.pack(b.msg, b.compression, b.start) + if err != nil { + return &nestedError{"ResourceHeader", err} + } + preLen := len(msg) + if msg, err = r.pack(msg, b.compression, b.start); err != nil { + return &nestedError{"OPTResource body", err} + } + if err := h.fixLen(msg, lenOff, preLen); err != nil { + return err + } + if err := b.incrementSectionCount(); err != nil { + return err + } + b.msg = msg + return nil +} + +// Finish ends message building and generates a binary message. func (b *Builder) Finish() ([]byte, error) { if b.section < sectionHeader { return nil, ErrNotStarted } b.section = sectionDone - b.header.pack(b.msg[:0]) + // Space for the header was allocated in NewBuilder. + b.header.pack(b.msg[b.start:b.start]) return b.msg, nil } @@ -1239,20 +1594,30 @@ type ResourceHeader struct { Length uint16 } -// pack packs all of the fields in a ResourceHeader except for the length. The -// length bytes are returned as a slice so they can be filled in after the rest -// of the Resource has been packed. -func (h *ResourceHeader) pack(oldMsg []byte, compression map[string]int) (msg []byte, length []byte, err error) { +// GoString implements fmt.GoStringer.GoString. +func (h *ResourceHeader) GoString() string { + return "dnsmessage.ResourceHeader{" + + "Name: " + h.Name.GoString() + ", " + + "Type: " + h.Type.GoString() + ", " + + "Class: " + h.Class.GoString() + ", " + + "TTL: " + printUint32(h.TTL) + ", " + + "Length: " + printUint16(h.Length) + "}" +} + +// pack appends the wire format of the ResourceHeader to oldMsg. +// +// lenOff is the offset in msg where the Length field was packed. +func (h *ResourceHeader) pack(oldMsg []byte, compression map[string]int, compressionOff int) (msg []byte, lenOff int, err error) { msg = oldMsg - if msg, err = h.Name.pack(msg, compression); err != nil { - return oldMsg, nil, &nestedError{"Name", err} + if msg, err = h.Name.pack(msg, compression, compressionOff); err != nil { + return oldMsg, 0, &nestedError{"Name", err} } msg = packType(msg, h.Type) msg = packClass(msg, h.Class) msg = packUint32(msg, h.TTL) - lenBegin := len(msg) + lenOff = len(msg) msg = packUint16(msg, h.Length) - return msg, msg[lenBegin : lenBegin+uint16Len], nil + return msg, lenOff, nil } func (h *ResourceHeader) unpack(msg []byte, off int) (int, error) { @@ -1276,19 +1641,63 @@ func (h *ResourceHeader) unpack(msg []byte, off int) (int, error) { return newOff, nil } -func (h *ResourceHeader) fixLen(msg []byte, length []byte, preLen int) error { +// fixLen updates a packed ResourceHeader to include the length of the +// ResourceBody. +// +// lenOff is the offset of the ResourceHeader.Length field in msg. +// +// preLen is the length that msg was before the ResourceBody was packed. +func (h *ResourceHeader) fixLen(msg []byte, lenOff int, preLen int) error { conLen := len(msg) - preLen if conLen > int(^uint16(0)) { return errResTooLong } // Fill in the length now that we know how long the content is. - packUint16(length[:0], uint16(conLen)) + packUint16(msg[lenOff:lenOff], uint16(conLen)) h.Length = uint16(conLen) return nil } +// EDNS(0) wire costants. +const ( + edns0Version = 0 + + edns0DNSSECOK = 0x00008000 + ednsVersionMask = 0x00ff0000 + edns0DNSSECOKMask = 0x00ff8000 +) + +// SetEDNS0 configures h for EDNS(0). +// +// The provided extRCode must be an extedned RCode. +func (h *ResourceHeader) SetEDNS0(udpPayloadLen int, extRCode RCode, dnssecOK bool) error { + h.Name = Name{Data: [nameLen]byte{'.'}, Length: 1} // RFC 6891 section 6.1.2 + h.Type = TypeOPT + h.Class = Class(udpPayloadLen) + h.TTL = uint32(extRCode) >> 4 << 24 + if dnssecOK { + h.TTL |= edns0DNSSECOK + } + return nil +} + +// DNSSECAllowed reports whether the DNSSEC OK bit is set. +func (h *ResourceHeader) DNSSECAllowed() bool { + return h.TTL&edns0DNSSECOKMask == edns0DNSSECOK // RFC 6891 section 6.1.3 +} + +// ExtendedRCode returns an extended RCode. +// +// The provided rcode must be the RCode in DNS message header. +func (h *ResourceHeader) ExtendedRCode(rcode RCode) RCode { + if h.TTL&ednsVersionMask == edns0Version { // RFC 6891 section 6.1.3 + return RCode(h.TTL>>24<<4) | rcode + } + return rcode +} + func skipResource(msg []byte, off int) (int, error) { newOff, err := skipName(msg, off) if err != nil { @@ -1313,6 +1722,7 @@ func skipResource(msg []byte, off int) (int, error) { return newOff, nil } +// packUint16 appends the wire format of field to msg. func packUint16(msg []byte, field uint16) []byte { return append(msg, byte(field>>8), byte(field)) } @@ -1331,6 +1741,7 @@ func skipUint16(msg []byte, off int) (int, error) { return off + uint16Len, nil } +// packType appends the wire format of field to msg. func packType(msg []byte, field Type) []byte { return packUint16(msg, uint16(field)) } @@ -1344,6 +1755,7 @@ func skipType(msg []byte, off int) (int, error) { return skipUint16(msg, off) } +// packClass appends the wire format of field to msg. func packClass(msg []byte, field Class) []byte { return packUint16(msg, uint16(field)) } @@ -1357,6 +1769,7 @@ func skipClass(msg []byte, off int) (int, error) { return skipUint16(msg, off) } +// packUint32 appends the wire format of field to msg. func packUint32(msg []byte, field uint32) []byte { return append( msg, @@ -1382,17 +1795,16 @@ func skipUint32(msg []byte, off int) (int, error) { return off + uint32Len, nil } -func packText(msg []byte, field string) []byte { - for len(field) > 0 { - l := len(field) - if l > 255 { - l = 255 - } - msg = append(msg, byte(l)) - msg = append(msg, field[:l]...) - field = field[l:] +// packText appends the wire format of field to msg. +func packText(msg []byte, field string) ([]byte, error) { + l := len(field) + if l > 255 { + return nil, errStringTooLong } - return msg + msg = append(msg, byte(l)) + msg = append(msg, field...) + + return msg, nil } func unpackText(msg []byte, off int) (string, int, error) { @@ -1418,6 +1830,7 @@ func skipText(msg []byte, off int) (int, error) { return endOff, nil } +// packBytes appends the wire format of field to msg. func packBytes(msg []byte, field []byte) []byte { return append(msg, field...) } @@ -1458,18 +1871,33 @@ func NewName(name string) (Name, error) { return n, nil } +// MustNewName creates a new Name from a string and panics on error. +func MustNewName(name string) Name { + n, err := NewName(name) + if err != nil { + panic("creating name: " + err.Error()) + } + return n +} + +// String implements fmt.Stringer.String. func (n Name) String() string { return string(n.Data[:n.Length]) } -// pack packs a domain name. +// GoString implements fmt.GoStringer.GoString. +func (n *Name) GoString() string { + return `dnsmessage.MustNewName("` + printString(n.Data[:n.Length]) + `")` +} + +// pack appends the wire format of the Name to msg. // // Domain names are a sequence of counted strings split at the dots. They end // with a zero-length string. Compression can be used to reuse domain suffixes. // // The compression map will be updated with new domain suffixes. If compression // is nil, compression will not be used. -func (n *Name) pack(msg []byte, compression map[string]int) ([]byte, error) { +func (n *Name) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { oldMsg := msg // Add a trailing dot to canonicalize name. @@ -1521,7 +1949,7 @@ func (n *Name) pack(msg []byte, compression map[string]int) ([]byte, error) { // Miss. Add the suffix to the compression table if the // offset can be stored in the available 14 bytes. if len(msg) <= int(^uint16(0)>>2) { - compression[string(n.Data[i:])] = len(msg) + compression[string(n.Data[i:])] = len(msg) - compressionOff } } } @@ -1530,6 +1958,10 @@ func (n *Name) pack(msg []byte, compression map[string]int) ([]byte, error) { // unpack unpacks a domain name. func (n *Name) unpack(msg []byte, off int) (int, error) { + return n.unpackCompressed(msg, off, true /* allowCompression */) +} + +func (n *Name) unpackCompressed(msg []byte, off int, allowCompression bool) (int, error) { // currOff is the current working offset. currOff := off @@ -1565,6 +1997,9 @@ Loop: name = append(name, '.') currOff = endOff case 0xC0: // Pointer + if !allowCompression { + return off, errCompressedSRV + } if currOff >= len(msg) { return off, errInvalidPtr } @@ -1644,8 +2079,9 @@ type Question struct { Class Class } -func (q *Question) pack(msg []byte, compression map[string]int) ([]byte, error) { - msg, err := q.Name.pack(msg, compression) +// pack appends the wire format of the Question to msg. +func (q *Question) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + msg, err := q.Name.pack(msg, compression, compressionOff) if err != nil { return msg, &nestedError{"Name", err} } @@ -1653,6 +2089,14 @@ func (q *Question) pack(msg []byte, compression map[string]int) ([]byte, error) return packClass(msg, q.Class), nil } +// GoString implements fmt.GoStringer.GoString. +func (q *Question) GoString() string { + return "dnsmessage.Question{" + + "Name: " + q.Name.GoString() + ", " + + "Type: " + q.Type.GoString() + ", " + + "Class: " + q.Class.GoString() + "}" +} + func unpackResourceBody(msg []byte, off int, hdr ResourceHeader) (ResourceBody, int, error) { var ( r ResourceBody @@ -1705,6 +2149,11 @@ func unpackResourceBody(msg []byte, off int, hdr ResourceHeader) (ResourceBody, rb, err = unpackSRVResource(msg, off) r = &rb name = "SRV" + case TypeOPT: + var rb OPTResource + rb, err = unpackOPTResource(msg, off, hdr.Length) + r = &rb + name = "OPT" } if err != nil { return nil, off, &nestedError{name + " record", err} @@ -1724,8 +2173,14 @@ func (r *CNAMEResource) realType() Type { return TypeCNAME } -func (r *CNAMEResource) pack(msg []byte, compression map[string]int) ([]byte, error) { - return r.CNAME.pack(msg, compression) +// pack appends the wire format of the CNAMEResource to msg. +func (r *CNAMEResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + return r.CNAME.pack(msg, compression, compressionOff) +} + +// GoString implements fmt.GoStringer.GoString. +func (r *CNAMEResource) GoString() string { + return "dnsmessage.CNAMEResource{CNAME: " + r.CNAME.GoString() + "}" } func unpackCNAMEResource(msg []byte, off int) (CNAMEResource, error) { @@ -1746,16 +2201,24 @@ func (r *MXResource) realType() Type { return TypeMX } -func (r *MXResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the MXResource to msg. +func (r *MXResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { oldMsg := msg msg = packUint16(msg, r.Pref) - msg, err := r.MX.pack(msg, compression) + msg, err := r.MX.pack(msg, compression, compressionOff) if err != nil { return oldMsg, &nestedError{"MXResource.MX", err} } return msg, nil } +// GoString implements fmt.GoStringer.GoString. +func (r *MXResource) GoString() string { + return "dnsmessage.MXResource{" + + "Pref: " + printUint16(r.Pref) + ", " + + "MX: " + r.MX.GoString() + "}" +} + func unpackMXResource(msg []byte, off int) (MXResource, error) { pref, off, err := unpackUint16(msg, off) if err != nil { @@ -1777,8 +2240,14 @@ func (r *NSResource) realType() Type { return TypeNS } -func (r *NSResource) pack(msg []byte, compression map[string]int) ([]byte, error) { - return r.NS.pack(msg, compression) +// pack appends the wire format of the NSResource to msg. +func (r *NSResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + return r.NS.pack(msg, compression, compressionOff) +} + +// GoString implements fmt.GoStringer.GoString. +func (r *NSResource) GoString() string { + return "dnsmessage.NSResource{NS: " + r.NS.GoString() + "}" } func unpackNSResource(msg []byte, off int) (NSResource, error) { @@ -1798,8 +2267,14 @@ func (r *PTRResource) realType() Type { return TypePTR } -func (r *PTRResource) pack(msg []byte, compression map[string]int) ([]byte, error) { - return r.PTR.pack(msg, compression) +// pack appends the wire format of the PTRResource to msg. +func (r *PTRResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + return r.PTR.pack(msg, compression, compressionOff) +} + +// GoString implements fmt.GoStringer.GoString. +func (r *PTRResource) GoString() string { + return "dnsmessage.PTRResource{PTR: " + r.PTR.GoString() + "}" } func unpackPTRResource(msg []byte, off int) (PTRResource, error) { @@ -1829,13 +2304,14 @@ func (r *SOAResource) realType() Type { return TypeSOA } -func (r *SOAResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the SOAResource to msg. +func (r *SOAResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { oldMsg := msg - msg, err := r.NS.pack(msg, compression) + msg, err := r.NS.pack(msg, compression, compressionOff) if err != nil { return oldMsg, &nestedError{"SOAResource.NS", err} } - msg, err = r.MBox.pack(msg, compression) + msg, err = r.MBox.pack(msg, compression, compressionOff) if err != nil { return oldMsg, &nestedError{"SOAResource.MBox", err} } @@ -1846,6 +2322,18 @@ func (r *SOAResource) pack(msg []byte, compression map[string]int) ([]byte, erro return packUint32(msg, r.MinTTL), nil } +// GoString implements fmt.GoStringer.GoString. +func (r *SOAResource) GoString() string { + return "dnsmessage.SOAResource{" + + "NS: " + r.NS.GoString() + ", " + + "MBox: " + r.MBox.GoString() + ", " + + "Serial: " + printUint32(r.Serial) + ", " + + "Refresh: " + printUint32(r.Refresh) + ", " + + "Retry: " + printUint32(r.Retry) + ", " + + "Expire: " + printUint32(r.Expire) + ", " + + "MinTTL: " + printUint32(r.MinTTL) + "}" +} + func unpackSOAResource(msg []byte, off int) (SOAResource, error) { var ns Name off, err := ns.unpack(msg, off) @@ -1881,19 +2369,41 @@ func unpackSOAResource(msg []byte, off int) (SOAResource, error) { // A TXTResource is a TXT Resource record. type TXTResource struct { - Txt string // Not a domain name. + TXT []string } func (r *TXTResource) realType() Type { return TypeTXT } -func (r *TXTResource) pack(msg []byte, compression map[string]int) ([]byte, error) { - return packText(msg, r.Txt), nil +// pack appends the wire format of the TXTResource to msg. +func (r *TXTResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + oldMsg := msg + for _, s := range r.TXT { + var err error + msg, err = packText(msg, s) + if err != nil { + return oldMsg, err + } + } + return msg, nil +} + +// GoString implements fmt.GoStringer.GoString. +func (r *TXTResource) GoString() string { + s := "dnsmessage.TXTResource{TXT: []string{" + if len(r.TXT) == 0 { + return s + "}}" + } + s += `"` + printString([]byte(r.TXT[0])) + for _, t := range r.TXT[1:] { + s += `", "` + printString([]byte(t)) + } + return s + `"}}` } func unpackTXTResource(msg []byte, off int, length uint16) (TXTResource, error) { - var txt string + txts := make([]string, 0, 1) for n := uint16(0); n < length; { var t string var err error @@ -1905,9 +2415,9 @@ func unpackTXTResource(msg []byte, off int, length uint16) (TXTResource, error) return TXTResource{}, errCalcLen } n += uint16(len(t)) + 1 - txt += t + txts = append(txts, t) } - return TXTResource{txt}, nil + return TXTResource{txts}, nil } // An SRVResource is an SRV Resource record. @@ -1922,18 +2432,28 @@ func (r *SRVResource) realType() Type { return TypeSRV } -func (r *SRVResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the SRVResource to msg. +func (r *SRVResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { oldMsg := msg msg = packUint16(msg, r.Priority) msg = packUint16(msg, r.Weight) msg = packUint16(msg, r.Port) - msg, err := r.Target.pack(msg, nil) + msg, err := r.Target.pack(msg, nil, compressionOff) if err != nil { return oldMsg, &nestedError{"SRVResource.Target", err} } return msg, nil } +// GoString implements fmt.GoStringer.GoString. +func (r *SRVResource) GoString() string { + return "dnsmessage.SRVResource{" + + "Priority: " + printUint16(r.Priority) + ", " + + "Weight: " + printUint16(r.Weight) + ", " + + "Port: " + printUint16(r.Port) + ", " + + "Target: " + r.Target.GoString() + "}" +} + func unpackSRVResource(msg []byte, off int) (SRVResource, error) { priority, off, err := unpackUint16(msg, off) if err != nil { @@ -1948,7 +2468,7 @@ func unpackSRVResource(msg []byte, off int) (SRVResource, error) { return SRVResource{}, &nestedError{"Port", err} } var target Name - if _, err := target.unpack(msg, off); err != nil { + if _, err := target.unpackCompressed(msg, off, false /* allowCompression */); err != nil { return SRVResource{}, &nestedError{"Target", err} } return SRVResource{priority, weight, port, target}, nil @@ -1963,10 +2483,17 @@ func (r *AResource) realType() Type { return TypeA } -func (r *AResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// pack appends the wire format of the AResource to msg. +func (r *AResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { return packBytes(msg, r.A[:]), nil } +// GoString implements fmt.GoStringer.GoString. +func (r *AResource) GoString() string { + return "dnsmessage.AResource{" + + "A: [4]byte{" + printByteSlice(r.A[:]) + "}}" +} + func unpackAResource(msg []byte, off int) (AResource, error) { var a [4]byte if _, err := unpackBytes(msg, off, a[:]); err != nil { @@ -1984,7 +2511,14 @@ func (r *AAAAResource) realType() Type { return TypeAAAA } -func (r *AAAAResource) pack(msg []byte, compression map[string]int) ([]byte, error) { +// GoString implements fmt.GoStringer.GoString. +func (r *AAAAResource) GoString() string { + return "dnsmessage.AAAAResource{" + + "AAAA: [16]byte{" + printByteSlice(r.AAAA[:]) + "}}" +} + +// pack appends the wire format of the AAAAResource to msg. +func (r *AAAAResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { return packBytes(msg, r.AAAA[:]), nil } @@ -1995,3 +2529,78 @@ func unpackAAAAResource(msg []byte, off int) (AAAAResource, error) { } return AAAAResource{aaaa}, nil } + +// An OPTResource is an OPT pseudo Resource record. +// +// The pseudo resource record is part of the extension mechanisms for DNS +// as defined in RFC 6891. +type OPTResource struct { + Options []Option +} + +// An Option represents a DNS message option within OPTResource. +// +// The message option is part of the extension mechanisms for DNS as +// defined in RFC 6891. +type Option struct { + Code uint16 // option code + Data []byte +} + +// GoString implements fmt.GoStringer.GoString. +func (o *Option) GoString() string { + return "dnsmessage.Option{" + + "Code: " + printUint16(o.Code) + ", " + + "Data: []byte{" + printByteSlice(o.Data) + "}}" +} + +func (r *OPTResource) realType() Type { + return TypeOPT +} + +func (r *OPTResource) pack(msg []byte, compression map[string]int, compressionOff int) ([]byte, error) { + for _, opt := range r.Options { + msg = packUint16(msg, opt.Code) + l := uint16(len(opt.Data)) + msg = packUint16(msg, l) + msg = packBytes(msg, opt.Data) + } + return msg, nil +} + +// GoString implements fmt.GoStringer.GoString. +func (r *OPTResource) GoString() string { + s := "dnsmessage.OPTResource{Options: []dnsmessage.Option{" + if len(r.Options) == 0 { + return s + "}}" + } + s += r.Options[0].GoString() + for _, o := range r.Options[1:] { + s += ", " + o.GoString() + } + return s + "}}" +} + +func unpackOPTResource(msg []byte, off int, length uint16) (OPTResource, error) { + var opts []Option + for oldOff := off; off < oldOff+int(length); { + var err error + var o Option + o.Code, off, err = unpackUint16(msg, off) + if err != nil { + return OPTResource{}, &nestedError{"Code", err} + } + var l uint16 + l, off, err = unpackUint16(msg, off) + if err != nil { + return OPTResource{}, &nestedError{"Data", err} + } + o.Data = make([]byte, l) + if copy(o.Data, msg[off:]) != int(l) { + return OPTResource{}, &nestedError{"Data", errCalcLen} + } + off += int(l) + opts = append(opts, o) + } + return OPTResource{opts}, nil +} diff --git a/vendor/golang.org/x/net/dns/dnsmessage/message_test.go b/vendor/golang.org/x/net/dns/dnsmessage/message_test.go index 0f98daa..25ba8f0 100644 --- a/vendor/golang.org/x/net/dns/dnsmessage/message_test.go +++ b/vendor/golang.org/x/net/dns/dnsmessage/message_test.go @@ -8,15 +8,109 @@ import ( "bytes" "fmt" "reflect" + "strings" "testing" ) -func mustNewName(name string) Name { - n, err := NewName(name) - if err != nil { +func TestPrintPaddedUint8(t *testing.T) { + tests := []struct { + num uint8 + want string + }{ + {0, "000"}, + {1, "001"}, + {9, "009"}, + {10, "010"}, + {99, "099"}, + {100, "100"}, + {124, "124"}, + {104, "104"}, + {120, "120"}, + {255, "255"}, + } + + for _, test := range tests { + if got := printPaddedUint8(test.num); got != test.want { + t.Errorf("got printPaddedUint8(%d) = %s, want = %s", test.num, got, test.want) + } + } +} + +func TestPrintUint8Bytes(t *testing.T) { + tests := []uint8{ + 0, + 1, + 9, + 10, + 99, + 100, + 124, + 104, + 120, + 255, + } + + for _, test := range tests { + if got, want := string(printUint8Bytes(nil, test)), fmt.Sprint(test); got != want { + t.Errorf("got printUint8Bytes(%d) = %s, want = %s", test, got, want) + } + } +} + +func TestPrintUint16(t *testing.T) { + tests := []uint16{ + 65535, + 0, + 1, + 10, + 100, + 1000, + 10000, + 324, + 304, + 320, + } + + for _, test := range tests { + if got, want := printUint16(test), fmt.Sprint(test); got != want { + t.Errorf("got printUint16(%d) = %s, want = %s", test, got, want) + } + } +} + +func TestPrintUint32(t *testing.T) { + tests := []uint32{ + 4294967295, + 65535, + 0, + 1, + 10, + 100, + 1000, + 10000, + 100000, + 1000000, + 10000000, + 100000000, + 1000000000, + 324, + 304, + 320, + } + + for _, test := range tests { + if got, want := printUint32(test), fmt.Sprint(test); got != want { + t.Errorf("got printUint32(%d) = %s, want = %s", test, got, want) + } + } +} + +func mustEDNS0ResourceHeader(l int, extrc RCode, do bool) ResourceHeader { + h := ResourceHeader{Class: ClassINET} + if err := h.SetEDNS0(l, extrc, do); err != nil { panic(err) } - return n + return h } func (m *Message) String() string { @@ -50,7 +144,7 @@ func (m *Message) String() string { func TestNameString(t *testing.T) { want := "foo" - name := mustNewName(want) + name := MustNewName(want) if got := fmt.Sprint(name); got != want { t.Errorf("got fmt.Sprint(%#v) = %s, want = %s", name, got, want) } @@ -58,13 +152,13 @@ func TestNameString(t *testing.T) { func TestQuestionPackUnpack(t *testing.T) { want := Question{ - Name: mustNewName("."), + Name: MustNewName("."), Type: TypeA, Class: ClassINET, } - buf, err := want.pack(make([]byte, 1, 50), map[string]int{}) + buf, err := want.pack(make([]byte, 1, 50), map[string]int{}, 1) if err != nil { - t.Fatal("Packing failed:", err) + t.Fatal("Question.pack() =", err) } var p Parser p.msg = buf @@ -73,13 +167,13 @@ func TestQuestionPackUnpack(t *testing.T) { p.off = 1 got, err := p.Question() if err != nil { - t.Fatalf("Unpacking failed: %v\n%s", err, string(buf[1:])) + t.Fatalf("Parser{%q}.Question() = %v", string(buf[1:]), err) } if p.off != len(buf) { - t.Errorf("Unpacked different amount than packed: got n = %d, want = %d", p.off, len(buf)) + t.Errorf("unpacked different amount than packed: got = %d, want = %d", p.off, len(buf)) } if !reflect.DeepEqual(got, want) { - t.Errorf("Got = %+v, want = %+v", got, want) + t.Errorf("got from Parser.Question() = %+v, want = %+v", got, want) } } @@ -99,11 +193,11 @@ func TestName(t *testing.T) { for _, test := range tests { n, err := NewName(test) if err != nil { - t.Errorf("Creating name for %q: %v", test, err) + t.Errorf("NewName(%q) = %v", test, err) continue } if ns := n.String(); ns != test { - t.Errorf("Got %#v.String() = %q, want = %q", n, ns, test) + t.Errorf("got %#v.String() = %q, want = %q", n, ns, test) continue } } @@ -127,11 +221,11 @@ func TestNamePackUnpack(t *testing.T) { } for _, test := range tests { - in := mustNewName(test.in) - want := mustNewName(test.want) - buf, err := in.pack(make([]byte, 0, 30), map[string]int{}) + in := MustNewName(test.in) + want := MustNewName(test.want) + buf, err := in.pack(make([]byte, 0, 30), map[string]int{}, 0) if err != test.err { - t.Errorf("Packing of %q: got err = %v, want err = %v", test.in, err, test.err) + t.Errorf("got %q.pack() = %v, want = %v", test.in, err, test.err) continue } if test.err != nil { @@ -140,23 +234,45 @@ func TestNamePackUnpack(t *testing.T) { var got Name n, err := got.unpack(buf, 0) if err != nil { - t.Errorf("Unpacking for %q failed: %v", test.in, err) + t.Errorf("%q.unpack() = %v", test.in, err) continue } if n != len(buf) { t.Errorf( - "Unpacked different amount than packed for %q: got n = %d, want = %d", + "unpacked different amount than packed for %q: got = %d, want = %d", test.in, n, len(buf), ) } if got != want { - t.Errorf("Unpacking packing of %q: got = %#v, want = %#v", test.in, got, want) + t.Errorf("unpacking packing of %q: got = %#v, want = %#v", test.in, got, want) } } } +func TestIncompressibleName(t *testing.T) { + name := MustNewName("example.com.") + compression := map[string]int{} + buf, err := name.pack(make([]byte, 0, 100), compression, 0) + if err != nil { + t.Fatal("first Name.pack() =", err) + } + buf, err = name.pack(buf, compression, 0) + if err != nil { + t.Fatal("second Name.pack() =", err) + } + var n1 Name + off, err := n1.unpackCompressed(buf, 0, false /* allowCompression */) + if err != nil { + t.Fatal("unpacking incompressible name without pointers failed:", err) + } + var n2 Name + if _, err := n2.unpackCompressed(buf, off, false /* allowCompression */); err != errCompressedSRV { + t.Errorf("unpacking compressed incompressible name with pointers: got %v, want = %v", err, errCompressedSRV) + } +} + func checkErrorPrefix(err error, prefix string) bool { e, ok := err.(*nestedError) return ok && e.s == prefix @@ -176,7 +292,7 @@ func TestHeaderUnpackError(t *testing.T) { for _, want := range wants { n, err := h.unpack(buf, 0) if n != 0 || !checkErrorPrefix(err, want) { - t.Errorf("got h.unpack([%d]byte, 0) = %d, %v, want = 0, %s", len(buf), n, err, want) + t.Errorf("got header.unpack([%d]byte, 0) = %d, %v, want = 0, %s", len(buf), n, err, want) } buf = append(buf, 0, 0) } @@ -188,7 +304,7 @@ func TestParserStart(t *testing.T) { for i := 0; i <= 1; i++ { _, err := p.Start([]byte{}) if !checkErrorPrefix(err, want) { - t.Errorf("got p.Start(nil) = _, %v, want = _, %s", err, want) + t.Errorf("got Parser.Start(nil) = _, %v, want = _, %s", err, want) } } } @@ -211,7 +327,7 @@ func TestResourceNotStarted(t *testing.T) { for _, test := range tests { if err := test.fn(&Parser{}); err != ErrNotStarted { - t.Errorf("got _, %v = p.%s(), want = _, %v", err, test.name, ErrNotStarted) + t.Errorf("got Parser.%s() = _ , %v, want = _, %v", test.name, err, ErrNotStarted) } } } @@ -221,7 +337,7 @@ func TestDNSPackUnpack(t *testing.T) { { Questions: []Question{ { - Name: mustNewName("."), + Name: MustNewName("."), Type: TypeAAAA, Class: ClassINET, }, @@ -235,15 +351,49 @@ func TestDNSPackUnpack(t *testing.T) { for i, want := range wants { b, err := want.Pack() if err != nil { - t.Fatalf("%d: packing failed: %v", i, err) + t.Fatalf("%d: Message.Pack() = %v", i, err) } var got Message err = got.Unpack(b) if err != nil { - t.Fatalf("%d: unpacking failed: %v", i, err) + t.Fatalf("%d: Message.Unapck() = %v", i, err) } if !reflect.DeepEqual(got, want) { - t.Errorf("%d: got = %+v, want = %+v", i, &got, &want) + t.Errorf("%d: Message.Pack/Unpack() roundtrip: got = %+v, want = %+v", i, &got, &want) + } + } +} + +func TestDNSAppendPackUnpack(t *testing.T) { + wants := []Message{ + { + Questions: []Question{ + { + Name: MustNewName("."), + Type: TypeAAAA, + Class: ClassINET, + }, + }, + Answers: []Resource{}, + Authorities: []Resource{}, + Additionals: []Resource{}, + }, + largeTestMsg(), + } + for i, want := range wants { + b := make([]byte, 2, 514) + b, err := want.AppendPack(b) + if err != nil { + t.Fatalf("%d: Message.AppendPack() = %v", i, err) + } + b = b[2:] + var got Message + err = got.Unpack(b) + if err != nil { + t.Fatalf("%d: Message.Unapck() = %v", i, err) + } + if !reflect.DeepEqual(got, want) { + t.Errorf("%d: Message.AppendPack/Unpack() roundtrip: got = %+v, want = %+v", i, &got, &want) } } } @@ -252,11 +402,11 @@ func TestSkipAll(t *testing.T) { msg := largeTestMsg() buf, err := msg.Pack() if err != nil { - t.Fatal("Packing large test message:", err) + t.Fatal("Message.Pack() =", err) } var p Parser if _, err := p.Start(buf); err != nil { - t.Fatal(err) + t.Fatal("Parser.Start(non-nil) =", err) } tests := []struct { @@ -271,12 +421,75 @@ func TestSkipAll(t *testing.T) { for _, test := range tests { for i := 1; i <= 3; i++ { if err := test.f(); err != nil { - t.Errorf("Call #%d to %s(): %v", i, test.name, err) + t.Errorf("%d: Parser.%s() = %v", i, test.name, err) } } } } +func TestSkipEach(t *testing.T) { + msg := smallTestMsg() + + buf, err := msg.Pack() + if err != nil { + t.Fatal("Message.Pack() =", err) + } + var p Parser + if _, err := p.Start(buf); err != nil { + t.Fatal("Parser.Start(non-nil) =", err) + } + + tests := []struct { + name string + f func() error + }{ + {"SkipQuestion", p.SkipQuestion}, + {"SkipAnswer", p.SkipAnswer}, + {"SkipAuthority", p.SkipAuthority}, + {"SkipAdditional", p.SkipAdditional}, + } + for _, test := range tests { + if err := test.f(); err != nil { + t.Errorf("first Parser.%s() = %v, want = nil", test.name, err) + } + if err := test.f(); err != ErrSectionDone { + t.Errorf("second Parser.%s() = %v, want = %v", test.name, err, ErrSectionDone) + } + } +} + +func TestSkipAfterRead(t *testing.T) { + msg := smallTestMsg() + + buf, err := msg.Pack() + if err != nil { + t.Fatal("Message.Pack() =", err) + } + var p Parser + if _, err := p.Start(buf); err != nil { + t.Fatal("Parser.Srart(non-nil) =", err) + } + + tests := []struct { + name string + skip func() error + read func() error + }{ + {"Question", p.SkipQuestion, func() error { _, err := p.Question(); return err }}, + {"Answer", p.SkipAnswer, func() error { _, err := p.Answer(); return err }}, + {"Authority", p.SkipAuthority, func() error { _, err := p.Authority(); return err }}, + {"Additional", p.SkipAdditional, func() error { _, err := p.Additional(); return err }}, + } + for _, test := range tests { + if err := test.read(); err != nil { + t.Errorf("got Parser.%s() = _, %v, want = _, nil", test.name, err) + } + if err := test.skip(); err != ErrSectionDone { + t.Errorf("got Parser.Skip%s() = %v, want = %v", test.name, err, ErrSectionDone) + } + } +} + func TestSkipNotStarted(t *testing.T) { var p Parser @@ -291,7 +504,7 @@ func TestSkipNotStarted(t *testing.T) { } for _, test := range tests { if err := test.f(); err != ErrNotStarted { - t.Errorf("Got %s() = %v, want = %v", test.name, err, ErrNotStarted) + t.Errorf("got Parser.%s() = %v, want = %v", test.name, err, ErrNotStarted) } } } @@ -335,7 +548,7 @@ func TestTooManyRecords(t *testing.T) { for _, test := range tests { if _, got := test.msg.Pack(); got != test.want { - t.Errorf("Packing %d %s: got = %v, want = %v", recs, test.name, got, test.want) + t.Errorf("got Message.Pack() for %d %s = %v, want = %v", recs, test.name, got, test.want) } } } @@ -343,31 +556,66 @@ func TestTooManyRecords(t *testing.T) { func TestVeryLongTxt(t *testing.T) { want := Resource{ ResourceHeader{ - Name: mustNewName("foo.bar.example.com."), + Name: MustNewName("foo.bar.example.com."), Type: TypeTXT, Class: ClassINET, }, - &TXTResource{loremIpsum}, + &TXTResource{[]string{ + "", + "", + "foo bar", + "", + "www.example.com", + "www.example.com.", + strings.Repeat(".", 255), + }}, } - buf, err := want.pack(make([]byte, 0, 8000), map[string]int{}) + buf, err := want.pack(make([]byte, 0, 8000), map[string]int{}, 0) if err != nil { - t.Fatal("Packing failed:", err) + t.Fatal("Resource.pack() =", err) } var got Resource off, err := got.Header.unpack(buf, 0) if err != nil { - t.Fatal("Unpacking ResourceHeader failed:", err) + t.Fatal("ResourceHeader.unpack() =", err) } body, n, err := unpackResourceBody(buf, off, got.Header) if err != nil { - t.Fatal("Unpacking failed:", err) + t.Fatal("unpackResourceBody() =", err) } got.Body = body if n != len(buf) { - t.Errorf("Unpacked different amount than packed: got n = %d, want = %d", n, len(buf)) + t.Errorf("unpacked different amount than packed: got = %d, want = %d", n, len(buf)) } if !reflect.DeepEqual(got, want) { - t.Errorf("Got = %#v, want = %#v", got, want) + t.Errorf("Resource.pack/unpack() roundtrip: got = %#v, want = %#v", got, want) + } +} + +func TestTooLongTxt(t *testing.T) { + rb := TXTResource{[]string{strings.Repeat(".", 256)}} + if _, err := rb.pack(make([]byte, 0, 8000), map[string]int{}, 0); err != errStringTooLong { + t.Errorf("packing TXTResource with 256 character string: got err = %v, want = %v", err, errStringTooLong) + } +} + +func TestStartAppends(t *testing.T) { + buf := make([]byte, 2, 514) + wantBuf := []byte{4, 44} + copy(buf, wantBuf) + + b := NewBuilder(buf, Header{}) + b.EnableCompression() + + buf, err := b.Finish() + if err != nil { + t.Fatal("Builder.Finish() =", err) + } + if got, want := len(buf), headerLen+2; got != want { + t.Errorf("got len(buf) = %d, want = %d", got, want) + } + if string(buf[:2]) != string(wantBuf) { + t.Errorf("original data not preserved, got = %#v, want = %#v", buf[:2], wantBuf) } } @@ -394,7 +642,7 @@ func TestStartError(t *testing.T) { for _, env := range envs { for _, test := range tests { if got := test.fn(env.fn()); got != env.want { - t.Errorf("got Builder{%s}.Start%s = %v, want = %v", env.name, test.name, got, env.want) + t.Errorf("got Builder{%s}.Start%s() = %v, want = %v", env.name, test.name, got, env.want) } } } @@ -414,6 +662,7 @@ func TestBuilderResourceError(t *testing.T) { {"SRVResource", func(b *Builder) error { return b.SRVResource(ResourceHeader{}, SRVResource{}) }}, {"AResource", func(b *Builder) error { return b.AResource(ResourceHeader{}, AResource{}) }}, {"AAAAResource", func(b *Builder) error { return b.AAAAResource(ResourceHeader{}, AAAAResource{}) }}, + {"OPTResource", func(b *Builder) error { return b.OPTResource(ResourceHeader{}, OPTResource{}) }}, } envs := []struct { @@ -430,7 +679,7 @@ func TestBuilderResourceError(t *testing.T) { for _, env := range envs { for _, test := range tests { if got := test.fn(env.fn()); got != env.want { - t.Errorf("got Builder{%s}.%s = %v, want = %v", env.name, test.name, got, env.want) + t.Errorf("got Builder{%s}.%s() = %v, want = %v", env.name, test.name, got, env.want) } } } @@ -440,7 +689,7 @@ func TestFinishError(t *testing.T) { var b Builder want := ErrNotStarted if _, got := b.Finish(); got != want { - t.Errorf("got Builder{}.Finish() = %v, want = %v", got, want) + t.Errorf("got Builder.Finish() = %v, want = %v", got, want) } } @@ -448,89 +697,96 @@ func TestBuilder(t *testing.T) { msg := largeTestMsg() want, err := msg.Pack() if err != nil { - t.Fatal("Packing without builder:", err) + t.Fatal("Message.Pack() =", err) } - var b Builder - b.Start(nil, msg.Header) + b := NewBuilder(nil, msg.Header) + b.EnableCompression() if err := b.StartQuestions(); err != nil { - t.Fatal("b.StartQuestions():", err) + t.Fatal("Builder.StartQuestions() =", err) } for _, q := range msg.Questions { if err := b.Question(q); err != nil { - t.Fatalf("b.Question(%#v): %v", q, err) + t.Fatalf("Builder.Question(%#v) = %v", q, err) } } if err := b.StartAnswers(); err != nil { - t.Fatal("b.StartAnswers():", err) + t.Fatal("Builder.StartAnswers() =", err) } for _, a := range msg.Answers { switch a.Header.Type { case TypeA: if err := b.AResource(a.Header, *a.Body.(*AResource)); err != nil { - t.Fatalf("b.AResource(%#v): %v", a, err) + t.Fatalf("Builder.AResource(%#v) = %v", a, err) } case TypeNS: if err := b.NSResource(a.Header, *a.Body.(*NSResource)); err != nil { - t.Fatalf("b.NSResource(%#v): %v", a, err) + t.Fatalf("Builder.NSResource(%#v) = %v", a, err) } case TypeCNAME: if err := b.CNAMEResource(a.Header, *a.Body.(*CNAMEResource)); err != nil { - t.Fatalf("b.CNAMEResource(%#v): %v", a, err) + t.Fatalf("Builder.CNAMEResource(%#v) = %v", a, err) } case TypeSOA: if err := b.SOAResource(a.Header, *a.Body.(*SOAResource)); err != nil { - t.Fatalf("b.SOAResource(%#v): %v", a, err) + t.Fatalf("Builder.SOAResource(%#v) = %v", a, err) } case TypePTR: if err := b.PTRResource(a.Header, *a.Body.(*PTRResource)); err != nil { - t.Fatalf("b.PTRResource(%#v): %v", a, err) + t.Fatalf("Builder.PTRResource(%#v) = %v", a, err) } case TypeMX: if err := b.MXResource(a.Header, *a.Body.(*MXResource)); err != nil { - t.Fatalf("b.MXResource(%#v): %v", a, err) + t.Fatalf("Builder.MXResource(%#v) = %v", a, err) } case TypeTXT: if err := b.TXTResource(a.Header, *a.Body.(*TXTResource)); err != nil { - t.Fatalf("b.TXTResource(%#v): %v", a, err) + t.Fatalf("Builder.TXTResource(%#v) = %v", a, err) } case TypeAAAA: if err := b.AAAAResource(a.Header, *a.Body.(*AAAAResource)); err != nil { - t.Fatalf("b.AAAAResource(%#v): %v", a, err) + t.Fatalf("Builder.AAAAResource(%#v) = %v", a, err) } case TypeSRV: if err := b.SRVResource(a.Header, *a.Body.(*SRVResource)); err != nil { - t.Fatalf("b.SRVResource(%#v): %v", a, err) + t.Fatalf("Builder.SRVResource(%#v) = %v", a, err) } } } if err := b.StartAuthorities(); err != nil { - t.Fatal("b.StartAuthorities():", err) + t.Fatal("Builder.StartAuthorities() =", err) } for _, a := range msg.Authorities { if err := b.NSResource(a.Header, *a.Body.(*NSResource)); err != nil { - t.Fatalf("b.NSResource(%#v): %v", a, err) + t.Fatalf("Builder.NSResource(%#v) = %v", a, err) } } if err := b.StartAdditionals(); err != nil { - t.Fatal("b.StartAdditionals():", err) + t.Fatal("Builder.StartAdditionals() =", err) } for _, a := range msg.Additionals { - if err := b.TXTResource(a.Header, *a.Body.(*TXTResource)); err != nil { - t.Fatalf("b.TXTResource(%#v): %v", a, err) + switch a.Body.(type) { + case *TXTResource: + if err := b.TXTResource(a.Header, *a.Body.(*TXTResource)); err != nil { + t.Fatalf("Builder.TXTResource(%#v) = %v", a, err) + } + case *OPTResource: + if err := b.OPTResource(a.Header, *a.Body.(*OPTResource)); err != nil { + t.Fatalf("Builder.OPTResource(%#v) = %v", a, err) + } } } got, err := b.Finish() if err != nil { - t.Fatal("b.Finish():", err) + t.Fatal("Builder.Finish() =", err) } if !bytes.Equal(got, want) { - t.Fatalf("Got from Builder: %#v\nwant = %#v", got, want) + t.Fatalf("got from Builder.Finish() = %#v\nwant = %#v", got, want) } } @@ -543,7 +799,7 @@ func TestResourcePack(t *testing.T) { Message{ Questions: []Question{ { - Name: mustNewName("."), + Name: MustNewName("."), Type: TypeAAAA, Class: ClassINET, }, @@ -556,7 +812,7 @@ func TestResourcePack(t *testing.T) { Message{ Questions: []Question{ { - Name: mustNewName("."), + Name: MustNewName("."), Type: TypeAAAA, Class: ClassINET, }, @@ -573,7 +829,7 @@ func TestResourcePack(t *testing.T) { Message{ Questions: []Question{ { - Name: mustNewName("."), + Name: MustNewName("."), Type: TypeA, Class: ClassINET, }, @@ -585,15 +841,202 @@ func TestResourcePack(t *testing.T) { } { _, err := tt.m.Pack() if !reflect.DeepEqual(err, tt.err) { - t.Errorf("got %v for %v; want %v", err, tt.m, tt.err) + t.Errorf("got Message{%v}.Pack() = %v, want %v", tt.m, err, tt.err) } } } -func BenchmarkParsing(b *testing.B) { - b.ReportAllocs() +func TestResourcePackLength(t *testing.T) { + r := Resource{ + ResourceHeader{ + Name: MustNewName("."), + Type: TypeA, + Class: ClassINET, + }, + &AResource{[4]byte{127, 0, 0, 2}}, + } - name := mustNewName("foo.bar.example.com.") + hb, _, err := r.Header.pack(nil, nil, 0) + if err != nil { + t.Fatal("ResourceHeader.pack() =", err) + } + buf := make([]byte, 0, len(hb)) + buf, err = r.pack(buf, nil, 0) + if err != nil { + t.Fatal("Resource.pack() =", err) + } + + var hdr ResourceHeader + if _, err := hdr.unpack(buf, 0); err != nil { + t.Fatal("ResourceHeader.unpack() =", err) + } + + if got, want := int(hdr.Length), len(buf)-len(hb); got != want { + t.Errorf("got hdr.Length = %d, want = %d", got, want) + } +} + +func TestOptionPackUnpack(t *testing.T) { + for _, tt := range []struct { + name string + w []byte // wire format of m.Additionals + m Message + dnssecOK bool + extRCode RCode + }{ + { + name: "without EDNS(0) options", + w: []byte{ + 0x00, 0x00, 0x29, 0x10, 0x00, 0xfe, 0x00, 0x80, + 0x00, 0x00, 0x00, + }, + m: Message{ + Header: Header{RCode: RCodeFormatError}, + Questions: []Question{ + { + Name: MustNewName("."), + Type: TypeA, + Class: ClassINET, + }, + }, + Additionals: []Resource{ + { + mustEDNS0ResourceHeader(4096, 0xfe0|RCodeFormatError, true), + &OPTResource{}, + }, + }, + }, + dnssecOK: true, + extRCode: 0xfe0 | RCodeFormatError, + }, + { + name: "with EDNS(0) options", + w: []byte{ + 0x00, 0x00, 0x29, 0x10, 0x00, 0xff, 0x00, 0x00, + 0x00, 0x00, 0x0c, 0x00, 0x0c, 0x00, 0x02, 0x00, + 0x00, 0x00, 0x0b, 0x00, 0x02, 0x12, 0x34, + }, + m: Message{ + Header: Header{RCode: RCodeServerFailure}, + Questions: []Question{ + { + Name: MustNewName("."), + Type: TypeAAAA, + Class: ClassINET, + }, + }, + Additionals: []Resource{ + { + mustEDNS0ResourceHeader(4096, 0xff0|RCodeServerFailure, false), + &OPTResource{ + Options: []Option{ + { + Code: 12, // see RFC 7828 + Data: []byte{0x00, 0x00}, + }, + { + Code: 11, // see RFC 7830 + Data: []byte{0x12, 0x34}, + }, + }, + }, + }, + }, + }, + dnssecOK: false, + extRCode: 0xff0 | RCodeServerFailure, + }, + { + // Containing multiple OPT resources in a + // message is invalid, but it's necessary for + // protocol conformance testing. + name: "with multiple OPT resources", + w: []byte{ + 0x00, 0x00, 0x29, 0x10, 0x00, 0xff, 0x00, 0x00, + 0x00, 0x00, 0x06, 0x00, 0x0b, 0x00, 0x02, 0x12, + 0x34, 0x00, 0x00, 0x29, 0x10, 0x00, 0xff, 0x00, + 0x00, 0x00, 0x00, 0x06, 0x00, 0x0c, 0x00, 0x02, + 0x00, 0x00, + }, + m: Message{ + Header: Header{RCode: RCodeNameError}, + Questions: []Question{ + { + Name: MustNewName("."), + Type: TypeAAAA, + Class: ClassINET, + }, + }, + Additionals: []Resource{ + { + mustEDNS0ResourceHeader(4096, 0xff0|RCodeNameError, false), + &OPTResource{ + Options: []Option{ + { + Code: 11, // see RFC 7830 + Data: []byte{0x12, 0x34}, + }, + }, + }, + }, + { + mustEDNS0ResourceHeader(4096, 0xff0|RCodeNameError, false), + &OPTResource{ + Options: []Option{ + { + Code: 12, // see RFC 7828 + Data: []byte{0x00, 0x00}, + }, + }, + }, + }, + }, + }, + }, + } { + w, err := tt.m.Pack() + if err != nil { + t.Errorf("Message.Pack() for %s = %v", tt.name, err) + continue + } + if !bytes.Equal(w[len(w)-len(tt.w):], tt.w) { + t.Errorf("got Message.Pack() for %s = %#v, want %#v", tt.name, w[len(w)-len(tt.w):], tt.w) + continue + } + var m Message + if err := m.Unpack(w); err != nil { + t.Errorf("Message.Unpack() for %s = %v", tt.name, err) + continue + } + if !reflect.DeepEqual(m.Additionals, tt.m.Additionals) { + t.Errorf("got Message.Pack/Unpack() roundtrip for %s = %+v, want %+v", tt.name, m, tt.m) + continue + } + } +} + +// TestGoString tests that Message.GoString produces Go code that compiles to +// reproduce the Message. +// +// This test was produced as follows: +// 1. Run (*Message).GoString on largeTestMsg(). +// 2. Remove "dnsmessage." from the output. +// 3. Paste the result in the test to store it in msg. +// 4. Also put the original output in the test to store in want. +func TestGoString(t *testing.T) { + msg := Message{Header: Header{ID: 0, Response: true, OpCode: 0, Authoritative: true, Truncated: false, RecursionDesired: false, RecursionAvailable: false, RCode: RCodeSuccess}, Questions: []Question{{Name: MustNewName("foo.bar.example.com."), Type: TypeA, Class: ClassINET}}, Answers: []Resource{{Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeA, Class: ClassINET, TTL: 0, Length: 0}, Body: &AResource{A: [4]byte{127, 0, 0, 1}}}, {Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeA, Class: ClassINET, TTL: 0, Length: 0}, Body: &AResource{A: [4]byte{127, 0, 0, 2}}}, {Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeAAAA, Class: ClassINET, TTL: 0, Length: 0}, Body: &AAAAResource{AAAA: [16]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}}}, {Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeCNAME, Class: ClassINET, TTL: 0, Length: 0}, Body: &CNAMEResource{CNAME: MustNewName("alias.example.com.")}}, {Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeSOA, Class: ClassINET, TTL: 0, Length: 0}, Body: &SOAResource{NS: MustNewName("ns1.example.com."), MBox: MustNewName("mb.example.com."), Serial: 1, Refresh: 2, Retry: 3, Expire: 4, MinTTL: 5}}, {Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypePTR, Class: ClassINET, TTL: 0, Length: 0}, Body: &PTRResource{PTR: MustNewName("ptr.example.com.")}}, {Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeMX, Class: ClassINET, TTL: 0, Length: 0}, Body: &MXResource{Pref: 7, MX: MustNewName("mx.example.com.")}}, {Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeSRV, Class: ClassINET, TTL: 0, Length: 0}, Body: &SRVResource{Priority: 8, Weight: 9, Port: 11, Target: MustNewName("srv.example.com.")}}}, Authorities: []Resource{{Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeNS, Class: ClassINET, TTL: 0, Length: 0}, Body: &NSResource{NS: MustNewName("ns1.example.com.")}}, {Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeNS, Class: ClassINET, TTL: 0, Length: 0}, Body: &NSResource{NS: MustNewName("ns2.example.com.")}}}, Additionals: []Resource{{Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeTXT, Class: ClassINET, TTL: 0, Length: 0}, Body: &TXTResource{TXT: []string{"So Long\x2c and Thanks for All the Fish"}}}, {Header: ResourceHeader{Name: MustNewName("foo.bar.example.com."), Type: TypeTXT, Class: ClassINET, TTL: 0, Length: 0}, Body: &TXTResource{TXT: []string{"Hamster Huey and the Gooey Kablooie"}}}, {Header: ResourceHeader{Name: MustNewName("."), Type: TypeOPT, Class: 4096, TTL: 4261412864, Length: 0}, Body: &OPTResource{Options: []Option{{Code: 10, Data: []byte{1, 35, 69, 103, 137, 171, 205, 239}}}}}}} + if !reflect.DeepEqual(msg, largeTestMsg()) { + t.Error("Message.GoString lost information or largeTestMsg changed: msg != largeTestMsg()") + } + got := msg.GoString() + want := `dnsmessage.Message{Header: dnsmessage.Header{ID: 0, Response: true, OpCode: 0, Authoritative: true, Truncated: false, RecursionDesired: false, RecursionAvailable: false, RCode: dnsmessage.RCodeSuccess}, Questions: []dnsmessage.Question{dnsmessage.Question{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeA, Class: dnsmessage.ClassINET}}, Answers: []dnsmessage.Resource{dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeA, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.AResource{A: [4]byte{127, 0, 0, 1}}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeA, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.AResource{A: [4]byte{127, 0, 0, 2}}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeAAAA, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.AAAAResource{AAAA: [16]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeCNAME, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.CNAMEResource{CNAME: dnsmessage.MustNewName("alias.example.com.")}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeSOA, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.SOAResource{NS: dnsmessage.MustNewName("ns1.example.com."), MBox: dnsmessage.MustNewName("mb.example.com."), Serial: 1, Refresh: 2, Retry: 3, Expire: 4, MinTTL: 5}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypePTR, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.PTRResource{PTR: dnsmessage.MustNewName("ptr.example.com.")}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeMX, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.MXResource{Pref: 7, MX: dnsmessage.MustNewName("mx.example.com.")}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeSRV, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.SRVResource{Priority: 8, Weight: 9, Port: 11, Target: dnsmessage.MustNewName("srv.example.com.")}}}, Authorities: []dnsmessage.Resource{dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeNS, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.NSResource{NS: dnsmessage.MustNewName("ns1.example.com.")}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeNS, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.NSResource{NS: dnsmessage.MustNewName("ns2.example.com.")}}}, Additionals: []dnsmessage.Resource{dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeTXT, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.TXTResource{TXT: []string{"So Long\x2c and Thanks for All the Fish"}}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("foo.bar.example.com."), Type: dnsmessage.TypeTXT, Class: dnsmessage.ClassINET, TTL: 0, Length: 0}, Body: &dnsmessage.TXTResource{TXT: []string{"Hamster Huey and the Gooey Kablooie"}}}, dnsmessage.Resource{Header: dnsmessage.ResourceHeader{Name: dnsmessage.MustNewName("."), Type: dnsmessage.TypeOPT, Class: 4096, TTL: 4261412864, Length: 0}, Body: &dnsmessage.OPTResource{Options: []dnsmessage.Option{dnsmessage.Option{Code: 10, Data: []byte{1, 35, 69, 103, 137, 171, 205, 239}}}}}}}` + if got != want { + t.Errorf("got msg1.GoString() = %s\nwant = %s", got, want) + } +} + +func benchmarkParsingSetup() ([]byte, error) { + name := MustNewName("foo.bar.example.com.") msg := Message{ Header: Header{Response: true, Authoritative: true}, Questions: []Question{ @@ -637,116 +1080,235 @@ func BenchmarkParsing(b *testing.B) { buf, err := msg.Pack() if err != nil { - b.Fatal("msg.Pack():", err) + return nil, fmt.Errorf("Message.Pack() = %v", err) } + return buf, nil +} - for i := 0; i < b.N; i++ { - var p Parser - if _, err := p.Start(buf); err != nil { - b.Fatal("p.Start(buf):", err) +func benchmarkParsing(tb testing.TB, buf []byte) { + var p Parser + if _, err := p.Start(buf); err != nil { + tb.Fatal("Parser.Start(non-nil) =", err) + } + + for { + _, err := p.Question() + if err == ErrSectionDone { + break } + if err != nil { + tb.Fatal("Parser.Question() =", err) + } + } - for { - _, err := p.Question() - if err == ErrSectionDone { - break - } - if err != nil { - b.Fatal("p.Question():", err) - } + for { + h, err := p.AnswerHeader() + if err == ErrSectionDone { + break + } + if err != nil { + tb.Fatal("Parser.AnswerHeader() =", err) } - for { - h, err := p.AnswerHeader() - if err == ErrSectionDone { - break + switch h.Type { + case TypeA: + if _, err := p.AResource(); err != nil { + tb.Fatal("Parser.AResource() =", err) } - if err != nil { - panic(err) + case TypeAAAA: + if _, err := p.AAAAResource(); err != nil { + tb.Fatal("Parser.AAAAResource() =", err) } - - switch h.Type { - case TypeA: - if _, err := p.AResource(); err != nil { - b.Fatal("p.AResource():", err) - } - case TypeAAAA: - if _, err := p.AAAAResource(); err != nil { - b.Fatal("p.AAAAResource():", err) - } - case TypeCNAME: - if _, err := p.CNAMEResource(); err != nil { - b.Fatal("p.CNAMEResource():", err) - } - case TypeNS: - if _, err := p.NSResource(); err != nil { - b.Fatal("p.NSResource():", err) - } - default: - b.Fatalf("unknown type: %T", h) + case TypeCNAME: + if _, err := p.CNAMEResource(); err != nil { + tb.Fatal("Parser.CNAMEResource() =", err) } + case TypeNS: + if _, err := p.NSResource(); err != nil { + tb.Fatal("Parser.NSResource() =", err) + } + case TypeOPT: + if _, err := p.OPTResource(); err != nil { + tb.Fatal("Parser.OPTResource() =", err) + } + default: + tb.Fatalf("got unknown type: %T", h) } } } -func BenchmarkBuilding(b *testing.B) { +func BenchmarkParsing(b *testing.B) { + buf, err := benchmarkParsingSetup() + if err != nil { + b.Fatal(err) + } + b.ReportAllocs() + for i := 0; i < b.N; i++ { + benchmarkParsing(b, buf) + } +} + +func TestParsingAllocs(t *testing.T) { + buf, err := benchmarkParsingSetup() + if err != nil { + t.Fatal(err) + } - name := mustNewName("foo.bar.example.com.") + if allocs := testing.AllocsPerRun(100, func() { benchmarkParsing(t, buf) }); allocs > 0.5 { + t.Errorf("allocations during parsing: got = %f, want ~0", allocs) + } +} + +func benchmarkBuildingSetup() (Name, []byte) { + name := MustNewName("foo.bar.example.com.") buf := make([]byte, 0, packStartingCap) + return name, buf +} + +func benchmarkBuilding(tb testing.TB, name Name, buf []byte) { + bld := NewBuilder(buf, Header{Response: true, Authoritative: true}) + + if err := bld.StartQuestions(); err != nil { + tb.Fatal("Builder.StartQuestions() =", err) + } + q := Question{ + Name: name, + Type: TypeA, + Class: ClassINET, + } + if err := bld.Question(q); err != nil { + tb.Fatalf("Builder.Question(%+v) = %v", q, err) + } + + hdr := ResourceHeader{ + Name: name, + Class: ClassINET, + } + if err := bld.StartAnswers(); err != nil { + tb.Fatal("Builder.StartQuestions() =", err) + } + ar := AResource{[4]byte{}} + if err := bld.AResource(hdr, ar); err != nil { + tb.Fatalf("Builder.AResource(%+v, %+v) = %v", hdr, ar, err) + } + + aaar := AAAAResource{[16]byte{}} + if err := bld.AAAAResource(hdr, aaar); err != nil { + tb.Fatalf("Builder.AAAAResource(%+v, %+v) = %v", hdr, aaar, err) + } + + cnr := CNAMEResource{name} + if err := bld.CNAMEResource(hdr, cnr); err != nil { + tb.Fatalf("Builder.CNAMEResource(%+v, %+v) = %v", hdr, cnr, err) + } + + nsr := NSResource{name} + if err := bld.NSResource(hdr, nsr); err != nil { + tb.Fatalf("Builder.NSResource(%+v, %+v) = %v", hdr, nsr, err) + } + + extrc := 0xfe0 | RCodeNotImplemented + if err := (&hdr).SetEDNS0(4096, extrc, true); err != nil { + tb.Fatalf("ResourceHeader.SetEDNS0(4096, %#x, true) = %v", extrc, err) + } + optr := OPTResource{} + if err := bld.OPTResource(hdr, optr); err != nil { + tb.Fatalf("Builder.OPTResource(%+v, %+v) = %v", hdr, optr, err) + } + + if _, err := bld.Finish(); err != nil { + tb.Fatal("Builder.Finish() =", err) + } +} + +func BenchmarkBuilding(b *testing.B) { + name, buf := benchmarkBuildingSetup() + b.ReportAllocs() for i := 0; i < b.N; i++ { - var bld Builder - bld.StartWithoutCompression(buf, Header{Response: true, Authoritative: true}) + benchmarkBuilding(b, name, buf) + } +} - if err := bld.StartQuestions(); err != nil { - b.Fatal("bld.StartQuestions():", err) - } - q := Question{ - Name: name, - Type: TypeA, - Class: ClassINET, - } - if err := bld.Question(q); err != nil { - b.Fatalf("bld.Question(%+v): %v", q, err) - } +func TestBuildingAllocs(t *testing.T) { + name, buf := benchmarkBuildingSetup() + if allocs := testing.AllocsPerRun(100, func() { benchmarkBuilding(t, name, buf) }); allocs > 0.5 { + t.Errorf("allocations during building: got = %f, want ~0", allocs) + } +} - hdr := ResourceHeader{ - Name: name, - Class: ClassINET, - } - if err := bld.StartAnswers(); err != nil { - b.Fatal("bld.StartQuestions():", err) - } +func smallTestMsg() Message { + name := MustNewName("example.com.") + return Message{ + Header: Header{Response: true, Authoritative: true}, + Questions: []Question{ + { + Name: name, + Type: TypeA, + Class: ClassINET, + }, + }, + Answers: []Resource{ + { + ResourceHeader{ + Name: name, + Type: TypeA, + Class: ClassINET, + }, + &AResource{[4]byte{127, 0, 0, 1}}, + }, + }, + Authorities: []Resource{ + { + ResourceHeader{ + Name: name, + Type: TypeA, + Class: ClassINET, + }, + &AResource{[4]byte{127, 0, 0, 1}}, + }, + }, + Additionals: []Resource{ + { + ResourceHeader{ + Name: name, + Type: TypeA, + Class: ClassINET, + }, + &AResource{[4]byte{127, 0, 0, 1}}, + }, + }, + } +} - ar := AResource{[4]byte{}} - if err := bld.AResource(hdr, ar); err != nil { - b.Fatalf("bld.AResource(%+v, %+v): %v", hdr, ar, err) - } +func BenchmarkPack(b *testing.B) { + msg := largeTestMsg() - aaar := AAAAResource{[16]byte{}} - if err := bld.AAAAResource(hdr, aaar); err != nil { - b.Fatalf("bld.AAAAResource(%+v, %+v): %v", hdr, aaar, err) - } + b.ReportAllocs() - cnr := CNAMEResource{name} - if err := bld.CNAMEResource(hdr, cnr); err != nil { - b.Fatalf("bld.CNAMEResource(%+v, %+v): %v", hdr, cnr, err) + for i := 0; i < b.N; i++ { + if _, err := msg.Pack(); err != nil { + b.Fatal("Message.Pack() =", err) } + } +} - nsr := NSResource{name} - if err := bld.NSResource(hdr, nsr); err != nil { - b.Fatalf("bld.NSResource(%+v, %+v): %v", hdr, nsr, err) - } +func BenchmarkAppendPack(b *testing.B) { + msg := largeTestMsg() + buf := make([]byte, 0, packStartingCap) - if _, err := bld.Finish(); err != nil { - b.Fatal("bld.Finish():", err) + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + if _, err := msg.AppendPack(buf[:0]); err != nil { + b.Fatal("Message.AppendPack() = ", err) } } } func largeTestMsg() Message { - name := mustNewName("foo.bar.example.com.") + name := MustNewName("foo.bar.example.com.") return Message{ Header: Header{Response: true, Authoritative: true}, Questions: []Question{ @@ -787,7 +1349,7 @@ func largeTestMsg() Message { Type: TypeCNAME, Class: ClassINET, }, - &CNAMEResource{mustNewName("alias.example.com.")}, + &CNAMEResource{MustNewName("alias.example.com.")}, }, { ResourceHeader{ @@ -796,8 +1358,8 @@ func largeTestMsg() Message { Class: ClassINET, }, &SOAResource{ - NS: mustNewName("ns1.example.com."), - MBox: mustNewName("mb.example.com."), + NS: MustNewName("ns1.example.com."), + MBox: MustNewName("mb.example.com."), Serial: 1, Refresh: 2, Retry: 3, @@ -811,7 +1373,7 @@ func largeTestMsg() Message { Type: TypePTR, Class: ClassINET, }, - &PTRResource{mustNewName("ptr.example.com.")}, + &PTRResource{MustNewName("ptr.example.com.")}, }, { ResourceHeader{ @@ -821,7 +1383,7 @@ func largeTestMsg() Message { }, &MXResource{ 7, - mustNewName("mx.example.com."), + MustNewName("mx.example.com."), }, }, { @@ -834,7 +1396,7 @@ func largeTestMsg() Message { 8, 9, 11, - mustNewName("srv.example.com."), + MustNewName("srv.example.com."), }, }, }, @@ -845,7 +1407,7 @@ func largeTestMsg() Message { Type: TypeNS, Class: ClassINET, }, - &NSResource{mustNewName("ns1.example.com.")}, + &NSResource{MustNewName("ns1.example.com.")}, }, { ResourceHeader{ @@ -853,7 +1415,7 @@ func largeTestMsg() Message { Type: TypeNS, Class: ClassINET, }, - &NSResource{mustNewName("ns2.example.com.")}, + &NSResource{MustNewName("ns2.example.com.")}, }, }, Additionals: []Resource{ @@ -863,7 +1425,7 @@ func largeTestMsg() Message { Type: TypeTXT, Class: ClassINET, }, - &TXTResource{"So Long, and Thanks for All the Fish"}, + &TXTResource{[]string{"So Long, and Thanks for All the Fish"}}, }, { ResourceHeader{ @@ -871,139 +1433,19 @@ func largeTestMsg() Message { Type: TypeTXT, Class: ClassINET, }, - &TXTResource{"Hamster Huey and the Gooey Kablooie"}, + &TXTResource{[]string{"Hamster Huey and the Gooey Kablooie"}}, + }, + { + mustEDNS0ResourceHeader(4096, 0xfe0|RCodeSuccess, false), + &OPTResource{ + Options: []Option{ + { + Code: 10, // see RFC 7873 + Data: []byte{0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef}, + }, + }, + }, }, }, } } - -const loremIpsum = ` -Lorem ipsum dolor sit amet, nec enim antiopam id, an ullum choro -nonumes qui, pro eu debet honestatis mediocritatem. No alia enim eos, -magna signiferumque ex vis. Mei no aperiri dissentias, cu vel quas -regione. Malorum quaeque vim ut, eum cu semper aliquid invidunt, ei -nam ipsum assentior. - -Nostrum appellantur usu no, vis ex probatus adipiscing. Cu usu illum -facilis eleifend. Iusto conceptam complectitur vim id. Tale omnesque -no usu, ei oblique sadipscing vim. At nullam voluptua usu, mei laudem -reformidans et. Qui ei eros porro reformidans, ius suas veritus -torquatos ex. Mea te facer alterum consequat. - -Soleat torquatos democritum sed et, no mea congue appareat, facer -aliquam nec in. Has te ipsum tritani. At justo dicta option nec, movet -phaedrum ad nam. Ea detracto verterem liberavisse has, delectus -suscipiantur in mei. Ex nam meliore complectitur. Ut nam omnis -honestatis quaerendum, ea mea nihil affert detracto, ad vix rebum -mollis. - -Ut epicurei praesent neglegentur pri, prima fuisset intellegebat ad -vim. An habemus comprehensam usu, at enim dignissim pro. Eam reque -vivendum adipisci ea. Vel ne odio choro minimum. Sea admodum -dissentiet ex. Mundi tamquam evertitur ius cu. Homero postea iisque ut -pro, vel ne saepe senserit consetetur. - -Nulla utamur facilisis ius ea, in viderer diceret pertinax eum. Mei no -enim quodsi facilisi, ex sed aeterno appareat mediocritatem, eum -sententiae deterruisset ut. At suas timeam euismod cum, offendit -appareat interpretaris ne vix. Vel ea civibus albucius, ex vim quidam -accusata intellegebat, noluisse instructior sea id. Nec te nonumes -habemus appellantur, quis dignissim vituperata eu nam. - -At vix apeirian patrioque vituperatoribus, an usu agam assum. Debet -iisque an mea. Per eu dicant ponderum accommodare. Pri alienum -placerat senserit an, ne eum ferri abhorreant vituperatoribus. Ut mea -eligendi disputationi. Ius no tation everti impedit, ei magna quidam -mediocritatem pri. - -Legendos perpetua iracundia ne usu, no ius ullum epicurei intellegam, -ad modus epicuri lucilius eam. In unum quaerendum usu. Ne diam paulo -has, ea veri virtute sed. Alia honestatis conclusionemque mea eu, ut -iudico albucius his. - -Usu essent probatus eu, sed omnis dolor delicatissimi ex. No qui augue -dissentias dissentiet. Laudem recteque no usu, vel an velit noluisse, -an sed utinam eirmod appetere. Ne mea fuisset inimicus ocurreret. At -vis dicant abhorreant, utinam forensibus nec ne, mei te docendi -consequat. Brute inermis persecuti cum id. Ut ipsum munere propriae -usu, dicit graeco disputando id has. - -Eros dolore quaerendum nam ei. Timeam ornatus inciderint pro id. Nec -torquatos sadipscing ei, ancillae molestie per in. Malis principes duo -ea, usu liber postulant ei. - -Graece timeam voluptatibus eu eam. Alia probatus quo no, ea scripta -feugiat duo. Congue option meliore ex qui, noster invenire appellantur -ea vel. Eu exerci legendos vel. Consetetur repudiandae vim ut. Vix an -probo minimum, et nam illud falli tempor. - -Cum dico signiferumque eu. Sed ut regione maiorum, id veritus insolens -tacimates vix. Eu mel sint tamquam lucilius, duo no oporteat -tacimates. Atqui augue concludaturque vix ei, id mel utroque menandri. - -Ad oratio blandit aliquando pro. Vis et dolorum rationibus -philosophia, ad cum nulla molestie. Hinc fuisset adversarium eum et, -ne qui nisl verear saperet, vel te quaestio forensibus. Per odio -option delenit an. Alii placerat has no, in pri nihil platonem -cotidieque. Est ut elit copiosae scaevola, debet tollit maluisset sea -an. - -Te sea hinc debet pericula, liber ridens fabulas cu sed, quem mutat -accusam mea et. Elitr labitur albucius et pri, an labore feugait mel. -Velit zril melius usu ea. Ad stet putent interpretaris qui. Mel no -error volumus scripserit. In pro paulo iudico, quo ei dolorem -verterem, affert fabellas dissentiet ea vix. - -Vis quot deserunt te. Error aliquid detraxit eu usu, vis alia eruditi -salutatus cu. Est nostrud bonorum an, ei usu alii salutatus. Vel at -nisl primis, eum ex aperiri noluisse reformidans. Ad veri velit -utroque vis, ex equidem detraxit temporibus has. - -Inermis appareat usu ne. Eros placerat periculis mea ad, in dictas -pericula pro. Errem postulant at usu, ea nec amet ornatus mentitum. Ad -mazim graeco eum, vel ex percipit volutpat iudicabit, sit ne delicata -interesset. Mel sapientem prodesset abhorreant et, oblique suscipit -eam id. - -An maluisset disputando mea, vidit mnesarchum pri et. Malis insolens -inciderint no sea. Ea persius maluisset vix, ne vim appellantur -instructior, consul quidam definiebas pri id. Cum integre feugiat -pericula in, ex sed persius similique, mel ne natum dicit percipitur. - -Primis discere ne pri, errem putent definitionem at vis. Ei mel dolore -neglegentur, mei tincidunt percipitur ei. Pro ad simul integre -rationibus. Eu vel alii honestatis definitiones, mea no nonumy -reprehendunt. - -Dicta appareat legendos est cu. Eu vel congue dicunt omittam, no vix -adhuc minimum constituam, quot noluisse id mel. Eu quot sale mutat -duo, ex nisl munere invenire duo. Ne nec ullum utamur. Pro alterum -debitis nostrum no, ut vel aliquid vivendo. - -Aliquip fierent praesent quo ne, id sit audiam recusabo delicatissimi. -Usu postulant incorrupte cu. At pro dicit tibique intellegam, cibo -dolore impedit id eam, et aeque feugait assentior has. Quando sensibus -nec ex. Possit sensibus pri ad, unum mutat periculis cu vix. - -Mundi tibique vix te, duo simul partiendo qualisque id, est at vidit -sonet tempor. No per solet aeterno deseruisse. Petentium salutandi -definiebas pri cu. Munere vivendum est in. Ei justo congue eligendi -vis, modus offendit omittantur te mel. - -Integre voluptaria in qui, sit habemus tractatos constituam no. Utinam -melius conceptam est ne, quo in minimum apeirian delicata, ut ius -porro recusabo. Dicant expetenda vix no, ludus scripserit sed ex, eu -his modo nostro. Ut etiam sonet his, quodsi inciderint philosophia te -per. Nullam lobortis eu cum, vix an sonet efficiendi repudiandae. Vis -ad idque fabellas intellegebat. - -Eum commodo senserit conclusionemque ex. Sed forensibus sadipscing ut, -mei in facer delicata periculis, sea ne hinc putent cetero. Nec ne -alia corpora invenire, alia prima soleat te cum. Eleifend posidonium -nam at. - -Dolorum indoctum cu quo, ex dolor legendos recteque eam, cu pri zril -discere. Nec civibus officiis dissentiunt ex, est te liber ludus -elaboraret. Cum ea fabellas invenire. Ex vim nostrud eripuit -comprehensam, nam te inermis delectus, saepe inermis senserit. -` diff --git a/vendor/golang.org/x/net/go.mod b/vendor/golang.org/x/net/go.mod new file mode 100644 index 0000000..78a1450 --- /dev/null +++ b/vendor/golang.org/x/net/go.mod @@ -0,0 +1,6 @@ +module golang.org/x/net + +require ( + golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 + golang.org/x/text v0.3.0 +) diff --git a/vendor/golang.org/x/net/go.sum b/vendor/golang.org/x/net/go.sum new file mode 100644 index 0000000..0fa675a --- /dev/null +++ b/vendor/golang.org/x/net/go.sum @@ -0,0 +1,6 @@ +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= diff --git a/vendor/golang.org/x/net/html/atom/gen.go b/vendor/golang.org/x/net/html/atom/gen.go index 6bfa866..5d05278 100644 --- a/vendor/golang.org/x/net/html/atom/gen.go +++ b/vendor/golang.org/x/net/html/atom/gen.go @@ -4,17 +4,17 @@ // +build ignore -package main +//go:generate go run gen.go +//go:generate go run gen.go -test -// This program generates table.go and table_test.go. -// Invoke as -// -// go run gen.go |gofmt >table.go -// go run gen.go -test |gofmt >table_test.go +package main import ( + "bytes" "flag" "fmt" + "go/format" + "io/ioutil" "math/rand" "os" "sort" @@ -42,6 +42,18 @@ func identifier(s string) string { var test = flag.Bool("test", false, "generate table_test.go") +func genFile(name string, buf *bytes.Buffer) { + b, err := format.Source(buf.Bytes()) + if err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + if err := ioutil.WriteFile(name, b, 0644); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} + func main() { flag.Parse() @@ -52,32 +64,31 @@ func main() { all = append(all, extra...) sort.Strings(all) - if *test { - fmt.Printf("// generated by go run gen.go -test; DO NOT EDIT\n\n") - fmt.Printf("package atom\n\n") - fmt.Printf("var testAtomList = []string{\n") - for _, s := range all { - fmt.Printf("\t%q,\n", s) - } - fmt.Printf("}\n") - return - } - // uniq - lists have dups - // compute max len too - maxLen := 0 w := 0 for _, s := range all { if w == 0 || all[w-1] != s { - if maxLen < len(s) { - maxLen = len(s) - } all[w] = s w++ } } all = all[:w] + if *test { + var buf bytes.Buffer + fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n") + fmt.Fprintln(&buf, "//go:generate go run gen.go -test\n") + fmt.Fprintln(&buf, "package atom\n") + fmt.Fprintln(&buf, "var testAtomList = []string{") + for _, s := range all { + fmt.Fprintf(&buf, "\t%q,\n", s) + } + fmt.Fprintln(&buf, "}") + + genFile("table_test.go", &buf) + return + } + // Find hash that minimizes table size. var best *table for i := 0; i < 1000000; i++ { @@ -163,36 +174,46 @@ func main() { atom[s] = uint32(off<<8 | len(s)) } + var buf bytes.Buffer // Generate the Go code. - fmt.Printf("// generated by go run gen.go; DO NOT EDIT\n\n") - fmt.Printf("package atom\n\nconst (\n") + fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n") + fmt.Fprintln(&buf, "//go:generate go run gen.go\n") + fmt.Fprintln(&buf, "package atom\n\nconst (") + + // compute max len + maxLen := 0 for _, s := range all { - fmt.Printf("\t%s Atom = %#x\n", identifier(s), atom[s]) + if maxLen < len(s) { + maxLen = len(s) + } + fmt.Fprintf(&buf, "\t%s Atom = %#x\n", identifier(s), atom[s]) } - fmt.Printf(")\n\n") + fmt.Fprintln(&buf, ")\n") - fmt.Printf("const hash0 = %#x\n\n", best.h0) - fmt.Printf("const maxAtomLen = %d\n\n", maxLen) + fmt.Fprintf(&buf, "const hash0 = %#x\n\n", best.h0) + fmt.Fprintf(&buf, "const maxAtomLen = %d\n\n", maxLen) - fmt.Printf("var table = [1<<%d]Atom{\n", best.k) + fmt.Fprintf(&buf, "var table = [1<<%d]Atom{\n", best.k) for i, s := range best.tab { if s == "" { continue } - fmt.Printf("\t%#x: %#x, // %s\n", i, atom[s], s) + fmt.Fprintf(&buf, "\t%#x: %#x, // %s\n", i, atom[s], s) } - fmt.Printf("}\n") + fmt.Fprintf(&buf, "}\n") datasize := (1 << best.k) * 4 - fmt.Printf("const atomText =\n") + fmt.Fprintln(&buf, "const atomText =") textsize := len(text) for len(text) > 60 { - fmt.Printf("\t%q +\n", text[:60]) + fmt.Fprintf(&buf, "\t%q +\n", text[:60]) text = text[60:] } - fmt.Printf("\t%q\n\n", text) + fmt.Fprintf(&buf, "\t%q\n\n", text) + + genFile("table.go", &buf) - fmt.Fprintf(os.Stderr, "%d atoms; %d string bytes + %d tables = %d total data\n", len(all), textsize, datasize, textsize+datasize) + fmt.Fprintf(os.Stdout, "%d atoms; %d string bytes + %d tables = %d total data\n", len(all), textsize, datasize, textsize+datasize) } type byLen []string @@ -285,8 +306,10 @@ func (t *table) push(i uint32, depth int) bool { // The lists of element names and attribute keys were taken from // https://html.spec.whatwg.org/multipage/indices.html#index -// as of the "HTML Living Standard - Last Updated 21 February 2015" version. +// as of the "HTML Living Standard - Last Updated 16 April 2018" version. +// "command", "keygen" and "menuitem" have been removed from the spec, +// but are kept here for backwards compatibility. var elements = []string{ "a", "abbr", @@ -349,6 +372,7 @@ var elements = []string{ "legend", "li", "link", + "main", "map", "mark", "menu", @@ -364,6 +388,7 @@ var elements = []string{ "output", "p", "param", + "picture", "pre", "progress", "q", @@ -375,6 +400,7 @@ var elements = []string{ "script", "section", "select", + "slot", "small", "source", "span", @@ -403,14 +429,21 @@ var elements = []string{ } // https://html.spec.whatwg.org/multipage/indices.html#attributes-3 - +// +// "challenge", "command", "contextmenu", "dropzone", "icon", "keytype", "mediagroup", +// "radiogroup", "spellcheck", "scoped", "seamless", "sortable" and "sorted" have been removed from the spec, +// but are kept here for backwards compatibility. var attributes = []string{ "abbr", "accept", "accept-charset", "accesskey", "action", + "allowfullscreen", + "allowpaymentrequest", + "allowusermedia", "alt", + "as", "async", "autocomplete", "autofocus", @@ -420,6 +453,7 @@ var attributes = []string{ "checked", "cite", "class", + "color", "cols", "colspan", "command", @@ -457,6 +491,8 @@ var attributes = []string{ "icon", "id", "inputmode", + "integrity", + "is", "ismap", "itemid", "itemprop", @@ -481,16 +517,20 @@ var attributes = []string{ "multiple", "muted", "name", + "nomodule", + "nonce", "novalidate", "open", "optimum", "pattern", "ping", "placeholder", + "playsinline", "poster", "preload", "radiogroup", "readonly", + "referrerpolicy", "rel", "required", "reversed", @@ -507,10 +547,13 @@ var attributes = []string{ "sizes", "sortable", "sorted", + "slot", "span", + "spellcheck", "src", "srcdoc", "srclang", + "srcset", "start", "step", "style", @@ -520,16 +563,22 @@ var attributes = []string{ "translate", "type", "typemustmatch", + "updateviacache", "usemap", "value", "width", + "workertype", "wrap", } +// "onautocomplete", "onautocompleteerror", "onmousewheel", +// "onshow" and "onsort" have been removed from the spec, +// but are kept here for backwards compatibility. var eventHandlers = []string{ "onabort", "onautocomplete", "onautocompleteerror", + "onauxclick", "onafterprint", "onbeforeprint", "onbeforeunload", @@ -541,11 +590,14 @@ var eventHandlers = []string{ "onclick", "onclose", "oncontextmenu", + "oncopy", "oncuechange", + "oncut", "ondblclick", "ondrag", "ondragend", "ondragenter", + "ondragexit", "ondragleave", "ondragover", "ondragstart", @@ -565,18 +617,24 @@ var eventHandlers = []string{ "onload", "onloadeddata", "onloadedmetadata", + "onloadend", "onloadstart", "onmessage", + "onmessageerror", "onmousedown", + "onmouseenter", + "onmouseleave", "onmousemove", "onmouseout", "onmouseover", "onmouseup", "onmousewheel", + "onwheel", "onoffline", "ononline", "onpagehide", "onpageshow", + "onpaste", "onpause", "onplay", "onplaying", @@ -585,7 +643,9 @@ var eventHandlers = []string{ "onratechange", "onreset", "onresize", + "onrejectionhandled", "onscroll", + "onsecuritypolicyviolation", "onseeked", "onseeking", "onselect", @@ -597,6 +657,7 @@ var eventHandlers = []string{ "onsuspend", "ontimeupdate", "ontoggle", + "onunhandledrejection", "onunload", "onvolumechange", "onwaiting", @@ -604,6 +665,7 @@ var eventHandlers = []string{ // extra are ad-hoc values not covered by any of the lists above. var extra = []string{ + "acronym", "align", "annotation", "annotation-xml", @@ -639,6 +701,8 @@ var extra = []string{ "plaintext", "prompt", "public", + "rb", + "rtc", "spacer", "strike", "svg", diff --git a/vendor/golang.org/x/net/html/atom/table.go b/vendor/golang.org/x/net/html/atom/table.go index 2605ba3..2a93886 100644 --- a/vendor/golang.org/x/net/html/atom/table.go +++ b/vendor/golang.org/x/net/html/atom/table.go @@ -1,713 +1,783 @@ -// generated by go run gen.go; DO NOT EDIT +// Code generated by go generate gen.go; DO NOT EDIT. + +//go:generate go run gen.go package atom const ( - A Atom = 0x1 - Abbr Atom = 0x4 - Accept Atom = 0x2106 - AcceptCharset Atom = 0x210e - Accesskey Atom = 0x3309 - Action Atom = 0x1f606 - Address Atom = 0x4f307 - Align Atom = 0x1105 - Alt Atom = 0x4503 - Annotation Atom = 0x1670a - AnnotationXml Atom = 0x1670e - Applet Atom = 0x2b306 - Area Atom = 0x2fa04 - Article Atom = 0x38807 - Aside Atom = 0x8305 - Async Atom = 0x7b05 - Audio Atom = 0xa605 - Autocomplete Atom = 0x1fc0c - Autofocus Atom = 0xb309 - Autoplay Atom = 0xce08 - B Atom = 0x101 - Base Atom = 0xd604 - Basefont Atom = 0xd608 - Bdi Atom = 0x1a03 - Bdo Atom = 0xe703 - Bgsound Atom = 0x11807 - Big Atom = 0x12403 - Blink Atom = 0x12705 - Blockquote Atom = 0x12c0a - Body Atom = 0x2f04 - Br Atom = 0x202 - Button Atom = 0x13606 - Canvas Atom = 0x7f06 - Caption Atom = 0x1bb07 - Center Atom = 0x5b506 - Challenge Atom = 0x21f09 - Charset Atom = 0x2807 - Checked Atom = 0x32807 - Cite Atom = 0x3c804 - Class Atom = 0x4de05 - Code Atom = 0x14904 - Col Atom = 0x15003 - Colgroup Atom = 0x15008 - Color Atom = 0x15d05 - Cols Atom = 0x16204 - Colspan Atom = 0x16207 - Command Atom = 0x17507 - Content Atom = 0x42307 - Contenteditable Atom = 0x4230f - Contextmenu Atom = 0x3310b - Controls Atom = 0x18808 - Coords Atom = 0x19406 - Crossorigin Atom = 0x19f0b - Data Atom = 0x44a04 - Datalist Atom = 0x44a08 - Datetime Atom = 0x23c08 - Dd Atom = 0x26702 - Default Atom = 0x8607 - Defer Atom = 0x14b05 - Del Atom = 0x3ef03 - Desc Atom = 0x4db04 - Details Atom = 0x4807 - Dfn Atom = 0x6103 - Dialog Atom = 0x1b06 - Dir Atom = 0x6903 - Dirname Atom = 0x6907 - Disabled Atom = 0x10c08 - Div Atom = 0x11303 - Dl Atom = 0x11e02 - Download Atom = 0x40008 - Draggable Atom = 0x17b09 - Dropzone Atom = 0x39108 - Dt Atom = 0x50902 - Em Atom = 0x6502 - Embed Atom = 0x6505 - Enctype Atom = 0x21107 - Face Atom = 0x5b304 - Fieldset Atom = 0x1b008 - Figcaption Atom = 0x1b80a - Figure Atom = 0x1cc06 - Font Atom = 0xda04 - Footer Atom = 0x8d06 - For Atom = 0x1d803 - ForeignObject Atom = 0x1d80d - Foreignobject Atom = 0x1e50d - Form Atom = 0x1f204 - Formaction Atom = 0x1f20a - Formenctype Atom = 0x20d0b - Formmethod Atom = 0x2280a - Formnovalidate Atom = 0x2320e - Formtarget Atom = 0x2470a - Frame Atom = 0x9a05 - Frameset Atom = 0x9a08 - H1 Atom = 0x26e02 - H2 Atom = 0x29402 - H3 Atom = 0x2a702 - H4 Atom = 0x2e902 - H5 Atom = 0x2f302 - H6 Atom = 0x50b02 - Head Atom = 0x2d504 - Header Atom = 0x2d506 - Headers Atom = 0x2d507 - Height Atom = 0x25106 - Hgroup Atom = 0x25906 - Hidden Atom = 0x26506 - High Atom = 0x26b04 - Hr Atom = 0x27002 - Href Atom = 0x27004 - Hreflang Atom = 0x27008 - Html Atom = 0x25504 - HttpEquiv Atom = 0x2780a - I Atom = 0x601 - Icon Atom = 0x42204 - Id Atom = 0x8502 - Iframe Atom = 0x29606 - Image Atom = 0x29c05 - Img Atom = 0x2a103 - Input Atom = 0x3e805 - Inputmode Atom = 0x3e809 - Ins Atom = 0x1a803 - Isindex Atom = 0x2a907 - Ismap Atom = 0x2b005 - Itemid Atom = 0x33c06 - Itemprop Atom = 0x3c908 - Itemref Atom = 0x5ad07 - Itemscope Atom = 0x2b909 - Itemtype Atom = 0x2c308 - Kbd Atom = 0x1903 - Keygen Atom = 0x3906 - Keytype Atom = 0x53707 - Kind Atom = 0x10904 - Label Atom = 0xf005 - Lang Atom = 0x27404 - Legend Atom = 0x18206 - Li Atom = 0x1202 - Link Atom = 0x12804 - List Atom = 0x44e04 - Listing Atom = 0x44e07 - Loop Atom = 0xf404 - Low Atom = 0x11f03 - Malignmark Atom = 0x100a - Manifest Atom = 0x5f108 - Map Atom = 0x2b203 - Mark Atom = 0x1604 - Marquee Atom = 0x2cb07 - Math Atom = 0x2d204 - Max Atom = 0x2e103 - Maxlength Atom = 0x2e109 - Media Atom = 0x6e05 - Mediagroup Atom = 0x6e0a - Menu Atom = 0x33804 - Menuitem Atom = 0x33808 - Meta Atom = 0x45d04 - Meter Atom = 0x24205 - Method Atom = 0x22c06 - Mglyph Atom = 0x2a206 - Mi Atom = 0x2eb02 - Min Atom = 0x2eb03 - Minlength Atom = 0x2eb09 - Mn Atom = 0x23502 - Mo Atom = 0x3ed02 - Ms Atom = 0x2bc02 - Mtext Atom = 0x2f505 - Multiple Atom = 0x30308 - Muted Atom = 0x30b05 - Name Atom = 0x6c04 - Nav Atom = 0x3e03 - Nobr Atom = 0x5704 - Noembed Atom = 0x6307 - Noframes Atom = 0x9808 - Noscript Atom = 0x3d208 - Novalidate Atom = 0x2360a - Object Atom = 0x1ec06 - Ol Atom = 0xc902 - Onabort Atom = 0x13a07 - Onafterprint Atom = 0x1c00c - Onautocomplete Atom = 0x1fa0e - Onautocompleteerror Atom = 0x1fa13 - Onbeforeprint Atom = 0x6040d - Onbeforeunload Atom = 0x4e70e - Onblur Atom = 0xaa06 - Oncancel Atom = 0xe908 - Oncanplay Atom = 0x28509 - Oncanplaythrough Atom = 0x28510 - Onchange Atom = 0x3a708 - Onclick Atom = 0x31007 - Onclose Atom = 0x31707 - Oncontextmenu Atom = 0x32f0d - Oncuechange Atom = 0x3420b - Ondblclick Atom = 0x34d0a - Ondrag Atom = 0x35706 - Ondragend Atom = 0x35709 - Ondragenter Atom = 0x3600b - Ondragleave Atom = 0x36b0b - Ondragover Atom = 0x3760a - Ondragstart Atom = 0x3800b - Ondrop Atom = 0x38f06 - Ondurationchange Atom = 0x39f10 - Onemptied Atom = 0x39609 - Onended Atom = 0x3af07 - Onerror Atom = 0x3b607 - Onfocus Atom = 0x3bd07 - Onhashchange Atom = 0x3da0c - Oninput Atom = 0x3e607 - Oninvalid Atom = 0x3f209 - Onkeydown Atom = 0x3fb09 - Onkeypress Atom = 0x4080a - Onkeyup Atom = 0x41807 - Onlanguagechange Atom = 0x43210 - Onload Atom = 0x44206 - Onloadeddata Atom = 0x4420c - Onloadedmetadata Atom = 0x45510 - Onloadstart Atom = 0x46b0b - Onmessage Atom = 0x47609 - Onmousedown Atom = 0x47f0b - Onmousemove Atom = 0x48a0b - Onmouseout Atom = 0x4950a - Onmouseover Atom = 0x4a20b - Onmouseup Atom = 0x4ad09 - Onmousewheel Atom = 0x4b60c - Onoffline Atom = 0x4c209 - Ononline Atom = 0x4cb08 - Onpagehide Atom = 0x4d30a - Onpageshow Atom = 0x4fe0a - Onpause Atom = 0x50d07 - Onplay Atom = 0x51706 - Onplaying Atom = 0x51709 - Onpopstate Atom = 0x5200a - Onprogress Atom = 0x52a0a - Onratechange Atom = 0x53e0c - Onreset Atom = 0x54a07 - Onresize Atom = 0x55108 - Onscroll Atom = 0x55f08 - Onseeked Atom = 0x56708 - Onseeking Atom = 0x56f09 - Onselect Atom = 0x57808 - Onshow Atom = 0x58206 - Onsort Atom = 0x58b06 - Onstalled Atom = 0x59509 - Onstorage Atom = 0x59e09 - Onsubmit Atom = 0x5a708 - Onsuspend Atom = 0x5bb09 - Ontimeupdate Atom = 0xdb0c - Ontoggle Atom = 0x5c408 - Onunload Atom = 0x5cc08 - Onvolumechange Atom = 0x5d40e - Onwaiting Atom = 0x5e209 - Open Atom = 0x3cf04 - Optgroup Atom = 0xf608 - Optimum Atom = 0x5eb07 - Option Atom = 0x60006 - Output Atom = 0x49c06 - P Atom = 0xc01 - Param Atom = 0xc05 - Pattern Atom = 0x5107 - Ping Atom = 0x7704 - Placeholder Atom = 0xc30b - Plaintext Atom = 0xfd09 - Poster Atom = 0x15706 - Pre Atom = 0x25e03 - Preload Atom = 0x25e07 - Progress Atom = 0x52c08 - Prompt Atom = 0x5fa06 - Public Atom = 0x41e06 - Q Atom = 0x13101 - Radiogroup Atom = 0x30a - Readonly Atom = 0x2fb08 - Rel Atom = 0x25f03 - Required Atom = 0x1d008 - Reversed Atom = 0x5a08 - Rows Atom = 0x9204 - Rowspan Atom = 0x9207 - Rp Atom = 0x1c602 - Rt Atom = 0x13f02 - Ruby Atom = 0xaf04 - S Atom = 0x2c01 - Samp Atom = 0x4e04 - Sandbox Atom = 0xbb07 - Scope Atom = 0x2bd05 - Scoped Atom = 0x2bd06 - Script Atom = 0x3d406 - Seamless Atom = 0x31c08 - Section Atom = 0x4e207 - Select Atom = 0x57a06 - Selected Atom = 0x57a08 - Shape Atom = 0x4f905 - Size Atom = 0x55504 - Sizes Atom = 0x55505 - Small Atom = 0x18f05 - Sortable Atom = 0x58d08 - Sorted Atom = 0x19906 - Source Atom = 0x1aa06 - Spacer Atom = 0x2db06 - Span Atom = 0x9504 - Spellcheck Atom = 0x3230a - Src Atom = 0x3c303 - Srcdoc Atom = 0x3c306 - Srclang Atom = 0x41107 - Start Atom = 0x38605 - Step Atom = 0x5f704 - Strike Atom = 0x53306 - Strong Atom = 0x55906 - Style Atom = 0x61105 - Sub Atom = 0x5a903 - Summary Atom = 0x61607 - Sup Atom = 0x61d03 - Svg Atom = 0x62003 - System Atom = 0x62306 - Tabindex Atom = 0x46308 - Table Atom = 0x42d05 - Target Atom = 0x24b06 - Tbody Atom = 0x2e05 - Td Atom = 0x4702 - Template Atom = 0x62608 - Textarea Atom = 0x2f608 - Tfoot Atom = 0x8c05 - Th Atom = 0x22e02 - Thead Atom = 0x2d405 - Time Atom = 0xdd04 - Title Atom = 0xa105 - Tr Atom = 0x10502 - Track Atom = 0x10505 - Translate Atom = 0x14009 - Tt Atom = 0x5302 - Type Atom = 0x21404 - Typemustmatch Atom = 0x2140d - U Atom = 0xb01 - Ul Atom = 0x8a02 - Usemap Atom = 0x51106 - Value Atom = 0x4005 - Var Atom = 0x11503 - Video Atom = 0x28105 - Wbr Atom = 0x12103 - Width Atom = 0x50705 - Wrap Atom = 0x58704 - Xmp Atom = 0xc103 + A Atom = 0x1 + Abbr Atom = 0x4 + Accept Atom = 0x1a06 + AcceptCharset Atom = 0x1a0e + Accesskey Atom = 0x2c09 + Acronym Atom = 0xaa07 + Action Atom = 0x27206 + Address Atom = 0x6f307 + Align Atom = 0xb105 + Allowfullscreen Atom = 0x2080f + Allowpaymentrequest Atom = 0xc113 + Allowusermedia Atom = 0xdd0e + Alt Atom = 0xf303 + Annotation Atom = 0x1c90a + AnnotationXml Atom = 0x1c90e + Applet Atom = 0x31906 + Area Atom = 0x35604 + Article Atom = 0x3fc07 + As Atom = 0x3c02 + Aside Atom = 0x10705 + Async Atom = 0xff05 + Audio Atom = 0x11505 + Autocomplete Atom = 0x2780c + Autofocus Atom = 0x12109 + Autoplay Atom = 0x13c08 + B Atom = 0x101 + Base Atom = 0x3b04 + Basefont Atom = 0x3b08 + Bdi Atom = 0xba03 + Bdo Atom = 0x14b03 + Bgsound Atom = 0x15e07 + Big Atom = 0x17003 + Blink Atom = 0x17305 + Blockquote Atom = 0x1870a + Body Atom = 0x2804 + Br Atom = 0x202 + Button Atom = 0x19106 + Canvas Atom = 0x10306 + Caption Atom = 0x23107 + Center Atom = 0x22006 + Challenge Atom = 0x29b09 + Charset Atom = 0x2107 + Checked Atom = 0x47907 + Cite Atom = 0x19c04 + Class Atom = 0x56405 + Code Atom = 0x5c504 + Col Atom = 0x1ab03 + Colgroup Atom = 0x1ab08 + Color Atom = 0x1bf05 + Cols Atom = 0x1c404 + Colspan Atom = 0x1c407 + Command Atom = 0x1d707 + Content Atom = 0x58b07 + Contenteditable Atom = 0x58b0f + Contextmenu Atom = 0x3800b + Controls Atom = 0x1de08 + Coords Atom = 0x1ea06 + Crossorigin Atom = 0x1fb0b + Data Atom = 0x4a504 + Datalist Atom = 0x4a508 + Datetime Atom = 0x2b808 + Dd Atom = 0x2d702 + Default Atom = 0x10a07 + Defer Atom = 0x5c705 + Del Atom = 0x45203 + Desc Atom = 0x56104 + Details Atom = 0x7207 + Dfn Atom = 0x8703 + Dialog Atom = 0xbb06 + Dir Atom = 0x9303 + Dirname Atom = 0x9307 + Disabled Atom = 0x16408 + Div Atom = 0x16b03 + Dl Atom = 0x5e602 + Download Atom = 0x46308 + Draggable Atom = 0x17a09 + Dropzone Atom = 0x40508 + Dt Atom = 0x64b02 + Em Atom = 0x6e02 + Embed Atom = 0x6e05 + Enctype Atom = 0x28d07 + Face Atom = 0x21e04 + Fieldset Atom = 0x22608 + Figcaption Atom = 0x22e0a + Figure Atom = 0x24806 + Font Atom = 0x3f04 + Footer Atom = 0xf606 + For Atom = 0x25403 + ForeignObject Atom = 0x2540d + Foreignobject Atom = 0x2610d + Form Atom = 0x26e04 + Formaction Atom = 0x26e0a + Formenctype Atom = 0x2890b + Formmethod Atom = 0x2a40a + Formnovalidate Atom = 0x2ae0e + Formtarget Atom = 0x2c00a + Frame Atom = 0x8b05 + Frameset Atom = 0x8b08 + H1 Atom = 0x15c02 + H2 Atom = 0x2de02 + H3 Atom = 0x30d02 + H4 Atom = 0x34502 + H5 Atom = 0x34f02 + H6 Atom = 0x64d02 + Head Atom = 0x33104 + Header Atom = 0x33106 + Headers Atom = 0x33107 + Height Atom = 0x5206 + Hgroup Atom = 0x2ca06 + Hidden Atom = 0x2d506 + High Atom = 0x2db04 + Hr Atom = 0x15702 + Href Atom = 0x2e004 + Hreflang Atom = 0x2e008 + Html Atom = 0x5604 + HttpEquiv Atom = 0x2e80a + I Atom = 0x601 + Icon Atom = 0x58a04 + Id Atom = 0x10902 + Iframe Atom = 0x2fc06 + Image Atom = 0x30205 + Img Atom = 0x30703 + Input Atom = 0x44b05 + Inputmode Atom = 0x44b09 + Ins Atom = 0x20403 + Integrity Atom = 0x23f09 + Is Atom = 0x16502 + Isindex Atom = 0x30f07 + Ismap Atom = 0x31605 + Itemid Atom = 0x38b06 + Itemprop Atom = 0x19d08 + Itemref Atom = 0x3cd07 + Itemscope Atom = 0x67109 + Itemtype Atom = 0x31f08 + Kbd Atom = 0xb903 + Keygen Atom = 0x3206 + Keytype Atom = 0xd607 + Kind Atom = 0x17704 + Label Atom = 0x5905 + Lang Atom = 0x2e404 + Legend Atom = 0x18106 + Li Atom = 0xb202 + Link Atom = 0x17404 + List Atom = 0x4a904 + Listing Atom = 0x4a907 + Loop Atom = 0x5d04 + Low Atom = 0xc303 + Main Atom = 0x1004 + Malignmark Atom = 0xb00a + Manifest Atom = 0x6d708 + Map Atom = 0x31803 + Mark Atom = 0xb604 + Marquee Atom = 0x32707 + Math Atom = 0x32e04 + Max Atom = 0x33d03 + Maxlength Atom = 0x33d09 + Media Atom = 0xe605 + Mediagroup Atom = 0xe60a + Menu Atom = 0x38704 + Menuitem Atom = 0x38708 + Meta Atom = 0x4b804 + Meter Atom = 0x9805 + Method Atom = 0x2a806 + Mglyph Atom = 0x30806 + Mi Atom = 0x34702 + Min Atom = 0x34703 + Minlength Atom = 0x34709 + Mn Atom = 0x2b102 + Mo Atom = 0xa402 + Ms Atom = 0x67402 + Mtext Atom = 0x35105 + Multiple Atom = 0x35f08 + Muted Atom = 0x36705 + Name Atom = 0x9604 + Nav Atom = 0x1303 + Nobr Atom = 0x3704 + Noembed Atom = 0x6c07 + Noframes Atom = 0x8908 + Nomodule Atom = 0xa208 + Nonce Atom = 0x1a605 + Noscript Atom = 0x21608 + Novalidate Atom = 0x2b20a + Object Atom = 0x26806 + Ol Atom = 0x13702 + Onabort Atom = 0x19507 + Onafterprint Atom = 0x2360c + Onautocomplete Atom = 0x2760e + Onautocompleteerror Atom = 0x27613 + Onauxclick Atom = 0x61f0a + Onbeforeprint Atom = 0x69e0d + Onbeforeunload Atom = 0x6e70e + Onblur Atom = 0x56d06 + Oncancel Atom = 0x11908 + Oncanplay Atom = 0x14d09 + Oncanplaythrough Atom = 0x14d10 + Onchange Atom = 0x41b08 + Onclick Atom = 0x2f507 + Onclose Atom = 0x36c07 + Oncontextmenu Atom = 0x37e0d + Oncopy Atom = 0x39106 + Oncuechange Atom = 0x3970b + Oncut Atom = 0x3a205 + Ondblclick Atom = 0x3a70a + Ondrag Atom = 0x3b106 + Ondragend Atom = 0x3b109 + Ondragenter Atom = 0x3ba0b + Ondragexit Atom = 0x3c50a + Ondragleave Atom = 0x3df0b + Ondragover Atom = 0x3ea0a + Ondragstart Atom = 0x3f40b + Ondrop Atom = 0x40306 + Ondurationchange Atom = 0x41310 + Onemptied Atom = 0x40a09 + Onended Atom = 0x42307 + Onerror Atom = 0x42a07 + Onfocus Atom = 0x43107 + Onhashchange Atom = 0x43d0c + Oninput Atom = 0x44907 + Oninvalid Atom = 0x45509 + Onkeydown Atom = 0x45e09 + Onkeypress Atom = 0x46b0a + Onkeyup Atom = 0x48007 + Onlanguagechange Atom = 0x48d10 + Onload Atom = 0x49d06 + Onloadeddata Atom = 0x49d0c + Onloadedmetadata Atom = 0x4b010 + Onloadend Atom = 0x4c609 + Onloadstart Atom = 0x4cf0b + Onmessage Atom = 0x4da09 + Onmessageerror Atom = 0x4da0e + Onmousedown Atom = 0x4e80b + Onmouseenter Atom = 0x4f30c + Onmouseleave Atom = 0x4ff0c + Onmousemove Atom = 0x50b0b + Onmouseout Atom = 0x5160a + Onmouseover Atom = 0x5230b + Onmouseup Atom = 0x52e09 + Onmousewheel Atom = 0x53c0c + Onoffline Atom = 0x54809 + Ononline Atom = 0x55108 + Onpagehide Atom = 0x5590a + Onpageshow Atom = 0x5730a + Onpaste Atom = 0x57f07 + Onpause Atom = 0x59a07 + Onplay Atom = 0x5a406 + Onplaying Atom = 0x5a409 + Onpopstate Atom = 0x5ad0a + Onprogress Atom = 0x5b70a + Onratechange Atom = 0x5cc0c + Onrejectionhandled Atom = 0x5d812 + Onreset Atom = 0x5ea07 + Onresize Atom = 0x5f108 + Onscroll Atom = 0x60008 + Onsecuritypolicyviolation Atom = 0x60819 + Onseeked Atom = 0x62908 + Onseeking Atom = 0x63109 + Onselect Atom = 0x63a08 + Onshow Atom = 0x64406 + Onsort Atom = 0x64f06 + Onstalled Atom = 0x65909 + Onstorage Atom = 0x66209 + Onsubmit Atom = 0x66b08 + Onsuspend Atom = 0x67b09 + Ontimeupdate Atom = 0x400c + Ontoggle Atom = 0x68408 + Onunhandledrejection Atom = 0x68c14 + Onunload Atom = 0x6ab08 + Onvolumechange Atom = 0x6b30e + Onwaiting Atom = 0x6c109 + Onwheel Atom = 0x6ca07 + Open Atom = 0x1a304 + Optgroup Atom = 0x5f08 + Optimum Atom = 0x6d107 + Option Atom = 0x6e306 + Output Atom = 0x51d06 + P Atom = 0xc01 + Param Atom = 0xc05 + Pattern Atom = 0x6607 + Picture Atom = 0x7b07 + Ping Atom = 0xef04 + Placeholder Atom = 0x1310b + Plaintext Atom = 0x1b209 + Playsinline Atom = 0x1400b + Poster Atom = 0x2cf06 + Pre Atom = 0x47003 + Preload Atom = 0x48607 + Progress Atom = 0x5b908 + Prompt Atom = 0x53606 + Public Atom = 0x58606 + Q Atom = 0xcf01 + Radiogroup Atom = 0x30a + Rb Atom = 0x3a02 + Readonly Atom = 0x35708 + Referrerpolicy Atom = 0x3d10e + Rel Atom = 0x48703 + Required Atom = 0x24c08 + Reversed Atom = 0x8008 + Rows Atom = 0x9c04 + Rowspan Atom = 0x9c07 + Rp Atom = 0x23c02 + Rt Atom = 0x19a02 + Rtc Atom = 0x19a03 + Ruby Atom = 0xfb04 + S Atom = 0x2501 + Samp Atom = 0x7804 + Sandbox Atom = 0x12907 + Scope Atom = 0x67505 + Scoped Atom = 0x67506 + Script Atom = 0x21806 + Seamless Atom = 0x37108 + Section Atom = 0x56807 + Select Atom = 0x63c06 + Selected Atom = 0x63c08 + Shape Atom = 0x1e505 + Size Atom = 0x5f504 + Sizes Atom = 0x5f505 + Slot Atom = 0x1ef04 + Small Atom = 0x20605 + Sortable Atom = 0x65108 + Sorted Atom = 0x33706 + Source Atom = 0x37806 + Spacer Atom = 0x43706 + Span Atom = 0x9f04 + Spellcheck Atom = 0x4740a + Src Atom = 0x5c003 + Srcdoc Atom = 0x5c006 + Srclang Atom = 0x5f907 + Srcset Atom = 0x6f906 + Start Atom = 0x3fa05 + Step Atom = 0x58304 + Strike Atom = 0xd206 + Strong Atom = 0x6dd06 + Style Atom = 0x6ff05 + Sub Atom = 0x66d03 + Summary Atom = 0x70407 + Sup Atom = 0x70b03 + Svg Atom = 0x70e03 + System Atom = 0x71106 + Tabindex Atom = 0x4be08 + Table Atom = 0x59505 + Target Atom = 0x2c406 + Tbody Atom = 0x2705 + Td Atom = 0x9202 + Template Atom = 0x71408 + Textarea Atom = 0x35208 + Tfoot Atom = 0xf505 + Th Atom = 0x15602 + Thead Atom = 0x33005 + Time Atom = 0x4204 + Title Atom = 0x11005 + Tr Atom = 0xcc02 + Track Atom = 0x1ba05 + Translate Atom = 0x1f209 + Tt Atom = 0x6802 + Type Atom = 0xd904 + Typemustmatch Atom = 0x2900d + U Atom = 0xb01 + Ul Atom = 0xa702 + Updateviacache Atom = 0x460e + Usemap Atom = 0x59e06 + Value Atom = 0x1505 + Var Atom = 0x16d03 + Video Atom = 0x2f105 + Wbr Atom = 0x57c03 + Width Atom = 0x64905 + Workertype Atom = 0x71c0a + Wrap Atom = 0x72604 + Xmp Atom = 0x12f03 ) -const hash0 = 0xc17da63e +const hash0 = 0x81cdf10e -const maxAtomLen = 19 +const maxAtomLen = 25 var table = [1 << 9]Atom{ - 0x1: 0x48a0b, // onmousemove - 0x2: 0x5e209, // onwaiting - 0x3: 0x1fa13, // onautocompleteerror - 0x4: 0x5fa06, // prompt - 0x7: 0x5eb07, // optimum - 0x8: 0x1604, // mark - 0xa: 0x5ad07, // itemref - 0xb: 0x4fe0a, // onpageshow - 0xc: 0x57a06, // select - 0xd: 0x17b09, // draggable - 0xe: 0x3e03, // nav - 0xf: 0x17507, // command - 0x11: 0xb01, // u - 0x14: 0x2d507, // headers - 0x15: 0x44a08, // datalist - 0x17: 0x4e04, // samp - 0x1a: 0x3fb09, // onkeydown - 0x1b: 0x55f08, // onscroll - 0x1c: 0x15003, // col - 0x20: 0x3c908, // itemprop - 0x21: 0x2780a, // http-equiv - 0x22: 0x61d03, // sup - 0x24: 0x1d008, // required - 0x2b: 0x25e07, // preload - 0x2c: 0x6040d, // onbeforeprint - 0x2d: 0x3600b, // ondragenter - 0x2e: 0x50902, // dt - 0x2f: 0x5a708, // onsubmit - 0x30: 0x27002, // hr - 0x31: 0x32f0d, // oncontextmenu - 0x33: 0x29c05, // image - 0x34: 0x50d07, // onpause - 0x35: 0x25906, // hgroup - 0x36: 0x7704, // ping - 0x37: 0x57808, // onselect - 0x3a: 0x11303, // div - 0x3b: 0x1fa0e, // onautocomplete - 0x40: 0x2eb02, // mi - 0x41: 0x31c08, // seamless - 0x42: 0x2807, // charset - 0x43: 0x8502, // id - 0x44: 0x5200a, // onpopstate - 0x45: 0x3ef03, // del - 0x46: 0x2cb07, // marquee - 0x47: 0x3309, // accesskey - 0x49: 0x8d06, // footer - 0x4a: 0x44e04, // list - 0x4b: 0x2b005, // ismap - 0x51: 0x33804, // menu - 0x52: 0x2f04, // body - 0x55: 0x9a08, // frameset - 0x56: 0x54a07, // onreset - 0x57: 0x12705, // blink - 0x58: 0xa105, // title - 0x59: 0x38807, // article - 0x5b: 0x22e02, // th - 0x5d: 0x13101, // q - 0x5e: 0x3cf04, // open - 0x5f: 0x2fa04, // area - 0x61: 0x44206, // onload - 0x62: 0xda04, // font - 0x63: 0xd604, // base - 0x64: 0x16207, // colspan - 0x65: 0x53707, // keytype - 0x66: 0x11e02, // dl - 0x68: 0x1b008, // fieldset - 0x6a: 0x2eb03, // min - 0x6b: 0x11503, // var - 0x6f: 0x2d506, // header - 0x70: 0x13f02, // rt - 0x71: 0x15008, // colgroup - 0x72: 0x23502, // mn - 0x74: 0x13a07, // onabort - 0x75: 0x3906, // keygen - 0x76: 0x4c209, // onoffline - 0x77: 0x21f09, // challenge - 0x78: 0x2b203, // map - 0x7a: 0x2e902, // h4 - 0x7b: 0x3b607, // onerror - 0x7c: 0x2e109, // maxlength - 0x7d: 0x2f505, // mtext - 0x7e: 0xbb07, // sandbox - 0x7f: 0x58b06, // onsort - 0x80: 0x100a, // malignmark - 0x81: 0x45d04, // meta - 0x82: 0x7b05, // async - 0x83: 0x2a702, // h3 - 0x84: 0x26702, // dd - 0x85: 0x27004, // href - 0x86: 0x6e0a, // mediagroup - 0x87: 0x19406, // coords - 0x88: 0x41107, // srclang - 0x89: 0x34d0a, // ondblclick - 0x8a: 0x4005, // value - 0x8c: 0xe908, // oncancel - 0x8e: 0x3230a, // spellcheck - 0x8f: 0x9a05, // frame - 0x91: 0x12403, // big - 0x94: 0x1f606, // action - 0x95: 0x6903, // dir - 0x97: 0x2fb08, // readonly - 0x99: 0x42d05, // table - 0x9a: 0x61607, // summary - 0x9b: 0x12103, // wbr - 0x9c: 0x30a, // radiogroup - 0x9d: 0x6c04, // name - 0x9f: 0x62306, // system - 0xa1: 0x15d05, // color - 0xa2: 0x7f06, // canvas - 0xa3: 0x25504, // html - 0xa5: 0x56f09, // onseeking - 0xac: 0x4f905, // shape - 0xad: 0x25f03, // rel - 0xae: 0x28510, // oncanplaythrough - 0xaf: 0x3760a, // ondragover - 0xb0: 0x62608, // template - 0xb1: 0x1d80d, // foreignObject - 0xb3: 0x9204, // rows - 0xb6: 0x44e07, // listing - 0xb7: 0x49c06, // output - 0xb9: 0x3310b, // contextmenu - 0xbb: 0x11f03, // low - 0xbc: 0x1c602, // rp - 0xbd: 0x5bb09, // onsuspend - 0xbe: 0x13606, // button - 0xbf: 0x4db04, // desc - 0xc1: 0x4e207, // section - 0xc2: 0x52a0a, // onprogress - 0xc3: 0x59e09, // onstorage - 0xc4: 0x2d204, // math - 0xc5: 0x4503, // alt - 0xc7: 0x8a02, // ul - 0xc8: 0x5107, // pattern - 0xc9: 0x4b60c, // onmousewheel - 0xca: 0x35709, // ondragend - 0xcb: 0xaf04, // ruby - 0xcc: 0xc01, // p - 0xcd: 0x31707, // onclose - 0xce: 0x24205, // meter - 0xcf: 0x11807, // bgsound - 0xd2: 0x25106, // height - 0xd4: 0x101, // b - 0xd5: 0x2c308, // itemtype - 0xd8: 0x1bb07, // caption - 0xd9: 0x10c08, // disabled - 0xdb: 0x33808, // menuitem - 0xdc: 0x62003, // svg - 0xdd: 0x18f05, // small - 0xde: 0x44a04, // data - 0xe0: 0x4cb08, // ononline - 0xe1: 0x2a206, // mglyph - 0xe3: 0x6505, // embed - 0xe4: 0x10502, // tr - 0xe5: 0x46b0b, // onloadstart - 0xe7: 0x3c306, // srcdoc - 0xeb: 0x5c408, // ontoggle - 0xed: 0xe703, // bdo - 0xee: 0x4702, // td - 0xef: 0x8305, // aside - 0xf0: 0x29402, // h2 - 0xf1: 0x52c08, // progress - 0xf2: 0x12c0a, // blockquote - 0xf4: 0xf005, // label - 0xf5: 0x601, // i - 0xf7: 0x9207, // rowspan - 0xfb: 0x51709, // onplaying - 0xfd: 0x2a103, // img - 0xfe: 0xf608, // optgroup - 0xff: 0x42307, // content - 0x101: 0x53e0c, // onratechange - 0x103: 0x3da0c, // onhashchange - 0x104: 0x4807, // details - 0x106: 0x40008, // download - 0x109: 0x14009, // translate - 0x10b: 0x4230f, // contenteditable - 0x10d: 0x36b0b, // ondragleave - 0x10e: 0x2106, // accept - 0x10f: 0x57a08, // selected - 0x112: 0x1f20a, // formaction - 0x113: 0x5b506, // center - 0x115: 0x45510, // onloadedmetadata - 0x116: 0x12804, // link - 0x117: 0xdd04, // time - 0x118: 0x19f0b, // crossorigin - 0x119: 0x3bd07, // onfocus - 0x11a: 0x58704, // wrap - 0x11b: 0x42204, // icon - 0x11d: 0x28105, // video - 0x11e: 0x4de05, // class - 0x121: 0x5d40e, // onvolumechange - 0x122: 0xaa06, // onblur - 0x123: 0x2b909, // itemscope - 0x124: 0x61105, // style - 0x127: 0x41e06, // public - 0x129: 0x2320e, // formnovalidate - 0x12a: 0x58206, // onshow - 0x12c: 0x51706, // onplay - 0x12d: 0x3c804, // cite - 0x12e: 0x2bc02, // ms - 0x12f: 0xdb0c, // ontimeupdate - 0x130: 0x10904, // kind - 0x131: 0x2470a, // formtarget - 0x135: 0x3af07, // onended - 0x136: 0x26506, // hidden - 0x137: 0x2c01, // s - 0x139: 0x2280a, // formmethod - 0x13a: 0x3e805, // input - 0x13c: 0x50b02, // h6 - 0x13d: 0xc902, // ol - 0x13e: 0x3420b, // oncuechange - 0x13f: 0x1e50d, // foreignobject - 0x143: 0x4e70e, // onbeforeunload - 0x144: 0x2bd05, // scope - 0x145: 0x39609, // onemptied - 0x146: 0x14b05, // defer - 0x147: 0xc103, // xmp - 0x148: 0x39f10, // ondurationchange - 0x149: 0x1903, // kbd - 0x14c: 0x47609, // onmessage - 0x14d: 0x60006, // option - 0x14e: 0x2eb09, // minlength - 0x14f: 0x32807, // checked - 0x150: 0xce08, // autoplay - 0x152: 0x202, // br - 0x153: 0x2360a, // novalidate - 0x156: 0x6307, // noembed - 0x159: 0x31007, // onclick - 0x15a: 0x47f0b, // onmousedown - 0x15b: 0x3a708, // onchange - 0x15e: 0x3f209, // oninvalid - 0x15f: 0x2bd06, // scoped - 0x160: 0x18808, // controls - 0x161: 0x30b05, // muted - 0x162: 0x58d08, // sortable - 0x163: 0x51106, // usemap - 0x164: 0x1b80a, // figcaption - 0x165: 0x35706, // ondrag - 0x166: 0x26b04, // high - 0x168: 0x3c303, // src - 0x169: 0x15706, // poster - 0x16b: 0x1670e, // annotation-xml - 0x16c: 0x5f704, // step - 0x16d: 0x4, // abbr - 0x16e: 0x1b06, // dialog - 0x170: 0x1202, // li - 0x172: 0x3ed02, // mo - 0x175: 0x1d803, // for - 0x176: 0x1a803, // ins - 0x178: 0x55504, // size - 0x179: 0x43210, // onlanguagechange - 0x17a: 0x8607, // default - 0x17b: 0x1a03, // bdi - 0x17c: 0x4d30a, // onpagehide - 0x17d: 0x6907, // dirname - 0x17e: 0x21404, // type - 0x17f: 0x1f204, // form - 0x181: 0x28509, // oncanplay - 0x182: 0x6103, // dfn - 0x183: 0x46308, // tabindex - 0x186: 0x6502, // em - 0x187: 0x27404, // lang - 0x189: 0x39108, // dropzone - 0x18a: 0x4080a, // onkeypress - 0x18b: 0x23c08, // datetime - 0x18c: 0x16204, // cols - 0x18d: 0x1, // a - 0x18e: 0x4420c, // onloadeddata - 0x190: 0xa605, // audio - 0x192: 0x2e05, // tbody - 0x193: 0x22c06, // method - 0x195: 0xf404, // loop - 0x196: 0x29606, // iframe - 0x198: 0x2d504, // head - 0x19e: 0x5f108, // manifest - 0x19f: 0xb309, // autofocus - 0x1a0: 0x14904, // code - 0x1a1: 0x55906, // strong - 0x1a2: 0x30308, // multiple - 0x1a3: 0xc05, // param - 0x1a6: 0x21107, // enctype - 0x1a7: 0x5b304, // face - 0x1a8: 0xfd09, // plaintext - 0x1a9: 0x26e02, // h1 - 0x1aa: 0x59509, // onstalled - 0x1ad: 0x3d406, // script - 0x1ae: 0x2db06, // spacer - 0x1af: 0x55108, // onresize - 0x1b0: 0x4a20b, // onmouseover - 0x1b1: 0x5cc08, // onunload - 0x1b2: 0x56708, // onseeked - 0x1b4: 0x2140d, // typemustmatch - 0x1b5: 0x1cc06, // figure - 0x1b6: 0x4950a, // onmouseout - 0x1b7: 0x25e03, // pre - 0x1b8: 0x50705, // width - 0x1b9: 0x19906, // sorted - 0x1bb: 0x5704, // nobr - 0x1be: 0x5302, // tt - 0x1bf: 0x1105, // align - 0x1c0: 0x3e607, // oninput - 0x1c3: 0x41807, // onkeyup - 0x1c6: 0x1c00c, // onafterprint - 0x1c7: 0x210e, // accept-charset - 0x1c8: 0x33c06, // itemid - 0x1c9: 0x3e809, // inputmode - 0x1cb: 0x53306, // strike - 0x1cc: 0x5a903, // sub - 0x1cd: 0x10505, // track - 0x1ce: 0x38605, // start - 0x1d0: 0xd608, // basefont - 0x1d6: 0x1aa06, // source - 0x1d7: 0x18206, // legend - 0x1d8: 0x2d405, // thead - 0x1da: 0x8c05, // tfoot - 0x1dd: 0x1ec06, // object - 0x1de: 0x6e05, // media - 0x1df: 0x1670a, // annotation - 0x1e0: 0x20d0b, // formenctype - 0x1e2: 0x3d208, // noscript - 0x1e4: 0x55505, // sizes - 0x1e5: 0x1fc0c, // autocomplete - 0x1e6: 0x9504, // span - 0x1e7: 0x9808, // noframes - 0x1e8: 0x24b06, // target - 0x1e9: 0x38f06, // ondrop - 0x1ea: 0x2b306, // applet - 0x1ec: 0x5a08, // reversed - 0x1f0: 0x2a907, // isindex - 0x1f3: 0x27008, // hreflang - 0x1f5: 0x2f302, // h5 - 0x1f6: 0x4f307, // address - 0x1fa: 0x2e103, // max - 0x1fb: 0xc30b, // placeholder - 0x1fc: 0x2f608, // textarea - 0x1fe: 0x4ad09, // onmouseup - 0x1ff: 0x3800b, // ondragstart + 0x1: 0xe60a, // mediagroup + 0x2: 0x2e404, // lang + 0x4: 0x2c09, // accesskey + 0x5: 0x8b08, // frameset + 0x7: 0x63a08, // onselect + 0x8: 0x71106, // system + 0xa: 0x64905, // width + 0xc: 0x2890b, // formenctype + 0xd: 0x13702, // ol + 0xe: 0x3970b, // oncuechange + 0x10: 0x14b03, // bdo + 0x11: 0x11505, // audio + 0x12: 0x17a09, // draggable + 0x14: 0x2f105, // video + 0x15: 0x2b102, // mn + 0x16: 0x38704, // menu + 0x17: 0x2cf06, // poster + 0x19: 0xf606, // footer + 0x1a: 0x2a806, // method + 0x1b: 0x2b808, // datetime + 0x1c: 0x19507, // onabort + 0x1d: 0x460e, // updateviacache + 0x1e: 0xff05, // async + 0x1f: 0x49d06, // onload + 0x21: 0x11908, // oncancel + 0x22: 0x62908, // onseeked + 0x23: 0x30205, // image + 0x24: 0x5d812, // onrejectionhandled + 0x26: 0x17404, // link + 0x27: 0x51d06, // output + 0x28: 0x33104, // head + 0x29: 0x4ff0c, // onmouseleave + 0x2a: 0x57f07, // onpaste + 0x2b: 0x5a409, // onplaying + 0x2c: 0x1c407, // colspan + 0x2f: 0x1bf05, // color + 0x30: 0x5f504, // size + 0x31: 0x2e80a, // http-equiv + 0x33: 0x601, // i + 0x34: 0x5590a, // onpagehide + 0x35: 0x68c14, // onunhandledrejection + 0x37: 0x42a07, // onerror + 0x3a: 0x3b08, // basefont + 0x3f: 0x1303, // nav + 0x40: 0x17704, // kind + 0x41: 0x35708, // readonly + 0x42: 0x30806, // mglyph + 0x44: 0xb202, // li + 0x46: 0x2d506, // hidden + 0x47: 0x70e03, // svg + 0x48: 0x58304, // step + 0x49: 0x23f09, // integrity + 0x4a: 0x58606, // public + 0x4c: 0x1ab03, // col + 0x4d: 0x1870a, // blockquote + 0x4e: 0x34f02, // h5 + 0x50: 0x5b908, // progress + 0x51: 0x5f505, // sizes + 0x52: 0x34502, // h4 + 0x56: 0x33005, // thead + 0x57: 0xd607, // keytype + 0x58: 0x5b70a, // onprogress + 0x59: 0x44b09, // inputmode + 0x5a: 0x3b109, // ondragend + 0x5d: 0x3a205, // oncut + 0x5e: 0x43706, // spacer + 0x5f: 0x1ab08, // colgroup + 0x62: 0x16502, // is + 0x65: 0x3c02, // as + 0x66: 0x54809, // onoffline + 0x67: 0x33706, // sorted + 0x69: 0x48d10, // onlanguagechange + 0x6c: 0x43d0c, // onhashchange + 0x6d: 0x9604, // name + 0x6e: 0xf505, // tfoot + 0x6f: 0x56104, // desc + 0x70: 0x33d03, // max + 0x72: 0x1ea06, // coords + 0x73: 0x30d02, // h3 + 0x74: 0x6e70e, // onbeforeunload + 0x75: 0x9c04, // rows + 0x76: 0x63c06, // select + 0x77: 0x9805, // meter + 0x78: 0x38b06, // itemid + 0x79: 0x53c0c, // onmousewheel + 0x7a: 0x5c006, // srcdoc + 0x7d: 0x1ba05, // track + 0x7f: 0x31f08, // itemtype + 0x82: 0xa402, // mo + 0x83: 0x41b08, // onchange + 0x84: 0x33107, // headers + 0x85: 0x5cc0c, // onratechange + 0x86: 0x60819, // onsecuritypolicyviolation + 0x88: 0x4a508, // datalist + 0x89: 0x4e80b, // onmousedown + 0x8a: 0x1ef04, // slot + 0x8b: 0x4b010, // onloadedmetadata + 0x8c: 0x1a06, // accept + 0x8d: 0x26806, // object + 0x91: 0x6b30e, // onvolumechange + 0x92: 0x2107, // charset + 0x93: 0x27613, // onautocompleteerror + 0x94: 0xc113, // allowpaymentrequest + 0x95: 0x2804, // body + 0x96: 0x10a07, // default + 0x97: 0x63c08, // selected + 0x98: 0x21e04, // face + 0x99: 0x1e505, // shape + 0x9b: 0x68408, // ontoggle + 0x9e: 0x64b02, // dt + 0x9f: 0xb604, // mark + 0xa1: 0xb01, // u + 0xa4: 0x6ab08, // onunload + 0xa5: 0x5d04, // loop + 0xa6: 0x16408, // disabled + 0xaa: 0x42307, // onended + 0xab: 0xb00a, // malignmark + 0xad: 0x67b09, // onsuspend + 0xae: 0x35105, // mtext + 0xaf: 0x64f06, // onsort + 0xb0: 0x19d08, // itemprop + 0xb3: 0x67109, // itemscope + 0xb4: 0x17305, // blink + 0xb6: 0x3b106, // ondrag + 0xb7: 0xa702, // ul + 0xb8: 0x26e04, // form + 0xb9: 0x12907, // sandbox + 0xba: 0x8b05, // frame + 0xbb: 0x1505, // value + 0xbc: 0x66209, // onstorage + 0xbf: 0xaa07, // acronym + 0xc0: 0x19a02, // rt + 0xc2: 0x202, // br + 0xc3: 0x22608, // fieldset + 0xc4: 0x2900d, // typemustmatch + 0xc5: 0xa208, // nomodule + 0xc6: 0x6c07, // noembed + 0xc7: 0x69e0d, // onbeforeprint + 0xc8: 0x19106, // button + 0xc9: 0x2f507, // onclick + 0xca: 0x70407, // summary + 0xcd: 0xfb04, // ruby + 0xce: 0x56405, // class + 0xcf: 0x3f40b, // ondragstart + 0xd0: 0x23107, // caption + 0xd4: 0xdd0e, // allowusermedia + 0xd5: 0x4cf0b, // onloadstart + 0xd9: 0x16b03, // div + 0xda: 0x4a904, // list + 0xdb: 0x32e04, // math + 0xdc: 0x44b05, // input + 0xdf: 0x3ea0a, // ondragover + 0xe0: 0x2de02, // h2 + 0xe2: 0x1b209, // plaintext + 0xe4: 0x4f30c, // onmouseenter + 0xe7: 0x47907, // checked + 0xe8: 0x47003, // pre + 0xea: 0x35f08, // multiple + 0xeb: 0xba03, // bdi + 0xec: 0x33d09, // maxlength + 0xed: 0xcf01, // q + 0xee: 0x61f0a, // onauxclick + 0xf0: 0x57c03, // wbr + 0xf2: 0x3b04, // base + 0xf3: 0x6e306, // option + 0xf5: 0x41310, // ondurationchange + 0xf7: 0x8908, // noframes + 0xf9: 0x40508, // dropzone + 0xfb: 0x67505, // scope + 0xfc: 0x8008, // reversed + 0xfd: 0x3ba0b, // ondragenter + 0xfe: 0x3fa05, // start + 0xff: 0x12f03, // xmp + 0x100: 0x5f907, // srclang + 0x101: 0x30703, // img + 0x104: 0x101, // b + 0x105: 0x25403, // for + 0x106: 0x10705, // aside + 0x107: 0x44907, // oninput + 0x108: 0x35604, // area + 0x109: 0x2a40a, // formmethod + 0x10a: 0x72604, // wrap + 0x10c: 0x23c02, // rp + 0x10d: 0x46b0a, // onkeypress + 0x10e: 0x6802, // tt + 0x110: 0x34702, // mi + 0x111: 0x36705, // muted + 0x112: 0xf303, // alt + 0x113: 0x5c504, // code + 0x114: 0x6e02, // em + 0x115: 0x3c50a, // ondragexit + 0x117: 0x9f04, // span + 0x119: 0x6d708, // manifest + 0x11a: 0x38708, // menuitem + 0x11b: 0x58b07, // content + 0x11d: 0x6c109, // onwaiting + 0x11f: 0x4c609, // onloadend + 0x121: 0x37e0d, // oncontextmenu + 0x123: 0x56d06, // onblur + 0x124: 0x3fc07, // article + 0x125: 0x9303, // dir + 0x126: 0xef04, // ping + 0x127: 0x24c08, // required + 0x128: 0x45509, // oninvalid + 0x129: 0xb105, // align + 0x12b: 0x58a04, // icon + 0x12c: 0x64d02, // h6 + 0x12d: 0x1c404, // cols + 0x12e: 0x22e0a, // figcaption + 0x12f: 0x45e09, // onkeydown + 0x130: 0x66b08, // onsubmit + 0x131: 0x14d09, // oncanplay + 0x132: 0x70b03, // sup + 0x133: 0xc01, // p + 0x135: 0x40a09, // onemptied + 0x136: 0x39106, // oncopy + 0x137: 0x19c04, // cite + 0x138: 0x3a70a, // ondblclick + 0x13a: 0x50b0b, // onmousemove + 0x13c: 0x66d03, // sub + 0x13d: 0x48703, // rel + 0x13e: 0x5f08, // optgroup + 0x142: 0x9c07, // rowspan + 0x143: 0x37806, // source + 0x144: 0x21608, // noscript + 0x145: 0x1a304, // open + 0x146: 0x20403, // ins + 0x147: 0x2540d, // foreignObject + 0x148: 0x5ad0a, // onpopstate + 0x14a: 0x28d07, // enctype + 0x14b: 0x2760e, // onautocomplete + 0x14c: 0x35208, // textarea + 0x14e: 0x2780c, // autocomplete + 0x14f: 0x15702, // hr + 0x150: 0x1de08, // controls + 0x151: 0x10902, // id + 0x153: 0x2360c, // onafterprint + 0x155: 0x2610d, // foreignobject + 0x156: 0x32707, // marquee + 0x157: 0x59a07, // onpause + 0x158: 0x5e602, // dl + 0x159: 0x5206, // height + 0x15a: 0x34703, // min + 0x15b: 0x9307, // dirname + 0x15c: 0x1f209, // translate + 0x15d: 0x5604, // html + 0x15e: 0x34709, // minlength + 0x15f: 0x48607, // preload + 0x160: 0x71408, // template + 0x161: 0x3df0b, // ondragleave + 0x162: 0x3a02, // rb + 0x164: 0x5c003, // src + 0x165: 0x6dd06, // strong + 0x167: 0x7804, // samp + 0x168: 0x6f307, // address + 0x169: 0x55108, // ononline + 0x16b: 0x1310b, // placeholder + 0x16c: 0x2c406, // target + 0x16d: 0x20605, // small + 0x16e: 0x6ca07, // onwheel + 0x16f: 0x1c90a, // annotation + 0x170: 0x4740a, // spellcheck + 0x171: 0x7207, // details + 0x172: 0x10306, // canvas + 0x173: 0x12109, // autofocus + 0x174: 0xc05, // param + 0x176: 0x46308, // download + 0x177: 0x45203, // del + 0x178: 0x36c07, // onclose + 0x179: 0xb903, // kbd + 0x17a: 0x31906, // applet + 0x17b: 0x2e004, // href + 0x17c: 0x5f108, // onresize + 0x17e: 0x49d0c, // onloadeddata + 0x180: 0xcc02, // tr + 0x181: 0x2c00a, // formtarget + 0x182: 0x11005, // title + 0x183: 0x6ff05, // style + 0x184: 0xd206, // strike + 0x185: 0x59e06, // usemap + 0x186: 0x2fc06, // iframe + 0x187: 0x1004, // main + 0x189: 0x7b07, // picture + 0x18c: 0x31605, // ismap + 0x18e: 0x4a504, // data + 0x18f: 0x5905, // label + 0x191: 0x3d10e, // referrerpolicy + 0x192: 0x15602, // th + 0x194: 0x53606, // prompt + 0x195: 0x56807, // section + 0x197: 0x6d107, // optimum + 0x198: 0x2db04, // high + 0x199: 0x15c02, // h1 + 0x19a: 0x65909, // onstalled + 0x19b: 0x16d03, // var + 0x19c: 0x4204, // time + 0x19e: 0x67402, // ms + 0x19f: 0x33106, // header + 0x1a0: 0x4da09, // onmessage + 0x1a1: 0x1a605, // nonce + 0x1a2: 0x26e0a, // formaction + 0x1a3: 0x22006, // center + 0x1a4: 0x3704, // nobr + 0x1a5: 0x59505, // table + 0x1a6: 0x4a907, // listing + 0x1a7: 0x18106, // legend + 0x1a9: 0x29b09, // challenge + 0x1aa: 0x24806, // figure + 0x1ab: 0xe605, // media + 0x1ae: 0xd904, // type + 0x1af: 0x3f04, // font + 0x1b0: 0x4da0e, // onmessageerror + 0x1b1: 0x37108, // seamless + 0x1b2: 0x8703, // dfn + 0x1b3: 0x5c705, // defer + 0x1b4: 0xc303, // low + 0x1b5: 0x19a03, // rtc + 0x1b6: 0x5230b, // onmouseover + 0x1b7: 0x2b20a, // novalidate + 0x1b8: 0x71c0a, // workertype + 0x1ba: 0x3cd07, // itemref + 0x1bd: 0x1, // a + 0x1be: 0x31803, // map + 0x1bf: 0x400c, // ontimeupdate + 0x1c0: 0x15e07, // bgsound + 0x1c1: 0x3206, // keygen + 0x1c2: 0x2705, // tbody + 0x1c5: 0x64406, // onshow + 0x1c7: 0x2501, // s + 0x1c8: 0x6607, // pattern + 0x1cc: 0x14d10, // oncanplaythrough + 0x1ce: 0x2d702, // dd + 0x1cf: 0x6f906, // srcset + 0x1d0: 0x17003, // big + 0x1d2: 0x65108, // sortable + 0x1d3: 0x48007, // onkeyup + 0x1d5: 0x5a406, // onplay + 0x1d7: 0x4b804, // meta + 0x1d8: 0x40306, // ondrop + 0x1da: 0x60008, // onscroll + 0x1db: 0x1fb0b, // crossorigin + 0x1dc: 0x5730a, // onpageshow + 0x1dd: 0x4, // abbr + 0x1de: 0x9202, // td + 0x1df: 0x58b0f, // contenteditable + 0x1e0: 0x27206, // action + 0x1e1: 0x1400b, // playsinline + 0x1e2: 0x43107, // onfocus + 0x1e3: 0x2e008, // hreflang + 0x1e5: 0x5160a, // onmouseout + 0x1e6: 0x5ea07, // onreset + 0x1e7: 0x13c08, // autoplay + 0x1e8: 0x63109, // onseeking + 0x1ea: 0x67506, // scoped + 0x1ec: 0x30a, // radiogroup + 0x1ee: 0x3800b, // contextmenu + 0x1ef: 0x52e09, // onmouseup + 0x1f1: 0x2ca06, // hgroup + 0x1f2: 0x2080f, // allowfullscreen + 0x1f3: 0x4be08, // tabindex + 0x1f6: 0x30f07, // isindex + 0x1f7: 0x1a0e, // accept-charset + 0x1f8: 0x2ae0e, // formnovalidate + 0x1fb: 0x1c90e, // annotation-xml + 0x1fc: 0x6e05, // embed + 0x1fd: 0x21806, // script + 0x1fe: 0xbb06, // dialog + 0x1ff: 0x1d707, // command } -const atomText = "abbradiogrouparamalignmarkbdialogaccept-charsetbodyaccesskey" + - "genavaluealtdetailsampatternobreversedfnoembedirnamediagroup" + - "ingasyncanvasidefaultfooterowspanoframesetitleaudionblurubya" + - "utofocusandboxmplaceholderautoplaybasefontimeupdatebdoncance" + - "labelooptgrouplaintextrackindisabledivarbgsoundlowbrbigblink" + - "blockquotebuttonabortranslatecodefercolgroupostercolorcolspa" + - "nnotation-xmlcommandraggablegendcontrolsmallcoordsortedcross" + - "originsourcefieldsetfigcaptionafterprintfigurequiredforeignO" + - "bjectforeignobjectformactionautocompleteerrorformenctypemust" + - "matchallengeformmethodformnovalidatetimeterformtargetheightm" + - "lhgroupreloadhiddenhigh1hreflanghttp-equivideoncanplaythroug" + - "h2iframeimageimglyph3isindexismappletitemscopeditemtypemarqu" + - "eematheaderspacermaxlength4minlength5mtextareadonlymultiplem" + - "utedonclickoncloseamlesspellcheckedoncontextmenuitemidoncuec" + - "hangeondblclickondragendondragenterondragleaveondragoverondr" + - "agstarticleondropzonemptiedondurationchangeonendedonerroronf" + - "ocusrcdocitempropenoscriptonhashchangeoninputmodeloninvalido" + - "nkeydownloadonkeypressrclangonkeyupublicontenteditableonlang" + - "uagechangeonloadeddatalistingonloadedmetadatabindexonloadsta" + - "rtonmessageonmousedownonmousemoveonmouseoutputonmouseoveronm" + - "ouseuponmousewheelonofflineononlineonpagehidesclassectionbef" + - "oreunloaddresshapeonpageshowidth6onpausemaponplayingonpopsta" + - "teonprogresstrikeytypeonratechangeonresetonresizestrongonscr" + - "ollonseekedonseekingonselectedonshowraponsortableonstalledon" + - "storageonsubmitemrefacenteronsuspendontoggleonunloadonvolume" + - "changeonwaitingoptimumanifestepromptoptionbeforeprintstylesu" + - "mmarysupsvgsystemplate" +const atomText = "abbradiogrouparamainavalueaccept-charsetbodyaccesskeygenobrb" + + "asefontimeupdateviacacheightmlabelooptgroupatternoembedetail" + + "sampictureversedfnoframesetdirnameterowspanomoduleacronymali" + + "gnmarkbdialogallowpaymentrequestrikeytypeallowusermediagroup" + + "ingaltfooterubyasyncanvasidefaultitleaudioncancelautofocusan" + + "dboxmplaceholderautoplaysinlinebdoncanplaythrough1bgsoundisa" + + "bledivarbigblinkindraggablegendblockquotebuttonabortcitempro" + + "penoncecolgrouplaintextrackcolorcolspannotation-xmlcommandco" + + "ntrolshapecoordslotranslatecrossoriginsmallowfullscreenoscri" + + "ptfacenterfieldsetfigcaptionafterprintegrityfigurequiredfore" + + "ignObjectforeignobjectformactionautocompleteerrorformenctype" + + "mustmatchallengeformmethodformnovalidatetimeformtargethgroup" + + "osterhiddenhigh2hreflanghttp-equivideonclickiframeimageimgly" + + "ph3isindexismappletitemtypemarqueematheadersortedmaxlength4m" + + "inlength5mtextareadonlymultiplemutedoncloseamlessourceoncont" + + "extmenuitemidoncopyoncuechangeoncutondblclickondragendondrag" + + "enterondragexitemreferrerpolicyondragleaveondragoverondragst" + + "articleondropzonemptiedondurationchangeonendedonerroronfocus" + + "paceronhashchangeoninputmodeloninvalidonkeydownloadonkeypres" + + "spellcheckedonkeyupreloadonlanguagechangeonloadeddatalisting" + + "onloadedmetadatabindexonloadendonloadstartonmessageerroronmo" + + "usedownonmouseenteronmouseleaveonmousemoveonmouseoutputonmou" + + "seoveronmouseupromptonmousewheelonofflineononlineonpagehides" + + "classectionbluronpageshowbronpastepublicontenteditableonpaus" + + "emaponplayingonpopstateonprogressrcdocodeferonratechangeonre" + + "jectionhandledonresetonresizesrclangonscrollonsecuritypolicy" + + "violationauxclickonseekedonseekingonselectedonshowidth6onsor" + + "tableonstalledonstorageonsubmitemscopedonsuspendontoggleonun" + + "handledrejectionbeforeprintonunloadonvolumechangeonwaitingon" + + "wheeloptimumanifestrongoptionbeforeunloaddressrcsetstylesumm" + + "arysupsvgsystemplateworkertypewrap" diff --git a/vendor/golang.org/x/net/html/atom/table_test.go b/vendor/golang.org/x/net/html/atom/table_test.go index 0f2ecce..8a30762 100644 --- a/vendor/golang.org/x/net/html/atom/table_test.go +++ b/vendor/golang.org/x/net/html/atom/table_test.go @@ -1,23 +1,29 @@ -// generated by go run gen.go -test; DO NOT EDIT +// Code generated by go generate gen.go; DO NOT EDIT. + +//go:generate go run gen.go -test package atom var testAtomList = []string{ "a", "abbr", - "abbr", "accept", "accept-charset", "accesskey", + "acronym", "action", "address", "align", + "allowfullscreen", + "allowpaymentrequest", + "allowusermedia", "alt", "annotation", "annotation-xml", "applet", "area", "article", + "as", "aside", "async", "audio", @@ -43,7 +49,6 @@ var testAtomList = []string{ "charset", "checked", "cite", - "cite", "class", "code", "col", @@ -52,7 +57,6 @@ var testAtomList = []string{ "cols", "colspan", "command", - "command", "content", "contenteditable", "contextmenu", @@ -60,7 +64,6 @@ var testAtomList = []string{ "coords", "crossorigin", "data", - "data", "datalist", "datetime", "dd", @@ -93,7 +96,6 @@ var testAtomList = []string{ "foreignObject", "foreignobject", "form", - "form", "formaction", "formenctype", "formmethod", @@ -128,6 +130,8 @@ var testAtomList = []string{ "input", "inputmode", "ins", + "integrity", + "is", "isindex", "ismap", "itemid", @@ -140,7 +144,6 @@ var testAtomList = []string{ "keytype", "kind", "label", - "label", "lang", "legend", "li", @@ -149,6 +152,7 @@ var testAtomList = []string{ "listing", "loop", "low", + "main", "malignmark", "manifest", "map", @@ -179,6 +183,8 @@ var testAtomList = []string{ "nobr", "noembed", "noframes", + "nomodule", + "nonce", "noscript", "novalidate", "object", @@ -187,6 +193,7 @@ var testAtomList = []string{ "onafterprint", "onautocomplete", "onautocompleteerror", + "onauxclick", "onbeforeprint", "onbeforeunload", "onblur", @@ -197,11 +204,14 @@ var testAtomList = []string{ "onclick", "onclose", "oncontextmenu", + "oncopy", "oncuechange", + "oncut", "ondblclick", "ondrag", "ondragend", "ondragenter", + "ondragexit", "ondragleave", "ondragover", "ondragstart", @@ -221,9 +231,13 @@ var testAtomList = []string{ "onload", "onloadeddata", "onloadedmetadata", + "onloadend", "onloadstart", "onmessage", + "onmessageerror", "onmousedown", + "onmouseenter", + "onmouseleave", "onmousemove", "onmouseout", "onmouseover", @@ -233,15 +247,18 @@ var testAtomList = []string{ "ononline", "onpagehide", "onpageshow", + "onpaste", "onpause", "onplay", "onplaying", "onpopstate", "onprogress", "onratechange", + "onrejectionhandled", "onreset", "onresize", "onscroll", + "onsecuritypolicyviolation", "onseeked", "onseeking", "onselect", @@ -253,9 +270,11 @@ var testAtomList = []string{ "onsuspend", "ontimeupdate", "ontoggle", + "onunhandledrejection", "onunload", "onvolumechange", "onwaiting", + "onwheel", "open", "optgroup", "optimum", @@ -264,9 +283,11 @@ var testAtomList = []string{ "p", "param", "pattern", + "picture", "ping", "placeholder", "plaintext", + "playsinline", "poster", "pre", "preload", @@ -275,7 +296,9 @@ var testAtomList = []string{ "public", "q", "radiogroup", + "rb", "readonly", + "referrerpolicy", "rel", "required", "reversed", @@ -283,6 +306,7 @@ var testAtomList = []string{ "rowspan", "rp", "rt", + "rtc", "ruby", "s", "samp", @@ -297,23 +321,23 @@ var testAtomList = []string{ "shape", "size", "sizes", + "slot", "small", "sortable", "sorted", "source", "spacer", "span", - "span", "spellcheck", "src", "srcdoc", "srclang", + "srcset", "start", "step", "strike", "strong", "style", - "style", "sub", "summary", "sup", @@ -331,7 +355,6 @@ var testAtomList = []string{ "thead", "time", "title", - "title", "tr", "track", "translate", @@ -340,12 +363,14 @@ var testAtomList = []string{ "typemustmatch", "u", "ul", + "updateviacache", "usemap", "value", "var", "video", "wbr", "width", + "workertype", "wrap", "xmp", } diff --git a/vendor/golang.org/x/net/html/const.go b/vendor/golang.org/x/net/html/const.go index 52f651f..a3a918f 100644 --- a/vendor/golang.org/x/net/html/const.go +++ b/vendor/golang.org/x/net/html/const.go @@ -4,7 +4,7 @@ package html -// Section 12.2.3.2 of the HTML5 specification says "The following elements +// Section 12.2.4.2 of the HTML5 specification says "The following elements // have varying levels of special parsing rules". // https://html.spec.whatwg.org/multipage/syntax.html#the-stack-of-open-elements var isSpecialElementMap = map[string]bool{ @@ -52,10 +52,12 @@ var isSpecialElementMap = map[string]bool{ "iframe": true, "img": true, "input": true, - "isindex": true, + "isindex": true, // The 'isindex' element has been removed, but keep it for backwards compatibility. + "keygen": true, "li": true, "link": true, "listing": true, + "main": true, "marquee": true, "menu": true, "meta": true, @@ -95,8 +97,16 @@ func isSpecialElement(element *Node) bool { switch element.Namespace { case "", "html": return isSpecialElementMap[element.Data] + case "math": + switch element.Data { + case "mi", "mo", "mn", "ms", "mtext", "annotation-xml": + return true + } case "svg": - return element.Data == "foreignObject" + switch element.Data { + case "foreignObject", "desc", "title": + return true + } } return false } diff --git a/vendor/golang.org/x/net/html/doc.go b/vendor/golang.org/x/net/html/doc.go index 94f4968..822ed42 100644 --- a/vendor/golang.org/x/net/html/doc.go +++ b/vendor/golang.org/x/net/html/doc.go @@ -49,18 +49,18 @@ call to Next. For example, to extract an HTML page's anchor text: for { tt := z.Next() switch tt { - case ErrorToken: + case html.ErrorToken: return z.Err() - case TextToken: + case html.TextToken: if depth > 0 { // emitBytes should copy the []byte it receives, // if it doesn't process it immediately. emitBytes(z.Text()) } - case StartTagToken, EndTagToken: + case html.StartTagToken, html.EndTagToken: tn, _ := z.TagName() if len(tn) == 1 && tn[0] == 'a' { - if tt == StartTagToken { + if tt == html.StartTagToken { depth++ } else { depth-- diff --git a/vendor/golang.org/x/net/html/entity.go b/vendor/golang.org/x/net/html/entity.go index a50c04c..b628880 100644 --- a/vendor/golang.org/x/net/html/entity.go +++ b/vendor/golang.org/x/net/html/entity.go @@ -75,2083 +75,2083 @@ var entity = map[string]rune{ "Copf;": '\U00002102', "Coproduct;": '\U00002210', "CounterClockwiseContourIntegral;": '\U00002233', - "Cross;": '\U00002A2F', - "Cscr;": '\U0001D49E', - "Cup;": '\U000022D3', - "CupCap;": '\U0000224D', - "DD;": '\U00002145', - "DDotrahd;": '\U00002911', - "DJcy;": '\U00000402', - "DScy;": '\U00000405', - "DZcy;": '\U0000040F', - "Dagger;": '\U00002021', - "Darr;": '\U000021A1', - "Dashv;": '\U00002AE4', - "Dcaron;": '\U0000010E', - "Dcy;": '\U00000414', - "Del;": '\U00002207', - "Delta;": '\U00000394', - "Dfr;": '\U0001D507', - "DiacriticalAcute;": '\U000000B4', - "DiacriticalDot;": '\U000002D9', - "DiacriticalDoubleAcute;": '\U000002DD', - "DiacriticalGrave;": '\U00000060', - "DiacriticalTilde;": '\U000002DC', - "Diamond;": '\U000022C4', - "DifferentialD;": '\U00002146', - "Dopf;": '\U0001D53B', - "Dot;": '\U000000A8', - "DotDot;": '\U000020DC', - "DotEqual;": '\U00002250', - "DoubleContourIntegral;": '\U0000222F', - "DoubleDot;": '\U000000A8', - "DoubleDownArrow;": '\U000021D3', - "DoubleLeftArrow;": '\U000021D0', - "DoubleLeftRightArrow;": '\U000021D4', - "DoubleLeftTee;": '\U00002AE4', - "DoubleLongLeftArrow;": '\U000027F8', - "DoubleLongLeftRightArrow;": '\U000027FA', - "DoubleLongRightArrow;": '\U000027F9', - "DoubleRightArrow;": '\U000021D2', - "DoubleRightTee;": '\U000022A8', - "DoubleUpArrow;": '\U000021D1', - "DoubleUpDownArrow;": '\U000021D5', - "DoubleVerticalBar;": '\U00002225', - "DownArrow;": '\U00002193', - "DownArrowBar;": '\U00002913', - "DownArrowUpArrow;": '\U000021F5', - "DownBreve;": '\U00000311', - "DownLeftRightVector;": '\U00002950', - "DownLeftTeeVector;": '\U0000295E', - "DownLeftVector;": '\U000021BD', - "DownLeftVectorBar;": '\U00002956', - "DownRightTeeVector;": '\U0000295F', - "DownRightVector;": '\U000021C1', - "DownRightVectorBar;": '\U00002957', - "DownTee;": '\U000022A4', - "DownTeeArrow;": '\U000021A7', - "Downarrow;": '\U000021D3', - "Dscr;": '\U0001D49F', - "Dstrok;": '\U00000110', - "ENG;": '\U0000014A', - "ETH;": '\U000000D0', - "Eacute;": '\U000000C9', - "Ecaron;": '\U0000011A', - "Ecirc;": '\U000000CA', - "Ecy;": '\U0000042D', - "Edot;": '\U00000116', - "Efr;": '\U0001D508', - "Egrave;": '\U000000C8', - "Element;": '\U00002208', - "Emacr;": '\U00000112', - "EmptySmallSquare;": '\U000025FB', - "EmptyVerySmallSquare;": '\U000025AB', - "Eogon;": '\U00000118', - "Eopf;": '\U0001D53C', - "Epsilon;": '\U00000395', - "Equal;": '\U00002A75', - "EqualTilde;": '\U00002242', - "Equilibrium;": '\U000021CC', - "Escr;": '\U00002130', - "Esim;": '\U00002A73', - "Eta;": '\U00000397', - "Euml;": '\U000000CB', - "Exists;": '\U00002203', - "ExponentialE;": '\U00002147', - "Fcy;": '\U00000424', - "Ffr;": '\U0001D509', - "FilledSmallSquare;": '\U000025FC', - "FilledVerySmallSquare;": '\U000025AA', - "Fopf;": '\U0001D53D', - "ForAll;": '\U00002200', - "Fouriertrf;": '\U00002131', - "Fscr;": '\U00002131', - "GJcy;": '\U00000403', - "GT;": '\U0000003E', - "Gamma;": '\U00000393', - "Gammad;": '\U000003DC', - "Gbreve;": '\U0000011E', - "Gcedil;": '\U00000122', - "Gcirc;": '\U0000011C', - "Gcy;": '\U00000413', - "Gdot;": '\U00000120', - "Gfr;": '\U0001D50A', - "Gg;": '\U000022D9', - "Gopf;": '\U0001D53E', - "GreaterEqual;": '\U00002265', - "GreaterEqualLess;": '\U000022DB', - "GreaterFullEqual;": '\U00002267', - "GreaterGreater;": '\U00002AA2', - "GreaterLess;": '\U00002277', - "GreaterSlantEqual;": '\U00002A7E', - "GreaterTilde;": '\U00002273', - "Gscr;": '\U0001D4A2', - "Gt;": '\U0000226B', - "HARDcy;": '\U0000042A', - "Hacek;": '\U000002C7', - "Hat;": '\U0000005E', - "Hcirc;": '\U00000124', - "Hfr;": '\U0000210C', - "HilbertSpace;": '\U0000210B', - "Hopf;": '\U0000210D', - "HorizontalLine;": '\U00002500', - "Hscr;": '\U0000210B', - "Hstrok;": '\U00000126', - "HumpDownHump;": '\U0000224E', - "HumpEqual;": '\U0000224F', - "IEcy;": '\U00000415', - "IJlig;": '\U00000132', - "IOcy;": '\U00000401', - "Iacute;": '\U000000CD', - "Icirc;": '\U000000CE', - "Icy;": '\U00000418', - "Idot;": '\U00000130', - "Ifr;": '\U00002111', - "Igrave;": '\U000000CC', - "Im;": '\U00002111', - "Imacr;": '\U0000012A', - "ImaginaryI;": '\U00002148', - "Implies;": '\U000021D2', - "Int;": '\U0000222C', - "Integral;": '\U0000222B', - "Intersection;": '\U000022C2', - "InvisibleComma;": '\U00002063', - "InvisibleTimes;": '\U00002062', - "Iogon;": '\U0000012E', - "Iopf;": '\U0001D540', - "Iota;": '\U00000399', - "Iscr;": '\U00002110', - "Itilde;": '\U00000128', - "Iukcy;": '\U00000406', - "Iuml;": '\U000000CF', - "Jcirc;": '\U00000134', - "Jcy;": '\U00000419', - "Jfr;": '\U0001D50D', - "Jopf;": '\U0001D541', - "Jscr;": '\U0001D4A5', - "Jsercy;": '\U00000408', - "Jukcy;": '\U00000404', - "KHcy;": '\U00000425', - "KJcy;": '\U0000040C', - "Kappa;": '\U0000039A', - "Kcedil;": '\U00000136', - "Kcy;": '\U0000041A', - "Kfr;": '\U0001D50E', - "Kopf;": '\U0001D542', - "Kscr;": '\U0001D4A6', - "LJcy;": '\U00000409', - "LT;": '\U0000003C', - "Lacute;": '\U00000139', - "Lambda;": '\U0000039B', - "Lang;": '\U000027EA', - "Laplacetrf;": '\U00002112', - "Larr;": '\U0000219E', - "Lcaron;": '\U0000013D', - "Lcedil;": '\U0000013B', - "Lcy;": '\U0000041B', - "LeftAngleBracket;": '\U000027E8', - "LeftArrow;": '\U00002190', - "LeftArrowBar;": '\U000021E4', - "LeftArrowRightArrow;": '\U000021C6', - "LeftCeiling;": '\U00002308', - "LeftDoubleBracket;": '\U000027E6', - "LeftDownTeeVector;": '\U00002961', - "LeftDownVector;": '\U000021C3', - "LeftDownVectorBar;": '\U00002959', - "LeftFloor;": '\U0000230A', - "LeftRightArrow;": '\U00002194', - "LeftRightVector;": '\U0000294E', - "LeftTee;": '\U000022A3', - "LeftTeeArrow;": '\U000021A4', - "LeftTeeVector;": '\U0000295A', - "LeftTriangle;": '\U000022B2', - "LeftTriangleBar;": '\U000029CF', - "LeftTriangleEqual;": '\U000022B4', - "LeftUpDownVector;": '\U00002951', - "LeftUpTeeVector;": '\U00002960', - "LeftUpVector;": '\U000021BF', - "LeftUpVectorBar;": '\U00002958', - "LeftVector;": '\U000021BC', - "LeftVectorBar;": '\U00002952', - "Leftarrow;": '\U000021D0', - "Leftrightarrow;": '\U000021D4', - "LessEqualGreater;": '\U000022DA', - "LessFullEqual;": '\U00002266', - "LessGreater;": '\U00002276', - "LessLess;": '\U00002AA1', - "LessSlantEqual;": '\U00002A7D', - "LessTilde;": '\U00002272', - "Lfr;": '\U0001D50F', - "Ll;": '\U000022D8', - "Lleftarrow;": '\U000021DA', - "Lmidot;": '\U0000013F', - "LongLeftArrow;": '\U000027F5', - "LongLeftRightArrow;": '\U000027F7', - "LongRightArrow;": '\U000027F6', - "Longleftarrow;": '\U000027F8', - "Longleftrightarrow;": '\U000027FA', - "Longrightarrow;": '\U000027F9', - "Lopf;": '\U0001D543', - "LowerLeftArrow;": '\U00002199', - "LowerRightArrow;": '\U00002198', - "Lscr;": '\U00002112', - "Lsh;": '\U000021B0', - "Lstrok;": '\U00000141', - "Lt;": '\U0000226A', - "Map;": '\U00002905', - "Mcy;": '\U0000041C', - "MediumSpace;": '\U0000205F', - "Mellintrf;": '\U00002133', - "Mfr;": '\U0001D510', - "MinusPlus;": '\U00002213', - "Mopf;": '\U0001D544', - "Mscr;": '\U00002133', - "Mu;": '\U0000039C', - "NJcy;": '\U0000040A', - "Nacute;": '\U00000143', - "Ncaron;": '\U00000147', - "Ncedil;": '\U00000145', - "Ncy;": '\U0000041D', - "NegativeMediumSpace;": '\U0000200B', - "NegativeThickSpace;": '\U0000200B', - "NegativeThinSpace;": '\U0000200B', - "NegativeVeryThinSpace;": '\U0000200B', - "NestedGreaterGreater;": '\U0000226B', - "NestedLessLess;": '\U0000226A', - "NewLine;": '\U0000000A', - "Nfr;": '\U0001D511', - "NoBreak;": '\U00002060', - "NonBreakingSpace;": '\U000000A0', - "Nopf;": '\U00002115', - "Not;": '\U00002AEC', - "NotCongruent;": '\U00002262', - "NotCupCap;": '\U0000226D', - "NotDoubleVerticalBar;": '\U00002226', - "NotElement;": '\U00002209', - "NotEqual;": '\U00002260', - "NotExists;": '\U00002204', - "NotGreater;": '\U0000226F', - "NotGreaterEqual;": '\U00002271', - "NotGreaterLess;": '\U00002279', - "NotGreaterTilde;": '\U00002275', - "NotLeftTriangle;": '\U000022EA', - "NotLeftTriangleEqual;": '\U000022EC', - "NotLess;": '\U0000226E', - "NotLessEqual;": '\U00002270', - "NotLessGreater;": '\U00002278', - "NotLessTilde;": '\U00002274', - "NotPrecedes;": '\U00002280', - "NotPrecedesSlantEqual;": '\U000022E0', - "NotReverseElement;": '\U0000220C', - "NotRightTriangle;": '\U000022EB', - "NotRightTriangleEqual;": '\U000022ED', - "NotSquareSubsetEqual;": '\U000022E2', - "NotSquareSupersetEqual;": '\U000022E3', - "NotSubsetEqual;": '\U00002288', - "NotSucceeds;": '\U00002281', - "NotSucceedsSlantEqual;": '\U000022E1', - "NotSupersetEqual;": '\U00002289', - "NotTilde;": '\U00002241', - "NotTildeEqual;": '\U00002244', - "NotTildeFullEqual;": '\U00002247', - "NotTildeTilde;": '\U00002249', - "NotVerticalBar;": '\U00002224', - "Nscr;": '\U0001D4A9', - "Ntilde;": '\U000000D1', - "Nu;": '\U0000039D', - "OElig;": '\U00000152', - "Oacute;": '\U000000D3', - "Ocirc;": '\U000000D4', - "Ocy;": '\U0000041E', - "Odblac;": '\U00000150', - "Ofr;": '\U0001D512', - "Ograve;": '\U000000D2', - "Omacr;": '\U0000014C', - "Omega;": '\U000003A9', - "Omicron;": '\U0000039F', - "Oopf;": '\U0001D546', - "OpenCurlyDoubleQuote;": '\U0000201C', - "OpenCurlyQuote;": '\U00002018', - "Or;": '\U00002A54', - "Oscr;": '\U0001D4AA', - "Oslash;": '\U000000D8', - "Otilde;": '\U000000D5', - "Otimes;": '\U00002A37', - "Ouml;": '\U000000D6', - "OverBar;": '\U0000203E', - "OverBrace;": '\U000023DE', - "OverBracket;": '\U000023B4', - "OverParenthesis;": '\U000023DC', - "PartialD;": '\U00002202', - "Pcy;": '\U0000041F', - "Pfr;": '\U0001D513', - "Phi;": '\U000003A6', - "Pi;": '\U000003A0', - "PlusMinus;": '\U000000B1', - "Poincareplane;": '\U0000210C', - "Popf;": '\U00002119', - "Pr;": '\U00002ABB', - "Precedes;": '\U0000227A', - "PrecedesEqual;": '\U00002AAF', - "PrecedesSlantEqual;": '\U0000227C', - "PrecedesTilde;": '\U0000227E', - "Prime;": '\U00002033', - "Product;": '\U0000220F', - "Proportion;": '\U00002237', - "Proportional;": '\U0000221D', - "Pscr;": '\U0001D4AB', - "Psi;": '\U000003A8', - "QUOT;": '\U00000022', - "Qfr;": '\U0001D514', - "Qopf;": '\U0000211A', - "Qscr;": '\U0001D4AC', - "RBarr;": '\U00002910', - "REG;": '\U000000AE', - "Racute;": '\U00000154', - "Rang;": '\U000027EB', - "Rarr;": '\U000021A0', - "Rarrtl;": '\U00002916', - "Rcaron;": '\U00000158', - "Rcedil;": '\U00000156', - "Rcy;": '\U00000420', - "Re;": '\U0000211C', - "ReverseElement;": '\U0000220B', - "ReverseEquilibrium;": '\U000021CB', - "ReverseUpEquilibrium;": '\U0000296F', - "Rfr;": '\U0000211C', - "Rho;": '\U000003A1', - "RightAngleBracket;": '\U000027E9', - "RightArrow;": '\U00002192', - "RightArrowBar;": '\U000021E5', - "RightArrowLeftArrow;": '\U000021C4', - "RightCeiling;": '\U00002309', - "RightDoubleBracket;": '\U000027E7', - "RightDownTeeVector;": '\U0000295D', - "RightDownVector;": '\U000021C2', - "RightDownVectorBar;": '\U00002955', - "RightFloor;": '\U0000230B', - "RightTee;": '\U000022A2', - "RightTeeArrow;": '\U000021A6', - "RightTeeVector;": '\U0000295B', - "RightTriangle;": '\U000022B3', - "RightTriangleBar;": '\U000029D0', - "RightTriangleEqual;": '\U000022B5', - "RightUpDownVector;": '\U0000294F', - "RightUpTeeVector;": '\U0000295C', - "RightUpVector;": '\U000021BE', - "RightUpVectorBar;": '\U00002954', - "RightVector;": '\U000021C0', - "RightVectorBar;": '\U00002953', - "Rightarrow;": '\U000021D2', - "Ropf;": '\U0000211D', - "RoundImplies;": '\U00002970', - "Rrightarrow;": '\U000021DB', - "Rscr;": '\U0000211B', - "Rsh;": '\U000021B1', - "RuleDelayed;": '\U000029F4', - "SHCHcy;": '\U00000429', - "SHcy;": '\U00000428', - "SOFTcy;": '\U0000042C', - "Sacute;": '\U0000015A', - "Sc;": '\U00002ABC', - "Scaron;": '\U00000160', - "Scedil;": '\U0000015E', - "Scirc;": '\U0000015C', - "Scy;": '\U00000421', - "Sfr;": '\U0001D516', - "ShortDownArrow;": '\U00002193', - "ShortLeftArrow;": '\U00002190', - "ShortRightArrow;": '\U00002192', - "ShortUpArrow;": '\U00002191', - "Sigma;": '\U000003A3', - "SmallCircle;": '\U00002218', - "Sopf;": '\U0001D54A', - "Sqrt;": '\U0000221A', - "Square;": '\U000025A1', - "SquareIntersection;": '\U00002293', - "SquareSubset;": '\U0000228F', - "SquareSubsetEqual;": '\U00002291', - "SquareSuperset;": '\U00002290', - "SquareSupersetEqual;": '\U00002292', - "SquareUnion;": '\U00002294', - "Sscr;": '\U0001D4AE', - "Star;": '\U000022C6', - "Sub;": '\U000022D0', - "Subset;": '\U000022D0', - "SubsetEqual;": '\U00002286', - "Succeeds;": '\U0000227B', - "SucceedsEqual;": '\U00002AB0', - "SucceedsSlantEqual;": '\U0000227D', - "SucceedsTilde;": '\U0000227F', - "SuchThat;": '\U0000220B', - "Sum;": '\U00002211', - "Sup;": '\U000022D1', - "Superset;": '\U00002283', - "SupersetEqual;": '\U00002287', - "Supset;": '\U000022D1', - "THORN;": '\U000000DE', - "TRADE;": '\U00002122', - "TSHcy;": '\U0000040B', - "TScy;": '\U00000426', - "Tab;": '\U00000009', - "Tau;": '\U000003A4', - "Tcaron;": '\U00000164', - "Tcedil;": '\U00000162', - "Tcy;": '\U00000422', - "Tfr;": '\U0001D517', - "Therefore;": '\U00002234', - "Theta;": '\U00000398', - "ThinSpace;": '\U00002009', - "Tilde;": '\U0000223C', - "TildeEqual;": '\U00002243', - "TildeFullEqual;": '\U00002245', - "TildeTilde;": '\U00002248', - "Topf;": '\U0001D54B', - "TripleDot;": '\U000020DB', - "Tscr;": '\U0001D4AF', - "Tstrok;": '\U00000166', - "Uacute;": '\U000000DA', - "Uarr;": '\U0000219F', - "Uarrocir;": '\U00002949', - "Ubrcy;": '\U0000040E', - "Ubreve;": '\U0000016C', - "Ucirc;": '\U000000DB', - "Ucy;": '\U00000423', - "Udblac;": '\U00000170', - "Ufr;": '\U0001D518', - "Ugrave;": '\U000000D9', - "Umacr;": '\U0000016A', - "UnderBar;": '\U0000005F', - "UnderBrace;": '\U000023DF', - "UnderBracket;": '\U000023B5', - "UnderParenthesis;": '\U000023DD', - "Union;": '\U000022C3', - "UnionPlus;": '\U0000228E', - "Uogon;": '\U00000172', - "Uopf;": '\U0001D54C', - "UpArrow;": '\U00002191', - "UpArrowBar;": '\U00002912', - "UpArrowDownArrow;": '\U000021C5', - "UpDownArrow;": '\U00002195', - "UpEquilibrium;": '\U0000296E', - "UpTee;": '\U000022A5', - "UpTeeArrow;": '\U000021A5', - "Uparrow;": '\U000021D1', - "Updownarrow;": '\U000021D5', - "UpperLeftArrow;": '\U00002196', - "UpperRightArrow;": '\U00002197', - "Upsi;": '\U000003D2', - "Upsilon;": '\U000003A5', - "Uring;": '\U0000016E', - "Uscr;": '\U0001D4B0', - "Utilde;": '\U00000168', - "Uuml;": '\U000000DC', - "VDash;": '\U000022AB', - "Vbar;": '\U00002AEB', - "Vcy;": '\U00000412', - "Vdash;": '\U000022A9', - "Vdashl;": '\U00002AE6', - "Vee;": '\U000022C1', - "Verbar;": '\U00002016', - "Vert;": '\U00002016', - "VerticalBar;": '\U00002223', - "VerticalLine;": '\U0000007C', - "VerticalSeparator;": '\U00002758', - "VerticalTilde;": '\U00002240', - "VeryThinSpace;": '\U0000200A', - "Vfr;": '\U0001D519', - "Vopf;": '\U0001D54D', - "Vscr;": '\U0001D4B1', - "Vvdash;": '\U000022AA', - "Wcirc;": '\U00000174', - "Wedge;": '\U000022C0', - "Wfr;": '\U0001D51A', - "Wopf;": '\U0001D54E', - "Wscr;": '\U0001D4B2', - "Xfr;": '\U0001D51B', - "Xi;": '\U0000039E', - "Xopf;": '\U0001D54F', - "Xscr;": '\U0001D4B3', - "YAcy;": '\U0000042F', - "YIcy;": '\U00000407', - "YUcy;": '\U0000042E', - "Yacute;": '\U000000DD', - "Ycirc;": '\U00000176', - "Ycy;": '\U0000042B', - "Yfr;": '\U0001D51C', - "Yopf;": '\U0001D550', - "Yscr;": '\U0001D4B4', - "Yuml;": '\U00000178', - "ZHcy;": '\U00000416', - "Zacute;": '\U00000179', - "Zcaron;": '\U0000017D', - "Zcy;": '\U00000417', - "Zdot;": '\U0000017B', - "ZeroWidthSpace;": '\U0000200B', - "Zeta;": '\U00000396', - "Zfr;": '\U00002128', - "Zopf;": '\U00002124', - "Zscr;": '\U0001D4B5', - "aacute;": '\U000000E1', - "abreve;": '\U00000103', - "ac;": '\U0000223E', - "acd;": '\U0000223F', - "acirc;": '\U000000E2', - "acute;": '\U000000B4', - "acy;": '\U00000430', - "aelig;": '\U000000E6', - "af;": '\U00002061', - "afr;": '\U0001D51E', - "agrave;": '\U000000E0', - "alefsym;": '\U00002135', - "aleph;": '\U00002135', - "alpha;": '\U000003B1', - "amacr;": '\U00000101', - "amalg;": '\U00002A3F', - "amp;": '\U00000026', - "and;": '\U00002227', - "andand;": '\U00002A55', - "andd;": '\U00002A5C', - "andslope;": '\U00002A58', - "andv;": '\U00002A5A', - "ang;": '\U00002220', - "ange;": '\U000029A4', - "angle;": '\U00002220', - "angmsd;": '\U00002221', - "angmsdaa;": '\U000029A8', - "angmsdab;": '\U000029A9', - "angmsdac;": '\U000029AA', - "angmsdad;": '\U000029AB', - "angmsdae;": '\U000029AC', - "angmsdaf;": '\U000029AD', - "angmsdag;": '\U000029AE', - "angmsdah;": '\U000029AF', - "angrt;": '\U0000221F', - "angrtvb;": '\U000022BE', - "angrtvbd;": '\U0000299D', - "angsph;": '\U00002222', - "angst;": '\U000000C5', - "angzarr;": '\U0000237C', - "aogon;": '\U00000105', - "aopf;": '\U0001D552', - "ap;": '\U00002248', - "apE;": '\U00002A70', - "apacir;": '\U00002A6F', - "ape;": '\U0000224A', - "apid;": '\U0000224B', - "apos;": '\U00000027', - "approx;": '\U00002248', - "approxeq;": '\U0000224A', - "aring;": '\U000000E5', - "ascr;": '\U0001D4B6', - "ast;": '\U0000002A', - "asymp;": '\U00002248', - "asympeq;": '\U0000224D', - "atilde;": '\U000000E3', - "auml;": '\U000000E4', - "awconint;": '\U00002233', - "awint;": '\U00002A11', - "bNot;": '\U00002AED', - "backcong;": '\U0000224C', - "backepsilon;": '\U000003F6', - "backprime;": '\U00002035', - "backsim;": '\U0000223D', - "backsimeq;": '\U000022CD', - "barvee;": '\U000022BD', - "barwed;": '\U00002305', - "barwedge;": '\U00002305', - "bbrk;": '\U000023B5', - "bbrktbrk;": '\U000023B6', - "bcong;": '\U0000224C', - "bcy;": '\U00000431', - "bdquo;": '\U0000201E', - "becaus;": '\U00002235', - "because;": '\U00002235', - "bemptyv;": '\U000029B0', - "bepsi;": '\U000003F6', - "bernou;": '\U0000212C', - "beta;": '\U000003B2', - "beth;": '\U00002136', - "between;": '\U0000226C', - "bfr;": '\U0001D51F', - "bigcap;": '\U000022C2', - "bigcirc;": '\U000025EF', - "bigcup;": '\U000022C3', - "bigodot;": '\U00002A00', - "bigoplus;": '\U00002A01', - "bigotimes;": '\U00002A02', - "bigsqcup;": '\U00002A06', - "bigstar;": '\U00002605', - "bigtriangledown;": '\U000025BD', - "bigtriangleup;": '\U000025B3', - "biguplus;": '\U00002A04', - "bigvee;": '\U000022C1', - "bigwedge;": '\U000022C0', - "bkarow;": '\U0000290D', - "blacklozenge;": '\U000029EB', - "blacksquare;": '\U000025AA', - "blacktriangle;": '\U000025B4', - "blacktriangledown;": '\U000025BE', - "blacktriangleleft;": '\U000025C2', - "blacktriangleright;": '\U000025B8', - "blank;": '\U00002423', - "blk12;": '\U00002592', - "blk14;": '\U00002591', - "blk34;": '\U00002593', - "block;": '\U00002588', - "bnot;": '\U00002310', - "bopf;": '\U0001D553', - "bot;": '\U000022A5', - "bottom;": '\U000022A5', - "bowtie;": '\U000022C8', - "boxDL;": '\U00002557', - "boxDR;": '\U00002554', - "boxDl;": '\U00002556', - "boxDr;": '\U00002553', - "boxH;": '\U00002550', - "boxHD;": '\U00002566', - "boxHU;": '\U00002569', - "boxHd;": '\U00002564', - "boxHu;": '\U00002567', - "boxUL;": '\U0000255D', - "boxUR;": '\U0000255A', - "boxUl;": '\U0000255C', - "boxUr;": '\U00002559', - "boxV;": '\U00002551', - "boxVH;": '\U0000256C', - "boxVL;": '\U00002563', - "boxVR;": '\U00002560', - "boxVh;": '\U0000256B', - "boxVl;": '\U00002562', - "boxVr;": '\U0000255F', - "boxbox;": '\U000029C9', - "boxdL;": '\U00002555', - "boxdR;": '\U00002552', - "boxdl;": '\U00002510', - "boxdr;": '\U0000250C', - "boxh;": '\U00002500', - "boxhD;": '\U00002565', - "boxhU;": '\U00002568', - "boxhd;": '\U0000252C', - "boxhu;": '\U00002534', - "boxminus;": '\U0000229F', - "boxplus;": '\U0000229E', - "boxtimes;": '\U000022A0', - "boxuL;": '\U0000255B', - "boxuR;": '\U00002558', - "boxul;": '\U00002518', - "boxur;": '\U00002514', - "boxv;": '\U00002502', - "boxvH;": '\U0000256A', - "boxvL;": '\U00002561', - "boxvR;": '\U0000255E', - "boxvh;": '\U0000253C', - "boxvl;": '\U00002524', - "boxvr;": '\U0000251C', - "bprime;": '\U00002035', - "breve;": '\U000002D8', - "brvbar;": '\U000000A6', - "bscr;": '\U0001D4B7', - "bsemi;": '\U0000204F', - "bsim;": '\U0000223D', - "bsime;": '\U000022CD', - "bsol;": '\U0000005C', - "bsolb;": '\U000029C5', - "bsolhsub;": '\U000027C8', - "bull;": '\U00002022', - "bullet;": '\U00002022', - "bump;": '\U0000224E', - "bumpE;": '\U00002AAE', - "bumpe;": '\U0000224F', - "bumpeq;": '\U0000224F', - "cacute;": '\U00000107', - "cap;": '\U00002229', - "capand;": '\U00002A44', - "capbrcup;": '\U00002A49', - "capcap;": '\U00002A4B', - "capcup;": '\U00002A47', - "capdot;": '\U00002A40', - "caret;": '\U00002041', - "caron;": '\U000002C7', - "ccaps;": '\U00002A4D', - "ccaron;": '\U0000010D', - "ccedil;": '\U000000E7', - "ccirc;": '\U00000109', - "ccups;": '\U00002A4C', - "ccupssm;": '\U00002A50', - "cdot;": '\U0000010B', - "cedil;": '\U000000B8', - "cemptyv;": '\U000029B2', - "cent;": '\U000000A2', - "centerdot;": '\U000000B7', - "cfr;": '\U0001D520', - "chcy;": '\U00000447', - "check;": '\U00002713', - "checkmark;": '\U00002713', - "chi;": '\U000003C7', - "cir;": '\U000025CB', - "cirE;": '\U000029C3', - "circ;": '\U000002C6', - "circeq;": '\U00002257', - "circlearrowleft;": '\U000021BA', - "circlearrowright;": '\U000021BB', - "circledR;": '\U000000AE', - "circledS;": '\U000024C8', - "circledast;": '\U0000229B', - "circledcirc;": '\U0000229A', - "circleddash;": '\U0000229D', - "cire;": '\U00002257', - "cirfnint;": '\U00002A10', - "cirmid;": '\U00002AEF', - "cirscir;": '\U000029C2', - "clubs;": '\U00002663', - "clubsuit;": '\U00002663', - "colon;": '\U0000003A', - "colone;": '\U00002254', - "coloneq;": '\U00002254', - "comma;": '\U0000002C', - "commat;": '\U00000040', - "comp;": '\U00002201', - "compfn;": '\U00002218', - "complement;": '\U00002201', - "complexes;": '\U00002102', - "cong;": '\U00002245', - "congdot;": '\U00002A6D', - "conint;": '\U0000222E', - "copf;": '\U0001D554', - "coprod;": '\U00002210', - "copy;": '\U000000A9', - "copysr;": '\U00002117', - "crarr;": '\U000021B5', - "cross;": '\U00002717', - "cscr;": '\U0001D4B8', - "csub;": '\U00002ACF', - "csube;": '\U00002AD1', - "csup;": '\U00002AD0', - "csupe;": '\U00002AD2', - "ctdot;": '\U000022EF', - "cudarrl;": '\U00002938', - "cudarrr;": '\U00002935', - "cuepr;": '\U000022DE', - "cuesc;": '\U000022DF', - "cularr;": '\U000021B6', - "cularrp;": '\U0000293D', - "cup;": '\U0000222A', - "cupbrcap;": '\U00002A48', - "cupcap;": '\U00002A46', - "cupcup;": '\U00002A4A', - "cupdot;": '\U0000228D', - "cupor;": '\U00002A45', - "curarr;": '\U000021B7', - "curarrm;": '\U0000293C', - "curlyeqprec;": '\U000022DE', - "curlyeqsucc;": '\U000022DF', - "curlyvee;": '\U000022CE', - "curlywedge;": '\U000022CF', - "curren;": '\U000000A4', - "curvearrowleft;": '\U000021B6', - "curvearrowright;": '\U000021B7', - "cuvee;": '\U000022CE', - "cuwed;": '\U000022CF', - "cwconint;": '\U00002232', - "cwint;": '\U00002231', - "cylcty;": '\U0000232D', - "dArr;": '\U000021D3', - "dHar;": '\U00002965', - "dagger;": '\U00002020', - "daleth;": '\U00002138', - "darr;": '\U00002193', - "dash;": '\U00002010', - "dashv;": '\U000022A3', - "dbkarow;": '\U0000290F', - "dblac;": '\U000002DD', - "dcaron;": '\U0000010F', - "dcy;": '\U00000434', - "dd;": '\U00002146', - "ddagger;": '\U00002021', - "ddarr;": '\U000021CA', - "ddotseq;": '\U00002A77', - "deg;": '\U000000B0', - "delta;": '\U000003B4', - "demptyv;": '\U000029B1', - "dfisht;": '\U0000297F', - "dfr;": '\U0001D521', - "dharl;": '\U000021C3', - "dharr;": '\U000021C2', - "diam;": '\U000022C4', - "diamond;": '\U000022C4', - "diamondsuit;": '\U00002666', - "diams;": '\U00002666', - "die;": '\U000000A8', - "digamma;": '\U000003DD', - "disin;": '\U000022F2', - "div;": '\U000000F7', - "divide;": '\U000000F7', - "divideontimes;": '\U000022C7', - "divonx;": '\U000022C7', - "djcy;": '\U00000452', - "dlcorn;": '\U0000231E', - "dlcrop;": '\U0000230D', - "dollar;": '\U00000024', - "dopf;": '\U0001D555', - "dot;": '\U000002D9', - "doteq;": '\U00002250', - "doteqdot;": '\U00002251', - "dotminus;": '\U00002238', - "dotplus;": '\U00002214', - "dotsquare;": '\U000022A1', - "doublebarwedge;": '\U00002306', - "downarrow;": '\U00002193', - "downdownarrows;": '\U000021CA', - "downharpoonleft;": '\U000021C3', - "downharpoonright;": '\U000021C2', - "drbkarow;": '\U00002910', - "drcorn;": '\U0000231F', - "drcrop;": '\U0000230C', - "dscr;": '\U0001D4B9', - "dscy;": '\U00000455', - "dsol;": '\U000029F6', - "dstrok;": '\U00000111', - "dtdot;": '\U000022F1', - "dtri;": '\U000025BF', - "dtrif;": '\U000025BE', - "duarr;": '\U000021F5', - "duhar;": '\U0000296F', - "dwangle;": '\U000029A6', - "dzcy;": '\U0000045F', - "dzigrarr;": '\U000027FF', - "eDDot;": '\U00002A77', - "eDot;": '\U00002251', - "eacute;": '\U000000E9', - "easter;": '\U00002A6E', - "ecaron;": '\U0000011B', - "ecir;": '\U00002256', - "ecirc;": '\U000000EA', - "ecolon;": '\U00002255', - "ecy;": '\U0000044D', - "edot;": '\U00000117', - "ee;": '\U00002147', - "efDot;": '\U00002252', - "efr;": '\U0001D522', - "eg;": '\U00002A9A', - "egrave;": '\U000000E8', - "egs;": '\U00002A96', - "egsdot;": '\U00002A98', - "el;": '\U00002A99', - "elinters;": '\U000023E7', - "ell;": '\U00002113', - "els;": '\U00002A95', - "elsdot;": '\U00002A97', - "emacr;": '\U00000113', - "empty;": '\U00002205', - "emptyset;": '\U00002205', - "emptyv;": '\U00002205', - "emsp;": '\U00002003', - "emsp13;": '\U00002004', - "emsp14;": '\U00002005', - "eng;": '\U0000014B', - "ensp;": '\U00002002', - "eogon;": '\U00000119', - "eopf;": '\U0001D556', - "epar;": '\U000022D5', - "eparsl;": '\U000029E3', - "eplus;": '\U00002A71', - "epsi;": '\U000003B5', - "epsilon;": '\U000003B5', - "epsiv;": '\U000003F5', - "eqcirc;": '\U00002256', - "eqcolon;": '\U00002255', - "eqsim;": '\U00002242', - "eqslantgtr;": '\U00002A96', - "eqslantless;": '\U00002A95', - "equals;": '\U0000003D', - "equest;": '\U0000225F', - "equiv;": '\U00002261', - "equivDD;": '\U00002A78', - "eqvparsl;": '\U000029E5', - "erDot;": '\U00002253', - "erarr;": '\U00002971', - "escr;": '\U0000212F', - "esdot;": '\U00002250', - "esim;": '\U00002242', - "eta;": '\U000003B7', - "eth;": '\U000000F0', - "euml;": '\U000000EB', - "euro;": '\U000020AC', - "excl;": '\U00000021', - "exist;": '\U00002203', - "expectation;": '\U00002130', - "exponentiale;": '\U00002147', - "fallingdotseq;": '\U00002252', - "fcy;": '\U00000444', - "female;": '\U00002640', - "ffilig;": '\U0000FB03', - "fflig;": '\U0000FB00', - "ffllig;": '\U0000FB04', - "ffr;": '\U0001D523', - "filig;": '\U0000FB01', - "flat;": '\U0000266D', - "fllig;": '\U0000FB02', - "fltns;": '\U000025B1', - "fnof;": '\U00000192', - "fopf;": '\U0001D557', - "forall;": '\U00002200', - "fork;": '\U000022D4', - "forkv;": '\U00002AD9', - "fpartint;": '\U00002A0D', - "frac12;": '\U000000BD', - "frac13;": '\U00002153', - "frac14;": '\U000000BC', - "frac15;": '\U00002155', - "frac16;": '\U00002159', - "frac18;": '\U0000215B', - "frac23;": '\U00002154', - "frac25;": '\U00002156', - "frac34;": '\U000000BE', - "frac35;": '\U00002157', - "frac38;": '\U0000215C', - "frac45;": '\U00002158', - "frac56;": '\U0000215A', - "frac58;": '\U0000215D', - "frac78;": '\U0000215E', - "frasl;": '\U00002044', - "frown;": '\U00002322', - "fscr;": '\U0001D4BB', - "gE;": '\U00002267', - "gEl;": '\U00002A8C', - "gacute;": '\U000001F5', - "gamma;": '\U000003B3', - "gammad;": '\U000003DD', - "gap;": '\U00002A86', - "gbreve;": '\U0000011F', - "gcirc;": '\U0000011D', - "gcy;": '\U00000433', - "gdot;": '\U00000121', - "ge;": '\U00002265', - "gel;": '\U000022DB', - "geq;": '\U00002265', - "geqq;": '\U00002267', - "geqslant;": '\U00002A7E', - "ges;": '\U00002A7E', - "gescc;": '\U00002AA9', - "gesdot;": '\U00002A80', - "gesdoto;": '\U00002A82', - "gesdotol;": '\U00002A84', - "gesles;": '\U00002A94', - "gfr;": '\U0001D524', - "gg;": '\U0000226B', - "ggg;": '\U000022D9', - "gimel;": '\U00002137', - "gjcy;": '\U00000453', - "gl;": '\U00002277', - "glE;": '\U00002A92', - "gla;": '\U00002AA5', - "glj;": '\U00002AA4', - "gnE;": '\U00002269', - "gnap;": '\U00002A8A', - "gnapprox;": '\U00002A8A', - "gne;": '\U00002A88', - "gneq;": '\U00002A88', - "gneqq;": '\U00002269', - "gnsim;": '\U000022E7', - "gopf;": '\U0001D558', - "grave;": '\U00000060', - "gscr;": '\U0000210A', - "gsim;": '\U00002273', - "gsime;": '\U00002A8E', - "gsiml;": '\U00002A90', - "gt;": '\U0000003E', - "gtcc;": '\U00002AA7', - "gtcir;": '\U00002A7A', - "gtdot;": '\U000022D7', - "gtlPar;": '\U00002995', - "gtquest;": '\U00002A7C', - "gtrapprox;": '\U00002A86', - "gtrarr;": '\U00002978', - "gtrdot;": '\U000022D7', - "gtreqless;": '\U000022DB', - "gtreqqless;": '\U00002A8C', - "gtrless;": '\U00002277', - "gtrsim;": '\U00002273', - "hArr;": '\U000021D4', - "hairsp;": '\U0000200A', - "half;": '\U000000BD', - "hamilt;": '\U0000210B', - "hardcy;": '\U0000044A', - "harr;": '\U00002194', - "harrcir;": '\U00002948', - "harrw;": '\U000021AD', - "hbar;": '\U0000210F', - "hcirc;": '\U00000125', - "hearts;": '\U00002665', - "heartsuit;": '\U00002665', - "hellip;": '\U00002026', - "hercon;": '\U000022B9', - "hfr;": '\U0001D525', - "hksearow;": '\U00002925', - "hkswarow;": '\U00002926', - "hoarr;": '\U000021FF', - "homtht;": '\U0000223B', - "hookleftarrow;": '\U000021A9', - "hookrightarrow;": '\U000021AA', - "hopf;": '\U0001D559', - "horbar;": '\U00002015', - "hscr;": '\U0001D4BD', - "hslash;": '\U0000210F', - "hstrok;": '\U00000127', - "hybull;": '\U00002043', - "hyphen;": '\U00002010', - "iacute;": '\U000000ED', - "ic;": '\U00002063', - "icirc;": '\U000000EE', - "icy;": '\U00000438', - "iecy;": '\U00000435', - "iexcl;": '\U000000A1', - "iff;": '\U000021D4', - "ifr;": '\U0001D526', - "igrave;": '\U000000EC', - "ii;": '\U00002148', - "iiiint;": '\U00002A0C', - "iiint;": '\U0000222D', - "iinfin;": '\U000029DC', - "iiota;": '\U00002129', - "ijlig;": '\U00000133', - "imacr;": '\U0000012B', - "image;": '\U00002111', - "imagline;": '\U00002110', - "imagpart;": '\U00002111', - "imath;": '\U00000131', - "imof;": '\U000022B7', - "imped;": '\U000001B5', - "in;": '\U00002208', - "incare;": '\U00002105', - "infin;": '\U0000221E', - "infintie;": '\U000029DD', - "inodot;": '\U00000131', - "int;": '\U0000222B', - "intcal;": '\U000022BA', - "integers;": '\U00002124', - "intercal;": '\U000022BA', - "intlarhk;": '\U00002A17', - "intprod;": '\U00002A3C', - "iocy;": '\U00000451', - "iogon;": '\U0000012F', - "iopf;": '\U0001D55A', - "iota;": '\U000003B9', - "iprod;": '\U00002A3C', - "iquest;": '\U000000BF', - "iscr;": '\U0001D4BE', - "isin;": '\U00002208', - "isinE;": '\U000022F9', - "isindot;": '\U000022F5', - "isins;": '\U000022F4', - "isinsv;": '\U000022F3', - "isinv;": '\U00002208', - "it;": '\U00002062', - "itilde;": '\U00000129', - "iukcy;": '\U00000456', - "iuml;": '\U000000EF', - "jcirc;": '\U00000135', - "jcy;": '\U00000439', - "jfr;": '\U0001D527', - "jmath;": '\U00000237', - "jopf;": '\U0001D55B', - "jscr;": '\U0001D4BF', - "jsercy;": '\U00000458', - "jukcy;": '\U00000454', - "kappa;": '\U000003BA', - "kappav;": '\U000003F0', - "kcedil;": '\U00000137', - "kcy;": '\U0000043A', - "kfr;": '\U0001D528', - "kgreen;": '\U00000138', - "khcy;": '\U00000445', - "kjcy;": '\U0000045C', - "kopf;": '\U0001D55C', - "kscr;": '\U0001D4C0', - "lAarr;": '\U000021DA', - "lArr;": '\U000021D0', - "lAtail;": '\U0000291B', - "lBarr;": '\U0000290E', - "lE;": '\U00002266', - "lEg;": '\U00002A8B', - "lHar;": '\U00002962', - "lacute;": '\U0000013A', - "laemptyv;": '\U000029B4', - "lagran;": '\U00002112', - "lambda;": '\U000003BB', - "lang;": '\U000027E8', - "langd;": '\U00002991', - "langle;": '\U000027E8', - "lap;": '\U00002A85', - "laquo;": '\U000000AB', - "larr;": '\U00002190', - "larrb;": '\U000021E4', - "larrbfs;": '\U0000291F', - "larrfs;": '\U0000291D', - "larrhk;": '\U000021A9', - "larrlp;": '\U000021AB', - "larrpl;": '\U00002939', - "larrsim;": '\U00002973', - "larrtl;": '\U000021A2', - "lat;": '\U00002AAB', - "latail;": '\U00002919', - "late;": '\U00002AAD', - "lbarr;": '\U0000290C', - "lbbrk;": '\U00002772', - "lbrace;": '\U0000007B', - "lbrack;": '\U0000005B', - "lbrke;": '\U0000298B', - "lbrksld;": '\U0000298F', - "lbrkslu;": '\U0000298D', - "lcaron;": '\U0000013E', - "lcedil;": '\U0000013C', - "lceil;": '\U00002308', - "lcub;": '\U0000007B', - "lcy;": '\U0000043B', - "ldca;": '\U00002936', - "ldquo;": '\U0000201C', - "ldquor;": '\U0000201E', - "ldrdhar;": '\U00002967', - "ldrushar;": '\U0000294B', - "ldsh;": '\U000021B2', - "le;": '\U00002264', - "leftarrow;": '\U00002190', - "leftarrowtail;": '\U000021A2', - "leftharpoondown;": '\U000021BD', - "leftharpoonup;": '\U000021BC', - "leftleftarrows;": '\U000021C7', - "leftrightarrow;": '\U00002194', - "leftrightarrows;": '\U000021C6', - "leftrightharpoons;": '\U000021CB', - "leftrightsquigarrow;": '\U000021AD', - "leftthreetimes;": '\U000022CB', - "leg;": '\U000022DA', - "leq;": '\U00002264', - "leqq;": '\U00002266', - "leqslant;": '\U00002A7D', - "les;": '\U00002A7D', - "lescc;": '\U00002AA8', - "lesdot;": '\U00002A7F', - "lesdoto;": '\U00002A81', - "lesdotor;": '\U00002A83', - "lesges;": '\U00002A93', - "lessapprox;": '\U00002A85', - "lessdot;": '\U000022D6', - "lesseqgtr;": '\U000022DA', - "lesseqqgtr;": '\U00002A8B', - "lessgtr;": '\U00002276', - "lesssim;": '\U00002272', - "lfisht;": '\U0000297C', - "lfloor;": '\U0000230A', - "lfr;": '\U0001D529', - "lg;": '\U00002276', - "lgE;": '\U00002A91', - "lhard;": '\U000021BD', - "lharu;": '\U000021BC', - "lharul;": '\U0000296A', - "lhblk;": '\U00002584', - "ljcy;": '\U00000459', - "ll;": '\U0000226A', - "llarr;": '\U000021C7', - "llcorner;": '\U0000231E', - "llhard;": '\U0000296B', - "lltri;": '\U000025FA', - "lmidot;": '\U00000140', - "lmoust;": '\U000023B0', - "lmoustache;": '\U000023B0', - "lnE;": '\U00002268', - "lnap;": '\U00002A89', - "lnapprox;": '\U00002A89', - "lne;": '\U00002A87', - "lneq;": '\U00002A87', - "lneqq;": '\U00002268', - "lnsim;": '\U000022E6', - "loang;": '\U000027EC', - "loarr;": '\U000021FD', - "lobrk;": '\U000027E6', - "longleftarrow;": '\U000027F5', - "longleftrightarrow;": '\U000027F7', - "longmapsto;": '\U000027FC', - "longrightarrow;": '\U000027F6', - "looparrowleft;": '\U000021AB', - "looparrowright;": '\U000021AC', - "lopar;": '\U00002985', - "lopf;": '\U0001D55D', - "loplus;": '\U00002A2D', - "lotimes;": '\U00002A34', - "lowast;": '\U00002217', - "lowbar;": '\U0000005F', - "loz;": '\U000025CA', - "lozenge;": '\U000025CA', - "lozf;": '\U000029EB', - "lpar;": '\U00000028', - "lparlt;": '\U00002993', - "lrarr;": '\U000021C6', - "lrcorner;": '\U0000231F', - "lrhar;": '\U000021CB', - "lrhard;": '\U0000296D', - "lrm;": '\U0000200E', - "lrtri;": '\U000022BF', - "lsaquo;": '\U00002039', - "lscr;": '\U0001D4C1', - "lsh;": '\U000021B0', - "lsim;": '\U00002272', - "lsime;": '\U00002A8D', - "lsimg;": '\U00002A8F', - "lsqb;": '\U0000005B', - "lsquo;": '\U00002018', - "lsquor;": '\U0000201A', - "lstrok;": '\U00000142', - "lt;": '\U0000003C', - "ltcc;": '\U00002AA6', - "ltcir;": '\U00002A79', - "ltdot;": '\U000022D6', - "lthree;": '\U000022CB', - "ltimes;": '\U000022C9', - "ltlarr;": '\U00002976', - "ltquest;": '\U00002A7B', - "ltrPar;": '\U00002996', - "ltri;": '\U000025C3', - "ltrie;": '\U000022B4', - "ltrif;": '\U000025C2', - "lurdshar;": '\U0000294A', - "luruhar;": '\U00002966', - "mDDot;": '\U0000223A', - "macr;": '\U000000AF', - "male;": '\U00002642', - "malt;": '\U00002720', - "maltese;": '\U00002720', - "map;": '\U000021A6', - "mapsto;": '\U000021A6', - "mapstodown;": '\U000021A7', - "mapstoleft;": '\U000021A4', - "mapstoup;": '\U000021A5', - "marker;": '\U000025AE', - "mcomma;": '\U00002A29', - "mcy;": '\U0000043C', - "mdash;": '\U00002014', - "measuredangle;": '\U00002221', - "mfr;": '\U0001D52A', - "mho;": '\U00002127', - "micro;": '\U000000B5', - "mid;": '\U00002223', - "midast;": '\U0000002A', - "midcir;": '\U00002AF0', - "middot;": '\U000000B7', - "minus;": '\U00002212', - "minusb;": '\U0000229F', - "minusd;": '\U00002238', - "minusdu;": '\U00002A2A', - "mlcp;": '\U00002ADB', - "mldr;": '\U00002026', - "mnplus;": '\U00002213', - "models;": '\U000022A7', - "mopf;": '\U0001D55E', - "mp;": '\U00002213', - "mscr;": '\U0001D4C2', - "mstpos;": '\U0000223E', - "mu;": '\U000003BC', - "multimap;": '\U000022B8', - "mumap;": '\U000022B8', - "nLeftarrow;": '\U000021CD', - "nLeftrightarrow;": '\U000021CE', - "nRightarrow;": '\U000021CF', - "nVDash;": '\U000022AF', - "nVdash;": '\U000022AE', - "nabla;": '\U00002207', - "nacute;": '\U00000144', - "nap;": '\U00002249', - "napos;": '\U00000149', - "napprox;": '\U00002249', - "natur;": '\U0000266E', - "natural;": '\U0000266E', - "naturals;": '\U00002115', - "nbsp;": '\U000000A0', - "ncap;": '\U00002A43', - "ncaron;": '\U00000148', - "ncedil;": '\U00000146', - "ncong;": '\U00002247', - "ncup;": '\U00002A42', - "ncy;": '\U0000043D', - "ndash;": '\U00002013', - "ne;": '\U00002260', - "neArr;": '\U000021D7', - "nearhk;": '\U00002924', - "nearr;": '\U00002197', - "nearrow;": '\U00002197', - "nequiv;": '\U00002262', - "nesear;": '\U00002928', - "nexist;": '\U00002204', - "nexists;": '\U00002204', - "nfr;": '\U0001D52B', - "nge;": '\U00002271', - "ngeq;": '\U00002271', - "ngsim;": '\U00002275', - "ngt;": '\U0000226F', - "ngtr;": '\U0000226F', - "nhArr;": '\U000021CE', - "nharr;": '\U000021AE', - "nhpar;": '\U00002AF2', - "ni;": '\U0000220B', - "nis;": '\U000022FC', - "nisd;": '\U000022FA', - "niv;": '\U0000220B', - "njcy;": '\U0000045A', - "nlArr;": '\U000021CD', - "nlarr;": '\U0000219A', - "nldr;": '\U00002025', - "nle;": '\U00002270', - "nleftarrow;": '\U0000219A', - "nleftrightarrow;": '\U000021AE', - "nleq;": '\U00002270', - "nless;": '\U0000226E', - "nlsim;": '\U00002274', - "nlt;": '\U0000226E', - "nltri;": '\U000022EA', - "nltrie;": '\U000022EC', - "nmid;": '\U00002224', - "nopf;": '\U0001D55F', - "not;": '\U000000AC', - "notin;": '\U00002209', - "notinva;": '\U00002209', - "notinvb;": '\U000022F7', - "notinvc;": '\U000022F6', - "notni;": '\U0000220C', - "notniva;": '\U0000220C', - "notnivb;": '\U000022FE', - "notnivc;": '\U000022FD', - "npar;": '\U00002226', - "nparallel;": '\U00002226', - "npolint;": '\U00002A14', - "npr;": '\U00002280', - "nprcue;": '\U000022E0', - "nprec;": '\U00002280', - "nrArr;": '\U000021CF', - "nrarr;": '\U0000219B', - "nrightarrow;": '\U0000219B', - "nrtri;": '\U000022EB', - "nrtrie;": '\U000022ED', - "nsc;": '\U00002281', - "nsccue;": '\U000022E1', - "nscr;": '\U0001D4C3', - "nshortmid;": '\U00002224', - "nshortparallel;": '\U00002226', - "nsim;": '\U00002241', - "nsime;": '\U00002244', - "nsimeq;": '\U00002244', - "nsmid;": '\U00002224', - "nspar;": '\U00002226', - "nsqsube;": '\U000022E2', - "nsqsupe;": '\U000022E3', - "nsub;": '\U00002284', - "nsube;": '\U00002288', - "nsubseteq;": '\U00002288', - "nsucc;": '\U00002281', - "nsup;": '\U00002285', - "nsupe;": '\U00002289', - "nsupseteq;": '\U00002289', - "ntgl;": '\U00002279', - "ntilde;": '\U000000F1', - "ntlg;": '\U00002278', - "ntriangleleft;": '\U000022EA', - "ntrianglelefteq;": '\U000022EC', - "ntriangleright;": '\U000022EB', - "ntrianglerighteq;": '\U000022ED', - "nu;": '\U000003BD', - "num;": '\U00000023', - "numero;": '\U00002116', - "numsp;": '\U00002007', - "nvDash;": '\U000022AD', - "nvHarr;": '\U00002904', - "nvdash;": '\U000022AC', - "nvinfin;": '\U000029DE', - "nvlArr;": '\U00002902', - "nvrArr;": '\U00002903', - "nwArr;": '\U000021D6', - "nwarhk;": '\U00002923', - "nwarr;": '\U00002196', - "nwarrow;": '\U00002196', - "nwnear;": '\U00002927', - "oS;": '\U000024C8', - "oacute;": '\U000000F3', - "oast;": '\U0000229B', - "ocir;": '\U0000229A', - "ocirc;": '\U000000F4', - "ocy;": '\U0000043E', - "odash;": '\U0000229D', - "odblac;": '\U00000151', - "odiv;": '\U00002A38', - "odot;": '\U00002299', - "odsold;": '\U000029BC', - "oelig;": '\U00000153', - "ofcir;": '\U000029BF', - "ofr;": '\U0001D52C', - "ogon;": '\U000002DB', - "ograve;": '\U000000F2', - "ogt;": '\U000029C1', - "ohbar;": '\U000029B5', - "ohm;": '\U000003A9', - "oint;": '\U0000222E', - "olarr;": '\U000021BA', - "olcir;": '\U000029BE', - "olcross;": '\U000029BB', - "oline;": '\U0000203E', - "olt;": '\U000029C0', - "omacr;": '\U0000014D', - "omega;": '\U000003C9', - "omicron;": '\U000003BF', - "omid;": '\U000029B6', - "ominus;": '\U00002296', - "oopf;": '\U0001D560', - "opar;": '\U000029B7', - "operp;": '\U000029B9', - "oplus;": '\U00002295', - "or;": '\U00002228', - "orarr;": '\U000021BB', - "ord;": '\U00002A5D', - "order;": '\U00002134', - "orderof;": '\U00002134', - "ordf;": '\U000000AA', - "ordm;": '\U000000BA', - "origof;": '\U000022B6', - "oror;": '\U00002A56', - "orslope;": '\U00002A57', - "orv;": '\U00002A5B', - "oscr;": '\U00002134', - "oslash;": '\U000000F8', - "osol;": '\U00002298', - "otilde;": '\U000000F5', - "otimes;": '\U00002297', - "otimesas;": '\U00002A36', - "ouml;": '\U000000F6', - "ovbar;": '\U0000233D', - "par;": '\U00002225', - "para;": '\U000000B6', - "parallel;": '\U00002225', - "parsim;": '\U00002AF3', - "parsl;": '\U00002AFD', - "part;": '\U00002202', - "pcy;": '\U0000043F', - "percnt;": '\U00000025', - "period;": '\U0000002E', - "permil;": '\U00002030', - "perp;": '\U000022A5', - "pertenk;": '\U00002031', - "pfr;": '\U0001D52D', - "phi;": '\U000003C6', - "phiv;": '\U000003D5', - "phmmat;": '\U00002133', - "phone;": '\U0000260E', - "pi;": '\U000003C0', - "pitchfork;": '\U000022D4', - "piv;": '\U000003D6', - "planck;": '\U0000210F', - "planckh;": '\U0000210E', - "plankv;": '\U0000210F', - "plus;": '\U0000002B', - "plusacir;": '\U00002A23', - "plusb;": '\U0000229E', - "pluscir;": '\U00002A22', - "plusdo;": '\U00002214', - "plusdu;": '\U00002A25', - "pluse;": '\U00002A72', - "plusmn;": '\U000000B1', - "plussim;": '\U00002A26', - "plustwo;": '\U00002A27', - "pm;": '\U000000B1', - "pointint;": '\U00002A15', - "popf;": '\U0001D561', - "pound;": '\U000000A3', - "pr;": '\U0000227A', - "prE;": '\U00002AB3', - "prap;": '\U00002AB7', - "prcue;": '\U0000227C', - "pre;": '\U00002AAF', - "prec;": '\U0000227A', - "precapprox;": '\U00002AB7', - "preccurlyeq;": '\U0000227C', - "preceq;": '\U00002AAF', - "precnapprox;": '\U00002AB9', - "precneqq;": '\U00002AB5', - "precnsim;": '\U000022E8', - "precsim;": '\U0000227E', - "prime;": '\U00002032', - "primes;": '\U00002119', - "prnE;": '\U00002AB5', - "prnap;": '\U00002AB9', - "prnsim;": '\U000022E8', - "prod;": '\U0000220F', - "profalar;": '\U0000232E', - "profline;": '\U00002312', - "profsurf;": '\U00002313', - "prop;": '\U0000221D', - "propto;": '\U0000221D', - "prsim;": '\U0000227E', - "prurel;": '\U000022B0', - "pscr;": '\U0001D4C5', - "psi;": '\U000003C8', - "puncsp;": '\U00002008', - "qfr;": '\U0001D52E', - "qint;": '\U00002A0C', - "qopf;": '\U0001D562', - "qprime;": '\U00002057', - "qscr;": '\U0001D4C6', - "quaternions;": '\U0000210D', - "quatint;": '\U00002A16', - "quest;": '\U0000003F', - "questeq;": '\U0000225F', - "quot;": '\U00000022', - "rAarr;": '\U000021DB', - "rArr;": '\U000021D2', - "rAtail;": '\U0000291C', - "rBarr;": '\U0000290F', - "rHar;": '\U00002964', - "racute;": '\U00000155', - "radic;": '\U0000221A', - "raemptyv;": '\U000029B3', - "rang;": '\U000027E9', - "rangd;": '\U00002992', - "range;": '\U000029A5', - "rangle;": '\U000027E9', - "raquo;": '\U000000BB', - "rarr;": '\U00002192', - "rarrap;": '\U00002975', - "rarrb;": '\U000021E5', - "rarrbfs;": '\U00002920', - "rarrc;": '\U00002933', - "rarrfs;": '\U0000291E', - "rarrhk;": '\U000021AA', - "rarrlp;": '\U000021AC', - "rarrpl;": '\U00002945', - "rarrsim;": '\U00002974', - "rarrtl;": '\U000021A3', - "rarrw;": '\U0000219D', - "ratail;": '\U0000291A', - "ratio;": '\U00002236', - "rationals;": '\U0000211A', - "rbarr;": '\U0000290D', - "rbbrk;": '\U00002773', - "rbrace;": '\U0000007D', - "rbrack;": '\U0000005D', - "rbrke;": '\U0000298C', - "rbrksld;": '\U0000298E', - "rbrkslu;": '\U00002990', - "rcaron;": '\U00000159', - "rcedil;": '\U00000157', - "rceil;": '\U00002309', - "rcub;": '\U0000007D', - "rcy;": '\U00000440', - "rdca;": '\U00002937', - "rdldhar;": '\U00002969', - "rdquo;": '\U0000201D', - "rdquor;": '\U0000201D', - "rdsh;": '\U000021B3', - "real;": '\U0000211C', - "realine;": '\U0000211B', - "realpart;": '\U0000211C', - "reals;": '\U0000211D', - "rect;": '\U000025AD', - "reg;": '\U000000AE', - "rfisht;": '\U0000297D', - "rfloor;": '\U0000230B', - "rfr;": '\U0001D52F', - "rhard;": '\U000021C1', - "rharu;": '\U000021C0', - "rharul;": '\U0000296C', - "rho;": '\U000003C1', - "rhov;": '\U000003F1', - "rightarrow;": '\U00002192', - "rightarrowtail;": '\U000021A3', - "rightharpoondown;": '\U000021C1', - "rightharpoonup;": '\U000021C0', - "rightleftarrows;": '\U000021C4', - "rightleftharpoons;": '\U000021CC', - "rightrightarrows;": '\U000021C9', - "rightsquigarrow;": '\U0000219D', - "rightthreetimes;": '\U000022CC', - "ring;": '\U000002DA', - "risingdotseq;": '\U00002253', - "rlarr;": '\U000021C4', - "rlhar;": '\U000021CC', - "rlm;": '\U0000200F', - "rmoust;": '\U000023B1', - "rmoustache;": '\U000023B1', - "rnmid;": '\U00002AEE', - "roang;": '\U000027ED', - "roarr;": '\U000021FE', - "robrk;": '\U000027E7', - "ropar;": '\U00002986', - "ropf;": '\U0001D563', - "roplus;": '\U00002A2E', - "rotimes;": '\U00002A35', - "rpar;": '\U00000029', - "rpargt;": '\U00002994', - "rppolint;": '\U00002A12', - "rrarr;": '\U000021C9', - "rsaquo;": '\U0000203A', - "rscr;": '\U0001D4C7', - "rsh;": '\U000021B1', - "rsqb;": '\U0000005D', - "rsquo;": '\U00002019', - "rsquor;": '\U00002019', - "rthree;": '\U000022CC', - "rtimes;": '\U000022CA', - "rtri;": '\U000025B9', - "rtrie;": '\U000022B5', - "rtrif;": '\U000025B8', - "rtriltri;": '\U000029CE', - "ruluhar;": '\U00002968', - "rx;": '\U0000211E', - "sacute;": '\U0000015B', - "sbquo;": '\U0000201A', - "sc;": '\U0000227B', - "scE;": '\U00002AB4', - "scap;": '\U00002AB8', - "scaron;": '\U00000161', - "sccue;": '\U0000227D', - "sce;": '\U00002AB0', - "scedil;": '\U0000015F', - "scirc;": '\U0000015D', - "scnE;": '\U00002AB6', - "scnap;": '\U00002ABA', - "scnsim;": '\U000022E9', - "scpolint;": '\U00002A13', - "scsim;": '\U0000227F', - "scy;": '\U00000441', - "sdot;": '\U000022C5', - "sdotb;": '\U000022A1', - "sdote;": '\U00002A66', - "seArr;": '\U000021D8', - "searhk;": '\U00002925', - "searr;": '\U00002198', - "searrow;": '\U00002198', - "sect;": '\U000000A7', - "semi;": '\U0000003B', - "seswar;": '\U00002929', - "setminus;": '\U00002216', - "setmn;": '\U00002216', - "sext;": '\U00002736', - "sfr;": '\U0001D530', - "sfrown;": '\U00002322', - "sharp;": '\U0000266F', - "shchcy;": '\U00000449', - "shcy;": '\U00000448', - "shortmid;": '\U00002223', - "shortparallel;": '\U00002225', - "shy;": '\U000000AD', - "sigma;": '\U000003C3', - "sigmaf;": '\U000003C2', - "sigmav;": '\U000003C2', - "sim;": '\U0000223C', - "simdot;": '\U00002A6A', - "sime;": '\U00002243', - "simeq;": '\U00002243', - "simg;": '\U00002A9E', - "simgE;": '\U00002AA0', - "siml;": '\U00002A9D', - "simlE;": '\U00002A9F', - "simne;": '\U00002246', - "simplus;": '\U00002A24', - "simrarr;": '\U00002972', - "slarr;": '\U00002190', - "smallsetminus;": '\U00002216', - "smashp;": '\U00002A33', - "smeparsl;": '\U000029E4', - "smid;": '\U00002223', - "smile;": '\U00002323', - "smt;": '\U00002AAA', - "smte;": '\U00002AAC', - "softcy;": '\U0000044C', - "sol;": '\U0000002F', - "solb;": '\U000029C4', - "solbar;": '\U0000233F', - "sopf;": '\U0001D564', - "spades;": '\U00002660', - "spadesuit;": '\U00002660', - "spar;": '\U00002225', - "sqcap;": '\U00002293', - "sqcup;": '\U00002294', - "sqsub;": '\U0000228F', - "sqsube;": '\U00002291', - "sqsubset;": '\U0000228F', - "sqsubseteq;": '\U00002291', - "sqsup;": '\U00002290', - "sqsupe;": '\U00002292', - "sqsupset;": '\U00002290', - "sqsupseteq;": '\U00002292', - "squ;": '\U000025A1', - "square;": '\U000025A1', - "squarf;": '\U000025AA', - "squf;": '\U000025AA', - "srarr;": '\U00002192', - "sscr;": '\U0001D4C8', - "ssetmn;": '\U00002216', - "ssmile;": '\U00002323', - "sstarf;": '\U000022C6', - "star;": '\U00002606', - "starf;": '\U00002605', - "straightepsilon;": '\U000003F5', - "straightphi;": '\U000003D5', - "strns;": '\U000000AF', - "sub;": '\U00002282', - "subE;": '\U00002AC5', - "subdot;": '\U00002ABD', - "sube;": '\U00002286', - "subedot;": '\U00002AC3', - "submult;": '\U00002AC1', - "subnE;": '\U00002ACB', - "subne;": '\U0000228A', - "subplus;": '\U00002ABF', - "subrarr;": '\U00002979', - "subset;": '\U00002282', - "subseteq;": '\U00002286', - "subseteqq;": '\U00002AC5', - "subsetneq;": '\U0000228A', - "subsetneqq;": '\U00002ACB', - "subsim;": '\U00002AC7', - "subsub;": '\U00002AD5', - "subsup;": '\U00002AD3', - "succ;": '\U0000227B', - "succapprox;": '\U00002AB8', - "succcurlyeq;": '\U0000227D', - "succeq;": '\U00002AB0', - "succnapprox;": '\U00002ABA', - "succneqq;": '\U00002AB6', - "succnsim;": '\U000022E9', - "succsim;": '\U0000227F', - "sum;": '\U00002211', - "sung;": '\U0000266A', - "sup;": '\U00002283', - "sup1;": '\U000000B9', - "sup2;": '\U000000B2', - "sup3;": '\U000000B3', - "supE;": '\U00002AC6', - "supdot;": '\U00002ABE', - "supdsub;": '\U00002AD8', - "supe;": '\U00002287', - "supedot;": '\U00002AC4', - "suphsol;": '\U000027C9', - "suphsub;": '\U00002AD7', - "suplarr;": '\U0000297B', - "supmult;": '\U00002AC2', - "supnE;": '\U00002ACC', - "supne;": '\U0000228B', - "supplus;": '\U00002AC0', - "supset;": '\U00002283', - "supseteq;": '\U00002287', - "supseteqq;": '\U00002AC6', - "supsetneq;": '\U0000228B', - "supsetneqq;": '\U00002ACC', - "supsim;": '\U00002AC8', - "supsub;": '\U00002AD4', - "supsup;": '\U00002AD6', - "swArr;": '\U000021D9', - "swarhk;": '\U00002926', - "swarr;": '\U00002199', - "swarrow;": '\U00002199', - "swnwar;": '\U0000292A', - "szlig;": '\U000000DF', - "target;": '\U00002316', - "tau;": '\U000003C4', - "tbrk;": '\U000023B4', - "tcaron;": '\U00000165', - "tcedil;": '\U00000163', - "tcy;": '\U00000442', - "tdot;": '\U000020DB', - "telrec;": '\U00002315', - "tfr;": '\U0001D531', - "there4;": '\U00002234', - "therefore;": '\U00002234', - "theta;": '\U000003B8', - "thetasym;": '\U000003D1', - "thetav;": '\U000003D1', - "thickapprox;": '\U00002248', - "thicksim;": '\U0000223C', - "thinsp;": '\U00002009', - "thkap;": '\U00002248', - "thksim;": '\U0000223C', - "thorn;": '\U000000FE', - "tilde;": '\U000002DC', - "times;": '\U000000D7', - "timesb;": '\U000022A0', - "timesbar;": '\U00002A31', - "timesd;": '\U00002A30', - "tint;": '\U0000222D', - "toea;": '\U00002928', - "top;": '\U000022A4', - "topbot;": '\U00002336', - "topcir;": '\U00002AF1', - "topf;": '\U0001D565', - "topfork;": '\U00002ADA', - "tosa;": '\U00002929', - "tprime;": '\U00002034', - "trade;": '\U00002122', - "triangle;": '\U000025B5', - "triangledown;": '\U000025BF', - "triangleleft;": '\U000025C3', - "trianglelefteq;": '\U000022B4', - "triangleq;": '\U0000225C', - "triangleright;": '\U000025B9', - "trianglerighteq;": '\U000022B5', - "tridot;": '\U000025EC', - "trie;": '\U0000225C', - "triminus;": '\U00002A3A', - "triplus;": '\U00002A39', - "trisb;": '\U000029CD', - "tritime;": '\U00002A3B', - "trpezium;": '\U000023E2', - "tscr;": '\U0001D4C9', - "tscy;": '\U00000446', - "tshcy;": '\U0000045B', - "tstrok;": '\U00000167', - "twixt;": '\U0000226C', - "twoheadleftarrow;": '\U0000219E', - "twoheadrightarrow;": '\U000021A0', - "uArr;": '\U000021D1', - "uHar;": '\U00002963', - "uacute;": '\U000000FA', - "uarr;": '\U00002191', - "ubrcy;": '\U0000045E', - "ubreve;": '\U0000016D', - "ucirc;": '\U000000FB', - "ucy;": '\U00000443', - "udarr;": '\U000021C5', - "udblac;": '\U00000171', - "udhar;": '\U0000296E', - "ufisht;": '\U0000297E', - "ufr;": '\U0001D532', - "ugrave;": '\U000000F9', - "uharl;": '\U000021BF', - "uharr;": '\U000021BE', - "uhblk;": '\U00002580', - "ulcorn;": '\U0000231C', - "ulcorner;": '\U0000231C', - "ulcrop;": '\U0000230F', - "ultri;": '\U000025F8', - "umacr;": '\U0000016B', - "uml;": '\U000000A8', - "uogon;": '\U00000173', - "uopf;": '\U0001D566', - "uparrow;": '\U00002191', - "updownarrow;": '\U00002195', - "upharpoonleft;": '\U000021BF', - "upharpoonright;": '\U000021BE', - "uplus;": '\U0000228E', - "upsi;": '\U000003C5', - "upsih;": '\U000003D2', - "upsilon;": '\U000003C5', - "upuparrows;": '\U000021C8', - "urcorn;": '\U0000231D', - "urcorner;": '\U0000231D', - "urcrop;": '\U0000230E', - "uring;": '\U0000016F', - "urtri;": '\U000025F9', - "uscr;": '\U0001D4CA', - "utdot;": '\U000022F0', - "utilde;": '\U00000169', - "utri;": '\U000025B5', - "utrif;": '\U000025B4', - "uuarr;": '\U000021C8', - "uuml;": '\U000000FC', - "uwangle;": '\U000029A7', - "vArr;": '\U000021D5', - "vBar;": '\U00002AE8', - "vBarv;": '\U00002AE9', - "vDash;": '\U000022A8', - "vangrt;": '\U0000299C', - "varepsilon;": '\U000003F5', - "varkappa;": '\U000003F0', - "varnothing;": '\U00002205', - "varphi;": '\U000003D5', - "varpi;": '\U000003D6', - "varpropto;": '\U0000221D', - "varr;": '\U00002195', - "varrho;": '\U000003F1', - "varsigma;": '\U000003C2', - "vartheta;": '\U000003D1', - "vartriangleleft;": '\U000022B2', - "vartriangleright;": '\U000022B3', - "vcy;": '\U00000432', - "vdash;": '\U000022A2', - "vee;": '\U00002228', - "veebar;": '\U000022BB', - "veeeq;": '\U0000225A', - "vellip;": '\U000022EE', - "verbar;": '\U0000007C', - "vert;": '\U0000007C', - "vfr;": '\U0001D533', - "vltri;": '\U000022B2', - "vopf;": '\U0001D567', - "vprop;": '\U0000221D', - "vrtri;": '\U000022B3', - "vscr;": '\U0001D4CB', - "vzigzag;": '\U0000299A', - "wcirc;": '\U00000175', - "wedbar;": '\U00002A5F', - "wedge;": '\U00002227', - "wedgeq;": '\U00002259', - "weierp;": '\U00002118', - "wfr;": '\U0001D534', - "wopf;": '\U0001D568', - "wp;": '\U00002118', - "wr;": '\U00002240', - "wreath;": '\U00002240', - "wscr;": '\U0001D4CC', - "xcap;": '\U000022C2', - "xcirc;": '\U000025EF', - "xcup;": '\U000022C3', - "xdtri;": '\U000025BD', - "xfr;": '\U0001D535', - "xhArr;": '\U000027FA', - "xharr;": '\U000027F7', - "xi;": '\U000003BE', - "xlArr;": '\U000027F8', - "xlarr;": '\U000027F5', - "xmap;": '\U000027FC', - "xnis;": '\U000022FB', - "xodot;": '\U00002A00', - "xopf;": '\U0001D569', - "xoplus;": '\U00002A01', - "xotime;": '\U00002A02', - "xrArr;": '\U000027F9', - "xrarr;": '\U000027F6', - "xscr;": '\U0001D4CD', - "xsqcup;": '\U00002A06', - "xuplus;": '\U00002A04', - "xutri;": '\U000025B3', - "xvee;": '\U000022C1', - "xwedge;": '\U000022C0', - "yacute;": '\U000000FD', - "yacy;": '\U0000044F', - "ycirc;": '\U00000177', - "ycy;": '\U0000044B', - "yen;": '\U000000A5', - "yfr;": '\U0001D536', - "yicy;": '\U00000457', - "yopf;": '\U0001D56A', - "yscr;": '\U0001D4CE', - "yucy;": '\U0000044E', - "yuml;": '\U000000FF', - "zacute;": '\U0000017A', - "zcaron;": '\U0000017E', - "zcy;": '\U00000437', - "zdot;": '\U0000017C', - "zeetrf;": '\U00002128', - "zeta;": '\U000003B6', - "zfr;": '\U0001D537', - "zhcy;": '\U00000436', - "zigrarr;": '\U000021DD', - "zopf;": '\U0001D56B', - "zscr;": '\U0001D4CF', - "zwj;": '\U0000200D', - "zwnj;": '\U0000200C', - "AElig": '\U000000C6', - "AMP": '\U00000026', - "Aacute": '\U000000C1', - "Acirc": '\U000000C2', - "Agrave": '\U000000C0', - "Aring": '\U000000C5', - "Atilde": '\U000000C3', - "Auml": '\U000000C4', - "COPY": '\U000000A9', - "Ccedil": '\U000000C7', - "ETH": '\U000000D0', - "Eacute": '\U000000C9', - "Ecirc": '\U000000CA', - "Egrave": '\U000000C8', - "Euml": '\U000000CB', - "GT": '\U0000003E', - "Iacute": '\U000000CD', - "Icirc": '\U000000CE', - "Igrave": '\U000000CC', - "Iuml": '\U000000CF', - "LT": '\U0000003C', - "Ntilde": '\U000000D1', - "Oacute": '\U000000D3', - "Ocirc": '\U000000D4', - "Ograve": '\U000000D2', - "Oslash": '\U000000D8', - "Otilde": '\U000000D5', - "Ouml": '\U000000D6', - "QUOT": '\U00000022', - "REG": '\U000000AE', - "THORN": '\U000000DE', - "Uacute": '\U000000DA', - "Ucirc": '\U000000DB', - "Ugrave": '\U000000D9', - "Uuml": '\U000000DC', - "Yacute": '\U000000DD', - "aacute": '\U000000E1', - "acirc": '\U000000E2', - "acute": '\U000000B4', - "aelig": '\U000000E6', - "agrave": '\U000000E0', - "amp": '\U00000026', - "aring": '\U000000E5', - "atilde": '\U000000E3', - "auml": '\U000000E4', - "brvbar": '\U000000A6', - "ccedil": '\U000000E7', - "cedil": '\U000000B8', - "cent": '\U000000A2', - "copy": '\U000000A9', - "curren": '\U000000A4', - "deg": '\U000000B0', - "divide": '\U000000F7', - "eacute": '\U000000E9', - "ecirc": '\U000000EA', - "egrave": '\U000000E8', - "eth": '\U000000F0', - "euml": '\U000000EB', - "frac12": '\U000000BD', - "frac14": '\U000000BC', - "frac34": '\U000000BE', - "gt": '\U0000003E', - "iacute": '\U000000ED', - "icirc": '\U000000EE', - "iexcl": '\U000000A1', - "igrave": '\U000000EC', - "iquest": '\U000000BF', - "iuml": '\U000000EF', - "laquo": '\U000000AB', - "lt": '\U0000003C', - "macr": '\U000000AF', - "micro": '\U000000B5', - "middot": '\U000000B7', - "nbsp": '\U000000A0', - "not": '\U000000AC', - "ntilde": '\U000000F1', - "oacute": '\U000000F3', - "ocirc": '\U000000F4', - "ograve": '\U000000F2', - "ordf": '\U000000AA', - "ordm": '\U000000BA', - "oslash": '\U000000F8', - "otilde": '\U000000F5', - "ouml": '\U000000F6', - "para": '\U000000B6', - "plusmn": '\U000000B1', - "pound": '\U000000A3', - "quot": '\U00000022', - "raquo": '\U000000BB', - "reg": '\U000000AE', - "sect": '\U000000A7', - "shy": '\U000000AD', - "sup1": '\U000000B9', - "sup2": '\U000000B2', - "sup3": '\U000000B3', - "szlig": '\U000000DF', - "thorn": '\U000000FE', - "times": '\U000000D7', - "uacute": '\U000000FA', - "ucirc": '\U000000FB', - "ugrave": '\U000000F9', - "uml": '\U000000A8', - "uuml": '\U000000FC', - "yacute": '\U000000FD', - "yen": '\U000000A5', - "yuml": '\U000000FF', + "Cross;": '\U00002A2F', + "Cscr;": '\U0001D49E', + "Cup;": '\U000022D3', + "CupCap;": '\U0000224D', + "DD;": '\U00002145', + "DDotrahd;": '\U00002911', + "DJcy;": '\U00000402', + "DScy;": '\U00000405', + "DZcy;": '\U0000040F', + "Dagger;": '\U00002021', + "Darr;": '\U000021A1', + "Dashv;": '\U00002AE4', + "Dcaron;": '\U0000010E', + "Dcy;": '\U00000414', + "Del;": '\U00002207', + "Delta;": '\U00000394', + "Dfr;": '\U0001D507', + "DiacriticalAcute;": '\U000000B4', + "DiacriticalDot;": '\U000002D9', + "DiacriticalDoubleAcute;": '\U000002DD', + "DiacriticalGrave;": '\U00000060', + "DiacriticalTilde;": '\U000002DC', + "Diamond;": '\U000022C4', + "DifferentialD;": '\U00002146', + "Dopf;": '\U0001D53B', + "Dot;": '\U000000A8', + "DotDot;": '\U000020DC', + "DotEqual;": '\U00002250', + "DoubleContourIntegral;": '\U0000222F', + "DoubleDot;": '\U000000A8', + "DoubleDownArrow;": '\U000021D3', + "DoubleLeftArrow;": '\U000021D0', + "DoubleLeftRightArrow;": '\U000021D4', + "DoubleLeftTee;": '\U00002AE4', + "DoubleLongLeftArrow;": '\U000027F8', + "DoubleLongLeftRightArrow;": '\U000027FA', + "DoubleLongRightArrow;": '\U000027F9', + "DoubleRightArrow;": '\U000021D2', + "DoubleRightTee;": '\U000022A8', + "DoubleUpArrow;": '\U000021D1', + "DoubleUpDownArrow;": '\U000021D5', + "DoubleVerticalBar;": '\U00002225', + "DownArrow;": '\U00002193', + "DownArrowBar;": '\U00002913', + "DownArrowUpArrow;": '\U000021F5', + "DownBreve;": '\U00000311', + "DownLeftRightVector;": '\U00002950', + "DownLeftTeeVector;": '\U0000295E', + "DownLeftVector;": '\U000021BD', + "DownLeftVectorBar;": '\U00002956', + "DownRightTeeVector;": '\U0000295F', + "DownRightVector;": '\U000021C1', + "DownRightVectorBar;": '\U00002957', + "DownTee;": '\U000022A4', + "DownTeeArrow;": '\U000021A7', + "Downarrow;": '\U000021D3', + "Dscr;": '\U0001D49F', + "Dstrok;": '\U00000110', + "ENG;": '\U0000014A', + "ETH;": '\U000000D0', + "Eacute;": '\U000000C9', + "Ecaron;": '\U0000011A', + "Ecirc;": '\U000000CA', + "Ecy;": '\U0000042D', + "Edot;": '\U00000116', + "Efr;": '\U0001D508', + "Egrave;": '\U000000C8', + "Element;": '\U00002208', + "Emacr;": '\U00000112', + "EmptySmallSquare;": '\U000025FB', + "EmptyVerySmallSquare;": '\U000025AB', + "Eogon;": '\U00000118', + "Eopf;": '\U0001D53C', + "Epsilon;": '\U00000395', + "Equal;": '\U00002A75', + "EqualTilde;": '\U00002242', + "Equilibrium;": '\U000021CC', + "Escr;": '\U00002130', + "Esim;": '\U00002A73', + "Eta;": '\U00000397', + "Euml;": '\U000000CB', + "Exists;": '\U00002203', + "ExponentialE;": '\U00002147', + "Fcy;": '\U00000424', + "Ffr;": '\U0001D509', + "FilledSmallSquare;": '\U000025FC', + "FilledVerySmallSquare;": '\U000025AA', + "Fopf;": '\U0001D53D', + "ForAll;": '\U00002200', + "Fouriertrf;": '\U00002131', + "Fscr;": '\U00002131', + "GJcy;": '\U00000403', + "GT;": '\U0000003E', + "Gamma;": '\U00000393', + "Gammad;": '\U000003DC', + "Gbreve;": '\U0000011E', + "Gcedil;": '\U00000122', + "Gcirc;": '\U0000011C', + "Gcy;": '\U00000413', + "Gdot;": '\U00000120', + "Gfr;": '\U0001D50A', + "Gg;": '\U000022D9', + "Gopf;": '\U0001D53E', + "GreaterEqual;": '\U00002265', + "GreaterEqualLess;": '\U000022DB', + "GreaterFullEqual;": '\U00002267', + "GreaterGreater;": '\U00002AA2', + "GreaterLess;": '\U00002277', + "GreaterSlantEqual;": '\U00002A7E', + "GreaterTilde;": '\U00002273', + "Gscr;": '\U0001D4A2', + "Gt;": '\U0000226B', + "HARDcy;": '\U0000042A', + "Hacek;": '\U000002C7', + "Hat;": '\U0000005E', + "Hcirc;": '\U00000124', + "Hfr;": '\U0000210C', + "HilbertSpace;": '\U0000210B', + "Hopf;": '\U0000210D', + "HorizontalLine;": '\U00002500', + "Hscr;": '\U0000210B', + "Hstrok;": '\U00000126', + "HumpDownHump;": '\U0000224E', + "HumpEqual;": '\U0000224F', + "IEcy;": '\U00000415', + "IJlig;": '\U00000132', + "IOcy;": '\U00000401', + "Iacute;": '\U000000CD', + "Icirc;": '\U000000CE', + "Icy;": '\U00000418', + "Idot;": '\U00000130', + "Ifr;": '\U00002111', + "Igrave;": '\U000000CC', + "Im;": '\U00002111', + "Imacr;": '\U0000012A', + "ImaginaryI;": '\U00002148', + "Implies;": '\U000021D2', + "Int;": '\U0000222C', + "Integral;": '\U0000222B', + "Intersection;": '\U000022C2', + "InvisibleComma;": '\U00002063', + "InvisibleTimes;": '\U00002062', + "Iogon;": '\U0000012E', + "Iopf;": '\U0001D540', + "Iota;": '\U00000399', + "Iscr;": '\U00002110', + "Itilde;": '\U00000128', + "Iukcy;": '\U00000406', + "Iuml;": '\U000000CF', + "Jcirc;": '\U00000134', + "Jcy;": '\U00000419', + "Jfr;": '\U0001D50D', + "Jopf;": '\U0001D541', + "Jscr;": '\U0001D4A5', + "Jsercy;": '\U00000408', + "Jukcy;": '\U00000404', + "KHcy;": '\U00000425', + "KJcy;": '\U0000040C', + "Kappa;": '\U0000039A', + "Kcedil;": '\U00000136', + "Kcy;": '\U0000041A', + "Kfr;": '\U0001D50E', + "Kopf;": '\U0001D542', + "Kscr;": '\U0001D4A6', + "LJcy;": '\U00000409', + "LT;": '\U0000003C', + "Lacute;": '\U00000139', + "Lambda;": '\U0000039B', + "Lang;": '\U000027EA', + "Laplacetrf;": '\U00002112', + "Larr;": '\U0000219E', + "Lcaron;": '\U0000013D', + "Lcedil;": '\U0000013B', + "Lcy;": '\U0000041B', + "LeftAngleBracket;": '\U000027E8', + "LeftArrow;": '\U00002190', + "LeftArrowBar;": '\U000021E4', + "LeftArrowRightArrow;": '\U000021C6', + "LeftCeiling;": '\U00002308', + "LeftDoubleBracket;": '\U000027E6', + "LeftDownTeeVector;": '\U00002961', + "LeftDownVector;": '\U000021C3', + "LeftDownVectorBar;": '\U00002959', + "LeftFloor;": '\U0000230A', + "LeftRightArrow;": '\U00002194', + "LeftRightVector;": '\U0000294E', + "LeftTee;": '\U000022A3', + "LeftTeeArrow;": '\U000021A4', + "LeftTeeVector;": '\U0000295A', + "LeftTriangle;": '\U000022B2', + "LeftTriangleBar;": '\U000029CF', + "LeftTriangleEqual;": '\U000022B4', + "LeftUpDownVector;": '\U00002951', + "LeftUpTeeVector;": '\U00002960', + "LeftUpVector;": '\U000021BF', + "LeftUpVectorBar;": '\U00002958', + "LeftVector;": '\U000021BC', + "LeftVectorBar;": '\U00002952', + "Leftarrow;": '\U000021D0', + "Leftrightarrow;": '\U000021D4', + "LessEqualGreater;": '\U000022DA', + "LessFullEqual;": '\U00002266', + "LessGreater;": '\U00002276', + "LessLess;": '\U00002AA1', + "LessSlantEqual;": '\U00002A7D', + "LessTilde;": '\U00002272', + "Lfr;": '\U0001D50F', + "Ll;": '\U000022D8', + "Lleftarrow;": '\U000021DA', + "Lmidot;": '\U0000013F', + "LongLeftArrow;": '\U000027F5', + "LongLeftRightArrow;": '\U000027F7', + "LongRightArrow;": '\U000027F6', + "Longleftarrow;": '\U000027F8', + "Longleftrightarrow;": '\U000027FA', + "Longrightarrow;": '\U000027F9', + "Lopf;": '\U0001D543', + "LowerLeftArrow;": '\U00002199', + "LowerRightArrow;": '\U00002198', + "Lscr;": '\U00002112', + "Lsh;": '\U000021B0', + "Lstrok;": '\U00000141', + "Lt;": '\U0000226A', + "Map;": '\U00002905', + "Mcy;": '\U0000041C', + "MediumSpace;": '\U0000205F', + "Mellintrf;": '\U00002133', + "Mfr;": '\U0001D510', + "MinusPlus;": '\U00002213', + "Mopf;": '\U0001D544', + "Mscr;": '\U00002133', + "Mu;": '\U0000039C', + "NJcy;": '\U0000040A', + "Nacute;": '\U00000143', + "Ncaron;": '\U00000147', + "Ncedil;": '\U00000145', + "Ncy;": '\U0000041D', + "NegativeMediumSpace;": '\U0000200B', + "NegativeThickSpace;": '\U0000200B', + "NegativeThinSpace;": '\U0000200B', + "NegativeVeryThinSpace;": '\U0000200B', + "NestedGreaterGreater;": '\U0000226B', + "NestedLessLess;": '\U0000226A', + "NewLine;": '\U0000000A', + "Nfr;": '\U0001D511', + "NoBreak;": '\U00002060', + "NonBreakingSpace;": '\U000000A0', + "Nopf;": '\U00002115', + "Not;": '\U00002AEC', + "NotCongruent;": '\U00002262', + "NotCupCap;": '\U0000226D', + "NotDoubleVerticalBar;": '\U00002226', + "NotElement;": '\U00002209', + "NotEqual;": '\U00002260', + "NotExists;": '\U00002204', + "NotGreater;": '\U0000226F', + "NotGreaterEqual;": '\U00002271', + "NotGreaterLess;": '\U00002279', + "NotGreaterTilde;": '\U00002275', + "NotLeftTriangle;": '\U000022EA', + "NotLeftTriangleEqual;": '\U000022EC', + "NotLess;": '\U0000226E', + "NotLessEqual;": '\U00002270', + "NotLessGreater;": '\U00002278', + "NotLessTilde;": '\U00002274', + "NotPrecedes;": '\U00002280', + "NotPrecedesSlantEqual;": '\U000022E0', + "NotReverseElement;": '\U0000220C', + "NotRightTriangle;": '\U000022EB', + "NotRightTriangleEqual;": '\U000022ED', + "NotSquareSubsetEqual;": '\U000022E2', + "NotSquareSupersetEqual;": '\U000022E3', + "NotSubsetEqual;": '\U00002288', + "NotSucceeds;": '\U00002281', + "NotSucceedsSlantEqual;": '\U000022E1', + "NotSupersetEqual;": '\U00002289', + "NotTilde;": '\U00002241', + "NotTildeEqual;": '\U00002244', + "NotTildeFullEqual;": '\U00002247', + "NotTildeTilde;": '\U00002249', + "NotVerticalBar;": '\U00002224', + "Nscr;": '\U0001D4A9', + "Ntilde;": '\U000000D1', + "Nu;": '\U0000039D', + "OElig;": '\U00000152', + "Oacute;": '\U000000D3', + "Ocirc;": '\U000000D4', + "Ocy;": '\U0000041E', + "Odblac;": '\U00000150', + "Ofr;": '\U0001D512', + "Ograve;": '\U000000D2', + "Omacr;": '\U0000014C', + "Omega;": '\U000003A9', + "Omicron;": '\U0000039F', + "Oopf;": '\U0001D546', + "OpenCurlyDoubleQuote;": '\U0000201C', + "OpenCurlyQuote;": '\U00002018', + "Or;": '\U00002A54', + "Oscr;": '\U0001D4AA', + "Oslash;": '\U000000D8', + "Otilde;": '\U000000D5', + "Otimes;": '\U00002A37', + "Ouml;": '\U000000D6', + "OverBar;": '\U0000203E', + "OverBrace;": '\U000023DE', + "OverBracket;": '\U000023B4', + "OverParenthesis;": '\U000023DC', + "PartialD;": '\U00002202', + "Pcy;": '\U0000041F', + "Pfr;": '\U0001D513', + "Phi;": '\U000003A6', + "Pi;": '\U000003A0', + "PlusMinus;": '\U000000B1', + "Poincareplane;": '\U0000210C', + "Popf;": '\U00002119', + "Pr;": '\U00002ABB', + "Precedes;": '\U0000227A', + "PrecedesEqual;": '\U00002AAF', + "PrecedesSlantEqual;": '\U0000227C', + "PrecedesTilde;": '\U0000227E', + "Prime;": '\U00002033', + "Product;": '\U0000220F', + "Proportion;": '\U00002237', + "Proportional;": '\U0000221D', + "Pscr;": '\U0001D4AB', + "Psi;": '\U000003A8', + "QUOT;": '\U00000022', + "Qfr;": '\U0001D514', + "Qopf;": '\U0000211A', + "Qscr;": '\U0001D4AC', + "RBarr;": '\U00002910', + "REG;": '\U000000AE', + "Racute;": '\U00000154', + "Rang;": '\U000027EB', + "Rarr;": '\U000021A0', + "Rarrtl;": '\U00002916', + "Rcaron;": '\U00000158', + "Rcedil;": '\U00000156', + "Rcy;": '\U00000420', + "Re;": '\U0000211C', + "ReverseElement;": '\U0000220B', + "ReverseEquilibrium;": '\U000021CB', + "ReverseUpEquilibrium;": '\U0000296F', + "Rfr;": '\U0000211C', + "Rho;": '\U000003A1', + "RightAngleBracket;": '\U000027E9', + "RightArrow;": '\U00002192', + "RightArrowBar;": '\U000021E5', + "RightArrowLeftArrow;": '\U000021C4', + "RightCeiling;": '\U00002309', + "RightDoubleBracket;": '\U000027E7', + "RightDownTeeVector;": '\U0000295D', + "RightDownVector;": '\U000021C2', + "RightDownVectorBar;": '\U00002955', + "RightFloor;": '\U0000230B', + "RightTee;": '\U000022A2', + "RightTeeArrow;": '\U000021A6', + "RightTeeVector;": '\U0000295B', + "RightTriangle;": '\U000022B3', + "RightTriangleBar;": '\U000029D0', + "RightTriangleEqual;": '\U000022B5', + "RightUpDownVector;": '\U0000294F', + "RightUpTeeVector;": '\U0000295C', + "RightUpVector;": '\U000021BE', + "RightUpVectorBar;": '\U00002954', + "RightVector;": '\U000021C0', + "RightVectorBar;": '\U00002953', + "Rightarrow;": '\U000021D2', + "Ropf;": '\U0000211D', + "RoundImplies;": '\U00002970', + "Rrightarrow;": '\U000021DB', + "Rscr;": '\U0000211B', + "Rsh;": '\U000021B1', + "RuleDelayed;": '\U000029F4', + "SHCHcy;": '\U00000429', + "SHcy;": '\U00000428', + "SOFTcy;": '\U0000042C', + "Sacute;": '\U0000015A', + "Sc;": '\U00002ABC', + "Scaron;": '\U00000160', + "Scedil;": '\U0000015E', + "Scirc;": '\U0000015C', + "Scy;": '\U00000421', + "Sfr;": '\U0001D516', + "ShortDownArrow;": '\U00002193', + "ShortLeftArrow;": '\U00002190', + "ShortRightArrow;": '\U00002192', + "ShortUpArrow;": '\U00002191', + "Sigma;": '\U000003A3', + "SmallCircle;": '\U00002218', + "Sopf;": '\U0001D54A', + "Sqrt;": '\U0000221A', + "Square;": '\U000025A1', + "SquareIntersection;": '\U00002293', + "SquareSubset;": '\U0000228F', + "SquareSubsetEqual;": '\U00002291', + "SquareSuperset;": '\U00002290', + "SquareSupersetEqual;": '\U00002292', + "SquareUnion;": '\U00002294', + "Sscr;": '\U0001D4AE', + "Star;": '\U000022C6', + "Sub;": '\U000022D0', + "Subset;": '\U000022D0', + "SubsetEqual;": '\U00002286', + "Succeeds;": '\U0000227B', + "SucceedsEqual;": '\U00002AB0', + "SucceedsSlantEqual;": '\U0000227D', + "SucceedsTilde;": '\U0000227F', + "SuchThat;": '\U0000220B', + "Sum;": '\U00002211', + "Sup;": '\U000022D1', + "Superset;": '\U00002283', + "SupersetEqual;": '\U00002287', + "Supset;": '\U000022D1', + "THORN;": '\U000000DE', + "TRADE;": '\U00002122', + "TSHcy;": '\U0000040B', + "TScy;": '\U00000426', + "Tab;": '\U00000009', + "Tau;": '\U000003A4', + "Tcaron;": '\U00000164', + "Tcedil;": '\U00000162', + "Tcy;": '\U00000422', + "Tfr;": '\U0001D517', + "Therefore;": '\U00002234', + "Theta;": '\U00000398', + "ThinSpace;": '\U00002009', + "Tilde;": '\U0000223C', + "TildeEqual;": '\U00002243', + "TildeFullEqual;": '\U00002245', + "TildeTilde;": '\U00002248', + "Topf;": '\U0001D54B', + "TripleDot;": '\U000020DB', + "Tscr;": '\U0001D4AF', + "Tstrok;": '\U00000166', + "Uacute;": '\U000000DA', + "Uarr;": '\U0000219F', + "Uarrocir;": '\U00002949', + "Ubrcy;": '\U0000040E', + "Ubreve;": '\U0000016C', + "Ucirc;": '\U000000DB', + "Ucy;": '\U00000423', + "Udblac;": '\U00000170', + "Ufr;": '\U0001D518', + "Ugrave;": '\U000000D9', + "Umacr;": '\U0000016A', + "UnderBar;": '\U0000005F', + "UnderBrace;": '\U000023DF', + "UnderBracket;": '\U000023B5', + "UnderParenthesis;": '\U000023DD', + "Union;": '\U000022C3', + "UnionPlus;": '\U0000228E', + "Uogon;": '\U00000172', + "Uopf;": '\U0001D54C', + "UpArrow;": '\U00002191', + "UpArrowBar;": '\U00002912', + "UpArrowDownArrow;": '\U000021C5', + "UpDownArrow;": '\U00002195', + "UpEquilibrium;": '\U0000296E', + "UpTee;": '\U000022A5', + "UpTeeArrow;": '\U000021A5', + "Uparrow;": '\U000021D1', + "Updownarrow;": '\U000021D5', + "UpperLeftArrow;": '\U00002196', + "UpperRightArrow;": '\U00002197', + "Upsi;": '\U000003D2', + "Upsilon;": '\U000003A5', + "Uring;": '\U0000016E', + "Uscr;": '\U0001D4B0', + "Utilde;": '\U00000168', + "Uuml;": '\U000000DC', + "VDash;": '\U000022AB', + "Vbar;": '\U00002AEB', + "Vcy;": '\U00000412', + "Vdash;": '\U000022A9', + "Vdashl;": '\U00002AE6', + "Vee;": '\U000022C1', + "Verbar;": '\U00002016', + "Vert;": '\U00002016', + "VerticalBar;": '\U00002223', + "VerticalLine;": '\U0000007C', + "VerticalSeparator;": '\U00002758', + "VerticalTilde;": '\U00002240', + "VeryThinSpace;": '\U0000200A', + "Vfr;": '\U0001D519', + "Vopf;": '\U0001D54D', + "Vscr;": '\U0001D4B1', + "Vvdash;": '\U000022AA', + "Wcirc;": '\U00000174', + "Wedge;": '\U000022C0', + "Wfr;": '\U0001D51A', + "Wopf;": '\U0001D54E', + "Wscr;": '\U0001D4B2', + "Xfr;": '\U0001D51B', + "Xi;": '\U0000039E', + "Xopf;": '\U0001D54F', + "Xscr;": '\U0001D4B3', + "YAcy;": '\U0000042F', + "YIcy;": '\U00000407', + "YUcy;": '\U0000042E', + "Yacute;": '\U000000DD', + "Ycirc;": '\U00000176', + "Ycy;": '\U0000042B', + "Yfr;": '\U0001D51C', + "Yopf;": '\U0001D550', + "Yscr;": '\U0001D4B4', + "Yuml;": '\U00000178', + "ZHcy;": '\U00000416', + "Zacute;": '\U00000179', + "Zcaron;": '\U0000017D', + "Zcy;": '\U00000417', + "Zdot;": '\U0000017B', + "ZeroWidthSpace;": '\U0000200B', + "Zeta;": '\U00000396', + "Zfr;": '\U00002128', + "Zopf;": '\U00002124', + "Zscr;": '\U0001D4B5', + "aacute;": '\U000000E1', + "abreve;": '\U00000103', + "ac;": '\U0000223E', + "acd;": '\U0000223F', + "acirc;": '\U000000E2', + "acute;": '\U000000B4', + "acy;": '\U00000430', + "aelig;": '\U000000E6', + "af;": '\U00002061', + "afr;": '\U0001D51E', + "agrave;": '\U000000E0', + "alefsym;": '\U00002135', + "aleph;": '\U00002135', + "alpha;": '\U000003B1', + "amacr;": '\U00000101', + "amalg;": '\U00002A3F', + "amp;": '\U00000026', + "and;": '\U00002227', + "andand;": '\U00002A55', + "andd;": '\U00002A5C', + "andslope;": '\U00002A58', + "andv;": '\U00002A5A', + "ang;": '\U00002220', + "ange;": '\U000029A4', + "angle;": '\U00002220', + "angmsd;": '\U00002221', + "angmsdaa;": '\U000029A8', + "angmsdab;": '\U000029A9', + "angmsdac;": '\U000029AA', + "angmsdad;": '\U000029AB', + "angmsdae;": '\U000029AC', + "angmsdaf;": '\U000029AD', + "angmsdag;": '\U000029AE', + "angmsdah;": '\U000029AF', + "angrt;": '\U0000221F', + "angrtvb;": '\U000022BE', + "angrtvbd;": '\U0000299D', + "angsph;": '\U00002222', + "angst;": '\U000000C5', + "angzarr;": '\U0000237C', + "aogon;": '\U00000105', + "aopf;": '\U0001D552', + "ap;": '\U00002248', + "apE;": '\U00002A70', + "apacir;": '\U00002A6F', + "ape;": '\U0000224A', + "apid;": '\U0000224B', + "apos;": '\U00000027', + "approx;": '\U00002248', + "approxeq;": '\U0000224A', + "aring;": '\U000000E5', + "ascr;": '\U0001D4B6', + "ast;": '\U0000002A', + "asymp;": '\U00002248', + "asympeq;": '\U0000224D', + "atilde;": '\U000000E3', + "auml;": '\U000000E4', + "awconint;": '\U00002233', + "awint;": '\U00002A11', + "bNot;": '\U00002AED', + "backcong;": '\U0000224C', + "backepsilon;": '\U000003F6', + "backprime;": '\U00002035', + "backsim;": '\U0000223D', + "backsimeq;": '\U000022CD', + "barvee;": '\U000022BD', + "barwed;": '\U00002305', + "barwedge;": '\U00002305', + "bbrk;": '\U000023B5', + "bbrktbrk;": '\U000023B6', + "bcong;": '\U0000224C', + "bcy;": '\U00000431', + "bdquo;": '\U0000201E', + "becaus;": '\U00002235', + "because;": '\U00002235', + "bemptyv;": '\U000029B0', + "bepsi;": '\U000003F6', + "bernou;": '\U0000212C', + "beta;": '\U000003B2', + "beth;": '\U00002136', + "between;": '\U0000226C', + "bfr;": '\U0001D51F', + "bigcap;": '\U000022C2', + "bigcirc;": '\U000025EF', + "bigcup;": '\U000022C3', + "bigodot;": '\U00002A00', + "bigoplus;": '\U00002A01', + "bigotimes;": '\U00002A02', + "bigsqcup;": '\U00002A06', + "bigstar;": '\U00002605', + "bigtriangledown;": '\U000025BD', + "bigtriangleup;": '\U000025B3', + "biguplus;": '\U00002A04', + "bigvee;": '\U000022C1', + "bigwedge;": '\U000022C0', + "bkarow;": '\U0000290D', + "blacklozenge;": '\U000029EB', + "blacksquare;": '\U000025AA', + "blacktriangle;": '\U000025B4', + "blacktriangledown;": '\U000025BE', + "blacktriangleleft;": '\U000025C2', + "blacktriangleright;": '\U000025B8', + "blank;": '\U00002423', + "blk12;": '\U00002592', + "blk14;": '\U00002591', + "blk34;": '\U00002593', + "block;": '\U00002588', + "bnot;": '\U00002310', + "bopf;": '\U0001D553', + "bot;": '\U000022A5', + "bottom;": '\U000022A5', + "bowtie;": '\U000022C8', + "boxDL;": '\U00002557', + "boxDR;": '\U00002554', + "boxDl;": '\U00002556', + "boxDr;": '\U00002553', + "boxH;": '\U00002550', + "boxHD;": '\U00002566', + "boxHU;": '\U00002569', + "boxHd;": '\U00002564', + "boxHu;": '\U00002567', + "boxUL;": '\U0000255D', + "boxUR;": '\U0000255A', + "boxUl;": '\U0000255C', + "boxUr;": '\U00002559', + "boxV;": '\U00002551', + "boxVH;": '\U0000256C', + "boxVL;": '\U00002563', + "boxVR;": '\U00002560', + "boxVh;": '\U0000256B', + "boxVl;": '\U00002562', + "boxVr;": '\U0000255F', + "boxbox;": '\U000029C9', + "boxdL;": '\U00002555', + "boxdR;": '\U00002552', + "boxdl;": '\U00002510', + "boxdr;": '\U0000250C', + "boxh;": '\U00002500', + "boxhD;": '\U00002565', + "boxhU;": '\U00002568', + "boxhd;": '\U0000252C', + "boxhu;": '\U00002534', + "boxminus;": '\U0000229F', + "boxplus;": '\U0000229E', + "boxtimes;": '\U000022A0', + "boxuL;": '\U0000255B', + "boxuR;": '\U00002558', + "boxul;": '\U00002518', + "boxur;": '\U00002514', + "boxv;": '\U00002502', + "boxvH;": '\U0000256A', + "boxvL;": '\U00002561', + "boxvR;": '\U0000255E', + "boxvh;": '\U0000253C', + "boxvl;": '\U00002524', + "boxvr;": '\U0000251C', + "bprime;": '\U00002035', + "breve;": '\U000002D8', + "brvbar;": '\U000000A6', + "bscr;": '\U0001D4B7', + "bsemi;": '\U0000204F', + "bsim;": '\U0000223D', + "bsime;": '\U000022CD', + "bsol;": '\U0000005C', + "bsolb;": '\U000029C5', + "bsolhsub;": '\U000027C8', + "bull;": '\U00002022', + "bullet;": '\U00002022', + "bump;": '\U0000224E', + "bumpE;": '\U00002AAE', + "bumpe;": '\U0000224F', + "bumpeq;": '\U0000224F', + "cacute;": '\U00000107', + "cap;": '\U00002229', + "capand;": '\U00002A44', + "capbrcup;": '\U00002A49', + "capcap;": '\U00002A4B', + "capcup;": '\U00002A47', + "capdot;": '\U00002A40', + "caret;": '\U00002041', + "caron;": '\U000002C7', + "ccaps;": '\U00002A4D', + "ccaron;": '\U0000010D', + "ccedil;": '\U000000E7', + "ccirc;": '\U00000109', + "ccups;": '\U00002A4C', + "ccupssm;": '\U00002A50', + "cdot;": '\U0000010B', + "cedil;": '\U000000B8', + "cemptyv;": '\U000029B2', + "cent;": '\U000000A2', + "centerdot;": '\U000000B7', + "cfr;": '\U0001D520', + "chcy;": '\U00000447', + "check;": '\U00002713', + "checkmark;": '\U00002713', + "chi;": '\U000003C7', + "cir;": '\U000025CB', + "cirE;": '\U000029C3', + "circ;": '\U000002C6', + "circeq;": '\U00002257', + "circlearrowleft;": '\U000021BA', + "circlearrowright;": '\U000021BB', + "circledR;": '\U000000AE', + "circledS;": '\U000024C8', + "circledast;": '\U0000229B', + "circledcirc;": '\U0000229A', + "circleddash;": '\U0000229D', + "cire;": '\U00002257', + "cirfnint;": '\U00002A10', + "cirmid;": '\U00002AEF', + "cirscir;": '\U000029C2', + "clubs;": '\U00002663', + "clubsuit;": '\U00002663', + "colon;": '\U0000003A', + "colone;": '\U00002254', + "coloneq;": '\U00002254', + "comma;": '\U0000002C', + "commat;": '\U00000040', + "comp;": '\U00002201', + "compfn;": '\U00002218', + "complement;": '\U00002201', + "complexes;": '\U00002102', + "cong;": '\U00002245', + "congdot;": '\U00002A6D', + "conint;": '\U0000222E', + "copf;": '\U0001D554', + "coprod;": '\U00002210', + "copy;": '\U000000A9', + "copysr;": '\U00002117', + "crarr;": '\U000021B5', + "cross;": '\U00002717', + "cscr;": '\U0001D4B8', + "csub;": '\U00002ACF', + "csube;": '\U00002AD1', + "csup;": '\U00002AD0', + "csupe;": '\U00002AD2', + "ctdot;": '\U000022EF', + "cudarrl;": '\U00002938', + "cudarrr;": '\U00002935', + "cuepr;": '\U000022DE', + "cuesc;": '\U000022DF', + "cularr;": '\U000021B6', + "cularrp;": '\U0000293D', + "cup;": '\U0000222A', + "cupbrcap;": '\U00002A48', + "cupcap;": '\U00002A46', + "cupcup;": '\U00002A4A', + "cupdot;": '\U0000228D', + "cupor;": '\U00002A45', + "curarr;": '\U000021B7', + "curarrm;": '\U0000293C', + "curlyeqprec;": '\U000022DE', + "curlyeqsucc;": '\U000022DF', + "curlyvee;": '\U000022CE', + "curlywedge;": '\U000022CF', + "curren;": '\U000000A4', + "curvearrowleft;": '\U000021B6', + "curvearrowright;": '\U000021B7', + "cuvee;": '\U000022CE', + "cuwed;": '\U000022CF', + "cwconint;": '\U00002232', + "cwint;": '\U00002231', + "cylcty;": '\U0000232D', + "dArr;": '\U000021D3', + "dHar;": '\U00002965', + "dagger;": '\U00002020', + "daleth;": '\U00002138', + "darr;": '\U00002193', + "dash;": '\U00002010', + "dashv;": '\U000022A3', + "dbkarow;": '\U0000290F', + "dblac;": '\U000002DD', + "dcaron;": '\U0000010F', + "dcy;": '\U00000434', + "dd;": '\U00002146', + "ddagger;": '\U00002021', + "ddarr;": '\U000021CA', + "ddotseq;": '\U00002A77', + "deg;": '\U000000B0', + "delta;": '\U000003B4', + "demptyv;": '\U000029B1', + "dfisht;": '\U0000297F', + "dfr;": '\U0001D521', + "dharl;": '\U000021C3', + "dharr;": '\U000021C2', + "diam;": '\U000022C4', + "diamond;": '\U000022C4', + "diamondsuit;": '\U00002666', + "diams;": '\U00002666', + "die;": '\U000000A8', + "digamma;": '\U000003DD', + "disin;": '\U000022F2', + "div;": '\U000000F7', + "divide;": '\U000000F7', + "divideontimes;": '\U000022C7', + "divonx;": '\U000022C7', + "djcy;": '\U00000452', + "dlcorn;": '\U0000231E', + "dlcrop;": '\U0000230D', + "dollar;": '\U00000024', + "dopf;": '\U0001D555', + "dot;": '\U000002D9', + "doteq;": '\U00002250', + "doteqdot;": '\U00002251', + "dotminus;": '\U00002238', + "dotplus;": '\U00002214', + "dotsquare;": '\U000022A1', + "doublebarwedge;": '\U00002306', + "downarrow;": '\U00002193', + "downdownarrows;": '\U000021CA', + "downharpoonleft;": '\U000021C3', + "downharpoonright;": '\U000021C2', + "drbkarow;": '\U00002910', + "drcorn;": '\U0000231F', + "drcrop;": '\U0000230C', + "dscr;": '\U0001D4B9', + "dscy;": '\U00000455', + "dsol;": '\U000029F6', + "dstrok;": '\U00000111', + "dtdot;": '\U000022F1', + "dtri;": '\U000025BF', + "dtrif;": '\U000025BE', + "duarr;": '\U000021F5', + "duhar;": '\U0000296F', + "dwangle;": '\U000029A6', + "dzcy;": '\U0000045F', + "dzigrarr;": '\U000027FF', + "eDDot;": '\U00002A77', + "eDot;": '\U00002251', + "eacute;": '\U000000E9', + "easter;": '\U00002A6E', + "ecaron;": '\U0000011B', + "ecir;": '\U00002256', + "ecirc;": '\U000000EA', + "ecolon;": '\U00002255', + "ecy;": '\U0000044D', + "edot;": '\U00000117', + "ee;": '\U00002147', + "efDot;": '\U00002252', + "efr;": '\U0001D522', + "eg;": '\U00002A9A', + "egrave;": '\U000000E8', + "egs;": '\U00002A96', + "egsdot;": '\U00002A98', + "el;": '\U00002A99', + "elinters;": '\U000023E7', + "ell;": '\U00002113', + "els;": '\U00002A95', + "elsdot;": '\U00002A97', + "emacr;": '\U00000113', + "empty;": '\U00002205', + "emptyset;": '\U00002205', + "emptyv;": '\U00002205', + "emsp;": '\U00002003', + "emsp13;": '\U00002004', + "emsp14;": '\U00002005', + "eng;": '\U0000014B', + "ensp;": '\U00002002', + "eogon;": '\U00000119', + "eopf;": '\U0001D556', + "epar;": '\U000022D5', + "eparsl;": '\U000029E3', + "eplus;": '\U00002A71', + "epsi;": '\U000003B5', + "epsilon;": '\U000003B5', + "epsiv;": '\U000003F5', + "eqcirc;": '\U00002256', + "eqcolon;": '\U00002255', + "eqsim;": '\U00002242', + "eqslantgtr;": '\U00002A96', + "eqslantless;": '\U00002A95', + "equals;": '\U0000003D', + "equest;": '\U0000225F', + "equiv;": '\U00002261', + "equivDD;": '\U00002A78', + "eqvparsl;": '\U000029E5', + "erDot;": '\U00002253', + "erarr;": '\U00002971', + "escr;": '\U0000212F', + "esdot;": '\U00002250', + "esim;": '\U00002242', + "eta;": '\U000003B7', + "eth;": '\U000000F0', + "euml;": '\U000000EB', + "euro;": '\U000020AC', + "excl;": '\U00000021', + "exist;": '\U00002203', + "expectation;": '\U00002130', + "exponentiale;": '\U00002147', + "fallingdotseq;": '\U00002252', + "fcy;": '\U00000444', + "female;": '\U00002640', + "ffilig;": '\U0000FB03', + "fflig;": '\U0000FB00', + "ffllig;": '\U0000FB04', + "ffr;": '\U0001D523', + "filig;": '\U0000FB01', + "flat;": '\U0000266D', + "fllig;": '\U0000FB02', + "fltns;": '\U000025B1', + "fnof;": '\U00000192', + "fopf;": '\U0001D557', + "forall;": '\U00002200', + "fork;": '\U000022D4', + "forkv;": '\U00002AD9', + "fpartint;": '\U00002A0D', + "frac12;": '\U000000BD', + "frac13;": '\U00002153', + "frac14;": '\U000000BC', + "frac15;": '\U00002155', + "frac16;": '\U00002159', + "frac18;": '\U0000215B', + "frac23;": '\U00002154', + "frac25;": '\U00002156', + "frac34;": '\U000000BE', + "frac35;": '\U00002157', + "frac38;": '\U0000215C', + "frac45;": '\U00002158', + "frac56;": '\U0000215A', + "frac58;": '\U0000215D', + "frac78;": '\U0000215E', + "frasl;": '\U00002044', + "frown;": '\U00002322', + "fscr;": '\U0001D4BB', + "gE;": '\U00002267', + "gEl;": '\U00002A8C', + "gacute;": '\U000001F5', + "gamma;": '\U000003B3', + "gammad;": '\U000003DD', + "gap;": '\U00002A86', + "gbreve;": '\U0000011F', + "gcirc;": '\U0000011D', + "gcy;": '\U00000433', + "gdot;": '\U00000121', + "ge;": '\U00002265', + "gel;": '\U000022DB', + "geq;": '\U00002265', + "geqq;": '\U00002267', + "geqslant;": '\U00002A7E', + "ges;": '\U00002A7E', + "gescc;": '\U00002AA9', + "gesdot;": '\U00002A80', + "gesdoto;": '\U00002A82', + "gesdotol;": '\U00002A84', + "gesles;": '\U00002A94', + "gfr;": '\U0001D524', + "gg;": '\U0000226B', + "ggg;": '\U000022D9', + "gimel;": '\U00002137', + "gjcy;": '\U00000453', + "gl;": '\U00002277', + "glE;": '\U00002A92', + "gla;": '\U00002AA5', + "glj;": '\U00002AA4', + "gnE;": '\U00002269', + "gnap;": '\U00002A8A', + "gnapprox;": '\U00002A8A', + "gne;": '\U00002A88', + "gneq;": '\U00002A88', + "gneqq;": '\U00002269', + "gnsim;": '\U000022E7', + "gopf;": '\U0001D558', + "grave;": '\U00000060', + "gscr;": '\U0000210A', + "gsim;": '\U00002273', + "gsime;": '\U00002A8E', + "gsiml;": '\U00002A90', + "gt;": '\U0000003E', + "gtcc;": '\U00002AA7', + "gtcir;": '\U00002A7A', + "gtdot;": '\U000022D7', + "gtlPar;": '\U00002995', + "gtquest;": '\U00002A7C', + "gtrapprox;": '\U00002A86', + "gtrarr;": '\U00002978', + "gtrdot;": '\U000022D7', + "gtreqless;": '\U000022DB', + "gtreqqless;": '\U00002A8C', + "gtrless;": '\U00002277', + "gtrsim;": '\U00002273', + "hArr;": '\U000021D4', + "hairsp;": '\U0000200A', + "half;": '\U000000BD', + "hamilt;": '\U0000210B', + "hardcy;": '\U0000044A', + "harr;": '\U00002194', + "harrcir;": '\U00002948', + "harrw;": '\U000021AD', + "hbar;": '\U0000210F', + "hcirc;": '\U00000125', + "hearts;": '\U00002665', + "heartsuit;": '\U00002665', + "hellip;": '\U00002026', + "hercon;": '\U000022B9', + "hfr;": '\U0001D525', + "hksearow;": '\U00002925', + "hkswarow;": '\U00002926', + "hoarr;": '\U000021FF', + "homtht;": '\U0000223B', + "hookleftarrow;": '\U000021A9', + "hookrightarrow;": '\U000021AA', + "hopf;": '\U0001D559', + "horbar;": '\U00002015', + "hscr;": '\U0001D4BD', + "hslash;": '\U0000210F', + "hstrok;": '\U00000127', + "hybull;": '\U00002043', + "hyphen;": '\U00002010', + "iacute;": '\U000000ED', + "ic;": '\U00002063', + "icirc;": '\U000000EE', + "icy;": '\U00000438', + "iecy;": '\U00000435', + "iexcl;": '\U000000A1', + "iff;": '\U000021D4', + "ifr;": '\U0001D526', + "igrave;": '\U000000EC', + "ii;": '\U00002148', + "iiiint;": '\U00002A0C', + "iiint;": '\U0000222D', + "iinfin;": '\U000029DC', + "iiota;": '\U00002129', + "ijlig;": '\U00000133', + "imacr;": '\U0000012B', + "image;": '\U00002111', + "imagline;": '\U00002110', + "imagpart;": '\U00002111', + "imath;": '\U00000131', + "imof;": '\U000022B7', + "imped;": '\U000001B5', + "in;": '\U00002208', + "incare;": '\U00002105', + "infin;": '\U0000221E', + "infintie;": '\U000029DD', + "inodot;": '\U00000131', + "int;": '\U0000222B', + "intcal;": '\U000022BA', + "integers;": '\U00002124', + "intercal;": '\U000022BA', + "intlarhk;": '\U00002A17', + "intprod;": '\U00002A3C', + "iocy;": '\U00000451', + "iogon;": '\U0000012F', + "iopf;": '\U0001D55A', + "iota;": '\U000003B9', + "iprod;": '\U00002A3C', + "iquest;": '\U000000BF', + "iscr;": '\U0001D4BE', + "isin;": '\U00002208', + "isinE;": '\U000022F9', + "isindot;": '\U000022F5', + "isins;": '\U000022F4', + "isinsv;": '\U000022F3', + "isinv;": '\U00002208', + "it;": '\U00002062', + "itilde;": '\U00000129', + "iukcy;": '\U00000456', + "iuml;": '\U000000EF', + "jcirc;": '\U00000135', + "jcy;": '\U00000439', + "jfr;": '\U0001D527', + "jmath;": '\U00000237', + "jopf;": '\U0001D55B', + "jscr;": '\U0001D4BF', + "jsercy;": '\U00000458', + "jukcy;": '\U00000454', + "kappa;": '\U000003BA', + "kappav;": '\U000003F0', + "kcedil;": '\U00000137', + "kcy;": '\U0000043A', + "kfr;": '\U0001D528', + "kgreen;": '\U00000138', + "khcy;": '\U00000445', + "kjcy;": '\U0000045C', + "kopf;": '\U0001D55C', + "kscr;": '\U0001D4C0', + "lAarr;": '\U000021DA', + "lArr;": '\U000021D0', + "lAtail;": '\U0000291B', + "lBarr;": '\U0000290E', + "lE;": '\U00002266', + "lEg;": '\U00002A8B', + "lHar;": '\U00002962', + "lacute;": '\U0000013A', + "laemptyv;": '\U000029B4', + "lagran;": '\U00002112', + "lambda;": '\U000003BB', + "lang;": '\U000027E8', + "langd;": '\U00002991', + "langle;": '\U000027E8', + "lap;": '\U00002A85', + "laquo;": '\U000000AB', + "larr;": '\U00002190', + "larrb;": '\U000021E4', + "larrbfs;": '\U0000291F', + "larrfs;": '\U0000291D', + "larrhk;": '\U000021A9', + "larrlp;": '\U000021AB', + "larrpl;": '\U00002939', + "larrsim;": '\U00002973', + "larrtl;": '\U000021A2', + "lat;": '\U00002AAB', + "latail;": '\U00002919', + "late;": '\U00002AAD', + "lbarr;": '\U0000290C', + "lbbrk;": '\U00002772', + "lbrace;": '\U0000007B', + "lbrack;": '\U0000005B', + "lbrke;": '\U0000298B', + "lbrksld;": '\U0000298F', + "lbrkslu;": '\U0000298D', + "lcaron;": '\U0000013E', + "lcedil;": '\U0000013C', + "lceil;": '\U00002308', + "lcub;": '\U0000007B', + "lcy;": '\U0000043B', + "ldca;": '\U00002936', + "ldquo;": '\U0000201C', + "ldquor;": '\U0000201E', + "ldrdhar;": '\U00002967', + "ldrushar;": '\U0000294B', + "ldsh;": '\U000021B2', + "le;": '\U00002264', + "leftarrow;": '\U00002190', + "leftarrowtail;": '\U000021A2', + "leftharpoondown;": '\U000021BD', + "leftharpoonup;": '\U000021BC', + "leftleftarrows;": '\U000021C7', + "leftrightarrow;": '\U00002194', + "leftrightarrows;": '\U000021C6', + "leftrightharpoons;": '\U000021CB', + "leftrightsquigarrow;": '\U000021AD', + "leftthreetimes;": '\U000022CB', + "leg;": '\U000022DA', + "leq;": '\U00002264', + "leqq;": '\U00002266', + "leqslant;": '\U00002A7D', + "les;": '\U00002A7D', + "lescc;": '\U00002AA8', + "lesdot;": '\U00002A7F', + "lesdoto;": '\U00002A81', + "lesdotor;": '\U00002A83', + "lesges;": '\U00002A93', + "lessapprox;": '\U00002A85', + "lessdot;": '\U000022D6', + "lesseqgtr;": '\U000022DA', + "lesseqqgtr;": '\U00002A8B', + "lessgtr;": '\U00002276', + "lesssim;": '\U00002272', + "lfisht;": '\U0000297C', + "lfloor;": '\U0000230A', + "lfr;": '\U0001D529', + "lg;": '\U00002276', + "lgE;": '\U00002A91', + "lhard;": '\U000021BD', + "lharu;": '\U000021BC', + "lharul;": '\U0000296A', + "lhblk;": '\U00002584', + "ljcy;": '\U00000459', + "ll;": '\U0000226A', + "llarr;": '\U000021C7', + "llcorner;": '\U0000231E', + "llhard;": '\U0000296B', + "lltri;": '\U000025FA', + "lmidot;": '\U00000140', + "lmoust;": '\U000023B0', + "lmoustache;": '\U000023B0', + "lnE;": '\U00002268', + "lnap;": '\U00002A89', + "lnapprox;": '\U00002A89', + "lne;": '\U00002A87', + "lneq;": '\U00002A87', + "lneqq;": '\U00002268', + "lnsim;": '\U000022E6', + "loang;": '\U000027EC', + "loarr;": '\U000021FD', + "lobrk;": '\U000027E6', + "longleftarrow;": '\U000027F5', + "longleftrightarrow;": '\U000027F7', + "longmapsto;": '\U000027FC', + "longrightarrow;": '\U000027F6', + "looparrowleft;": '\U000021AB', + "looparrowright;": '\U000021AC', + "lopar;": '\U00002985', + "lopf;": '\U0001D55D', + "loplus;": '\U00002A2D', + "lotimes;": '\U00002A34', + "lowast;": '\U00002217', + "lowbar;": '\U0000005F', + "loz;": '\U000025CA', + "lozenge;": '\U000025CA', + "lozf;": '\U000029EB', + "lpar;": '\U00000028', + "lparlt;": '\U00002993', + "lrarr;": '\U000021C6', + "lrcorner;": '\U0000231F', + "lrhar;": '\U000021CB', + "lrhard;": '\U0000296D', + "lrm;": '\U0000200E', + "lrtri;": '\U000022BF', + "lsaquo;": '\U00002039', + "lscr;": '\U0001D4C1', + "lsh;": '\U000021B0', + "lsim;": '\U00002272', + "lsime;": '\U00002A8D', + "lsimg;": '\U00002A8F', + "lsqb;": '\U0000005B', + "lsquo;": '\U00002018', + "lsquor;": '\U0000201A', + "lstrok;": '\U00000142', + "lt;": '\U0000003C', + "ltcc;": '\U00002AA6', + "ltcir;": '\U00002A79', + "ltdot;": '\U000022D6', + "lthree;": '\U000022CB', + "ltimes;": '\U000022C9', + "ltlarr;": '\U00002976', + "ltquest;": '\U00002A7B', + "ltrPar;": '\U00002996', + "ltri;": '\U000025C3', + "ltrie;": '\U000022B4', + "ltrif;": '\U000025C2', + "lurdshar;": '\U0000294A', + "luruhar;": '\U00002966', + "mDDot;": '\U0000223A', + "macr;": '\U000000AF', + "male;": '\U00002642', + "malt;": '\U00002720', + "maltese;": '\U00002720', + "map;": '\U000021A6', + "mapsto;": '\U000021A6', + "mapstodown;": '\U000021A7', + "mapstoleft;": '\U000021A4', + "mapstoup;": '\U000021A5', + "marker;": '\U000025AE', + "mcomma;": '\U00002A29', + "mcy;": '\U0000043C', + "mdash;": '\U00002014', + "measuredangle;": '\U00002221', + "mfr;": '\U0001D52A', + "mho;": '\U00002127', + "micro;": '\U000000B5', + "mid;": '\U00002223', + "midast;": '\U0000002A', + "midcir;": '\U00002AF0', + "middot;": '\U000000B7', + "minus;": '\U00002212', + "minusb;": '\U0000229F', + "minusd;": '\U00002238', + "minusdu;": '\U00002A2A', + "mlcp;": '\U00002ADB', + "mldr;": '\U00002026', + "mnplus;": '\U00002213', + "models;": '\U000022A7', + "mopf;": '\U0001D55E', + "mp;": '\U00002213', + "mscr;": '\U0001D4C2', + "mstpos;": '\U0000223E', + "mu;": '\U000003BC', + "multimap;": '\U000022B8', + "mumap;": '\U000022B8', + "nLeftarrow;": '\U000021CD', + "nLeftrightarrow;": '\U000021CE', + "nRightarrow;": '\U000021CF', + "nVDash;": '\U000022AF', + "nVdash;": '\U000022AE', + "nabla;": '\U00002207', + "nacute;": '\U00000144', + "nap;": '\U00002249', + "napos;": '\U00000149', + "napprox;": '\U00002249', + "natur;": '\U0000266E', + "natural;": '\U0000266E', + "naturals;": '\U00002115', + "nbsp;": '\U000000A0', + "ncap;": '\U00002A43', + "ncaron;": '\U00000148', + "ncedil;": '\U00000146', + "ncong;": '\U00002247', + "ncup;": '\U00002A42', + "ncy;": '\U0000043D', + "ndash;": '\U00002013', + "ne;": '\U00002260', + "neArr;": '\U000021D7', + "nearhk;": '\U00002924', + "nearr;": '\U00002197', + "nearrow;": '\U00002197', + "nequiv;": '\U00002262', + "nesear;": '\U00002928', + "nexist;": '\U00002204', + "nexists;": '\U00002204', + "nfr;": '\U0001D52B', + "nge;": '\U00002271', + "ngeq;": '\U00002271', + "ngsim;": '\U00002275', + "ngt;": '\U0000226F', + "ngtr;": '\U0000226F', + "nhArr;": '\U000021CE', + "nharr;": '\U000021AE', + "nhpar;": '\U00002AF2', + "ni;": '\U0000220B', + "nis;": '\U000022FC', + "nisd;": '\U000022FA', + "niv;": '\U0000220B', + "njcy;": '\U0000045A', + "nlArr;": '\U000021CD', + "nlarr;": '\U0000219A', + "nldr;": '\U00002025', + "nle;": '\U00002270', + "nleftarrow;": '\U0000219A', + "nleftrightarrow;": '\U000021AE', + "nleq;": '\U00002270', + "nless;": '\U0000226E', + "nlsim;": '\U00002274', + "nlt;": '\U0000226E', + "nltri;": '\U000022EA', + "nltrie;": '\U000022EC', + "nmid;": '\U00002224', + "nopf;": '\U0001D55F', + "not;": '\U000000AC', + "notin;": '\U00002209', + "notinva;": '\U00002209', + "notinvb;": '\U000022F7', + "notinvc;": '\U000022F6', + "notni;": '\U0000220C', + "notniva;": '\U0000220C', + "notnivb;": '\U000022FE', + "notnivc;": '\U000022FD', + "npar;": '\U00002226', + "nparallel;": '\U00002226', + "npolint;": '\U00002A14', + "npr;": '\U00002280', + "nprcue;": '\U000022E0', + "nprec;": '\U00002280', + "nrArr;": '\U000021CF', + "nrarr;": '\U0000219B', + "nrightarrow;": '\U0000219B', + "nrtri;": '\U000022EB', + "nrtrie;": '\U000022ED', + "nsc;": '\U00002281', + "nsccue;": '\U000022E1', + "nscr;": '\U0001D4C3', + "nshortmid;": '\U00002224', + "nshortparallel;": '\U00002226', + "nsim;": '\U00002241', + "nsime;": '\U00002244', + "nsimeq;": '\U00002244', + "nsmid;": '\U00002224', + "nspar;": '\U00002226', + "nsqsube;": '\U000022E2', + "nsqsupe;": '\U000022E3', + "nsub;": '\U00002284', + "nsube;": '\U00002288', + "nsubseteq;": '\U00002288', + "nsucc;": '\U00002281', + "nsup;": '\U00002285', + "nsupe;": '\U00002289', + "nsupseteq;": '\U00002289', + "ntgl;": '\U00002279', + "ntilde;": '\U000000F1', + "ntlg;": '\U00002278', + "ntriangleleft;": '\U000022EA', + "ntrianglelefteq;": '\U000022EC', + "ntriangleright;": '\U000022EB', + "ntrianglerighteq;": '\U000022ED', + "nu;": '\U000003BD', + "num;": '\U00000023', + "numero;": '\U00002116', + "numsp;": '\U00002007', + "nvDash;": '\U000022AD', + "nvHarr;": '\U00002904', + "nvdash;": '\U000022AC', + "nvinfin;": '\U000029DE', + "nvlArr;": '\U00002902', + "nvrArr;": '\U00002903', + "nwArr;": '\U000021D6', + "nwarhk;": '\U00002923', + "nwarr;": '\U00002196', + "nwarrow;": '\U00002196', + "nwnear;": '\U00002927', + "oS;": '\U000024C8', + "oacute;": '\U000000F3', + "oast;": '\U0000229B', + "ocir;": '\U0000229A', + "ocirc;": '\U000000F4', + "ocy;": '\U0000043E', + "odash;": '\U0000229D', + "odblac;": '\U00000151', + "odiv;": '\U00002A38', + "odot;": '\U00002299', + "odsold;": '\U000029BC', + "oelig;": '\U00000153', + "ofcir;": '\U000029BF', + "ofr;": '\U0001D52C', + "ogon;": '\U000002DB', + "ograve;": '\U000000F2', + "ogt;": '\U000029C1', + "ohbar;": '\U000029B5', + "ohm;": '\U000003A9', + "oint;": '\U0000222E', + "olarr;": '\U000021BA', + "olcir;": '\U000029BE', + "olcross;": '\U000029BB', + "oline;": '\U0000203E', + "olt;": '\U000029C0', + "omacr;": '\U0000014D', + "omega;": '\U000003C9', + "omicron;": '\U000003BF', + "omid;": '\U000029B6', + "ominus;": '\U00002296', + "oopf;": '\U0001D560', + "opar;": '\U000029B7', + "operp;": '\U000029B9', + "oplus;": '\U00002295', + "or;": '\U00002228', + "orarr;": '\U000021BB', + "ord;": '\U00002A5D', + "order;": '\U00002134', + "orderof;": '\U00002134', + "ordf;": '\U000000AA', + "ordm;": '\U000000BA', + "origof;": '\U000022B6', + "oror;": '\U00002A56', + "orslope;": '\U00002A57', + "orv;": '\U00002A5B', + "oscr;": '\U00002134', + "oslash;": '\U000000F8', + "osol;": '\U00002298', + "otilde;": '\U000000F5', + "otimes;": '\U00002297', + "otimesas;": '\U00002A36', + "ouml;": '\U000000F6', + "ovbar;": '\U0000233D', + "par;": '\U00002225', + "para;": '\U000000B6', + "parallel;": '\U00002225', + "parsim;": '\U00002AF3', + "parsl;": '\U00002AFD', + "part;": '\U00002202', + "pcy;": '\U0000043F', + "percnt;": '\U00000025', + "period;": '\U0000002E', + "permil;": '\U00002030', + "perp;": '\U000022A5', + "pertenk;": '\U00002031', + "pfr;": '\U0001D52D', + "phi;": '\U000003C6', + "phiv;": '\U000003D5', + "phmmat;": '\U00002133', + "phone;": '\U0000260E', + "pi;": '\U000003C0', + "pitchfork;": '\U000022D4', + "piv;": '\U000003D6', + "planck;": '\U0000210F', + "planckh;": '\U0000210E', + "plankv;": '\U0000210F', + "plus;": '\U0000002B', + "plusacir;": '\U00002A23', + "plusb;": '\U0000229E', + "pluscir;": '\U00002A22', + "plusdo;": '\U00002214', + "plusdu;": '\U00002A25', + "pluse;": '\U00002A72', + "plusmn;": '\U000000B1', + "plussim;": '\U00002A26', + "plustwo;": '\U00002A27', + "pm;": '\U000000B1', + "pointint;": '\U00002A15', + "popf;": '\U0001D561', + "pound;": '\U000000A3', + "pr;": '\U0000227A', + "prE;": '\U00002AB3', + "prap;": '\U00002AB7', + "prcue;": '\U0000227C', + "pre;": '\U00002AAF', + "prec;": '\U0000227A', + "precapprox;": '\U00002AB7', + "preccurlyeq;": '\U0000227C', + "preceq;": '\U00002AAF', + "precnapprox;": '\U00002AB9', + "precneqq;": '\U00002AB5', + "precnsim;": '\U000022E8', + "precsim;": '\U0000227E', + "prime;": '\U00002032', + "primes;": '\U00002119', + "prnE;": '\U00002AB5', + "prnap;": '\U00002AB9', + "prnsim;": '\U000022E8', + "prod;": '\U0000220F', + "profalar;": '\U0000232E', + "profline;": '\U00002312', + "profsurf;": '\U00002313', + "prop;": '\U0000221D', + "propto;": '\U0000221D', + "prsim;": '\U0000227E', + "prurel;": '\U000022B0', + "pscr;": '\U0001D4C5', + "psi;": '\U000003C8', + "puncsp;": '\U00002008', + "qfr;": '\U0001D52E', + "qint;": '\U00002A0C', + "qopf;": '\U0001D562', + "qprime;": '\U00002057', + "qscr;": '\U0001D4C6', + "quaternions;": '\U0000210D', + "quatint;": '\U00002A16', + "quest;": '\U0000003F', + "questeq;": '\U0000225F', + "quot;": '\U00000022', + "rAarr;": '\U000021DB', + "rArr;": '\U000021D2', + "rAtail;": '\U0000291C', + "rBarr;": '\U0000290F', + "rHar;": '\U00002964', + "racute;": '\U00000155', + "radic;": '\U0000221A', + "raemptyv;": '\U000029B3', + "rang;": '\U000027E9', + "rangd;": '\U00002992', + "range;": '\U000029A5', + "rangle;": '\U000027E9', + "raquo;": '\U000000BB', + "rarr;": '\U00002192', + "rarrap;": '\U00002975', + "rarrb;": '\U000021E5', + "rarrbfs;": '\U00002920', + "rarrc;": '\U00002933', + "rarrfs;": '\U0000291E', + "rarrhk;": '\U000021AA', + "rarrlp;": '\U000021AC', + "rarrpl;": '\U00002945', + "rarrsim;": '\U00002974', + "rarrtl;": '\U000021A3', + "rarrw;": '\U0000219D', + "ratail;": '\U0000291A', + "ratio;": '\U00002236', + "rationals;": '\U0000211A', + "rbarr;": '\U0000290D', + "rbbrk;": '\U00002773', + "rbrace;": '\U0000007D', + "rbrack;": '\U0000005D', + "rbrke;": '\U0000298C', + "rbrksld;": '\U0000298E', + "rbrkslu;": '\U00002990', + "rcaron;": '\U00000159', + "rcedil;": '\U00000157', + "rceil;": '\U00002309', + "rcub;": '\U0000007D', + "rcy;": '\U00000440', + "rdca;": '\U00002937', + "rdldhar;": '\U00002969', + "rdquo;": '\U0000201D', + "rdquor;": '\U0000201D', + "rdsh;": '\U000021B3', + "real;": '\U0000211C', + "realine;": '\U0000211B', + "realpart;": '\U0000211C', + "reals;": '\U0000211D', + "rect;": '\U000025AD', + "reg;": '\U000000AE', + "rfisht;": '\U0000297D', + "rfloor;": '\U0000230B', + "rfr;": '\U0001D52F', + "rhard;": '\U000021C1', + "rharu;": '\U000021C0', + "rharul;": '\U0000296C', + "rho;": '\U000003C1', + "rhov;": '\U000003F1', + "rightarrow;": '\U00002192', + "rightarrowtail;": '\U000021A3', + "rightharpoondown;": '\U000021C1', + "rightharpoonup;": '\U000021C0', + "rightleftarrows;": '\U000021C4', + "rightleftharpoons;": '\U000021CC', + "rightrightarrows;": '\U000021C9', + "rightsquigarrow;": '\U0000219D', + "rightthreetimes;": '\U000022CC', + "ring;": '\U000002DA', + "risingdotseq;": '\U00002253', + "rlarr;": '\U000021C4', + "rlhar;": '\U000021CC', + "rlm;": '\U0000200F', + "rmoust;": '\U000023B1', + "rmoustache;": '\U000023B1', + "rnmid;": '\U00002AEE', + "roang;": '\U000027ED', + "roarr;": '\U000021FE', + "robrk;": '\U000027E7', + "ropar;": '\U00002986', + "ropf;": '\U0001D563', + "roplus;": '\U00002A2E', + "rotimes;": '\U00002A35', + "rpar;": '\U00000029', + "rpargt;": '\U00002994', + "rppolint;": '\U00002A12', + "rrarr;": '\U000021C9', + "rsaquo;": '\U0000203A', + "rscr;": '\U0001D4C7', + "rsh;": '\U000021B1', + "rsqb;": '\U0000005D', + "rsquo;": '\U00002019', + "rsquor;": '\U00002019', + "rthree;": '\U000022CC', + "rtimes;": '\U000022CA', + "rtri;": '\U000025B9', + "rtrie;": '\U000022B5', + "rtrif;": '\U000025B8', + "rtriltri;": '\U000029CE', + "ruluhar;": '\U00002968', + "rx;": '\U0000211E', + "sacute;": '\U0000015B', + "sbquo;": '\U0000201A', + "sc;": '\U0000227B', + "scE;": '\U00002AB4', + "scap;": '\U00002AB8', + "scaron;": '\U00000161', + "sccue;": '\U0000227D', + "sce;": '\U00002AB0', + "scedil;": '\U0000015F', + "scirc;": '\U0000015D', + "scnE;": '\U00002AB6', + "scnap;": '\U00002ABA', + "scnsim;": '\U000022E9', + "scpolint;": '\U00002A13', + "scsim;": '\U0000227F', + "scy;": '\U00000441', + "sdot;": '\U000022C5', + "sdotb;": '\U000022A1', + "sdote;": '\U00002A66', + "seArr;": '\U000021D8', + "searhk;": '\U00002925', + "searr;": '\U00002198', + "searrow;": '\U00002198', + "sect;": '\U000000A7', + "semi;": '\U0000003B', + "seswar;": '\U00002929', + "setminus;": '\U00002216', + "setmn;": '\U00002216', + "sext;": '\U00002736', + "sfr;": '\U0001D530', + "sfrown;": '\U00002322', + "sharp;": '\U0000266F', + "shchcy;": '\U00000449', + "shcy;": '\U00000448', + "shortmid;": '\U00002223', + "shortparallel;": '\U00002225', + "shy;": '\U000000AD', + "sigma;": '\U000003C3', + "sigmaf;": '\U000003C2', + "sigmav;": '\U000003C2', + "sim;": '\U0000223C', + "simdot;": '\U00002A6A', + "sime;": '\U00002243', + "simeq;": '\U00002243', + "simg;": '\U00002A9E', + "simgE;": '\U00002AA0', + "siml;": '\U00002A9D', + "simlE;": '\U00002A9F', + "simne;": '\U00002246', + "simplus;": '\U00002A24', + "simrarr;": '\U00002972', + "slarr;": '\U00002190', + "smallsetminus;": '\U00002216', + "smashp;": '\U00002A33', + "smeparsl;": '\U000029E4', + "smid;": '\U00002223', + "smile;": '\U00002323', + "smt;": '\U00002AAA', + "smte;": '\U00002AAC', + "softcy;": '\U0000044C', + "sol;": '\U0000002F', + "solb;": '\U000029C4', + "solbar;": '\U0000233F', + "sopf;": '\U0001D564', + "spades;": '\U00002660', + "spadesuit;": '\U00002660', + "spar;": '\U00002225', + "sqcap;": '\U00002293', + "sqcup;": '\U00002294', + "sqsub;": '\U0000228F', + "sqsube;": '\U00002291', + "sqsubset;": '\U0000228F', + "sqsubseteq;": '\U00002291', + "sqsup;": '\U00002290', + "sqsupe;": '\U00002292', + "sqsupset;": '\U00002290', + "sqsupseteq;": '\U00002292', + "squ;": '\U000025A1', + "square;": '\U000025A1', + "squarf;": '\U000025AA', + "squf;": '\U000025AA', + "srarr;": '\U00002192', + "sscr;": '\U0001D4C8', + "ssetmn;": '\U00002216', + "ssmile;": '\U00002323', + "sstarf;": '\U000022C6', + "star;": '\U00002606', + "starf;": '\U00002605', + "straightepsilon;": '\U000003F5', + "straightphi;": '\U000003D5', + "strns;": '\U000000AF', + "sub;": '\U00002282', + "subE;": '\U00002AC5', + "subdot;": '\U00002ABD', + "sube;": '\U00002286', + "subedot;": '\U00002AC3', + "submult;": '\U00002AC1', + "subnE;": '\U00002ACB', + "subne;": '\U0000228A', + "subplus;": '\U00002ABF', + "subrarr;": '\U00002979', + "subset;": '\U00002282', + "subseteq;": '\U00002286', + "subseteqq;": '\U00002AC5', + "subsetneq;": '\U0000228A', + "subsetneqq;": '\U00002ACB', + "subsim;": '\U00002AC7', + "subsub;": '\U00002AD5', + "subsup;": '\U00002AD3', + "succ;": '\U0000227B', + "succapprox;": '\U00002AB8', + "succcurlyeq;": '\U0000227D', + "succeq;": '\U00002AB0', + "succnapprox;": '\U00002ABA', + "succneqq;": '\U00002AB6', + "succnsim;": '\U000022E9', + "succsim;": '\U0000227F', + "sum;": '\U00002211', + "sung;": '\U0000266A', + "sup;": '\U00002283', + "sup1;": '\U000000B9', + "sup2;": '\U000000B2', + "sup3;": '\U000000B3', + "supE;": '\U00002AC6', + "supdot;": '\U00002ABE', + "supdsub;": '\U00002AD8', + "supe;": '\U00002287', + "supedot;": '\U00002AC4', + "suphsol;": '\U000027C9', + "suphsub;": '\U00002AD7', + "suplarr;": '\U0000297B', + "supmult;": '\U00002AC2', + "supnE;": '\U00002ACC', + "supne;": '\U0000228B', + "supplus;": '\U00002AC0', + "supset;": '\U00002283', + "supseteq;": '\U00002287', + "supseteqq;": '\U00002AC6', + "supsetneq;": '\U0000228B', + "supsetneqq;": '\U00002ACC', + "supsim;": '\U00002AC8', + "supsub;": '\U00002AD4', + "supsup;": '\U00002AD6', + "swArr;": '\U000021D9', + "swarhk;": '\U00002926', + "swarr;": '\U00002199', + "swarrow;": '\U00002199', + "swnwar;": '\U0000292A', + "szlig;": '\U000000DF', + "target;": '\U00002316', + "tau;": '\U000003C4', + "tbrk;": '\U000023B4', + "tcaron;": '\U00000165', + "tcedil;": '\U00000163', + "tcy;": '\U00000442', + "tdot;": '\U000020DB', + "telrec;": '\U00002315', + "tfr;": '\U0001D531', + "there4;": '\U00002234', + "therefore;": '\U00002234', + "theta;": '\U000003B8', + "thetasym;": '\U000003D1', + "thetav;": '\U000003D1', + "thickapprox;": '\U00002248', + "thicksim;": '\U0000223C', + "thinsp;": '\U00002009', + "thkap;": '\U00002248', + "thksim;": '\U0000223C', + "thorn;": '\U000000FE', + "tilde;": '\U000002DC', + "times;": '\U000000D7', + "timesb;": '\U000022A0', + "timesbar;": '\U00002A31', + "timesd;": '\U00002A30', + "tint;": '\U0000222D', + "toea;": '\U00002928', + "top;": '\U000022A4', + "topbot;": '\U00002336', + "topcir;": '\U00002AF1', + "topf;": '\U0001D565', + "topfork;": '\U00002ADA', + "tosa;": '\U00002929', + "tprime;": '\U00002034', + "trade;": '\U00002122', + "triangle;": '\U000025B5', + "triangledown;": '\U000025BF', + "triangleleft;": '\U000025C3', + "trianglelefteq;": '\U000022B4', + "triangleq;": '\U0000225C', + "triangleright;": '\U000025B9', + "trianglerighteq;": '\U000022B5', + "tridot;": '\U000025EC', + "trie;": '\U0000225C', + "triminus;": '\U00002A3A', + "triplus;": '\U00002A39', + "trisb;": '\U000029CD', + "tritime;": '\U00002A3B', + "trpezium;": '\U000023E2', + "tscr;": '\U0001D4C9', + "tscy;": '\U00000446', + "tshcy;": '\U0000045B', + "tstrok;": '\U00000167', + "twixt;": '\U0000226C', + "twoheadleftarrow;": '\U0000219E', + "twoheadrightarrow;": '\U000021A0', + "uArr;": '\U000021D1', + "uHar;": '\U00002963', + "uacute;": '\U000000FA', + "uarr;": '\U00002191', + "ubrcy;": '\U0000045E', + "ubreve;": '\U0000016D', + "ucirc;": '\U000000FB', + "ucy;": '\U00000443', + "udarr;": '\U000021C5', + "udblac;": '\U00000171', + "udhar;": '\U0000296E', + "ufisht;": '\U0000297E', + "ufr;": '\U0001D532', + "ugrave;": '\U000000F9', + "uharl;": '\U000021BF', + "uharr;": '\U000021BE', + "uhblk;": '\U00002580', + "ulcorn;": '\U0000231C', + "ulcorner;": '\U0000231C', + "ulcrop;": '\U0000230F', + "ultri;": '\U000025F8', + "umacr;": '\U0000016B', + "uml;": '\U000000A8', + "uogon;": '\U00000173', + "uopf;": '\U0001D566', + "uparrow;": '\U00002191', + "updownarrow;": '\U00002195', + "upharpoonleft;": '\U000021BF', + "upharpoonright;": '\U000021BE', + "uplus;": '\U0000228E', + "upsi;": '\U000003C5', + "upsih;": '\U000003D2', + "upsilon;": '\U000003C5', + "upuparrows;": '\U000021C8', + "urcorn;": '\U0000231D', + "urcorner;": '\U0000231D', + "urcrop;": '\U0000230E', + "uring;": '\U0000016F', + "urtri;": '\U000025F9', + "uscr;": '\U0001D4CA', + "utdot;": '\U000022F0', + "utilde;": '\U00000169', + "utri;": '\U000025B5', + "utrif;": '\U000025B4', + "uuarr;": '\U000021C8', + "uuml;": '\U000000FC', + "uwangle;": '\U000029A7', + "vArr;": '\U000021D5', + "vBar;": '\U00002AE8', + "vBarv;": '\U00002AE9', + "vDash;": '\U000022A8', + "vangrt;": '\U0000299C', + "varepsilon;": '\U000003F5', + "varkappa;": '\U000003F0', + "varnothing;": '\U00002205', + "varphi;": '\U000003D5', + "varpi;": '\U000003D6', + "varpropto;": '\U0000221D', + "varr;": '\U00002195', + "varrho;": '\U000003F1', + "varsigma;": '\U000003C2', + "vartheta;": '\U000003D1', + "vartriangleleft;": '\U000022B2', + "vartriangleright;": '\U000022B3', + "vcy;": '\U00000432', + "vdash;": '\U000022A2', + "vee;": '\U00002228', + "veebar;": '\U000022BB', + "veeeq;": '\U0000225A', + "vellip;": '\U000022EE', + "verbar;": '\U0000007C', + "vert;": '\U0000007C', + "vfr;": '\U0001D533', + "vltri;": '\U000022B2', + "vopf;": '\U0001D567', + "vprop;": '\U0000221D', + "vrtri;": '\U000022B3', + "vscr;": '\U0001D4CB', + "vzigzag;": '\U0000299A', + "wcirc;": '\U00000175', + "wedbar;": '\U00002A5F', + "wedge;": '\U00002227', + "wedgeq;": '\U00002259', + "weierp;": '\U00002118', + "wfr;": '\U0001D534', + "wopf;": '\U0001D568', + "wp;": '\U00002118', + "wr;": '\U00002240', + "wreath;": '\U00002240', + "wscr;": '\U0001D4CC', + "xcap;": '\U000022C2', + "xcirc;": '\U000025EF', + "xcup;": '\U000022C3', + "xdtri;": '\U000025BD', + "xfr;": '\U0001D535', + "xhArr;": '\U000027FA', + "xharr;": '\U000027F7', + "xi;": '\U000003BE', + "xlArr;": '\U000027F8', + "xlarr;": '\U000027F5', + "xmap;": '\U000027FC', + "xnis;": '\U000022FB', + "xodot;": '\U00002A00', + "xopf;": '\U0001D569', + "xoplus;": '\U00002A01', + "xotime;": '\U00002A02', + "xrArr;": '\U000027F9', + "xrarr;": '\U000027F6', + "xscr;": '\U0001D4CD', + "xsqcup;": '\U00002A06', + "xuplus;": '\U00002A04', + "xutri;": '\U000025B3', + "xvee;": '\U000022C1', + "xwedge;": '\U000022C0', + "yacute;": '\U000000FD', + "yacy;": '\U0000044F', + "ycirc;": '\U00000177', + "ycy;": '\U0000044B', + "yen;": '\U000000A5', + "yfr;": '\U0001D536', + "yicy;": '\U00000457', + "yopf;": '\U0001D56A', + "yscr;": '\U0001D4CE', + "yucy;": '\U0000044E', + "yuml;": '\U000000FF', + "zacute;": '\U0000017A', + "zcaron;": '\U0000017E', + "zcy;": '\U00000437', + "zdot;": '\U0000017C', + "zeetrf;": '\U00002128', + "zeta;": '\U000003B6', + "zfr;": '\U0001D537', + "zhcy;": '\U00000436', + "zigrarr;": '\U000021DD', + "zopf;": '\U0001D56B', + "zscr;": '\U0001D4CF', + "zwj;": '\U0000200D', + "zwnj;": '\U0000200C', + "AElig": '\U000000C6', + "AMP": '\U00000026', + "Aacute": '\U000000C1', + "Acirc": '\U000000C2', + "Agrave": '\U000000C0', + "Aring": '\U000000C5', + "Atilde": '\U000000C3', + "Auml": '\U000000C4', + "COPY": '\U000000A9', + "Ccedil": '\U000000C7', + "ETH": '\U000000D0', + "Eacute": '\U000000C9', + "Ecirc": '\U000000CA', + "Egrave": '\U000000C8', + "Euml": '\U000000CB', + "GT": '\U0000003E', + "Iacute": '\U000000CD', + "Icirc": '\U000000CE', + "Igrave": '\U000000CC', + "Iuml": '\U000000CF', + "LT": '\U0000003C', + "Ntilde": '\U000000D1', + "Oacute": '\U000000D3', + "Ocirc": '\U000000D4', + "Ograve": '\U000000D2', + "Oslash": '\U000000D8', + "Otilde": '\U000000D5', + "Ouml": '\U000000D6', + "QUOT": '\U00000022', + "REG": '\U000000AE', + "THORN": '\U000000DE', + "Uacute": '\U000000DA', + "Ucirc": '\U000000DB', + "Ugrave": '\U000000D9', + "Uuml": '\U000000DC', + "Yacute": '\U000000DD', + "aacute": '\U000000E1', + "acirc": '\U000000E2', + "acute": '\U000000B4', + "aelig": '\U000000E6', + "agrave": '\U000000E0', + "amp": '\U00000026', + "aring": '\U000000E5', + "atilde": '\U000000E3', + "auml": '\U000000E4', + "brvbar": '\U000000A6', + "ccedil": '\U000000E7', + "cedil": '\U000000B8', + "cent": '\U000000A2', + "copy": '\U000000A9', + "curren": '\U000000A4', + "deg": '\U000000B0', + "divide": '\U000000F7', + "eacute": '\U000000E9', + "ecirc": '\U000000EA', + "egrave": '\U000000E8', + "eth": '\U000000F0', + "euml": '\U000000EB', + "frac12": '\U000000BD', + "frac14": '\U000000BC', + "frac34": '\U000000BE', + "gt": '\U0000003E', + "iacute": '\U000000ED', + "icirc": '\U000000EE', + "iexcl": '\U000000A1', + "igrave": '\U000000EC', + "iquest": '\U000000BF', + "iuml": '\U000000EF', + "laquo": '\U000000AB', + "lt": '\U0000003C', + "macr": '\U000000AF', + "micro": '\U000000B5', + "middot": '\U000000B7', + "nbsp": '\U000000A0', + "not": '\U000000AC', + "ntilde": '\U000000F1', + "oacute": '\U000000F3', + "ocirc": '\U000000F4', + "ograve": '\U000000F2', + "ordf": '\U000000AA', + "ordm": '\U000000BA', + "oslash": '\U000000F8', + "otilde": '\U000000F5', + "ouml": '\U000000F6', + "para": '\U000000B6', + "plusmn": '\U000000B1', + "pound": '\U000000A3', + "quot": '\U00000022', + "raquo": '\U000000BB', + "reg": '\U000000AE', + "sect": '\U000000A7', + "shy": '\U000000AD', + "sup1": '\U000000B9', + "sup2": '\U000000B2', + "sup3": '\U000000B3', + "szlig": '\U000000DF', + "thorn": '\U000000FE', + "times": '\U000000D7', + "uacute": '\U000000FA', + "ucirc": '\U000000FB', + "ugrave": '\U000000F9', + "uml": '\U000000A8', + "uuml": '\U000000FC', + "yacute": '\U000000FD', + "yen": '\U000000A5', + "yuml": '\U000000FF', } // HTML entities that are two unicode codepoints. diff --git a/vendor/golang.org/x/net/html/foreign.go b/vendor/golang.org/x/net/html/foreign.go index d3b3844..01477a9 100644 --- a/vendor/golang.org/x/net/html/foreign.go +++ b/vendor/golang.org/x/net/html/foreign.go @@ -67,7 +67,7 @@ func mathMLTextIntegrationPoint(n *Node) bool { return false } -// Section 12.2.5.5. +// Section 12.2.6.5. var breakout = map[string]bool{ "b": true, "big": true, @@ -115,7 +115,7 @@ var breakout = map[string]bool{ "var": true, } -// Section 12.2.5.5. +// Section 12.2.6.5. var svgTagNameAdjustments = map[string]string{ "altglyph": "altGlyph", "altglyphdef": "altGlyphDef", @@ -155,7 +155,7 @@ var svgTagNameAdjustments = map[string]string{ "textpath": "textPath", } -// Section 12.2.5.1 +// Section 12.2.6.1 var mathMLAttributeAdjustments = map[string]string{ "definitionurl": "definitionURL", } diff --git a/vendor/golang.org/x/net/html/node.go b/vendor/golang.org/x/net/html/node.go index 26b657a..633ee15 100644 --- a/vendor/golang.org/x/net/html/node.go +++ b/vendor/golang.org/x/net/html/node.go @@ -21,9 +21,10 @@ const ( scopeMarkerNode ) -// Section 12.2.3.3 says "scope markers are inserted when entering applet -// elements, buttons, object elements, marquees, table cells, and table -// captions, and are used to prevent formatting from 'leaking'". +// Section 12.2.4.3 says "The markers are inserted when entering applet, +// object, marquee, template, td, th, and caption elements, and are used +// to prevent formatting from "leaking" into applet, object, marquee, +// template, td, th, and caption elements". var scopeMarker = Node{Type: scopeMarkerNode} // A Node consists of a NodeType and some Data (tag name for element nodes, @@ -173,6 +174,16 @@ func (s *nodeStack) index(n *Node) int { return -1 } +// contains returns whether a is within s. +func (s *nodeStack) contains(a atom.Atom) bool { + for _, n := range *s { + if n.DataAtom == a && n.Namespace == "" { + return true + } + } + return false +} + // insert inserts a node at the given index. func (s *nodeStack) insert(i int, n *Node) { (*s) = append(*s, nil) @@ -191,3 +202,19 @@ func (s *nodeStack) remove(n *Node) { (*s)[j] = nil *s = (*s)[:j] } + +type insertionModeStack []insertionMode + +func (s *insertionModeStack) pop() (im insertionMode) { + i := len(*s) + im = (*s)[i-1] + *s = (*s)[:i-1] + return im +} + +func (s *insertionModeStack) top() insertionMode { + if i := len(*s); i > 0 { + return (*s)[i-1] + } + return nil +} diff --git a/vendor/golang.org/x/net/html/parse.go b/vendor/golang.org/x/net/html/parse.go index be4b2bf..1d3c198 100644 --- a/vendor/golang.org/x/net/html/parse.go +++ b/vendor/golang.org/x/net/html/parse.go @@ -25,20 +25,22 @@ type parser struct { hasSelfClosingToken bool // doc is the document root element. doc *Node - // The stack of open elements (section 12.2.3.2) and active formatting - // elements (section 12.2.3.3). + // The stack of open elements (section 12.2.4.2) and active formatting + // elements (section 12.2.4.3). oe, afe nodeStack - // Element pointers (section 12.2.3.4). + // Element pointers (section 12.2.4.4). head, form *Node - // Other parsing state flags (section 12.2.3.5). + // Other parsing state flags (section 12.2.4.5). scripting, framesetOK bool + // The stack of template insertion modes + templateStack insertionModeStack // im is the current insertion mode. im insertionMode // originalIM is the insertion mode to go back to after completing a text // or inTableText insertion mode. originalIM insertionMode // fosterParenting is whether new elements should be inserted according to - // the foster parenting rules (section 12.2.5.3). + // the foster parenting rules (section 12.2.6.1). fosterParenting bool // quirks is whether the parser is operating in "quirks mode." quirks bool @@ -56,7 +58,7 @@ func (p *parser) top() *Node { return p.doc } -// Stop tags for use in popUntil. These come from section 12.2.3.2. +// Stop tags for use in popUntil. These come from section 12.2.4.2. var ( defaultScopeStopTags = map[string][]a.Atom{ "": {a.Applet, a.Caption, a.Html, a.Table, a.Td, a.Th, a.Marquee, a.Object, a.Template}, @@ -79,7 +81,7 @@ const ( // popUntil pops the stack of open elements at the highest element whose tag // is in matchTags, provided there is no higher element in the scope's stop -// tags (as defined in section 12.2.3.2). It returns whether or not there was +// tags (as defined in section 12.2.4.2). It returns whether or not there was // such an element. If there was not, popUntil leaves the stack unchanged. // // For example, the set of stop tags for table scope is: "html", "table". If @@ -126,7 +128,7 @@ func (p *parser) indexOfElementInScope(s scope, matchTags ...a.Atom) int { return -1 } case tableScope: - if tagAtom == a.Html || tagAtom == a.Table { + if tagAtom == a.Html || tagAtom == a.Table || tagAtom == a.Template { return -1 } case selectScope: @@ -162,17 +164,17 @@ func (p *parser) clearStackToContext(s scope) { tagAtom := p.oe[i].DataAtom switch s { case tableScope: - if tagAtom == a.Html || tagAtom == a.Table { + if tagAtom == a.Html || tagAtom == a.Table || tagAtom == a.Template { p.oe = p.oe[:i+1] return } case tableRowScope: - if tagAtom == a.Html || tagAtom == a.Tr { + if tagAtom == a.Html || tagAtom == a.Tr || tagAtom == a.Template { p.oe = p.oe[:i+1] return } case tableBodyScope: - if tagAtom == a.Html || tagAtom == a.Tbody || tagAtom == a.Tfoot || tagAtom == a.Thead { + if tagAtom == a.Html || tagAtom == a.Tbody || tagAtom == a.Tfoot || tagAtom == a.Thead || tagAtom == a.Template { p.oe = p.oe[:i+1] return } @@ -183,7 +185,7 @@ func (p *parser) clearStackToContext(s scope) { } // generateImpliedEndTags pops nodes off the stack of open elements as long as -// the top node has a tag name of dd, dt, li, option, optgroup, p, rp, or rt. +// the top node has a tag name of dd, dt, li, optgroup, option, p, rb, rp, rt or rtc. // If exceptions are specified, nodes with that name will not be popped off. func (p *parser) generateImpliedEndTags(exceptions ...string) { var i int @@ -192,7 +194,7 @@ loop: n := p.oe[i] if n.Type == ElementNode { switch n.DataAtom { - case a.Dd, a.Dt, a.Li, a.Option, a.Optgroup, a.P, a.Rp, a.Rt: + case a.Dd, a.Dt, a.Li, a.Optgroup, a.Option, a.P, a.Rb, a.Rp, a.Rt, a.Rtc: for _, except := range exceptions { if n.Data == except { break loop @@ -234,9 +236,9 @@ func (p *parser) shouldFosterParent() bool { } // fosterParent adds a child node according to the foster parenting rules. -// Section 12.2.5.3, "foster parenting". +// Section 12.2.6.1, "foster parenting". func (p *parser) fosterParent(n *Node) { - var table, parent, prev *Node + var table, parent, prev, template *Node var i int for i = len(p.oe) - 1; i >= 0; i-- { if p.oe[i].DataAtom == a.Table { @@ -245,6 +247,19 @@ func (p *parser) fosterParent(n *Node) { } } + var j int + for j = len(p.oe) - 1; j >= 0; j-- { + if p.oe[j].DataAtom == a.Template { + template = p.oe[j] + break + } + } + + if template != nil && (table == nil || j > i) { + template.AppendChild(n) + return + } + if table == nil { // The foster parent is the html element. parent = p.oe[0] @@ -304,7 +319,7 @@ func (p *parser) addElement() { }) } -// Section 12.2.3.3. +// Section 12.2.4.3. func (p *parser) addFormattingElement() { tagAtom, attr := p.tok.DataAtom, p.tok.Attr p.addElement() @@ -351,7 +366,7 @@ findIdenticalElements: p.afe = append(p.afe, p.top()) } -// Section 12.2.3.3. +// Section 12.2.4.3. func (p *parser) clearActiveFormattingElements() { for { n := p.afe.pop() @@ -361,7 +376,7 @@ func (p *parser) clearActiveFormattingElements() { } } -// Section 12.2.3.3. +// Section 12.2.4.3. func (p *parser) reconstructActiveFormattingElements() { n := p.afe.top() if n == nil { @@ -390,12 +405,12 @@ func (p *parser) reconstructActiveFormattingElements() { } } -// Section 12.2.4. +// Section 12.2.5. func (p *parser) acknowledgeSelfClosingTag() { p.hasSelfClosingToken = false } -// An insertion mode (section 12.2.3.1) is the state transition function from +// An insertion mode (section 12.2.4.1) is the state transition function from // a particular state in the HTML5 parser's state machine. It updates the // parser's fields depending on parser.tok (where ErrorToken means EOF). // It returns whether the token was consumed. @@ -403,7 +418,7 @@ type insertionMode func(*parser) bool // setOriginalIM sets the insertion mode to return to after completing a text or // inTableText insertion mode. -// Section 12.2.3.1, "using the rules for". +// Section 12.2.4.1, "using the rules for". func (p *parser) setOriginalIM() { if p.originalIM != nil { panic("html: bad parser state: originalIM was set twice") @@ -411,18 +426,35 @@ func (p *parser) setOriginalIM() { p.originalIM = p.im } -// Section 12.2.3.1, "reset the insertion mode". +// Section 12.2.4.1, "reset the insertion mode". func (p *parser) resetInsertionMode() { for i := len(p.oe) - 1; i >= 0; i-- { n := p.oe[i] - if i == 0 && p.context != nil { + last := i == 0 + if last && p.context != nil { n = p.context } switch n.DataAtom { case a.Select: + if !last { + for ancestor, first := n, p.oe[0]; ancestor != first; { + ancestor = p.oe[p.oe.index(ancestor)-1] + switch ancestor.DataAtom { + case a.Template: + p.im = inSelectIM + return + case a.Table: + p.im = inSelectInTableIM + return + } + } + } p.im = inSelectIM case a.Td, a.Th: + // TODO: remove this divergence from the HTML5 spec. + // + // See https://bugs.chromium.org/p/chromium/issues/detail?id=829668 p.im = inCellIM case a.Tr: p.im = inRowIM @@ -434,25 +466,41 @@ func (p *parser) resetInsertionMode() { p.im = inColumnGroupIM case a.Table: p.im = inTableIM + case a.Template: + // TODO: remove this divergence from the HTML5 spec. + if n.Namespace != "" { + continue + } + p.im = p.templateStack.top() case a.Head: - p.im = inBodyIM + // TODO: remove this divergence from the HTML5 spec. + // + // See https://bugs.chromium.org/p/chromium/issues/detail?id=829668 + p.im = inHeadIM case a.Body: p.im = inBodyIM case a.Frameset: p.im = inFramesetIM case a.Html: - p.im = beforeHeadIM + if p.head == nil { + p.im = beforeHeadIM + } else { + p.im = afterHeadIM + } default: + if last { + p.im = inBodyIM + return + } continue } return } - p.im = inBodyIM } const whitespace = " \t\r\n\f" -// Section 12.2.5.4.1. +// Section 12.2.6.4.1. func initialIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -479,7 +527,7 @@ func initialIM(p *parser) bool { return false } -// Section 12.2.5.4.2. +// Section 12.2.6.4.2. func beforeHTMLIM(p *parser) bool { switch p.tok.Type { case DoctypeToken: @@ -517,7 +565,7 @@ func beforeHTMLIM(p *parser) bool { return false } -// Section 12.2.5.4.3. +// Section 12.2.6.4.3. func beforeHeadIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -560,7 +608,7 @@ func beforeHeadIM(p *parser) bool { return false } -// Section 12.2.5.4.4. +// Section 12.2.6.4.4. func inHeadIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -590,19 +638,41 @@ func inHeadIM(p *parser) bool { case a.Head: // Ignore the token. return true + case a.Template: + p.addElement() + p.afe = append(p.afe, &scopeMarker) + p.framesetOK = false + p.im = inTemplateIM + p.templateStack = append(p.templateStack, inTemplateIM) + return true } case EndTagToken: switch p.tok.DataAtom { case a.Head: - n := p.oe.pop() - if n.DataAtom != a.Head { - panic("html: bad parser state: element not found, in the in-head insertion mode") - } + p.oe.pop() p.im = afterHeadIM return true case a.Body, a.Html, a.Br: p.parseImpliedToken(EndTagToken, a.Head, a.Head.String()) return false + case a.Template: + if !p.oe.contains(a.Template) { + return true + } + // TODO: remove this divergence from the HTML5 spec. + // + // See https://bugs.chromium.org/p/chromium/issues/detail?id=829668 + p.generateImpliedEndTags() + for i := len(p.oe) - 1; i >= 0; i-- { + if n := p.oe[i]; n.Namespace == "" && n.DataAtom == a.Template { + p.oe = p.oe[:i] + break + } + } + p.clearActiveFormattingElements() + p.templateStack.pop() + p.resetInsertionMode() + return true default: // Ignore the token. return true @@ -622,7 +692,7 @@ func inHeadIM(p *parser) bool { return false } -// Section 12.2.5.4.6. +// Section 12.2.6.4.6. func afterHeadIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -648,7 +718,7 @@ func afterHeadIM(p *parser) bool { p.addElement() p.im = inFramesetIM return true - case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Title: + case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Template, a.Title: p.oe = append(p.oe, p.head) defer p.oe.remove(p.head) return inHeadIM(p) @@ -660,6 +730,8 @@ func afterHeadIM(p *parser) bool { switch p.tok.DataAtom { case a.Body, a.Html, a.Br: // Drop down to creating an implied tag. + case a.Template: + return inHeadIM(p) default: // Ignore the token. return true @@ -697,7 +769,7 @@ func copyAttributes(dst *Node, src Token) { } } -// Section 12.2.5.4.7. +// Section 12.2.6.4.7. func inBodyIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -727,10 +799,16 @@ func inBodyIM(p *parser) bool { case StartTagToken: switch p.tok.DataAtom { case a.Html: + if p.oe.contains(a.Template) { + return true + } copyAttributes(p.oe[0], p.tok) - case a.Base, a.Basefont, a.Bgsound, a.Command, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Title: + case a.Base, a.Basefont, a.Bgsound, a.Command, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Template, a.Title: return inHeadIM(p) case a.Body: + if p.oe.contains(a.Template) { + return true + } if len(p.oe) >= 2 { body := p.oe[1] if body.Type == ElementNode && body.DataAtom == a.Body { @@ -767,9 +845,13 @@ func inBodyIM(p *parser) bool { // The newline, if any, will be dealt with by the TextToken case. p.framesetOK = false case a.Form: - if p.form == nil { - p.popUntil(buttonScope, a.P) - p.addElement() + if p.form != nil && !p.oe.contains(a.Template) { + // Ignore the token + return true + } + p.popUntil(buttonScope, a.P) + p.addElement() + if !p.oe.contains(a.Template) { p.form = p.top() } case a.Li: @@ -819,7 +901,7 @@ func inBodyIM(p *parser) bool { case a.A: for i := len(p.afe) - 1; i >= 0 && p.afe[i].Type != scopeMarkerNode; i-- { if n := p.afe[i]; n.Type == ElementNode && n.DataAtom == a.A { - p.inBodyEndTagFormatting(a.A) + p.inBodyEndTagFormatting(a.A, "a") p.oe.remove(n) p.afe.remove(n) break @@ -833,7 +915,7 @@ func inBodyIM(p *parser) bool { case a.Nobr: p.reconstructActiveFormattingElements() if p.elementInScope(defaultScope, a.Nobr) { - p.inBodyEndTagFormatting(a.Nobr) + p.inBodyEndTagFormatting(a.Nobr, "nobr") p.reconstructActiveFormattingElements() } p.addFormattingElement() @@ -903,6 +985,14 @@ func inBodyIM(p *parser) bool { p.acknowledgeSelfClosingTag() p.popUntil(buttonScope, a.P) p.parseImpliedToken(StartTagToken, a.Form, a.Form.String()) + if p.form == nil { + // NOTE: The 'isindex' element has been removed, + // and the 'template' element has not been designed to be + // collaborative with the index element. + // + // Ignore the token. + return true + } if action != "" { p.form.Attr = []Attribute{{Key: "action", Val: action}} } @@ -952,11 +1042,16 @@ func inBodyIM(p *parser) bool { } p.reconstructActiveFormattingElements() p.addElement() - case a.Rp, a.Rt: + case a.Rb, a.Rtc: if p.elementInScope(defaultScope, a.Ruby) { p.generateImpliedEndTags() } p.addElement() + case a.Rp, a.Rt: + if p.elementInScope(defaultScope, a.Ruby) { + p.generateImpliedEndTags("rtc") + } + p.addElement() case a.Math, a.Svg: p.reconstructActiveFormattingElements() if p.tok.DataAtom == a.Math { @@ -993,15 +1088,29 @@ func inBodyIM(p *parser) bool { case a.Address, a.Article, a.Aside, a.Blockquote, a.Button, a.Center, a.Details, a.Dir, a.Div, a.Dl, a.Fieldset, a.Figcaption, a.Figure, a.Footer, a.Header, a.Hgroup, a.Listing, a.Menu, a.Nav, a.Ol, a.Pre, a.Section, a.Summary, a.Ul: p.popUntil(defaultScope, p.tok.DataAtom) case a.Form: - node := p.form - p.form = nil - i := p.indexOfElementInScope(defaultScope, a.Form) - if node == nil || i == -1 || p.oe[i] != node { - // Ignore the token. - return true + if p.oe.contains(a.Template) { + i := p.indexOfElementInScope(defaultScope, a.Form) + if i == -1 { + // Ignore the token. + return true + } + p.generateImpliedEndTags() + if p.oe[i].DataAtom != a.Form { + // Ignore the token. + return true + } + p.popUntil(defaultScope, a.Form) + } else { + node := p.form + p.form = nil + i := p.indexOfElementInScope(defaultScope, a.Form) + if node == nil || i == -1 || p.oe[i] != node { + // Ignore the token. + return true + } + p.generateImpliedEndTags() + p.oe.remove(node) } - p.generateImpliedEndTags() - p.oe.remove(node) case a.P: if !p.elementInScope(buttonScope, a.P) { p.parseImpliedToken(StartTagToken, a.P, a.P.String()) @@ -1014,7 +1123,7 @@ func inBodyIM(p *parser) bool { case a.H1, a.H2, a.H3, a.H4, a.H5, a.H6: p.popUntil(defaultScope, a.H1, a.H2, a.H3, a.H4, a.H5, a.H6) case a.A, a.B, a.Big, a.Code, a.Em, a.Font, a.I, a.Nobr, a.S, a.Small, a.Strike, a.Strong, a.Tt, a.U: - p.inBodyEndTagFormatting(p.tok.DataAtom) + p.inBodyEndTagFormatting(p.tok.DataAtom, p.tok.Data) case a.Applet, a.Marquee, a.Object: if p.popUntil(defaultScope, p.tok.DataAtom) { p.clearActiveFormattingElements() @@ -1022,20 +1131,37 @@ func inBodyIM(p *parser) bool { case a.Br: p.tok.Type = StartTagToken return false + case a.Template: + return inHeadIM(p) default: - p.inBodyEndTagOther(p.tok.DataAtom) + p.inBodyEndTagOther(p.tok.DataAtom, p.tok.Data) } case CommentToken: p.addChild(&Node{ Type: CommentNode, Data: p.tok.Data, }) + case ErrorToken: + // TODO: remove this divergence from the HTML5 spec. + if len(p.templateStack) > 0 { + p.im = inTemplateIM + return false + } else { + for _, e := range p.oe { + switch e.DataAtom { + case a.Dd, a.Dt, a.Li, a.Optgroup, a.Option, a.P, a.Rb, a.Rp, a.Rt, a.Rtc, a.Tbody, a.Td, a.Tfoot, a.Th, + a.Thead, a.Tr, a.Body, a.Html: + default: + return true + } + } + } } return true } -func (p *parser) inBodyEndTagFormatting(tagAtom a.Atom) { +func (p *parser) inBodyEndTagFormatting(tagAtom a.Atom, tagName string) { // This is the "adoption agency" algorithm, described at // https://html.spec.whatwg.org/multipage/syntax.html#adoptionAgency @@ -1057,7 +1183,7 @@ func (p *parser) inBodyEndTagFormatting(tagAtom a.Atom) { } } if formattingElement == nil { - p.inBodyEndTagOther(tagAtom) + p.inBodyEndTagOther(tagAtom, tagName) return } feIndex := p.oe.index(formattingElement) @@ -1160,11 +1286,19 @@ func (p *parser) inBodyEndTagFormatting(tagAtom a.Atom) { } // inBodyEndTagOther performs the "any other end tag" algorithm for inBodyIM. -// "Any other end tag" handling from 12.2.5.5 The rules for parsing tokens in foreign content +// "Any other end tag" handling from 12.2.6.5 The rules for parsing tokens in foreign content // https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-inforeign -func (p *parser) inBodyEndTagOther(tagAtom a.Atom) { +func (p *parser) inBodyEndTagOther(tagAtom a.Atom, tagName string) { for i := len(p.oe) - 1; i >= 0; i-- { - if p.oe[i].DataAtom == tagAtom { + // Two element nodes have the same tag if they have the same Data (a + // string-typed field). As an optimization, for common HTML tags, each + // Data string is assigned a unique, non-zero DataAtom (a uint32-typed + // field), since integer comparison is faster than string comparison. + // Uncommon (custom) tags get a zero DataAtom. + // + // The if condition here is equivalent to (p.oe[i].Data == tagName). + if (p.oe[i].DataAtom == tagAtom) && + ((tagAtom != 0) || (p.oe[i].Data == tagName)) { p.oe = p.oe[:i] break } @@ -1174,7 +1308,7 @@ func (p *parser) inBodyEndTagOther(tagAtom a.Atom) { } } -// Section 12.2.5.4.8. +// Section 12.2.6.4.8. func textIM(p *parser) bool { switch p.tok.Type { case ErrorToken: @@ -1203,12 +1337,9 @@ func textIM(p *parser) bool { return p.tok.Type == EndTagToken } -// Section 12.2.5.4.9. +// Section 12.2.6.4.9. func inTableIM(p *parser) bool { switch p.tok.Type { - case ErrorToken: - // Stop parsing. - return true case TextToken: p.tok.Data = strings.Replace(p.tok.Data, "\x00", "", -1) switch p.oe.top().DataAtom { @@ -1249,7 +1380,7 @@ func inTableIM(p *parser) bool { } // Ignore the token. return true - case a.Style, a.Script: + case a.Style, a.Script, a.Template: return inHeadIM(p) case a.Input: for _, t := range p.tok.Attr { @@ -1261,7 +1392,7 @@ func inTableIM(p *parser) bool { } // Otherwise drop down to the default action. case a.Form: - if p.form != nil { + if p.oe.contains(a.Template) || p.form != nil { // Ignore the token. return true } @@ -1291,6 +1422,8 @@ func inTableIM(p *parser) bool { case a.Body, a.Caption, a.Col, a.Colgroup, a.Html, a.Tbody, a.Td, a.Tfoot, a.Th, a.Thead, a.Tr: // Ignore the token. return true + case a.Template: + return inHeadIM(p) } case CommentToken: p.addChild(&Node{ @@ -1301,6 +1434,8 @@ func inTableIM(p *parser) bool { case DoctypeToken: // Ignore the token. return true + case ErrorToken: + return inBodyIM(p) } p.fosterParenting = true @@ -1309,7 +1444,7 @@ func inTableIM(p *parser) bool { return inBodyIM(p) } -// Section 12.2.5.4.11. +// Section 12.2.6.4.11. func inCaptionIM(p *parser) bool { switch p.tok.Type { case StartTagToken: @@ -1355,7 +1490,7 @@ func inCaptionIM(p *parser) bool { return inBodyIM(p) } -// Section 12.2.5.4.12. +// Section 12.2.6.4.12. func inColumnGroupIM(p *parser) bool { switch p.tok.Type { case TextToken: @@ -1386,11 +1521,13 @@ func inColumnGroupIM(p *parser) bool { p.oe.pop() p.acknowledgeSelfClosingTag() return true + case a.Template: + return inHeadIM(p) } case EndTagToken: switch p.tok.DataAtom { case a.Colgroup: - if p.oe.top().DataAtom != a.Html { + if p.oe.top().DataAtom == a.Colgroup { p.oe.pop() p.im = inTableIM } @@ -1398,17 +1535,21 @@ func inColumnGroupIM(p *parser) bool { case a.Col: // Ignore the token. return true + case a.Template: + return inHeadIM(p) } + case ErrorToken: + return inBodyIM(p) } - if p.oe.top().DataAtom != a.Html { - p.oe.pop() - p.im = inTableIM - return false + if p.oe.top().DataAtom != a.Colgroup { + return true } - return true + p.oe.pop() + p.im = inTableIM + return false } -// Section 12.2.5.4.13. +// Section 12.2.6.4.13. func inTableBodyIM(p *parser) bool { switch p.tok.Type { case StartTagToken: @@ -1460,7 +1601,7 @@ func inTableBodyIM(p *parser) bool { return inTableIM(p) } -// Section 12.2.5.4.14. +// Section 12.2.6.4.14. func inRowIM(p *parser) bool { switch p.tok.Type { case StartTagToken: @@ -1511,7 +1652,7 @@ func inRowIM(p *parser) bool { return inTableIM(p) } -// Section 12.2.5.4.15. +// Section 12.2.6.4.15. func inCellIM(p *parser) bool { switch p.tok.Type { case StartTagToken: @@ -1560,12 +1701,9 @@ func inCellIM(p *parser) bool { return inBodyIM(p) } -// Section 12.2.5.4.16. +// Section 12.2.6.4.16. func inSelectIM(p *parser) bool { switch p.tok.Type { - case ErrorToken: - // Stop parsing. - return true case TextToken: p.addText(strings.Replace(p.tok.Data, "\x00", "", -1)) case StartTagToken: @@ -1586,8 +1724,12 @@ func inSelectIM(p *parser) bool { } p.addElement() case a.Select: - p.tok.Type = EndTagToken - return false + if p.popUntil(selectScope, a.Select) { + p.resetInsertionMode() + } else { + // Ignore the token. + return true + } case a.Input, a.Keygen, a.Textarea: if p.elementInScope(selectScope, a.Select) { p.parseImpliedToken(EndTagToken, a.Select, a.Select.String()) @@ -1597,7 +1739,7 @@ func inSelectIM(p *parser) bool { p.tokenizer.NextIsNotRawText() // Ignore the token. return true - case a.Script: + case a.Script, a.Template: return inHeadIM(p) } case EndTagToken: @@ -1617,7 +1759,12 @@ func inSelectIM(p *parser) bool { case a.Select: if p.popUntil(selectScope, a.Select) { p.resetInsertionMode() + } else { + // Ignore the token. + return true } + case a.Template: + return inHeadIM(p) } case CommentToken: p.addChild(&Node{ @@ -1627,30 +1774,107 @@ func inSelectIM(p *parser) bool { case DoctypeToken: // Ignore the token. return true + case ErrorToken: + return inBodyIM(p) } return true } -// Section 12.2.5.4.17. +// Section 12.2.6.4.17. func inSelectInTableIM(p *parser) bool { switch p.tok.Type { case StartTagToken, EndTagToken: switch p.tok.DataAtom { case a.Caption, a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr, a.Td, a.Th: - if p.tok.Type == StartTagToken || p.elementInScope(tableScope, p.tok.DataAtom) { - p.parseImpliedToken(EndTagToken, a.Select, a.Select.String()) - return false - } else { + if p.tok.Type == EndTagToken && !p.elementInScope(tableScope, p.tok.DataAtom) { // Ignore the token. return true } + // This is like p.popUntil(selectScope, a.Select), but it also + // matches , not just brx
aoe`: true, `